diff --git a/.travis.yml b/.travis.yml index cdfe9d7173..b5a9b2a876 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,9 +6,9 @@ python: - 3.4 - 3.5 env: -- INSTALL_DEB_DEPENDECIES=true -- INSTALL_DEB_DEPENDECIES=false -- INSTALL_DEB_DEPENDECIES=true DUECREDIT_ENABLE=yes +- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler" +- INSTALL_DEB_DEPENDECIES=false NIPYPE_EXTRAS="doc,tests,fmri,profiler" +- INSTALL_DEB_DEPENDECIES=true NIPYPE_EXTRAS="doc,tests,fmri,profiler,duecredit" before_install: - function bef_inst { wget http://repo.continuum.io/miniconda/Miniconda${TRAVIS_PYTHON_VERSION:0:1}-latest-Linux-x86_64.sh @@ -28,16 +28,15 @@ before_install: export FSLOUTPUTTYPE=NIFTI_GZ; } - travis_retry bef_inst install: +# Add install of vtk and mayavi to test mesh (disabled): conda install -y vtk mayavi && - function inst { conda config --add channels conda-forge && conda update --yes conda && conda update --all -y python=$TRAVIS_PYTHON_VERSION && - conda install -y nipype matplotlib nitime && - pip install python-coveralls coverage doctest-ignore-unicode && - if [ ! -z "$DUECREDIT_ENABLE"]; then pip install duecredit; fi && + conda install -y nipype && rm -r /home/travis/miniconda/lib/python${TRAVIS_PYTHON_VERSION}/site-packages/nipype* && pip install -r requirements.txt && - pip install -e . && + pip install -e .[$NIPYPE_EXTRAS] && export COVERAGE_PROCESS_START=$(pwd)/.coveragerc && export COVERAGE_DATA_FILE=$(pwd)/.coverage && echo "data_file = ${COVERAGE_DATA_FILE}" >> ${COVERAGE_PROCESS_START}; } diff --git a/docker/nipype_test/Dockerfile_py27 b/docker/nipype_test/Dockerfile_py27 index 434f785f22..6e38a5bf52 100644 --- a/docker/nipype_test/Dockerfile_py27 +++ b/docker/nipype_test/Dockerfile_py27 @@ -46,6 +46,6 @@ RUN pip install -r /root/src/nipype/requirements.txt COPY . /root/src/nipype RUN rm -r /usr/local/miniconda/lib/python2.7/site-packages/nipype* && \ cd /root/src/nipype && \ - pip install -e . + pip install -e .[all] CMD ["/bin/bash"] diff --git a/docker/nipype_test/Dockerfile_py34 b/docker/nipype_test/Dockerfile_py34 index cbb1b36098..e0d192ccae 100644 --- a/docker/nipype_test/Dockerfile_py34 +++ b/docker/nipype_test/Dockerfile_py34 @@ -51,6 +51,6 @@ RUN pip install -r /root/src/nipype/requirements.txt COPY . /root/src/nipype RUN rm -r /usr/local/miniconda/lib/python3.4/site-packages/nipype* && \ cd /root/src/nipype && \ - pip install -e . + pip install -e .[all] CMD ["/bin/bash"] diff --git a/docker/nipype_test/Dockerfile_py35 b/docker/nipype_test/Dockerfile_py35 index 14f3e2d3a1..93007c1e53 100644 --- a/docker/nipype_test/Dockerfile_py35 +++ b/docker/nipype_test/Dockerfile_py35 @@ -30,7 +30,7 @@ FROM nipype/nipype_test:base-0.0.2 MAINTAINER The nipype developers https://github.com/nipy/nipype WORKDIR /root - + COPY docker/circleci/run_* /usr/bin/ RUN chmod +x /usr/bin/run_* @@ -49,6 +49,6 @@ RUN pip install -r /root/src/nipype/requirements.txt COPY . /root/src/nipype RUN rm -r /usr/local/miniconda/lib/python3.5/site-packages/nipype* && \ cd /root/src/nipype && \ - pip install -e . + pip install -e .[all] CMD ["/bin/bash"] diff --git a/nipype/info.py b/nipype/info.py index ffab23276d..ca8aaa9ba7 100644 --- a/nipype/info.py +++ b/nipype/info.py @@ -48,49 +48,47 @@ def get_nipype_gitversion(): if gitversion: _version_extra = '-' + gitversion + '.dev' -# Format expected by setup.py and doc/source/conf.py: string of form "X.Y.Z" -__version__ = "%s.%s.%s%s" % (_version_major, +# Format expected by setup.py and doc/source/conf.py: string of form 'X.Y.Z' +__version__ = '%s.%s.%s%s' % (_version_major, _version_minor, _version_micro, _version_extra) -CLASSIFIERS = ["Development Status :: 5 - Production/Stable", - "Environment :: Console", - "Intended Audience :: Science/Research", - "License :: OSI Approved :: Apache Software License", - "Operating System :: MacOS :: MacOS X", - "Operating System :: POSIX :: Linux", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Topic :: Scientific/Engineering"] +CLASSIFIERS = ['Development Status :: 5 - Production/Stable', + 'Environment :: Console', + 'Intended Audience :: Science/Research', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: MacOS :: MacOS X', + 'Operating System :: POSIX :: Linux', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Topic :: Scientific/Engineering'] description = 'Neuroimaging in Python: Pipelines and Interfaces' # Note: this long_description is actually a copy/paste from the top-level # README.txt, so that it shows up nicely on PyPI. So please remember to edit # it only in one place and sync it correctly. -long_description = \ - """ -======================================================== +long_description = """======================================================== NIPYPE: Neuroimaging in Python: Pipelines and Interfaces ======================================================== -Current neuroimaging software offer users an incredible opportunity to -analyze data using a variety of different algorithms. However, this has -resulted in a heterogeneous collection of specialized applications +Current neuroimaging software offer users an incredible opportunity to \ +analyze data using a variety of different algorithms. However, this has \ +resulted in a heterogeneous collection of specialized applications \ without transparent interoperability or a uniform operating interface. -*Nipype*, an open-source, community-developed initiative under the -umbrella of NiPy_, is a Python project that provides a uniform interface -to existing neuroimaging software and facilitates interaction between -these packages within a single workflow. Nipype provides an environment -that encourages interactive exploration of algorithms from different -packages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE, -MRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and -between packages, and reduces the learning curve necessary to use different -packages. Nipype is creating a collaborative platform for neuroimaging software -development in a high-level language and addressing limitations of existing +*Nipype*, an open-source, community-developed initiative under the \ +umbrella of NiPy_, is a Python project that provides a uniform interface \ +to existing neuroimaging software and facilitates interaction between \ +these packages within a single workflow. Nipype provides an environment \ +that encourages interactive exploration of algorithms from different \ +packages (e.g., AFNI, ANTS, BRAINS, BrainSuite, Camino, FreeSurfer, FSL, MNE, \ +MRtrix, MNE, Nipy, Slicer, SPM), eases the design of workflows within and \ +between packages, and reduces the learning curve necessary to use different \ +packages. Nipype is creating a collaborative platform for neuroimaging software \ +development in a high-level language and addressing limitations of existing \ pipeline systems. *Nipype* allows you to: @@ -116,33 +114,58 @@ def get_nipype_gitversion(): PROV_MIN_VERSION = '1.4.0' NAME = 'nipype' -MAINTAINER = "nipype developers" -MAINTAINER_EMAIL = "neuroimaging@python.org" +MAINTAINER = 'nipype developers' +MAINTAINER_EMAIL = 'neuroimaging@python.org' DESCRIPTION = description LONG_DESCRIPTION = long_description -URL = "http://nipy.org/nipype" -DOWNLOAD_URL = "http://github.com/nipy/nipype/archives/master" -LICENSE = "Apache License, 2.0" +URL = 'http://nipy.org/nipype' +DOWNLOAD_URL = 'http://github.com/nipy/nipype/archives/master' +LICENSE = 'Apache License, 2.0' CLASSIFIERS = CLASSIFIERS -AUTHOR = "nipype developers" -AUTHOR_EMAIL = "neuroimaging@python.org" -PLATFORMS = "OS Independent" +AUTHOR = 'nipype developers' +AUTHOR_EMAIL = 'neuroimaging@python.org' +PLATFORMS = 'OS Independent' MAJOR = _version_major MINOR = _version_minor MICRO = _version_micro ISRELEASE = _version_extra == '' VERSION = __version__ PROVIDES = ['nipype'] -REQUIRES = ["nibabel>=%s" % NIBABEL_MIN_VERSION, - "networkx>=%s" % NETWORKX_MIN_VERSION, - "numpy>=%s" % NUMPY_MIN_VERSION, - "python-dateutil>=%s" % DATEUTIL_MIN_VERSION, - "scipy>=%s" % SCIPY_MIN_VERSION, - "traits>=%s" % TRAITS_MIN_VERSION, - "nose>=%s" % NOSE_MIN_VERSION, - "future>=%s" % FUTURE_MIN_VERSION, - "simplejson>=%s" % SIMPLEJSON_MIN_VERSION, - "prov>=%s" % PROV_MIN_VERSION, - "mock", - "xvfbwrapper"] +REQUIRES = [ + 'nibabel>=%s' % NIBABEL_MIN_VERSION, + 'networkx>=%s' % NETWORKX_MIN_VERSION, + 'numpy>=%s' % NUMPY_MIN_VERSION, + 'python-dateutil>=%s' % DATEUTIL_MIN_VERSION, + 'scipy>=%s' % SCIPY_MIN_VERSION, + 'traits>=%s' % TRAITS_MIN_VERSION, + 'future>=%s' % FUTURE_MIN_VERSION, + 'simplejson>=%s' % SIMPLEJSON_MIN_VERSION, + 'prov>=%s' % PROV_MIN_VERSION, + 'xvfbwrapper', + 'funcsigs' +] + +TESTS_REQUIRES = [ + 'nose>=%s' % NOSE_MIN_VERSION, + 'mock', + 'codecov', + 'doctest-ignore-unicode', + 'dipy', + 'nipy', + 'matplotlib' +] + +EXTRA_REQUIRES = { + 'doc': ['Sphinx>=0.3', 'matplotlib', 'pydotplus'], + 'tests': TESTS_REQUIRES, + 'fmri': ['nitime', 'nilearn', 'dipy', 'nipy', 'matplotlib'], + 'profiler': ['psutil'], + 'duecredit': ['duecredit'], + # 'mesh': ['mayavi'] # Enable when it works +} + +# Enable a handle to install all extra dependencies at once +EXTRA_REQUIRES['all'] = [val for _, val in list(EXTRA_REQUIRES.items())] + STATUS = 'stable' + diff --git a/setup.py b/setup.py index 402af61b32..c1426c5e3f 100755 --- a/setup.py +++ b/setup.py @@ -12,43 +12,19 @@ """ # Build helper -from __future__ import print_function -import os -from os.path import join as pjoin import sys from glob import glob -from functools import partial +import os +from os.path import join as pjoin from io import open -# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly -# update it when the contents of directories change. -if os.path.exists('MANIFEST'): - os.remove('MANIFEST') - -# For some commands, use setuptools. -if len(set(('develop', 'bdist_egg', 'bdist_rpm', 'bdist', 'bdist_dumb', - 'install_egg_info', 'egg_info', 'easy_install', 'bdist_wheel', - 'bdist_mpkg')).intersection(sys.argv)) > 0: - # import setuptools setup, thus monkeypatching distutils. - import setup_egg - from setuptools import setup -else: - from distutils.core import setup - # Commit hash writing, and dependency checking -''' Distutils / setuptools helpers from nibabel.nisext''' -from distutils.version import LooseVersion -from distutils.command.build_py import build_py -from distutils import log +from setuptools.command.build_py import build_py -PY3 = sys.version_info[0] >= 3 -if PY3: - string_types = (str, bytes) -else: - string_types = (basestring, str, unicode) +PY3 = sys.version_info[0] >= 3 -def get_comrec_build(pkg_dir, build_cmd=build_py): +class BuildWithCommitInfoCommand(build_py): """ Return extended build command class for recording commit The extended command tries to run git to find the current commit, getting @@ -83,216 +59,41 @@ def get_comrec_build(pkg_dir, build_cmd=build_py): information at the terminal. See the ``pkg_info.py`` module in the nipy package for an example. """ - class MyBuildPy(build_cmd): - ''' Subclass to write commit data into installation tree ''' - def run(self): - import subprocess - try: - from configparser import ConfigParser - except ImportError: - from ConfigParser import ConfigParser - - build_cmd.run(self) - - proc = subprocess.Popen('git rev-parse --short HEAD', - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - shell=True) - repo_commit, _ = proc.communicate() - # Fix for python 3 - repo_commit = '{}'.format(repo_commit) - # We write the installation commit even if it's empty - cfg_parser = ConfigParser() - cfg_parser.read(pjoin(pkg_dir, 'COMMIT_INFO.txt')) - cfg_parser.set('commit hash', 'install_hash', repo_commit) - out_pth = pjoin(self.build_lib, pkg_dir, 'COMMIT_INFO.txt') - if PY3: - cfg_parser.write(open(out_pth, 'wt')) - else: - cfg_parser.write(open(out_pth, 'wb')) - return MyBuildPy - - -def _add_append_key(in_dict, key, value): - """ Helper for appending dependencies to setuptools args """ - # If in_dict[key] does not exist, create it - # If in_dict[key] is a string, make it len 1 list of strings - # Append value to in_dict[key] list - if key not in in_dict: - in_dict[key] = [] - elif isinstance(in_dict[key], string_types): - in_dict[key] = [in_dict[key]] - in_dict[key].append(value) - - -# Dependency checks -def package_check(pkg_name, version=None, - optional=False, - checker=LooseVersion, - version_getter=None, - messages=None, - setuptools_args=None - ): - ''' Check if package `pkg_name` is present and has good enough version - - Has two modes of operation. If `setuptools_args` is None (the default), - raise an error for missing non-optional dependencies and log warnings for - missing optional dependencies. If `setuptools_args` is a dict, then fill - ``install_requires`` key value with any missing non-optional dependencies, - and the ``extras_requires`` key value with optional dependencies. - - This allows us to work with and without setuptools. It also means we can - check for packages that have not been installed with setuptools to avoid - installing them again. - - Parameters - ---------- - pkg_name : str - name of package as imported into python - version : {None, str}, optional - minimum version of the package that we require. If None, we don't - check the version. Default is None - optional : bool or str, optional - If ``bool(optional)`` is False, raise error for absent package or wrong - version; otherwise warn. If ``setuptools_args`` is not None, and - ``bool(optional)`` is not False, then `optional` should be a string - giving the feature name for the ``extras_require`` argument to setup. - checker : callable, optional - callable with which to return comparable thing from version - string. Default is ``distutils.version.LooseVersion`` - version_getter : {None, callable}: - Callable that takes `pkg_name` as argument, and returns the - package version string - as in:: + def run(self): + from configparser import ConfigParser + import subprocess + + build_py.run(self) + proc = subprocess.Popen('git rev-parse --short HEAD', + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True) + repo_commit, _ = proc.communicate() + # Fix for python 3 + if PY3: + repo_commit = repo_commit.decode() + + # We write the installation commit even if it's empty + cfg_parser = ConfigParser() + cfg_parser.read(pjoin('nipype', 'COMMIT_INFO.txt')) + cfg_parser.set('commit hash', 'install_hash', repo_commit.strip()) + out_pth = pjoin(self.build_lib, 'nipype', 'COMMIT_INFO.txt') + if PY3: + cfg_parser.write(open(out_pth, 'wt')) + else: + cfg_parser.write(open(out_pth, 'wb')) + + +def main(): + from setuptools import setup, find_packages - ``version = version_getter(pkg_name)`` - - If None, equivalent to:: - - mod = __import__(pkg_name); version = mod.__version__`` - messages : None or dict, optional - dictionary giving output messages - setuptools_args : None or dict - If None, raise errors / warnings for missing non-optional / optional - dependencies. If dict fill key values ``install_requires`` and - ``extras_require`` for non-optional and optional dependencies. - ''' - setuptools_mode = setuptools_args is not None - optional_tf = bool(optional) - if version_getter is None: - def version_getter(pkg_name): - mod = __import__(pkg_name) - return mod.__version__ - if messages is None: - messages = {} - msgs = { - 'missing': 'Cannot import package "%s" - is it installed?', - 'missing opt': 'Missing optional package "%s"', - 'opt suffix': '; you may get run-time errors', - 'version too old': 'You have version %s of package "%s"' - ' but we need version >= %s', } - msgs.update(messages) - status, have_version = _package_status(pkg_name, - version, - version_getter, - checker) - if status == 'satisfied': - return - if not setuptools_mode: - if status == 'missing': - if not optional_tf: - raise RuntimeError(msgs['missing'] % pkg_name) - log.warn(msgs['missing opt'] % pkg_name + - msgs['opt suffix']) - return - elif status == 'no-version': - raise RuntimeError('Cannot find version for %s' % pkg_name) - assert status == 'low-version' - if not optional_tf: - raise RuntimeError(msgs['version too old'] % (have_version, - pkg_name, - version)) - log.warn(msgs['version too old'] % (have_version, - pkg_name, - version) + - msgs['opt suffix']) - return - # setuptools mode - if optional_tf and not isinstance(optional, string_types): - raise RuntimeError('Not-False optional arg should be string') - dependency = pkg_name - if version: - dependency += '>=' + version - if optional_tf: - if 'extras_require' not in setuptools_args: - setuptools_args['extras_require'] = {} - _add_append_key(setuptools_args['extras_require'], - optional, - dependency) - return - # add_append_key(setuptools_args, 'install_requires', dependency) - return - - -def _package_status(pkg_name, version, version_getter, checker): - try: - __import__(pkg_name) - except ImportError: - return 'missing', None - if not version: - return 'satisfied', None - try: - have_version = version_getter(pkg_name) - except AttributeError: - return 'no-version', None - if checker(have_version) < checker(version): - return 'low-version', have_version - return 'satisfied', have_version - -cmdclass = {'build_py': get_comrec_build('nipype')} - -# Get version and release info, which is all stored in nipype/info.py -ver_file = os.path.join('nipype', 'info.py') -exec(open(ver_file).read(), locals()) - -# Prepare setuptools args -if 'setuptools' in sys.modules: - extra_setuptools_args = dict( - tests_require=['nose'], - test_suite='nose.collector', - zip_safe=False, - extras_require=dict( - doc='Sphinx>=0.3', - test='nose>=0.10.1'), - ) - pkg_chk = partial(package_check, setuptools_args=extra_setuptools_args) -else: - extra_setuptools_args = {} - pkg_chk = package_check - -# Do dependency checking -pkg_chk('networkx', NETWORKX_MIN_VERSION) -pkg_chk('nibabel', NIBABEL_MIN_VERSION) -pkg_chk('numpy', NUMPY_MIN_VERSION) -pkg_chk('scipy', SCIPY_MIN_VERSION) -pkg_chk('traits', TRAITS_MIN_VERSION) -pkg_chk('nose', NOSE_MIN_VERSION) -pkg_chk('future', FUTURE_MIN_VERSION) -pkg_chk('simplejson', SIMPLEJSON_MIN_VERSION) -pkg_chk('prov', PROV_MIN_VERSION) -custom_dateutil_messages = {'missing opt': ('Missing optional package "%s"' - ' provided by package ' - '"python-dateutil"')} -pkg_chk('dateutil', DATEUTIL_MIN_VERSION, - messages=custom_dateutil_messages) - - -def main(**extra_args): thispath, _ = os.path.split(__file__) + testdatafiles = [pjoin('testing', 'data', val) for val in os.listdir(pjoin(thispath, 'nipype', 'testing', 'data')) if not os.path.isdir(pjoin(thispath, 'nipype', 'testing', 'data', val))] - testdatafiles+=[ + testdatafiles += [ pjoin('testing', 'data', 'dicomdir', '*'), pjoin('testing', 'data', 'bedpostxout', '*'), pjoin('testing', 'data', 'tbss_dir', '*'), @@ -304,151 +105,40 @@ def main(**extra_args): pjoin('interfaces', 'tests', 'use_resources'), ] - setup(name=NAME, - maintainer=MAINTAINER, - maintainer_email=MAINTAINER_EMAIL, - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - url=URL, - download_url=DOWNLOAD_URL, - license=LICENSE, - classifiers=CLASSIFIERS, - author=AUTHOR, - author_email=AUTHOR_EMAIL, - platforms=PLATFORMS, - version=VERSION, - install_requires=REQUIRES, - provides=PROVIDES, - packages=['nipype', - 'nipype.algorithms', - 'nipype.algorithms.tests', - 'nipype.caching', - 'nipype.caching.tests', - 'nipype.external', - 'nipype.fixes', - 'nipype.fixes.numpy', - 'nipype.fixes.numpy.testing', - 'nipype.interfaces', - 'nipype.interfaces.afni', - 'nipype.interfaces.afni.tests', - 'nipype.interfaces.ants', - 'nipype.interfaces.ants.tests', - 'nipype.interfaces.camino', - 'nipype.interfaces.camino.tests', - 'nipype.interfaces.camino2trackvis', - 'nipype.interfaces.camino2trackvis.tests', - 'nipype.interfaces.cmtk', - 'nipype.interfaces.cmtk.tests', - 'nipype.interfaces.diffusion_toolkit', - 'nipype.interfaces.diffusion_toolkit.tests', - 'nipype.interfaces.dipy', - 'nipype.interfaces.dipy.tests', - 'nipype.interfaces.elastix', - 'nipype.interfaces.elastix.tests', - 'nipype.interfaces.freesurfer', - 'nipype.interfaces.freesurfer.tests', - 'nipype.interfaces.fsl', - 'nipype.interfaces.fsl.tests', - 'nipype.interfaces.minc', - 'nipype.interfaces.minc.tests', - 'nipype.interfaces.mipav', - 'nipype.interfaces.mipav.tests', - 'nipype.interfaces.mne', - 'nipype.interfaces.mne.tests', - 'nipype.interfaces.mrtrix', - 'nipype.interfaces.mrtrix3', - 'nipype.interfaces.mrtrix.tests', - 'nipype.interfaces.mrtrix3.tests', - 'nipype.interfaces.nipy', - 'nipype.interfaces.nipy.tests', - 'nipype.interfaces.nitime', - 'nipype.interfaces.nitime.tests', - 'nipype.interfaces.script_templates', - 'nipype.interfaces.semtools', - 'nipype.interfaces.semtools.brains', - 'nipype.interfaces.semtools.brains.tests', - 'nipype.interfaces.semtools.diffusion', - 'nipype.interfaces.semtools.diffusion.tests', - 'nipype.interfaces.semtools.diffusion.tractography', - 'nipype.interfaces.semtools.diffusion.tractography.tests', - 'nipype.interfaces.semtools.filtering', - 'nipype.interfaces.semtools.filtering.tests', - 'nipype.interfaces.semtools.legacy', - 'nipype.interfaces.semtools.legacy.tests', - 'nipype.interfaces.semtools.registration', - 'nipype.interfaces.semtools.registration.tests', - 'nipype.interfaces.semtools.segmentation', - 'nipype.interfaces.semtools.segmentation.tests', - 'nipype.interfaces.semtools.testing', - 'nipype.interfaces.semtools.tests', - 'nipype.interfaces.semtools.utilities', - 'nipype.interfaces.semtools.utilities.tests', - 'nipype.interfaces.slicer', - 'nipype.interfaces.slicer.diffusion', - 'nipype.interfaces.slicer.diffusion.tests', - 'nipype.interfaces.slicer.filtering', - 'nipype.interfaces.slicer.filtering.tests', - 'nipype.interfaces.slicer.legacy', - 'nipype.interfaces.slicer.legacy.diffusion', - 'nipype.interfaces.slicer.legacy.diffusion.tests', - 'nipype.interfaces.slicer.legacy.tests', - 'nipype.interfaces.slicer.quantification', - 'nipype.interfaces.slicer.quantification.tests', - 'nipype.interfaces.slicer.registration', - 'nipype.interfaces.slicer.registration.tests', - 'nipype.interfaces.slicer.segmentation', - 'nipype.interfaces.slicer.segmentation.tests', - 'nipype.interfaces.slicer.tests', - 'nipype.interfaces.spm', - 'nipype.interfaces.spm.tests', - 'nipype.interfaces.tests', - 'nipype.interfaces.vista', - 'nipype.interfaces.vista.tests', - 'nipype.pipeline', - 'nipype.pipeline.engine', - 'nipype.pipeline.engine.tests', - 'nipype.pipeline.plugins', - 'nipype.pipeline.plugins.tests', - 'nipype.testing', - 'nipype.testing.data', - 'nipype.testing.data.bedpostxout', - 'nipype.testing.data.dicomdir', - 'nipype.testing.data.tbss_dir', - 'nipype.utils', - 'nipype.utils.tests', - 'nipype.workflows', - 'nipype.workflows.data', - 'nipype.workflows.dmri', - 'nipype.workflows.dmri.camino', - 'nipype.workflows.dmri.connectivity', - 'nipype.workflows.dmri.dipy', - 'nipype.workflows.dmri.fsl', - 'nipype.workflows.dmri.fsl.tests', - 'nipype.workflows.dmri.mrtrix', - 'nipype.workflows.fmri', - 'nipype.workflows.fmri.fsl', - 'nipype.workflows.fmri.fsl.tests', - 'nipype.workflows.fmri.spm', - 'nipype.workflows.fmri.spm.tests', - 'nipype.workflows.graph', - 'nipype.workflows.misc', - 'nipype.workflows.rsfmri', - 'nipype.workflows.rsfmri.fsl', - 'nipype.workflows.smri', - 'nipype.workflows.smri.ants', - 'nipype.workflows.smri.freesurfer', - 'nipype.workflows.warp'], - # The package_data spec has no effect for me (on python 2.6) -- even - # changing to data_files doesn't get this stuff included in the source - # distribution -- not sure if it has something to do with the magic - # above, but distutils is surely the worst piece of code in all of - # python -- duplicating things into MANIFEST.in but this is admittedly - # only a workaround to get things started -- not a solution - package_data={'nipype': testdatafiles}, - scripts=glob('bin/*') + ['nipype/external/fsl_imglob.py'], - cmdclass=cmdclass, - **extra_args - ) + # Python 3: use a locals dictionary + # http://stackoverflow.com/a/1463370/6820620 + ldict = locals() + # Get version and release info, which is all stored in nipype/info.py + ver_file = os.path.join(thispath, 'nipype', 'info.py') + with open(ver_file) as infofile: + exec(infofile.read(), globals(), ldict) + + setup( + name=ldict['NAME'], + maintainer=ldict['MAINTAINER'], + maintainer_email=ldict['MAINTAINER_EMAIL'], + description=ldict['DESCRIPTION'], + long_description=ldict['LONG_DESCRIPTION'], + url=ldict['URL'], + download_url=ldict['DOWNLOAD_URL'], + license=ldict['LICENSE'], + classifiers=ldict['CLASSIFIERS'], + author=ldict['AUTHOR'], + author_email=ldict['AUTHOR_EMAIL'], + platforms=ldict['PLATFORMS'], + version=ldict['VERSION'], + install_requires=ldict['REQUIRES'], + setup_requires=['future', 'configparser'], + provides=ldict['PROVIDES'], + packages=find_packages(exclude=['*.tests']), + package_data={'nipype': testdatafiles}, + scripts=glob('bin/*'), + cmdclass={'build_py': BuildWithCommitInfoCommand}, + tests_require=ldict['TESTS_REQUIRES'], + test_suite='nose.collector', + zip_safe=False, + extras_require=ldict['EXTRA_REQUIRES'] + ) if __name__ == "__main__": - main(**extra_setuptools_args) + main()