diff --git a/.circle/tests.sh b/.circle/tests.sh deleted file mode 100644 index 0178ab91dd..0000000000 --- a/.circle/tests.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/bin/bash -# -# Balance nipype testing workflows across CircleCI build nodes -# - -# Setting # $ help set -set -e # Exit immediately if a command exits with a non-zero status. -set -u # Treat unset variables as an error when substituting. -set -x # Print command traces before executing command. - -if [ "${CIRCLE_NODE_TOTAL:-}" != "4" ]; then - echo "These tests were designed to be run at 4x parallelism." - exit 1 -fi - -# These tests are manually balanced based on previous build timings. -# They may need to be rebalanced in the future. -case ${CIRCLE_NODE_INDEX} in - 0) - docker run --rm=false -it -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_pytests.sh && \ - docker run --rm=false -it -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_pytests.sh && \ - docker run --rm=false -it -v $WORKDIR:/work -w /src/nipype/doc --entrypoint=/usr/bin/run_builddocs.sh nipype/nipype:py36 /usr/bin/run_builddocs.sh && \ - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d && \ - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d - exitcode=$? - ;; - 1) - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 && \ - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline - exitcode=$? - ;; - 2) - docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \ - docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -e NIPYPE_RESOURCE_MONITOR=1 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline - exitcode=$? - ;; - 3) - docker run --rm=false -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 && \ - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline && \ - docker run --rm=false -it -v $HOME/examples:/data/examples:ro -v $WORKDIR:/work -w /work nipype/nipype:py36 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow - exitcode=$? - ;; -esac - -cp ${WORKDIR}/tests/*.xml ${CIRCLE_TEST_REPORTS}/tests/ - -# Exit with error if any of the tests failed -if [ "$exitcode" != "0" ]; then exit 1; fi -codecov -f "coverage*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F unittests -e CIRCLE_NODE_INDEX -codecov -f "smoketest*.xml" -s "${WORKDIR}/tests/" -R "${HOME}/nipype/" -F smoketests -e CIRCLE_NODE_INDEX - diff --git a/.circleci/config.yml b/.circleci/config.yml new file mode 100644 index 0000000000..bd09e99e84 --- /dev/null +++ b/.circleci/config.yml @@ -0,0 +1,197 @@ +version: 2 +jobs: + + compare_base_dockerfiles: + docker: + - image: docker:17.10.0-ce-git + steps: + - checkout: + path: /home/circleci/nipype + - setup_remote_docker + - run: + name: Generate and prune base Dockerfile in preparation for cache check + working_directory: /home/circleci/nipype/docker + command: | + mkdir -p /tmp/docker + ash ./generate_dockerfiles.sh -b + + # Use the sha256 sum of the pruned Dockerfile as the cache key. + ash prune_dockerfile.sh Dockerfile.base > /tmp/docker/Dockerfile.base-pruned + - restore_cache: + key: dockerfile-cache-v1-master-{{ checksum "/tmp/docker/Dockerfile.base-pruned" }} + - run: + name: Determine how to get base image + command: | + if [ -f /tmp/docker/cache/Dockerfile.base-pruned ]; then + echo "Cache found. Will pull base image." + echo 'export GET_BASE=PULL' > /tmp/docker/get_base_image.sh + else + echo "Cache not found. Will build base image." + echo 'export GET_BASE=BUILD' > /tmp/docker/get_base_image.sh + fi + - persist_to_workspace: + root: /tmp + paths: + - docker/Dockerfile.base-pruned + - docker/get_base_image.sh + + + build_and_test: + parallelism: 4 + machine: + # Ubuntu 14.04 with Docker 17.10.0-ce + image: circleci/classic:201710-02 + working_directory: /home/circleci/nipype + steps: + - checkout: + path: /home/circleci/nipype + - attach_workspace: + at: /tmp + - run: + name: Get test dependencies and generate Dockerfiles + command: | + pip install --no-cache-dir codecov + make gen-dockerfiles + - run: + name: Modify Nipype version if necessary + command: | + if [ "$CIRCLE_TAG" != "" ]; then + sed -i -E "s/(__version__ = )'[A-Za-z0-9.-]+'/\1'$CIRCLE_TAG'/" nipype/info.py + fi + - run: + name: Get base image (pull or build) + no_output_timeout: 60m + command: | + source /tmp/docker/get_base_image.sh + if [ "$GET_BASE" == "PULL" ]; then + echo "Pulling base image ..." + docker pull nipype/nipype:base + elif [ "$GET_BASE" == "BUILD" ]; then + echo "Building base image ..." + docker build -t nipype/nipype:base - < docker/Dockerfile.base + else + echo "Error: method to get base image not understood" + exit 1 + fi + - run: + name: Build main image (py36) + no_output_timeout: 60m + command: | + e=1 && for i in {1..5}; do + docker build \ + --rm=false \ + --tag nipype/nipype:latest \ + --tag nipype/nipype:py36 \ + --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ + --build-arg VCS_REF="$(git rev-parse --short HEAD)" \ + --build-arg VERSION="${CIRCLE_TAG}" /home/circleci/nipype \ + && e=0 && break || sleep 15 + done && [ "$e" -eq "0" ] + - run: + name: Build main image (py27) + no_output_timeout: 60m + command: | + e=1 && for i in {1..5}; do + docker build \ + --rm=false \ + --tag nipype/nipype:py27 \ + --build-arg PYTHON_VERSION_MAJOR=2 \ + --build-arg PYTHON_VERSION_MINOR=7 \ + --build-arg BUILD_DATE="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" \ + --build-arg VCS_REF="$(git rev-parse --short HEAD)" \ + --build-arg VERSION="${CIRCLE_TAG}-py27" /home/circleci/nipype \ + && e=0 && break || sleep 15 + done && [ "$e" -eq "0" ] + - run: + name: Download test data + no_output_timeout: 20m + working_directory: /home/circleci/examples + environment: + OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage" + command: | + export DATA_NIPYPE_TUTORIAL_URL="${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21" + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_TUTORIAL_URL" | tar xj + + export DATA_NIPYPE_FSL_COURSE="${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_COURSE" | tar xz + + export DATA_NIPYPE_FSL_FEEDS="${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" + curl -sSL --retry 5 --connect-timeout 15 "$DATA_NIPYPE_FSL_FEEDS" | tar xz + - run: + name: Run tests + no_output_timeout: 4h + environment: + WORKDIR: /home/circleci/work + command: | + mkdir -p "$WORKDIR" + chmod -R 777 "$WORKDIR" + bash /home/circleci/nipype/.circleci/tests.sh + - store_artifacts: + path: /home/circleci/work/tests + - run: + name: Save Docker images to workspace + no_output_timeout: 60m + command: | + if [ "$CIRCLE_NODE_INDEX" -eq "0" ] && [ "$CIRCLE_BRANCH" == "master" ]; then + docker save nipype/nipype:base \ + nipype/nipype:latest \ + nipype/nipype:py36 \ + nipype/nipype:py27 | gzip -1 > /tmp/docker/nipype-base-latest-py36-py27.tar.gz + du -h /tmp/docker/nipype-base-latest-py36-py27.tar.gz + else + # Workaround for `persist_to_workspace` to succeed when we are + # not deploying Docker images. + touch /tmp/docker/nipype-base-latest-py36-py27.tar.gz + fi + - persist_to_workspace: + root: /tmp + paths: + - docker/nipype-base-latest-py36-py27.tar.gz + + + deploy: + docker: + - image: docker:17.10.0-ce-git + steps: + - setup_remote_docker + - attach_workspace: + at: /tmp + - run: + name: Load saved Docker images. + no_output_timeout: 60m + command: | + docker load < /tmp/docker/nipype-base-latest-py36-py27.tar.gz + - run: + name: Push to DockerHub + no_output_timeout: 120m + command: | + echo "$DOCKER_PASS" | docker login -u "$DOCKER_USER" --password-stdin + docker push nipype/nipype:base + docker push nipype/nipype:latest + docker push nipype/nipype:py36 + docker push nipype/nipype:py27 + - run: + name: Move pruned Dockerfile to /tmp/docker/cache directory + command: | + mkdir -p /tmp/docker/cache/ + mv /tmp/docker/Dockerfile.base-pruned /tmp/docker/cache/Dockerfile.base-pruned + - save_cache: + paths: + - /tmp/docker/cache/Dockerfile.base-pruned + key: dockerfile-cache-v1-{{ .Branch }}-{{ checksum "/tmp/docker/cache/Dockerfile.base-pruned" }} + + +workflows: + version: 2 + build_test_deply: + jobs: + - compare_base_dockerfiles + - build_and_test: + requires: + - compare_base_dockerfiles + - deploy: + filters: + branches: + only: master + requires: + - build_and_test diff --git a/.circleci/tests.sh b/.circleci/tests.sh new file mode 100644 index 0000000000..f55a3249d7 --- /dev/null +++ b/.circleci/tests.sh @@ -0,0 +1,54 @@ +#!/bin/bash +# +# Balance nipype testing workflows across CircleCI build nodes +# + +# Setting # $ help set +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error when substituting. +set -x # Print command traces before executing command. + +if [ "${CIRCLE_NODE_TOTAL:-}" != "4" ]; then + echo "These tests were designed to be run at 4x parallelism." + exit 1 +fi + +DOCKER_IMAGE="nipype/nipype" + +# These tests are manually balanced based on previous build timings. +# They may need to be rebalanced in the future. +case ${CIRCLE_NODE_INDEX} in + 0) + docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" "${DOCKER_IMAGE}:py36" /usr/bin/run_pytests.sh \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e CI_SKIP_TEST=1 -e NIPYPE_RESOURCE_MONITOR=1 -e FSL_COURSE_DATA="/data/examples/nipype-fsl_course_data" "${DOCKER_IMAGE}:py27" /usr/bin/run_pytests.sh \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /src/nipype/doc "${DOCKER_IMAGE}:py36" /usr/bin/run_builddocs.sh \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow3d \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh test_spm Linear /data/examples/ workflow4d + exitcode=$? + ;; + 1) + docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ level1 \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_dartel Linear /data/examples/ l2pipeline + exitcode=$? + ;; + 2) + docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e NIPYPE_NUMBER_OF_CPUS=4 "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e NIPYPE_NUMBER_OF_CPUS=4 -e NIPYPE_RESOURCE_MONITOR=1 "${DOCKER_IMAGE}:py27" /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ l2pipeline + exitcode=$? + ;; + 3) + docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work -e NIPYPE_NUMBER_OF_CPUS=4 "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_spm_nested MultiProc /data/examples/ level1 \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_fsl_feeds Linear /data/examples/ l1pipeline \ + && docker run --rm=false -t -v $WORKDIR:/work -v $HOME/examples:/data/examples:ro -w /work "${DOCKER_IMAGE}:py36" /usr/bin/run_examples.sh fmri_fsl_reuse Linear /data/examples/ level1_workflow + exitcode=$? + ;; +esac + +# Exit with error if any of the tests failed +if [ "$exitcode" != "0" ]; then exit 1; fi + +codecov --file "${WORKDIR}/tests/coverage*.xml" \ + --root "${HOME}/nipype/" --flags unittests -e CIRCLE_NODE_INDEX + +codecov --file "${WORKDIR}/tests/smoketest*.xml" \ + --root "${HOME}/nipype/" --flags smoketests -e CIRCLE_NODE_INDEX diff --git a/.zenodo.json b/.zenodo.json index 2fb6b63d61..41497da6d8 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -533,6 +533,11 @@ "affiliation": "University of Texas at Austin", "name": "De La Vega, Alejandro", "orcid": "0000-0001-9062-3778" + }, + { + "affiliation": "MIT", + "name": "Kaczmarzyk, Jakub", + "orcid": "0000-0002-5544-7577" } ], "keywords": [ diff --git a/Dockerfile b/Dockerfile deleted file mode 100644 index 14a6dec135..0000000000 --- a/Dockerfile +++ /dev/null @@ -1,111 +0,0 @@ -# Copyright (c) 2016, The developers of the Stanford CRN -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of crn_base nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -# -# Based on https://github.com/poldracklab/fmriprep/blob/9c92a3de9112f8ef1655b876de060a2ad336ffb0/Dockerfile -# -FROM nipype/base:latest -MAINTAINER The nipype developers https://github.com/nipy/nipype - -ARG PYTHON_VERSION_MAJOR=3 - -# Installing and setting up miniconda -RUN curl -sSLO https://repo.continuum.io/miniconda/Miniconda${PYTHON_VERSION_MAJOR}-4.2.12-Linux-x86_64.sh && \ - bash Miniconda${PYTHON_VERSION_MAJOR}-4.2.12-Linux-x86_64.sh -b -p /usr/local/miniconda && \ - rm Miniconda${PYTHON_VERSION_MAJOR}-4.2.12-Linux-x86_64.sh - -ENV PATH=/usr/local/miniconda/bin:$PATH \ - LANG=C.UTF-8 \ - LC_ALL=C.UTF-8 \ - ACCEPT_INTEL_PYTHON_EULA=yes \ - MKL_NUM_THREADS=1 \ - OMP_NUM_THREADS=1 -# MKL/OMP_NUM_THREADS: unless otherwise specified, each process should -# only use one thread - nipype will handle parallelization - -# Installing precomputed python packages -ARG PYTHON_VERSION_MINOR=6 -RUN conda config --add channels conda-forge; sync && \ - conda config --set always_yes yes --set changeps1 no; sync && \ - conda install -y python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} \ - mkl \ - numpy \ - scipy \ - scikit-learn \ - matplotlib \ - pandas \ - libxml2 \ - libxslt \ - traits=4.6.0 \ - psutil \ - icu=58.1 && \ - sync; - -# matplotlib cleanups: set default backend, precaching fonts -RUN sed -i 's/\(backend *: \).*$/\1Agg/g' /usr/local/miniconda/lib/python${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR}/site-packages/matplotlib/mpl-data/matplotlibrc && \ - python -c "from matplotlib import font_manager" - -# Install CI scripts -COPY docker/files/run_* /usr/bin/ -RUN chmod +x /usr/bin/run_* - -# Replace imglob with a Python3 compatible version -COPY nipype/external/fsl_imglob.py /usr/bin/fsl_imglob.py -RUN rm -rf ${FSLDIR}/bin/imglob && \ - chmod +x /usr/bin/fsl_imglob.py && \ - ln -s /usr/bin/fsl_imglob.py ${FSLDIR}/bin/imglob - -# Installing dev requirements (packages that are not in pypi) -WORKDIR /src/ -COPY requirements.txt requirements.txt -RUN pip install -r requirements.txt && \ - rm -rf ~/.cache/pip - -RUN git clone https://github.com/INCF/pybids.git && \ - cd pybids && python setup.py develop - -# Installing nipype -COPY . /src/nipype -RUN cd /src/nipype && \ - pip install -e .[all] && \ - rm -rf ~/.cache/pip - -WORKDIR /work/ - -ARG BUILD_DATE -ARG VCS_REF -ARG VERSION -LABEL org.label-schema.build-date=$BUILD_DATE \ - org.label-schema.name="NIPYPE" \ - org.label-schema.description="NIPYPE - Neuroimaging in Python: Pipelines and Interfaces" \ - org.label-schema.url="http://nipype.readthedocs.io" \ - org.label-schema.vcs-ref=$VCS_REF \ - org.label-schema.vcs-url="https://github.com/nipy/nipype" \ - org.label-schema.version=$VERSION \ - org.label-schema.schema-version="1.0" diff --git a/Makefile b/Makefile index 31f67bf500..0e1e927232 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,7 @@ PYTHON ?= python NOSETESTS=`which nosetests` -.PHONY: zipdoc sdist egg upload_to_pypi trailing-spaces clean-pyc clean-so clean-build clean-ctags clean in inplace test-code test-coverage test html specs check-before-commit check +.PHONY: zipdoc sdist egg upload_to_pypi trailing-spaces clean-pyc clean-so clean-build clean-ctags clean in inplace test-code test-coverage test html specs check-before-commit check gen-base-dockerfile gen-main-dockerfile gen-dockerfiles zipdoc: html zip documentation.zip doc/_build/html @@ -61,7 +61,7 @@ test-code: in test-coverage: clean-tests in py.test --doctest-modules --cov-config .coveragerc --cov=nipype nipype - + test: tests # just another name tests: clean test-code @@ -79,3 +79,13 @@ check-before-commit: specs trailing-spaces html test @echo "built docs" @echo "ran test" @echo "generated spec tests" + +gen-base-dockerfile: + @echo "Generating base Dockerfile" + bash docker/generate_dockerfiles.sh -b + +gen-main-dockerfile: + @echo "Generating main Dockerfile" + bash docker/generate_dockerfiles.sh -m + +gen-dockerfiles: gen-base-dockerfile gen-main-dockerfile diff --git a/README.rst b/README.rst index 85d34a704d..8831d11b2e 100644 --- a/README.rst +++ b/README.rst @@ -2,7 +2,7 @@ NIPYPE: Neuroimaging in Python: Pipelines and Interfaces ======================================================== -.. image:: https://travis-ci.org/nipy/nipype.png?branch=master +.. image:: https://travis-ci.org/nipy/nipype.svg?branch=master :target: https://travis-ci.org/nipy/nipype .. image:: https://circleci.com/gh/nipy/nipype/tree/master.svg?style=svg @@ -94,4 +94,3 @@ Contributing to the project --------------------------- If you'd like to contribute to the project please read our `guidelines `_. Please also read through our `code of conduct `_. - diff --git a/circle.yml b/circle.yml deleted file mode 100644 index 5624dbb7f8..0000000000 --- a/circle.yml +++ /dev/null @@ -1,86 +0,0 @@ -machine: - pre: - - curl -sSL https://s3.amazonaws.com/circle-downloads/install-circleci-docker.sh | bash -s -- 1.10.0 - environment: - OSF_NIPYPE_URL: "https://files.osf.io/v1/resources/nefdp/providers/osfstorage" - DATA_NIPYPE_TUTORIAL_URL: "${OSF_NIPYPE_URL}/57f4739cb83f6901ed94bf21" - DATA_NIPYPE_FSL_COURSE: "${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" - DATA_NIPYPE_FSL_FEEDS: "${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" - WORKDIR: "$HOME/work" - CODECOV_TOKEN: "ac172a50-8e66-42e5-8822-5373fcf54686" - services: - - docker - -dependencies: - cache_directories: - - "~/docker" - - "~/examples" - - "~/.apt-cache" - - pre: - # Let CircleCI cache the apt archive - - mkdir -p ~/.apt-cache/partial && sudo rm -rf /var/cache/apt/archives && sudo ln -s ~/.apt-cache /var/cache/apt/archives - - sudo apt-get -y update && sudo apt-get install -y wget bzip2 - # Create work folder and force group permissions - - mkdir -p $WORKDIR && sudo setfacl -d -m group:ubuntu:rwx $WORKDIR && sudo setfacl -m group:ubuntu:rwx $WORKDIR - - mkdir -p $HOME/docker $HOME/examples $WORKDIR/tests $WORKDIR/logs $WORKDIR/crashfiles ${CIRCLE_TEST_REPORTS}/tests/ - - if [[ ! -e "$HOME/bin/codecov" ]]; then mkdir -p $HOME/bin; curl -so $HOME/bin/codecov https://codecov.io/bash && chmod 755 $HOME/bin/codecov; fi - - (cd $HOME/docker && gzip -d cache.tar.gz && docker load --input $HOME/docker/cache.tar) || true : - timeout: 6000 - override: - # Get data - - if [[ ! -d ~/examples/nipype-tutorial ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O nipype-tutorial.tar.bz2 "${DATA_NIPYPE_TUTORIAL_URL}" && tar xjf nipype-tutorial.tar.bz2 -C ~/examples/; fi - - if [[ ! -d ~/examples/nipype-fsl_course_data ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O nipype-fsl_course_data.tar.gz "${DATA_NIPYPE_FSL_COURSE}" && tar xzf nipype-fsl_course_data.tar.gz -C ~/examples/; fi - - if [[ ! -d ~/examples/feeds ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O fsl-5.0.9-feeds.tar.gz "${DATA_NIPYPE_FSL_FEEDS}" && tar xzf fsl-5.0.9-feeds.tar.gz -C ~/examples/; fi - - if [ "$CIRCLE_TAG" != "" ]; then sed -i -E "s/(__version__ = )'[A-Za-z0-9.-]+'/\1'$CIRCLE_TAG'/" nipype/info.py; fi - # Docker - - docker images - - ? | - e=1 && for i in {1..5}; do - docker build --rm=false -f docker/base.Dockerfile -t nipype/base:latest . && e=0 && break || sleep 15; - done && [ "$e" -eq "0" ] - : - timeout: 21600 - - ? | - e=1 && for i in {1..5}; do - docker build --rm=false -t nipype/nipype:latest -t nipype/nipype:py36 --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG . && e=0 && break || sleep 15; - done && [ "$e" -eq "0" ] - : - timeout: 6000 - - ? | - e=1 && for i in {1..5}; do - docker build --rm=false -t nipype/nipype:py27 --build-arg PYTHON_VERSION_MAJOR=2 --build-arg PYTHON_VERSION_MINOR=7 --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG-py27 . && e=0 && break || sleep 15; - done && [ "$e" -eq "0" ] - : - timeout: 6000 - - docker save -o $HOME/docker/cache.tar ubuntu:xenial-20161213 nipype/base:latest nipype/nipype:py36 && (cd $HOME/docker && gzip cache.tar) : - timeout: 6000 - -test: - override: - - bash .circle/tests.sh : - timeout: 7200 - parallel: true - -general: - artifacts: - - "~/work/docs" - - "~/work/logs" - - "~/work/tests" - - "~/work/crashfiles" - -deployment: - production: - tag: /.*/ - commands: - # Deploy to docker hub - - if [[ -n "$DOCKER_PASS" ]]; then docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS && docker push nipype/base:latest; fi : - timeout: 21600 - - if [[ -n "$DOCKER_PASS" ]]; then docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS && docker push nipype/nipype:latest; fi : - timeout: 21600 - - if [[ -n "$DOCKER_PASS" ]]; then docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS && docker tag nipype/nipype nipype/nipype:$CIRCLE_TAG && docker push nipype/nipype:$CIRCLE_TAG; fi : - timeout: 21600 - -# Automatic deployment to Pypi: -# - printf "[distutils]\nindex-servers =\n pypi\n\n[pypi]\nusername:$PYPI_USER\npassword:$PYPI_PASS\n" > ~/.pypirc -# - python setup.py sdist upload -r pypi diff --git a/docker/base.Dockerfile b/docker/base.Dockerfile deleted file mode 100644 index 25fbb36401..0000000000 --- a/docker/base.Dockerfile +++ /dev/null @@ -1,151 +0,0 @@ -# Copyright (c) 2016, The developers of the Stanford CRN -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, this -# list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of crn_base nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - - -# -# Based on https://github.com/poldracklab/fmriprep/blob/9c92a3de9112f8ef1655b876de060a2ad336ffb0/Dockerfile -# -FROM ubuntu:xenial-20161213 -MAINTAINER The nipype developers https://github.com/nipy/nipype - -# Set noninteractive -ENV DEBIAN_FRONTEND=noninteractive - -# Installing requirements for freesurfer installation -RUN apt-get update && \ - apt-get install -y --no-install-recommends curl ca-certificates && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -WORKDIR /opt -# Installing freesurfer -- do it first so that it is cached early -#----------------------------------------------------------------------------- -# 3. Install FreeSurfer v6.0 (minimized with reprozip): -# https://github.com/freesurfer/freesurfer/issues/70 -#----------------------------------------------------------------------------- -RUN curl -sSL https://dl.dropbox.com/s/pbaisn6m5qpi9uu/recon-all-freesurfer6-2.min.tgz?dl=0 | tar zx -C /opt -ENV FS_OVERRIDE=0 \ - OS=Linux \ - FSF_OUTPUT_FORMAT=nii.gz \ - FIX_VERTEX_AREA=\ - FREESURFER_HOME=/opt/freesurfer -ENV MNI_DIR=$FREESURFER_HOME/mni \ - SUBJECTS_DIR=$FREESURFER_HOME/subjects -ENV PERL5LIB=$MNI_DIR/share/perl5 \ - MNI_PERL5LIB=$MNI_DIR/share/perl5 \ - MINC_BIN_DIR=$MNI_DIR/bin \ - MINC_LIB_DIR=$MNI_DIR/lib \ - MNI_DATAPATH=$MNI_DIR/data -ENV PATH=$FREESURFER_HOME/bin:$FREESURFER_HOME/tktools:$MINC_BIN_DIR:$PATH -ENV FSL_DIR=/usr/share/fsl/5.0 -RUN echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh - -# Enable neurodebian -COPY docker/files/neurodebian.gpg /etc/apt/neurodebian.gpg -RUN curl -sSL http://neuro.debian.net/lists/xenial.us-ca.full >> /etc/apt/sources.list.d/neurodebian.sources.list && \ - apt-key add /etc/apt/neurodebian.gpg && \ - apt-key adv --refresh-keys --keyserver hkp://ha.pool.sks-keyservers.net 0xA5D32F012649A5A9 || true - -# Installing general Debian utilities and Neurodebian packages (FSL, AFNI, git) -RUN apt-get update && \ - apt-get install -y --no-install-recommends \ - fsl-core \ - fsl-mni152-templates \ - afni \ - ants \ - bzip2 \ - xvfb \ - git \ - graphviz \ - unzip \ - apt-utils \ - fusefat \ - make \ - file \ - # Added g++ to compile dipy in py3.6 - g++ \ - ruby && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* - -ENV FSLDIR=/usr/share/fsl/5.0 \ - FSLOUTPUTTYPE=NIFTI_GZ \ - FSLMULTIFILEQUIT=TRUE \ - POSSUMDIR=/usr/share/fsl/5.0 \ - LD_LIBRARY_PATH=/usr/lib/fsl/5.0:$LD_LIBRARY_PATH \ - FSLTCLSH=/usr/bin/tclsh \ - FSLWISH=/usr/bin/wish \ - AFNI_MODELPATH=/usr/lib/afni/models \ - AFNI_IMSAVE_WARNINGS=NO \ - AFNI_TTATLAS_DATASET=/usr/share/afni/atlases \ - AFNI_PLUGINPATH=/usr/lib/afni/plugins \ - ANTSPATH=/usr/lib/ants -ENV PATH=/usr/lib/fsl/5.0:/usr/lib/afni/bin:$ANTSPATH:$PATH - -# Installing and setting up c3d -RUN mkdir -p /opt/c3d && \ - curl -sSL "https://files.osf.io/v1/resources/nefdp/providers/osfstorage/59ca96a9b83f69025d6b8985?action=download&version=1&direct" \ - | tar -xzC /opt/c3d --strip-components 1 - -ENV C3DPATH=/opt/c3d/ -ENV PATH=$C3DPATH/bin:$PATH - -# Install fake-S3 -ENV GEM_HOME /usr/lib/ruby/gems/2.3 -ENV BUNDLE_PATH="$GEM_HOME" \ - BUNDLE_BIN="$GEM_HOME/bin" \ - BUNDLE_SILENCE_ROOT_WARNING=1 \ - BUNDLE_APP_CONFIG="$GEM_HOME" -ENV PATH $BUNDLE_BIN:$PATH -RUN mkdir -p "$GEM_HOME" "$BUNDLE_BIN" && \ - chmod 777 "$GEM_HOME" "$BUNDLE_BIN" - -RUN gem install fakes3 - -# Install Matlab MCR: from the good old install_spm_mcr.sh of @chrisfilo -RUN echo "destinationFolder=/opt/mcr" > mcr_options.txt && \ - echo "agreeToLicense=yes" >> mcr_options.txt && \ - echo "outputFile=/tmp/matlabinstall_log" >> mcr_options.txt && \ - echo "mode=silent" >> mcr_options.txt && \ - mkdir -p matlab_installer && \ - curl -sSL http://www.mathworks.com/supportfiles/downloads/R2015a/deployment_files/R2015a/installers/glnxa64/MCR_R2015a_glnxa64_installer.zip \ - -o matlab_installer/installer.zip && \ - unzip matlab_installer/installer.zip -d matlab_installer/ && \ - matlab_installer/install -inputFile mcr_options.txt && \ - rm -rf matlab_installer mcr_options.txt - -# Install SPM -RUN curl -sSL http://www.fil.ion.ucl.ac.uk/spm/download/restricted/utopia/dev/spm12_r6472_Linux_R2015a.zip -o spm12.zip && \ - unzip spm12.zip && \ - rm -rf spm12.zip - -ENV MATLABCMD="/opt/mcr/v85/toolbox/matlab" \ - SPMMCRCMD="/opt/spm12/run_spm12.sh /opt/mcr/v85/ script" \ - FORCE_SPMMCR=1 - -WORKDIR /work diff --git a/docker/files/run_pytests.sh b/docker/files/run_pytests.sh index 19b6fcab87..76935b42f8 100644 --- a/docker/files/run_pytests.sh +++ b/docker/files/run_pytests.sh @@ -4,7 +4,7 @@ set -x set -u -TESTPATH=${1:-/src/nipype/} +TESTPATH=${1:-/src/nipype/nipype} WORKDIR=${WORK:-/work} PYTHON_VERSION=$( python -c "import sys; print('{}{}'.format(sys.version_info[0], sys.version_info[1]))" ) @@ -34,4 +34,3 @@ find ${WORKDIR} -maxdepth 1 -name "crash-*" -exec mv {} ${WORKDIR}/crashfiles/ \ echo "Unit tests finished with exit code ${exit_code}" exit ${exit_code} - diff --git a/docker/generate_dockerfiles.sh b/docker/generate_dockerfiles.sh new file mode 100755 index 0000000000..52eee8a1e6 --- /dev/null +++ b/docker/generate_dockerfiles.sh @@ -0,0 +1,132 @@ +#!/usr/bin/env bash +# +# Generate base and main Dockerfiles for Nipype. + +set -e + +USAGE="usage: $(basename $0) [-h] [-b] [-m]" + +function Help { + cat <&2 + exit 1 + ;; + esac +done + + +# neurodocker version 0.3.1-19-g8d02eb4 +NEURODOCKER_IMAGE="kaczmarj/neurodocker@sha256:6b5f92f413b9710b7581e62293a8f74438b14ce7e4ab1ce68db2a09f7c64375a" + +# neurodebian:stretch-non-free pulled on November 3, 2017 +BASE_IMAGE="neurodebian@sha256:7590552afd0e7a481a33314724ae27f76ccedd05ffd7ac06ec38638872427b9b" + +NIPYPE_BASE_IMAGE="nipype/nipype:base" +PKG_MANAGER="apt" +DIR="$(dirname "$0")" + +function generate_base_dockerfile() { + docker run --rm "$NEURODOCKER_IMAGE" generate \ + --base "$BASE_IMAGE" --pkg-manager "$PKG_MANAGER" \ + --label maintainer="The nipype developers https://github.com/nipy/nipype" \ + --spm version=12 matlab_version=R2017a \ + --afni version=latest install_python2=true \ + --freesurfer version=6.0.0 min=true \ + --run 'echo "cHJpbnRmICJrcnp5c3p0b2YuZ29yZ29sZXdza2lAZ21haWwuY29tXG41MTcyXG4gKkN2dW12RVYzelRmZ1xuRlM1Si8yYzFhZ2c0RVxuIiA+IC9vcHQvZnJlZXN1cmZlci9saWNlbnNlLnR4dAo=" | base64 -d | sh' \ + --install ants apt-utils bzip2 convert3d file fsl-core fsl-mni152-templates \ + fusefat g++ git graphviz make ruby unzip xvfb \ + --add-to-entrypoint "source /etc/fsl/fsl.sh" \ + --env ANTSPATH='/usr/lib/ants' PATH='/usr/lib/ants:$PATH' \ + --run "gem install fakes3" \ + --no-check-urls > "$DIR/Dockerfile.base" +} + + +function generate_main_dockerfile() { + docker run --rm "$NEURODOCKER_IMAGE" generate \ + --base "$NIPYPE_BASE_IMAGE" --pkg-manager "$PKG_MANAGER" \ + --label maintainer="The nipype developers https://github.com/nipy/nipype" \ + --env MKL_NUM_THREADS=1 OMP_NUM_THREADS=1 \ + --user neuro \ + --miniconda env_name=neuro \ + activate=true \ + --copy docker/files/run_builddocs.sh docker/files/run_examples.sh \ + docker/files/run_pytests.sh nipype/external/fsl_imglob.py /usr/bin/ \ + --copy . /src/nipype \ + --user root \ + --run 'chown -R neuro /src +&& chmod +x /usr/bin/fsl_imglob.py /usr/bin/run_*.sh +&& . /etc/fsl/fsl.sh +&& ln -sf /usr/bin/fsl_imglob.py ${FSLDIR}/bin/imglob +&& mkdir /work +&& chown neuro /work' \ + --user neuro \ + --arg PYTHON_VERSION_MAJOR=3 PYTHON_VERSION_MINOR=6 BUILD_DATE VCS_REF VERSION \ + --miniconda env_name=neuro \ + conda_install='python=${PYTHON_VERSION_MAJOR}.${PYTHON_VERSION_MINOR} + icu=58.1 libxml2 libxslt matplotlib mkl numpy + pandas psutil scikit-learn scipy traits=4.6.0' \ + pip_opts="-e" \ + pip_install="/src/nipype[all]" \ + --run-bash "mkdir -p /src/pybids + && curl -sSL --retry 5 https://github.com/INCF/pybids/tarball/master + | tar -xz -C /src/pybids --strip-components 1 + && source activate neuro + && pip install --no-cache-dir -e /src/pybids" \ + --workdir /work \ + --label org.label-schema.build-date='$BUILD_DATE' \ + org.label-schema.name="NIPYPE" \ + org.label-schema.description="NIPYPE - Neuroimaging in Python: Pipelines and Interfaces" \ + org.label-schema.url="http://nipype.readthedocs.io" \ + org.label-schema.vcs-ref='$VCS_REF' \ + org.label-schema.vcs-url="https://github.com/nipy/nipype" \ + org.label-schema.version='$VERSION' \ + org.label-schema.schema-version="1.0" \ + --no-check-urls +} + + +if [ "$GENERATE_BASE" == 1 ]; then + generate_base_dockerfile > "$DIR/Dockerfile.base" +fi +if [ "$GENERATE_MAIN" == 1 ]; then + generate_main_dockerfile > "$DIR/../Dockerfile" +fi diff --git a/docker/prune_dockerfile.sh b/docker/prune_dockerfile.sh new file mode 100644 index 0000000000..e6b05ebbcf --- /dev/null +++ b/docker/prune_dockerfile.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +if [ -z "$1" ]; then + echo "Usage: $(basename $0) " + exit 1 +fi + +# Remove empty lines, comments, and timestamp. +sed -e '/\s*#.*$/d' -e '/^\s*$/d' -e '/generation_timestamp/d' "$1"