diff --git a/.circle/codecov.sh b/.circle/codecov.sh new file mode 100644 index 0000000000..c71cf1c6f5 --- /dev/null +++ b/.circle/codecov.sh @@ -0,0 +1,25 @@ +#!/bin/bash +# +# This script pull all coverage files into the $CIRCLE_TEST_REPORTS folder +# and sends data to codecov. +# + +# Setting # $ help set +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error when substituting. +set -x # Print command traces before executing command. + +mkdir -p ${CIRCLE_TEST_REPORTS}/ +for report in $( ls ~/scratch/*.xml ); do + rname=$( basename $report ) + cp ${report} ${CIRCLE_TEST_REPORTS}/${rname:: -4}_${CIRCLE_NODE_INDEX}.xml +done + +# Send coverage data to codecov.io +curl -so codecov.io https://codecov.io/bash +chmod 755 codecov.io + +find "${CIRCLE_TEST_REPORTS}/" -name 'coverage*.xml' -print0 | \ + xargs -0 -I file ./codecov.io -f file -t "${CODECOV_TOKEN}" -F unittests +find "${CIRCLE_TEST_REPORTS}/" -name 'smoketests*.xml' -print0 | \ + xargs -0 -I file ./codecov.io -f file -t "${CODECOV_TOKEN}" -F smoketests diff --git a/docker/files/tests.sh b/.circle/tests.sh similarity index 79% rename from docker/files/tests.sh rename to .circle/tests.sh index 3b43003294..602dddca8b 100644 --- a/docker/files/tests.sh +++ b/.circle/tests.sh @@ -1,9 +1,12 @@ #!/bin/bash +# +# Balance nipype testing workflows across CircleCI build nodes +# -set -o nounset -set -o xtrace - -export CODECOV_TOKEN=ac172a50-8e66-42e5-8822-5373fcf54686 +# Setting # $ help set +set -e # Exit immediately if a command exits with a non-zero status. +set -u # Treat unset variables as an error when substituting. +set -x # Print command traces before executing command. if [ "${CIRCLE_NODE_TOTAL:-}" != "4" ]; then echo "These tests were designed to be run at 4x parallelism." @@ -14,15 +17,15 @@ fi # They may need to be rebalanced in the future. case ${CIRCLE_NODE_INDEX} in 0) - docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ level1 && \ - docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ l2pipeline - ;; - 1) - docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow3d && \ - docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow4d && \ docker run --rm -it -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /root/src/nipype nipype/nipype_test:py27 /usr/bin/run_pytests.sh py27 && \ docker run --rm -it -e FSL_COURSE_DATA="/root/examples/nipype-fsl_course_data" -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /root/src/nipype nipype/nipype_test:py35 /usr/bin/run_pytests.sh py35 && \ - docker run --rm -it -v $SCRATCH:/scratch -w /root/src/nipype/doc nipype/nipype_test:py35 /usr/bin/run_builddocs.sh + docker run --rm -it -v $SCRATCH:/scratch -w /root/src/nipype/doc nipype/nipype_test:py35 /usr/bin/run_builddocs.sh && \ + docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow3d && \ + docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh test_spm Linear /root/examples/ workflow4d + ;; + 1) + docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ level1 && \ + docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_spm_dartel Linear /root/examples/ l2pipeline ;; 2) docker run --rm -it -e NIPYPE_NUMBER_OF_CPUS=4 -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py27 /usr/bin/run_examples.sh fmri_spm_nested MultiProc /root/examples/ level1 && \ @@ -34,14 +37,3 @@ case ${CIRCLE_NODE_INDEX} in docker run --rm -it -v $HOME/examples:/root/examples:ro -v $SCRATCH:/scratch -w /scratch nipype/nipype_test:py35 /usr/bin/run_examples.sh fmri_fsl_reuse Linear /root/examples/ level1_workflow ;; esac - -# Put the artifacts in place -bash docker/files/teardown.sh - -# Send coverage data to codecov.io -curl -so codecov.io https://codecov.io/bash -chmod 755 codecov.io -find "${CIRCLE_TEST_REPORTS}/pytest" -name 'coverage*.xml' -print0 | \ - xargs -0 -I file ./codecov.io -f file -t "${CODECOV_TOKEN}" -F unittests -find "${CIRCLE_TEST_REPORTS}/pytest" -name 'smoketests*.xml' -print0 | \ - xargs -0 -I file ./codecov.io -f file -t "${CODECOV_TOKEN}" -F smoketests diff --git a/.dockerignore b/.dockerignore index 381de568df..2140bfcb66 100644 --- a/.dockerignore +++ b/.dockerignore @@ -23,11 +23,13 @@ src/ # other docs/**/* docs/ +.circle/**/* +.circle/ +circle.yml .coverage .coveragerc codecov.yml rtd_requirements.txt -circle.yml Vagrantfile .travis.yml .noserc diff --git a/circle.yml b/circle.yml index db67dc81a2..ac27d71a56 100644 --- a/circle.yml +++ b/circle.yml @@ -8,6 +8,7 @@ machine: DATA_NIPYPE_FSL_COURSE: "${OSF_NIPYPE_URL}/57f472cf9ad5a101f977ecfe" DATA_NIPYPE_FSL_FEEDS: "${OSF_NIPYPE_URL}/57f473066c613b01f113e7af" SCRATCH: "$HOME/scratch" + CODECOV_TOKEN: "ac172a50-8e66-42e5-8822-5373fcf54686" services: - docker @@ -32,34 +33,39 @@ dependencies: - if [[ ! -d ~/examples/feeds ]]; then wget --retry-connrefused --waitretry=5 --read-timeout=20 --timeout=15 -t 0 -q -O fsl-5.0.9-feeds.tar.gz "${DATA_NIPYPE_FSL_FEEDS}" && tar xzf fsl-5.0.9-feeds.tar.gz -C ~/examples/; fi - docker images - sed -i -E "s/(__version__ = )'[A-Za-z0-9.-]+'/\1'$CIRCLE_TAG'/" nipype/info.py - - e=1 && for i in {1..5}; do docker build -t nipype/nipype:latest --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : + - e=1 && for i in {1..5}; do docker build --rm=false -t nipype/nipype:latest --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg VCS_REF=`git rev-parse --short HEAD` --build-arg VERSION=$CIRCLE_TAG . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : timeout: 21600 - - e=1 && for i in {1..5}; do docker build -f docker/Dockerfile_py27 -t nipype/nipype_test:py27 . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : + - e=1 && for i in {1..5}; do docker build --rm=false -f docker/Dockerfile_py27 -t nipype/nipype_test:py27 . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : timeout: 1600 - - e=1 && for i in {1..5}; do docker build -f docker/Dockerfile_py35 -t nipype/nipype_test:py35 . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : + - e=1 && for i in {1..5}; do docker build --rm=false -f docker/Dockerfile_py35 -t nipype/nipype_test:py35 . && e=0 && break || sleep 15; done && [ "$e" -eq "0" ] : timeout: 1600 - docker save -o $HOME/docker/cache.tar nipype/nipype:latest nipype/nipype_test:py27 nipype/nipype_test:py35 : timeout: 6000 test: override: - - bash docker/files/tests.sh : + - bash .circle/tests.sh : timeout: 7200 parallel: true + post: + # Send coverage data to codecov.io + - bash .circle/codecov.sh general: artifacts: - - "~/docs" - - "~/logs" + - "~/scratch/docs" + - "~/scratch/logs" deployment: production: tag: /.*/ commands: + # Deploy to docker hub - if [[ -n "$DOCKER_PASS" ]]; then docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS && docker push nipype/nipype:latest; fi : timeout: 21600 - if [[ -n "$DOCKER_PASS" ]]; then docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS && docker tag nipype/nipype nipype/nipype:$CIRCLE_TAG && docker push nipype/nipype:$CIRCLE_TAG; fi : timeout: 21600 + # Automatic deployment to Pypi: # - printf "[distutils]\nindex-servers =\n pypi\n\n[pypi]\nusername:$PYPI_USER\npassword:$PYPI_PASS\n" > ~/.pypirc # - python setup.py sdist upload -r pypi diff --git a/docker/files/teardown.sh b/docker/files/teardown.sh deleted file mode 100644 index 3712b7ad23..0000000000 --- a/docker/files/teardown.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash -# -# This script puts all artifacts in place after the smoke tests -# -# -set -u -set -e - -mkdir -p ${CIRCLE_TEST_REPORTS}/pytest -mv ~/scratch/*.xml ${CIRCLE_TEST_REPORTS}/pytest -mkdir -p ~/docs -mv ~/scratch/docs/* ~/docs/ -mkdir -p ~/logs -mv ~/scratch/builddocs.log ~/logs/builddocs.log -mv ~/scratch/logs/* ~/logs/ diff --git a/nipype/interfaces/freesurfer/tests/test_preprocess.py b/nipype/interfaces/freesurfer/tests/test_preprocess.py index 60a51e3a86..deb475a6b4 100644 --- a/nipype/interfaces/freesurfer/tests/test_preprocess.py +++ b/nipype/interfaces/freesurfer/tests/test_preprocess.py @@ -98,9 +98,14 @@ def test_mandatory_outvol(create_files_in_directory): with pytest.raises(ValueError): mni.cmdline # test with minimal args - mni.inputs.in_file = filelist[0] - assert mni.cmdline == ('mri_nu_correct.mni --i %s --o %s_output.mgz' - % (filelist[0], filelist[0].replace('.mgz', ''))) + mni.inputs.in_file = filelist[0] + base, ext = os.path.splitext(os.path.basename(filelist[0])) + if ext == '.gz': + base, ext2 = os.path.splitext(base) + ext = ext2 + ext + + assert mni.cmdline == ( + 'mri_nu_correct.mni --i %s --o %s_output%s' % (filelist[0], base, ext)) # test with custom outfile mni.inputs.out_file = 'new_corrected_file.mgz' @@ -108,8 +113,8 @@ def test_mandatory_outvol(create_files_in_directory): % (filelist[0])) # constructor based tests - mni2 = freesurfer.MNIBiasCorrection(in_file=filelist[0], + mni2 = freesurfer.MNIBiasCorrection(in_file=filelist[0], out_file='bias_corrected_output', iterations=4) - assert mni2.cmdline == ('mri_nu_correct.mni --i %s --n 4 --o bias_corrected_output.mgz' + assert mni2.cmdline == ('mri_nu_correct.mni --i %s --n 4 --o bias_corrected_output' % filelist[0])