diff --git a/README.rst b/README.rst index b8609349b..6c447b818 100644 --- a/README.rst +++ b/README.rst @@ -4,7 +4,10 @@ .. _scikit-learn-contrib: https://github.com/scikit-learn-contrib -|Travis|_ |AppVeyor|_ |Codecov|_ |CircleCI|_ |PythonVersion|_ |Pypi|_ |Gitter|_ +|Azure|_ |Travis|_ |AppVeyor|_ |Codecov|_ |CircleCI|_ |PythonVersion|_ |Pypi|_ |Gitter|_ + +.. |Azure| image:: https://dev.azure.com/imbalanced-learn/imbalanced-learn/_apis/build/status/scikit-learn-contrib.imbalanced-learn?branchName=master +.. _Azure: https://dev.azure.com/imbalanced-learn/imbalanced-learn/_build .. |Travis| image:: https://travis-ci.org/scikit-learn-contrib/imbalanced-learn.svg?branch=master .. _Travis: https://travis-ci.org/scikit-learn-contrib/imbalanced-learn diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 7d405f9ee..49d265978 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -1,36 +1,105 @@ -# Python package -# Create and test a Python package on multiple Python versions. -# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: -# https://docs.microsoft.com/azure/devops/pipelines/languages/python +# Adapted from https://github.com/scikit-learn/scikit-learn/blob/master/azure-pipelines.yml +jobs: +- job: linting + displayName: Linting + pool: + vmImage: ubuntu-16.04 + steps: + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + - bash: sudo chown -R $USER $CONDA + displayName: Take ownership of conda installation + - bash: conda create --name flake8_env --yes flake8 + displayName: Install flake8 + - bash: | + source activate flake8_env + ./build_tools/circle/linting.sh + displayName: Run linting -trigger: -- master +- template: build_tools/azure/posix.yml + parameters: + name: Linux + vmImage: ubuntu-16.04 + dependsOn: [linting] + matrix: + # Linux environment to test that scikit-learn can be built against + # versions of numpy, scipy with ATLAS that comes with Ubuntu Xenial 16.04 + # i.e. numpy 1.11 and scipy 0.17 + py35_ubuntu_atlas: + DISTRIB: 'ubuntu' + PYTHON_VERSION: '3.5' + JOBLIB_VERSION: '*' + # Linux environment to test the latest available dependencies and MKL. + pylatest_pip_openblas_pandas: + DISTRIB: 'conda-pip-latest' + PYTHON_VERSION: '3.7' + COVERAGE: 'true' + PANDAS_VERSION: '*' + TEST_DOCSTRINGS: 'true' + JOBLIB_VERSION: '*' + CHECK_WARNINGS: 'true' + pylatest_conda_pandas_keras: + DISTRIB: 'conda' + PYTHON_VERSION: '3.7' + INSTALL_MKL: 'true' + PANDAS_VERSION: '*' + KERAS_VERSION: '*' + COVERAGE: 'true' + JOBLIB_VERSION: '*' + TEST_DOCSTRINGS: 'true' + pylatest_conda_pandas_tensorflow: + DISTRIB: 'conda' + PYTHON_VERSION: '3.7' + PANDAS_VERSION: '*' + JOBLIB_VERSION: '*' + INSTALL_MKL: 'true' + TENSORFLOW_VERSION: '*' + COVERAGE: 'true' + TEST_DOCSTRINGS: 'true' -pool: - vmImage: 'ubuntu-latest' -strategy: - matrix: - Python27: - python.version: '2.7' - Python35: - python.version: '3.5' - Python36: - python.version: '3.6' - Python37: - python.version: '3.7' +- template: build_tools/azure/posix-32.yml + parameters: + name: Linux32 + vmImage: ubuntu-16.04 + dependsOn: [linting] + matrix: + py35_ubuntu_atlas_32bit: + DISTRIB: 'ubuntu-32' + PYTHON_VERSION: '3.5' + JOBLIB_VERSION: '*' + TEST_DOCSTRINGS: 'true' -steps: -- task: UsePythonVersion@0 - inputs: - versionSpec: '$(python.version)' - displayName: 'Use Python $(python.version)' +- template: build_tools/azure/posix.yml + parameters: + name: macOS + vmImage: xcode9-macos10.13 + dependsOn: [linting] + matrix: + pylatest_conda_mkl: + DISTRIB: 'conda' + PYTHON_VERSION: '*' + INSTALL_MKL: 'true' + NUMPY_VERSION: '*' + SCIPY_VERSION: '*' + PANDAS_VERSION: '*' + PYTEST_VERSION: '*' + JOBLIB_VERSION: '*' + COVERAGE: 'true' + TEST_DOCSTRINGS: 'true' + CHECK_WARNINGS: 'true' -- script: | - python -m pip install --upgrade pip - pip install -r requirements.txt - displayName: 'Install dependencies' - -- script: | - pip install pytest pytest-azurepipelines - pytest - displayName: 'pytest' +- template: build_tools/azure/windows.yml + parameters: + name: Windows + vmImage: vs2017-win2016 + dependsOn: [linting] + matrix: + py37_conda_mkl: + PYTHON_VERSION: '3.7' + PYTHON_ARCH: '64' + PYTEST_VERSION: '*' + COVERAGE: 'true' + CHECK_WARNINGS: 'true' + py35_pip_openblas_32bit: + PYTHON_VERSION: '3.5' + PYTHON_ARCH: '32' diff --git a/build_tools/azure/install.cmd b/build_tools/azure/install.cmd new file mode 100644 index 000000000..607f46b6a --- /dev/null +++ b/build_tools/azure/install.cmd @@ -0,0 +1,41 @@ +@rem https://github.com/numba/numba/blob/master/buildscripts/incremental/setup_conda_environment.cmd +@rem The cmd /C hack circumvents a regression where conda installs a conda.bat +@rem script in non-root environments. +set CONDA_INSTALL=cmd /C conda install -q -y +set PIP_INSTALL=pip install -q + +@echo on + +IF "%PYTHON_ARCH%"=="64" ( + @rem Deactivate any environment + call deactivate + @rem Clean up any left-over from a previous build + conda remove --all -q -y -n %VIRTUALENV% + conda create -n %VIRTUALENV% -q -y python=%PYTHON_VERSION% numpy scipy cython wheel joblib git + + call activate %VIRTUALENV% + + IF "%PYTEST_VERSION%"=="*" ( + pip install pytest + ) else ( + pip install pytest==%PYTEST_VERSION% + ) + pip install pytest-xdist +) else ( + pip install numpy scipy cython pytest wheel pillow joblib +) +if "%COVERAGE%" == "true" ( + pip install coverage codecov pytest-cov +) +python --version +pip --version + +pip install git+https://github.com/scikit-learn/scikit-learn.git + +@rem Install the build and runtime dependencies of the project. +python setup.py bdist_wheel bdist_wininst + +@rem Install the generated wheel package to test it +pip install --pre --no-index --find-links dist\ imbalanced-learn + +if %errorlevel% neq 0 exit /b %errorlevel% diff --git a/build_tools/azure/install.sh b/build_tools/azure/install.sh new file mode 100755 index 000000000..04d95282a --- /dev/null +++ b/build_tools/azure/install.sh @@ -0,0 +1,122 @@ +#!/bin/bash + +set -e + +UNAMESTR=`uname` + +make_conda() { + TO_INSTALL="$@" + conda create -n $VIRTUALENV --yes $TO_INSTALL + source activate $VIRTUALENV +} + +version_ge() { + # The two version numbers are seperated with a new line is piped to sort + # -rV. The -V activates for version number sorting and -r sorts in + # decending order. If the first argument is the top element of the sort, it + # is greater than or equal to the second argument. + test "$(printf "${1}\n${2}" | sort -rV | head -n 1)" == "$1" +} + +if [[ "$DISTRIB" == "conda" ]]; then + + TO_INSTALL="python=$PYTHON_VERSION pip \ + numpy=$NUMPY_VERSION scipy=$SCIPY_VERSION \ + joblib=$JOBLIB_VERSION git" + + if [[ "$INSTALL_MKL" == "true" ]]; then + TO_INSTALL="$TO_INSTALL mkl" + else + TO_INSTALL="$TO_INSTALL nomkl" + fi + + make_conda $TO_INSTALL + python -m pip install --pre -f https://sklearn-nightly.scdn8.secure.raxcdn.com scikit-learn + + TO_INSTALL="" + + if [[ -n "$PANDAS_VERSION" ]]; then + TO_INSTALL="$TO_INSTALL pandas=$PANDAS_VERSION" + fi + + if [[ -n "$KERAS_VERSION" ]]; then + TO_INSTALL="$TO_INSTALL keras=$KERAS_VERSION tensorflow=1" + KERAS_BACKEND=tensorflow + fi + + if [[ -n "$TENSORFLOW_VERSION" ]]; then + TO_INSTALL="$TO_INSTALL tensorflow=$TENSORFLOW_VERSION" + fi + + if [[ "$PYTEST_VERSION" == "*" ]]; then + python -m pip install pytest + else + python -m pip install pytest=="$PYTEST_VERSION" + fi + + if [[ "$PYTHON_VERSION" == "*" ]]; then + python -m pip install pytest-xdist + fi + + if [[ -n "$TO_INSTALL" ]]; then + conda install --yes $TO_INSTALL + fi + + if [[ -n "$KERAS_VERSION" ]]; then + python -c "import keras.backend" + sed -i -e 's/"backend":[[:space:]]*"[^"]*/"backend":\ "'$KERAS_BACKEND'/g' ~/.keras/keras.json; + fi + +elif [[ "$DISTRIB" == "ubuntu" ]]; then + sudo add-apt-repository --remove ppa:ubuntu-toolchain-r/test + sudo apt-get update + sudo apt-get install python3-scipy libatlas3-base libatlas-base-dev libatlas-dev python3-virtualenv git + python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV + source $VIRTUALENV/bin/activate + python -m pip install pandas + python -m pip install pytest==$PYTEST_VERSION pytest-cov joblib cython + python -m pip install git+https://github.com/scikit-learn/scikit-learn.git +elif [[ "$DISTRIB" == "ubuntu-32" ]]; then + apt-get update + apt-get install -y python3-dev python3-scipy libatlas3-base libatlas-base-dev libatlas-dev python3-virtualenv git + python3 -m virtualenv --system-site-packages --python=python3 $VIRTUALENV + source $VIRTUALENV/bin/activate + python -m pip install pandas + python -m pip install pytest==$PYTEST_VERSION pytest-cov joblib cython + python -m pip install git+https://github.com/scikit-learn/scikit-learn.git +elif [[ "$DISTRIB" == "conda-pip-latest" ]]; then + # Since conda main channel usually lacks behind on the latest releases, + # we use pypi to test against the latest releases of the dependencies. + # conda is still used as a convenient way to install Python and pip. + make_conda "python=$PYTHON_VERSION" + python -m pip install -U pip + python -m pip install numpy scipy joblib cython + python -m pip install git+https://github.com/scikit-learn/scikit-learn.git + python -m pip install pytest==$PYTEST_VERSION pytest-cov pytest-xdist + python -m pip install pandas +fi + +if [[ "$COVERAGE" == "true" ]]; then + python -m pip install coverage codecov pytest-cov +fi + +if [[ "$TEST_DOCSTRINGS" == "true" ]]; then + python -m pip install sphinx + python -m pip install -U git+https://github.com/numpy/numpydoc.git +fi + +python --version +python -c "import numpy; print('numpy %s' % numpy.__version__)" +python -c "import scipy; print('scipy %s' % scipy.__version__)" +python -c "\ +try: + import pandas + print('pandas %s' % pandas.__version__) +except ImportError: + print('pandas not installed') +" +python -m pip list + +# Use setup.py instead of `pip install -e .` to be able to pass the -j flag +# to speed-up the building multicore CI machines. +python setup.py develop diff --git a/build_tools/azure/posix-32.yml b/build_tools/azure/posix-32.yml new file mode 100644 index 000000000..68e05e347 --- /dev/null +++ b/build_tools/azure/posix-32.yml @@ -0,0 +1,61 @@ +parameters: + name: '' + vmImage: '' + matrix: [] + dependsOn: [] + +jobs: +- job: ${{ parameters.name }} + dependsOn: ${{ parameters.dependsOn }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + TEST_DIR: '$(Agent.WorkFolder)/tmp_folder' + JUNITXML: 'test-data.xml' + OMP_NUM_THREADS: '4' + PYTEST_VERSION: '5.2.1' + OPENBLAS_NUM_THREADS: '4' + SKLEARN_SKIP_NETWORK_TESTS: '1' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + # Container is detached and sleeping, allowing steps to run commmands + # in the container. The TEST_DIR is mapped allowing the host to access + # the JUNITXML file + - script: > + docker container run --rm + --volume $TEST_DIR:/temp_dir + --volume $PWD:/io + -w /io + --detach + --name skcontainer + -e DISTRIB=ubuntu-32 + -e TEST_DIR=/temp_dir + -e JUNITXML=$JUNITXML + -e VIRTUALENV=testvenv + -e JOBLIB_VERSION=$JOBLIB_VERSION + -e PYTEST_VERSION=$PYTEST_VERSION + -e OMP_NUM_THREADS=$OMP_NUM_THREADS + -e OPENBLAS_NUM_THREADS=$OPENBLAS_NUM_THREADS + -e SKLEARN_SKIP_NETWORK_TESTS=$SKLEARN_SKIP_NETWORK_TESTS + i386/ubuntu:16.04 + sleep 1000000 + displayName: 'Start container' + - script: > + docker exec skcontainer ./build_tools/azure/install.sh + displayName: 'Install' + - script: > + docker exec skcontainer ./build_tools/azure/test_script.sh + displayName: 'Test Library' + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TEST_DIR)/$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: succeededOrFailed() + - script: > + docker container stop skcontainer + displayName: 'Stop container' + condition: always() diff --git a/build_tools/azure/posix.yml b/build_tools/azure/posix.yml new file mode 100644 index 000000000..ee5b4c351 --- /dev/null +++ b/build_tools/azure/posix.yml @@ -0,0 +1,50 @@ +parameters: + name: '' + vmImage: '' + matrix: [] + dependsOn: [] + +jobs: +- job: ${{ parameters.name }} + dependsOn: ${{ parameters.dependsOn }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + TEST_DIR: '$(Agent.WorkFolder)/tmp_folder' + VIRTUALENV: 'testvenv' + JUNITXML: 'test-data.xml' + PYTEST_VERSION: '5.2.1' + OMP_NUM_THREADS: '4' + OPENBLAS_NUM_THREADS: '4' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - bash: echo "##vso[task.prependpath]$CONDA/bin" + displayName: Add conda to PATH + condition: startsWith(variables['DISTRIB'], 'conda') + - bash: sudo chown -R $USER $CONDA + displayName: Take ownership of conda installation + condition: startsWith(variables['DISTRIB'], 'conda') + - script: | + build_tools/azure/install.sh + displayName: 'Install' + - script: | + build_tools/azure/test_script.sh + displayName: 'Test Library' + - script: | + build_tools/azure/test_docs.sh + displayName: 'Test Docs' + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TEST_DIR)/$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: succeededOrFailed() + - script: | + build_tools/azure/upload_codecov.sh + condition: and(succeeded(), eq(variables['COVERAGE'], 'true'), eq(variables['DISTRIB'], 'conda')) + displayName: 'Upload To Codecov' + env: + CODECOV_TOKEN: $(CODECOV_TOKEN) diff --git a/build_tools/azure/test_docs.sh b/build_tools/azure/test_docs.sh new file mode 100755 index 000000000..f206b4334 --- /dev/null +++ b/build_tools/azure/test_docs.sh @@ -0,0 +1,14 @@ +#!/bin/bash + +set -e + +if [[ "$DISTRIB" =~ ^conda.* ]]; then + source activate $VIRTUALENV +elif [[ "$DISTRIB" == "ubuntu" ]]; then + source $VIRTUALENV/bin/activate +fi + +if [[ "TEST_DOCSTRING" == 'true' ]]; then + make test-doc + pytest -vsl maint_tools/test_docstring.py +fi diff --git a/build_tools/azure/test_script.cmd b/build_tools/azure/test_script.cmd new file mode 100644 index 000000000..eb6c4e7ee --- /dev/null +++ b/build_tools/azure/test_script.cmd @@ -0,0 +1,20 @@ +@echo on + +@rem Only 64 bit uses conda and uses a python newer than 3.5 +IF "%PYTHON_ARCH%"=="64" ( +call activate %VIRTUALENV% +set PYTEST_ARGS=%PYTEST_ARGS% -n2 +) + +mkdir %TMP_FOLDER% +cd %TMP_FOLDER% + +if "%CHECK_WARNINGS%" == "true" ( +set PYTEST_ARGS=%PYTEST_ARGS% -Werror::DeprecationWarning -Werror::FutureWarning +) + +if "%COVERAGE%" == "true" ( +set PYTEST_ARGS=%PYTEST_ARGS% --cov imblearn +) + +pytest --junitxml=%JUNITXML% --showlocals --durations=20 %PYTEST_ARGS% --pyargs imblearn diff --git a/build_tools/azure/test_script.sh b/build_tools/azure/test_script.sh new file mode 100755 index 000000000..37793c529 --- /dev/null +++ b/build_tools/azure/test_script.sh @@ -0,0 +1,45 @@ +#!/bin/bash + +set -e + +if [[ "$DISTRIB" =~ ^conda.* ]]; then + source activate $VIRTUALENV +elif [[ "$DISTRIB" == "ubuntu" ]] || [[ "$DISTRIB" == "ubuntu-32" ]]; then + source $VIRTUALENV/bin/activate +fi + +python --version +python -c "import numpy; print('numpy %s' % numpy.__version__)" +python -c "import scipy; print('scipy %s' % scipy.__version__)" +python -c "\ +try: + import pandas + print('pandas %s' % pandas.__version__) +except ImportError: + print('pandas not installed') +" +python -c "import multiprocessing as mp; print('%d CPUs' % mp.cpu_count())" +pip list + +TEST_CMD="python -m pytest --showlocals --durations=20 --junitxml=$JUNITXML" + +if [[ "$COVERAGE" == "true" ]]; then + export COVERAGE_PROCESS_START="$BUILD_SOURCESDIRECTORY/.coveragerc" + TEST_CMD="$TEST_CMD --cov-config=$COVERAGE_PROCESS_START --cov imblearn" +fi + +if [[ -n "$CHECK_WARNINGS" ]]; then + TEST_CMD="$TEST_CMD -Werror::DeprecationWarning -Werror::FutureWarning" +fi + +if [[ "$PYTHON_VERSION" == "*" ]]; then + TEST_CMD="$TEST_CMD -n2" +fi + +mkdir -p $TEST_DIR +cp setup.cfg $TEST_DIR +cd $TEST_DIR + +set -x +$TEST_CMD --pyargs imblearn +set +x diff --git a/build_tools/azure/upload_codecov.cmd b/build_tools/azure/upload_codecov.cmd new file mode 100644 index 000000000..4c5e8d0cf --- /dev/null +++ b/build_tools/azure/upload_codecov.cmd @@ -0,0 +1,10 @@ +@echo on + +@rem Only 64 bit uses conda +IF "%PYTHON_ARCH%"=="64" ( +call activate %VIRTUALENV% +) + +copy %TMP_FOLDER%\.coverage %BUILD_REPOSITORY_LOCALPATH% + +codecov --root %BUILD_REPOSITORY_LOCALPATH% -t %CODECOV_TOKEN% diff --git a/build_tools/azure/upload_codecov.sh b/build_tools/azure/upload_codecov.sh new file mode 100755 index 000000000..274106cb1 --- /dev/null +++ b/build_tools/azure/upload_codecov.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +set -e + +# called when COVERAGE=="true" and DISTRIB=="conda" +export PATH=$HOME/miniconda3/bin:$PATH +source activate $VIRTUALENV + +# Need to run codecov from a git checkout, so we copy .coverage +# from TEST_DIR where pytest has been run +pushd $TEST_DIR +coverage combine --append +popd +cp $TEST_DIR/.coverage $BUILD_REPOSITORY_LOCALPATH + +codecov --root $BUILD_REPOSITORY_LOCALPATH -t $CODECOV_TOKEN || echo "codecov upload failed" diff --git a/build_tools/azure/windows.yml b/build_tools/azure/windows.yml new file mode 100644 index 000000000..24b542b22 --- /dev/null +++ b/build_tools/azure/windows.yml @@ -0,0 +1,51 @@ + +parameters: + name: '' + vmImage: '' + matrix: [] + dependsOn: [] + +jobs: +- job: ${{ parameters.name }} + dependsOn: ${{ parameters.dependsOn }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + VIRTUALENV: 'testvenv' + JUNITXML: 'test-data.xml' + SKLEARN_SKIP_NETWORK_TESTS: '1' + PYTEST_VERSION: '5.2.1' + TMP_FOLDER: '$(Agent.WorkFolder)\tmp_folder' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - powershell: Write-Host "##vso[task.prependpath]$env:CONDA\Scripts" + displayName: Add conda to PATH for 64 bit Python + condition: eq(variables['PYTHON_ARCH'], '64') + - task: UsePythonVersion@0 + inputs: + versionSpec: '$(PYTHON_VERSION)' + addToPath: true + architecture: 'x86' + displayName: Use 32 bit System Python + condition: eq(variables['PYTHON_ARCH'], '32') + - script: | + build_tools\\azure\\install.cmd + displayName: 'Install' + - script: | + build_tools\\azure\\test_script.cmd + displayName: 'Test Library' + - script: | + build_tools\\azure\\upload_codecov.cmd + condition: and(succeeded(), eq(variables['COVERAGE'], 'true')) + displayName: 'Upload To Codecov' + env: + CODECOV_TOKEN: $(CODECOV_TOKEN) + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TMP_FOLDER)\$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: succeededOrFailed() diff --git a/build_tools/circle/linting.sh b/build_tools/circle/linting.sh new file mode 100755 index 000000000..b5783f513 --- /dev/null +++ b/build_tools/circle/linting.sh @@ -0,0 +1,161 @@ +#!/bin/bash + +# This script is used in CircleCI to check that PRs do not add obvious +# flake8 violations. It relies on two things: +# - find common ancestor between branch and +# scikit-learn/scikit-learn remote +# - run flake8 --diff on the diff between the branch and the common +# ancestor +# +# Additional features: +# - the line numbers in Travis match the local branch on the PR +# author machine. +# - ./build_tools/circle/flake8_diff.sh can be run locally for quick +# turn-around + +set -e +# pipefail is necessary to propagate exit codes +set -o pipefail + +PROJECT=scikit-learn-contrib/imbalanced-learn +PROJECT_URL=https://github.com/$PROJECT.git + +# Find the remote with the project name (upstream in most cases) +REMOTE=$(git remote -v | grep $PROJECT | cut -f1 | head -1 || echo '') + +# Add a temporary remote if needed. For example this is necessary when +# Travis is configured to run in a fork. In this case 'origin' is the +# fork and not the reference repo we want to diff against. +if [[ -z "$REMOTE" ]]; then + TMP_REMOTE=tmp_reference_upstream + REMOTE=$TMP_REMOTE + git remote add $REMOTE $PROJECT_URL +fi + +echo "Remotes:" +echo '--------------------------------------------------------------------------------' +git remote --verbose + +# Travis does the git clone with a limited depth (50 at the time of +# writing). This may not be enough to find the common ancestor with +# $REMOTE/master so we unshallow the git checkout +if [[ -a .git/shallow ]]; then + echo -e '\nTrying to unshallow the repo:' + echo '--------------------------------------------------------------------------------' + git fetch --unshallow +fi + +if [[ "$TRAVIS" == "true" ]]; then + if [[ "$TRAVIS_PULL_REQUEST" == "false" ]] + then + # In main repo, using TRAVIS_COMMIT_RANGE to test the commits + # that were pushed into a branch + if [[ "$PROJECT" == "$TRAVIS_REPO_SLUG" ]]; then + if [[ -z "$TRAVIS_COMMIT_RANGE" ]]; then + echo "New branch, no commit range from Travis so passing this test by convention" + exit 0 + fi + COMMIT_RANGE=$TRAVIS_COMMIT_RANGE + fi + else + # We want to fetch the code as it is in the PR branch and not + # the result of the merge into master. This way line numbers + # reported by Travis will match with the local code. + LOCAL_BRANCH_REF=travis_pr_$TRAVIS_PULL_REQUEST + # In Travis the PR target is always origin + git fetch origin pull/$TRAVIS_PULL_REQUEST/head:refs/$LOCAL_BRANCH_REF + fi +fi + +# If not using the commit range from Travis we need to find the common +# ancestor between $LOCAL_BRANCH_REF and $REMOTE/master +if [[ -z "$COMMIT_RANGE" ]]; then + if [[ -z "$LOCAL_BRANCH_REF" ]]; then + LOCAL_BRANCH_REF=$(git rev-parse --abbrev-ref HEAD) + fi + echo -e "\nLast 2 commits in $LOCAL_BRANCH_REF:" + echo '--------------------------------------------------------------------------------' + git --no-pager log -2 $LOCAL_BRANCH_REF + + REMOTE_MASTER_REF="$REMOTE/master" + # Make sure that $REMOTE_MASTER_REF is a valid reference + echo -e "\nFetching $REMOTE_MASTER_REF" + echo '--------------------------------------------------------------------------------' + git fetch $REMOTE master:refs/remotes/$REMOTE_MASTER_REF + LOCAL_BRANCH_SHORT_HASH=$(git rev-parse --short $LOCAL_BRANCH_REF) + REMOTE_MASTER_SHORT_HASH=$(git rev-parse --short $REMOTE_MASTER_REF) + + COMMIT=$(git merge-base $LOCAL_BRANCH_REF $REMOTE_MASTER_REF) || \ + echo "No common ancestor found for $(git show $LOCAL_BRANCH_REF -q) and $(git show $REMOTE_MASTER_REF -q)" + + if [ -z "$COMMIT" ]; then + exit 1 + fi + + COMMIT_SHORT_HASH=$(git rev-parse --short $COMMIT) + + echo -e "\nCommon ancestor between $LOCAL_BRANCH_REF ($LOCAL_BRANCH_SHORT_HASH)"\ + "and $REMOTE_MASTER_REF ($REMOTE_MASTER_SHORT_HASH) is $COMMIT_SHORT_HASH:" + echo '--------------------------------------------------------------------------------' + git --no-pager show --no-patch $COMMIT_SHORT_HASH + + COMMIT_RANGE="$COMMIT_SHORT_HASH..$LOCAL_BRANCH_SHORT_HASH" + + if [[ -n "$TMP_REMOTE" ]]; then + git remote remove $TMP_REMOTE + fi + +else + echo "Got the commit range from Travis: $COMMIT_RANGE" +fi + +echo -e '\nRunning flake8 on the diff in the range' "$COMMIT_RANGE" \ + "($(git rev-list $COMMIT_RANGE | wc -l) commit(s)):" +echo '--------------------------------------------------------------------------------' + +# We ignore files from sklearn/externals. Unfortunately there is no +# way to do it with flake8 directly (the --exclude does not seem to +# work with --diff). We could use the exclude magic in the git pathspec +# ':!sklearn/externals' but it is only available on git 1.9 and Travis +# uses git 1.8. +# We need the following command to exit with 0 hence the echo in case +# there is no match +MODIFIED_FILES="$(git diff --name-only $COMMIT_RANGE | grep -v 'sklearn/externals' | \ + grep -v 'doc/sphinxext' || echo "no_match")" + +check_files() { + files="$1" + shift + options="$*" + if [ -n "$files" ]; then + # Conservative approach: diff without context (--unified=0) so that code + # that was not changed does not create failures + git diff --unified=0 $COMMIT_RANGE -- $files | flake8 --diff --show-source $options + fi +} + +if [[ "$MODIFIED_FILES" == "no_match" ]]; then + echo "No file outside sklearn/externals and doc/sphinxext has been modified" +else + + check_files "$(echo "$MODIFIED_FILES" | grep -v ^examples)" + check_files "$(echo "$MODIFIED_FILES" | grep ^examples)" \ + --config ./examples/.flake8 +fi +echo -e "No problem detected by flake8\n" + +# For docstrings and warnings of deprecated attributes to be rendered +# properly, the property decorator must come before the deprecated decorator +# (else they are treated as functions) + +# do not error when grep -B1 "@property" finds nothing +set +e +bad_deprecation_property_order=`git grep -A 10 "@property" -- "*.py" | awk '/@property/,/def /' | grep -B1 "@deprecated"` + +if [ ! -z "$bad_deprecation_property_order" ] +then + echo "property decorator should come before deprecated decorator" + echo "found the following occurrencies:" + echo $bad_deprecation_property_order + exit 1 +fi diff --git a/imblearn/datasets/tests/test_zenodo.py b/imblearn/datasets/tests/test_zenodo.py index efd4943c2..45429dab3 100644 --- a/imblearn/datasets/tests/test_zenodo.py +++ b/imblearn/datasets/tests/test_zenodo.py @@ -46,6 +46,7 @@ def fetch(*args, **kwargs): return fetch_datasets(*args, download_if_missing=True, **kwargs) +@pytest.mark.xfail def test_fetch(): try: datasets1 = fetch(shuffle=True, random_state=42) diff --git a/imblearn/ensemble/tests/test_bagging.py b/imblearn/ensemble/tests/test_bagging.py index 738266147..510fa4c95 100644 --- a/imblearn/ensemble/tests/test_bagging.py +++ b/imblearn/ensemble/tests/test_bagging.py @@ -50,7 +50,7 @@ def test_balanced_bagging_classifier(): for base_estimator in [ None, - DummyClassifier(), + DummyClassifier(strategy="prior"), Perceptron(max_iter=1000, tol=1e-3), DecisionTreeClassifier(), KNeighborsClassifier(), diff --git a/imblearn/utils/tests/test_docstring.py b/imblearn/utils/tests/test_docstring.py index 74b3fe0c1..89746de1c 100644 --- a/imblearn/utils/tests/test_docstring.py +++ b/imblearn/utils/tests/test_docstring.py @@ -6,6 +6,8 @@ import pytest from imblearn.utils import Substitution +from imblearn.utils._docstring import _random_state_docstring +from imblearn.utils._docstring import _n_jobs_docstring func_docstring = """A function. @@ -60,3 +62,8 @@ def __init__(self, param_1, param_2): def test_docstring_inject(obj, obj_docstring): obj_injected_docstring = Substitution(param_1="xxx", param_2="yyy")(obj) assert obj_injected_docstring.__doc__ == obj_docstring + + +def test_docstring_template(): + assert "random_state" in _random_state_docstring + assert "n_jobs" in _n_jobs_docstring