diff --git a/.travis.yml b/.travis.yml.back similarity index 100% rename from .travis.yml rename to .travis.yml.back diff --git a/appveyor.yml.back b/appveyor.yml.back new file mode 100644 index 0000000..d40159d --- /dev/null +++ b/appveyor.yml.back @@ -0,0 +1,92 @@ +environment: + global: + REPO_DIR: pandas + PACKAGE_NAME: pandas + BUILD_COMMIT: v1.0.0 + BUILDWHEEL: "True" + BUILD_DEPENDS: "hdf5 numpy>=1.9.3 numexpr>=2.6.2 six cython bzip2" + TEST_DEPENDS: "numpy>=1.9.3 numexpr>=2.6.2 six>=1.9.0 mock" + DISABLE_AVX2: "True" # do not include AVX2 in this build + WHEELHOUSE_UPLOADER_USERNAME: travis-worker + WHEELHOUSE_UPLOADER_SECRET: + secure: + 9s0gdDGnNnTt7hvyNpn0/ZzOMGPdwPp2SewFTfGzYk7uI+rdAN9rFq2D1gAP4NQh + + matrix: + - PYTHON: "C:\\Miniconda35" + PYTHON_VERSION: "3.5" + PYTHON_ARCH: "32" + - PYTHON: "C:\\Miniconda35-x64" + PYTHON_VERSION: "3.5" + PYTHON_ARCH: "64" + - PYTHON: "C:\\Miniconda36" + PYTHON_VERSION: "3.6" + PYTHON_ARCH: "32" + - PYTHON: "C:\\Miniconda36-x64" + PYTHON_VERSION: "3.6" + PYTHON_ARCH: "64" + - PYTHON: "C:\\Miniconda37" + PYTHON_VERSION: "3.7" + PYTHON_ARCH: "32" + - PYTHON: "C:\\Miniconda37-x64" + PYTHON_VERSION: "3.7" + PYTHON_ARCH: "64" + + +# We always use a 64-bit machine, but can build x86 distributions +# with the PYTHON_ARCH variable. +platform: + - x64 + +matrix: + fast_finish: true + +install: + # Fetch submodules + - git submodule update --init --recursive + + - SET PATH=%PYTHON%;%PYTHON%\Scripts;%PYTHON%\Library\bin;%PATH% + - conda info + + # Check that we have the expected version and architecture for Python + - python --version + - python -c "import struct; print(struct.calcsize('P') * 8)" + +build_script: + # Install build requirements + - conda create --yes -n build_env python=%PYTHON_VERSION% %BUILD_DEPENDS% + - activate build_env + + # FIXME update Cython for python 3.5 + - pip install -U cython + + # Additional pre install steps: + - set BZIP2_DIR=%CONDA_PREFIX%\Library\ + + # build wheel: + - cd %REPO_DIR% + - git checkout %BUILD_COMMIT% + - python setup.py bdist_wheel + +test_script: + # create test env + - conda create --yes -n test_env python=%PYTHON_VERSION% %TEST_DEPENDS% + - activate test_env + + # install from wheel + - pip install --no-index --find-links dist/ %PACKAGE_NAME% + + - cd .. + - python -m tables.tests.test_all + +artifacts: + - path: "%REPO_DIR%\\dist\\*" + +on_success: + # Upload the generated wheel package to Rackspace + # On Windows, Apache Libcloud cannot find a standard CA cert bundle so we + # disable the ssl checks. + - cd %REPO_DIR% + - pip install wheelhouse-uploader + - python -m wheelhouse_uploader upload + --no-ssl-check --local-folder=dist --no-update-index wheels diff --git a/azure-pipelines.yml b/azure-pipelines.yml new file mode 100644 index 0000000..597b7b7 --- /dev/null +++ b/azure-pipelines.yml @@ -0,0 +1,86 @@ +schedules: +- cron: "27 3 */1 * *" + # 3:27am UTC everyday + displayName: Nighthly build + branches: + include: + - master + always: true + +jobs: + - template: azure/windows.yml + parameters: + name: windows + vmImage: vs2017-win2016 + matrix: + py_3.6_32: + PYTHON_VERSION: "3.6.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.13.3" + py_3.6_64: + PYTHON_VERSION: "3.6.x" + NP_BUILD_DEP: "1.13.3" + py_3.7_32: + PYTHON_VERSION: "3.7.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.14.5" + NIGHTLY_BUILD: "true" + py_3.7_64: + PYTHON_VERSION: "3.7.x" + NP_BUILD_DEP: "1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_32: + PYTHON_VERSION: "3.8.x" + PYTHON_ARCH: "x86" + NP_BUILD_DEP: "1.17.3" + NIGHTLY_BUILD: "true" + py_3.8_64: + PYTHON_VERSION: "3.8.x" + NP_BUILD_DEP: "1.17.3" + NIGHTLY_BUILD: "true" + + - template: azure/posix.yml + parameters: + name: linux + vmImage: ubuntu-16.04 + matrix: + py_3.6_32: + MB_PYTHON_VERSION: "3.6" + PLAT: "i686" + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + py_3.7_32: + MB_PYTHON_VERSION: "3.7" + PLAT: "i686" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_32: + MB_PYTHON_VERSION: "3.8" + PLAT: "i686" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" + + - template: azure/posix.yml + parameters: + name: macOS + vmImage: macOS-10.14 + matrix: + py_3.6_64: + MB_PYTHON_VERSION: "3.6" + NP_BUILD_DEP: "numpy==1.13.3" + py_3.7_64: + MB_PYTHON_VERSION: "3.7" + NP_BUILD_DEP: "numpy==1.14.5" + NIGHTLY_BUILD: "true" + py_3.8_64: + MB_PYTHON_VERSION: "3.8" + NP_BUILD_DEP: "numpy==1.17.3" + NIGHTLY_BUILD: "true" diff --git a/azure/posix.yml b/azure/posix.yml new file mode 100644 index 0000000..807f3f3 --- /dev/null +++ b/azure/posix.yml @@ -0,0 +1,129 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + REPO_DIR: "pandas" + BUILD_COMMIT: "v1.0.2" + PLAT: "x86_64" + NP_BUILD_DEP: "numpy==1.13.3" + CYTHON_BUILD_DEP: "cython==0.29.13" + NIGHTLY_BUILD_COMMIT: "master" + NIGHTLY_BUILD: "false" + TEST_DEPENDS: "pytest pytest-xdist hypothesis" + JUNITXML: "test-data.xml" + TEST_DIR: "tmp_for_test" + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(MB_PYTHON_VERSION) + displayName: Set python version + + - bash: | + set -e + + SKIP_BUILD="false" + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + if [ "$NIGHTLY_BUILD" != "true" ]; then + SKIP_BUILD="true" + fi + fi + echo "Building pandas@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + echo "##vso[task.setvariable variable=SKIP_BUILD]$SKIP_BUILD" + + # Platform variables used in multibuild scripts + if [ `uname` == 'Darwin' ]; then + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]osx" + echo "##vso[task.setvariable variable=MACOSX_DEPLOYMENT_TARGET]10.9" + else + echo "##vso[task.setvariable variable=TRAVIS_OS_NAME]linux" + fi + + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + + - bash: | + set -e + pip install virtualenv wheel + BUILD_DEPENDS="$NP_BUILD_DEP $CYTHON_BUILD_DEP" + + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + + # Setup build dependencies + before_install + + clean_code $REPO_DIR $BUILD_COMMIT + build_wheel $REPO_DIR $PLAT + displayName: Build wheel + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + set -xe + source multibuild/common_utils.sh + source multibuild/travis_steps.sh + source extra_functions.sh + setup_test_venv + install_run $PLAT + teardown_test_venv + displayName: Install wheel and test + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: | + echo "##vso[task.prependpath]$CONDA/bin" + sudo chown -R $USER $CONDA + displayName: Add conda to PATH + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$PANDAS_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="multibuild-wheels-staging" + TOKEN="$PANDAS_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + env: + # Secret variables need to mapped to env variables explicitly: + PANDAS_NIGHTLY_UPLOAD_TOKEN: $(PANDAS_NIGHTLY_UPLOAD_TOKEN) + PANDAS_STAGING_UPLOAD_TOKEN: $(PANDAS_STAGING_UPLOAD_TOKEN) + + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + ls wheelhouse/*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG wheelhouse/*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/azure/windows.yml b/azure/windows.yml new file mode 100644 index 0000000..3e04d4f --- /dev/null +++ b/azure/windows.yml @@ -0,0 +1,132 @@ +parameters: + name: "" + vmImage: "" + matrix: [] + +jobs: + - job: ${{ parameters.name }} + pool: + vmImage: ${{ parameters.vmImage }} + variables: + BUILD_COMMIT: "v1.0.2" + NP_BUILD_DEP: "1.13.3" + CYTHON_BUILD_DEP: "0.29.13" + NIGHTLY_BUILD_COMMIT: "master" + NIGHTLY_BUILD: "false" + PYTHON_ARCH: "x64" + TEST_DEPENDS: "pytest pytest-xdist hypothesis" + JUNITXML: "test-data.xml" + TEST_DIR: '$(Agent.WorkFolder)/tmp_for_test' + strategy: + matrix: + ${{ insert }}: ${{ parameters.matrix }} + steps: + - checkout: self + submodules: true + + - task: UsePythonVersion@0 + inputs: + versionSpec: $(PYTHON_VERSION) + architecture: $(PYTHON_ARCH) + displayName: Set python version + + - bash: | + set -e + echo PYTHON $PYTHON_VERSION $PYTHON_ARCH + echo Build Reason: $BUILD_REASON + python --version + python -c "import struct; print(struct.calcsize('P') * 8)" + pip --version + displayName: Check that we have the expected version and architecture for Python + - bash: | + set -e + SKIP_BUILD="false" + if [ "$BUILD_REASON" == "Schedule" ]; then + BUILD_COMMIT=$NIGHTLY_BUILD_COMMIT + if [ "$NIGHTLY_BUILD" != "true" ]; then + SKIP_BUILD="true" + fi + fi + echo "Building pandas@$BUILD_COMMIT" + echo "##vso[task.setvariable variable=BUILD_COMMIT]$BUILD_COMMIT" + echo "##vso[task.setvariable variable=SKIP_BUILD]$SKIP_BUILD" + # Store original Python path to be able to create test_venv pointing + # to same Python version. + PYTHON_EXE=`which python` + echo "##vso[task.setvariable variable=PYTHON_EXE]$PYTHON_EXE" + displayName: Define build env variables + - bash: | + set -e + cd pandas + git checkout $BUILD_COMMIT + git clean -fxd + git reset --hard + displayName: Checkout pandas commit + condition: eq(variables['SKIP_BUILD'], 'false') + - bash: | + set -e + pip install --timeout=60 numpy==$NP_BUILD_DEP + pip install --timeout=60 $TEST_DEPENDS Cython==$CYTHON_BUILD_DEP + pip install twine wheel + pushd pandas + python setup.py build + python setup.py bdist_wheel + ls dist + twine check dist/* + popd + displayName: Build wheel + condition: eq(variables['SKIP_BUILD'], 'false') + - bash: | + set -e + source extra_functions.sh + source config.sh + setup_test_venv + pip install pandas/dist/pandas-*.whl + run_tests + teardown_test_venv + displayName: Install wheel and test + condition: eq(variables['SKIP_BUILD'], 'false') + - task: PublishTestResults@2 + inputs: + testResultsFiles: '$(TEST_DIR)/$(JUNITXML)' + testRunTitle: ${{ format('{0}-$(Agent.JobName)', parameters.name) }} + displayName: 'Publish Test Results' + condition: eq(variables['SKIP_BUILD'], 'false') + + - bash: echo "##vso[task.prependpath]$CONDA/Scripts" + displayName: Add conda to PATH + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: conda install -q -y anaconda-client + displayName: Install anaconda-client + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + + - bash: | + set -e + if [ "$BUILD_REASON" == "Schedule" ]; then + ANACONDA_ORG="scipy-wheels-nightly" + TOKEN="$PANDAS_NIGHTLY_UPLOAD_TOKEN" + else + ANACONDA_ORG="multibuild-wheels-staging" + TOKEN="$PANDAS_STAGING_UPLOAD_TOKEN" + fi + if [ "$TOKEN" == "" ]; then + echo "##[warning] Could not find anaconda.org upload token in secret variables" + fi + echo "##vso[task.setvariable variable=TOKEN]$TOKEN" + echo "##vso[task.setvariable variable=ANACONDA_ORG]$ANACONDA_ORG" + displayName: Retrieve secret upload token + condition: and(succeeded(), eq(variables['SKIP_BUILD'], 'false'), ne(variables['Build.Reason'], 'PullRequest')) + env: + # Secret variables need to mapped to env variables explicitly: + PANDAS_NIGHTLY_UPLOAD_TOKEN: $(PANDAS_NIGHTLY_UPLOAD_TOKEN) + PANDAS_STAGING_UPLOAD_TOKEN: $(PANDAS_STAGING_UPLOAD_TOKEN) + - bash: | + set -e + # The --force option forces a replacement if the remote file already + # exists. + ls pandas/dist/pandas-*.whl + anaconda -t $TOKEN upload --force -u $ANACONDA_ORG pandas/dist/pandas-*.whl + echo "PyPI-style index: https://pypi.anaconda.org/$ANACONDA_ORG/simple" + displayName: Upload to anaconda.org (only if secret token is retrieved) + condition: ne(variables['TOKEN'], '') diff --git a/config.sh b/config.sh index 625234f..675b7fd 100644 --- a/config.sh +++ b/config.sh @@ -25,6 +25,10 @@ function build_wheel { function run_tests { # Runs tests on installed distribution from an empty directory export PYTHONHASHSEED=$(python -c 'import random; print(random.randint(1, 4294967295))') + echo $PATH + which -a python + pip list python -c 'import pandas; pandas.show_versions()' - python -c 'import pandas; pandas.test(extra_args=["-m not clipboard", "--skip-slow", "--skip-network", "--skip-db", "-n=2"])' + # Skip test_maybe_promote_int_with_int: https://github.com/pandas-dev/pandas/issues/31856 + python -c 'import pandas; pandas.test(extra_args=["-m not clipboard", "--skip-slow", "--skip-network", "--skip-db", "-n=2", "-k not test_maybe_promote_int_with_int"])' } diff --git a/extra_functions.sh b/extra_functions.sh new file mode 100644 index 0000000..6e5f1b6 --- /dev/null +++ b/extra_functions.sh @@ -0,0 +1,29 @@ +function setup_test_venv { + # Create a new empty venv dedicated to testing for non-Linux platforms. On + # Linux the tests are run in a Docker container. + if [ $(uname) != "Linux" ]; then + deactivate || echo "" + $PYTHON_EXE -m venv test_venv + if [ $(uname) == "Darwin" ]; then + source test_venv/bin/activate + else + source test_venv/Scripts/activate + fi + # Note: the idiom "python -m pip install ..." is necessary to upgrade + # pip itself on Windows. Otherwise one would get a permission error on + # pip.exe. + python -m pip install --upgrade pip wheel + if [ "$TEST_DEPENDS" != "" ]; then + pip install $TEST_DEPENDS + fi + fi +} + +function teardown_test_venv { + if [ $(uname) != "Linux" ]; then + deactivate || echo "" + if [ $(uname) == "Darwin" ]; then + source venv/bin/activate + fi + fi +} diff --git a/multibuild b/multibuild index 217614c..47f4b24 160000 --- a/multibuild +++ b/multibuild @@ -1 +1 @@ -Subproject commit 217614c7bdc27ac8f5bb29dab0568594b8f4f352 +Subproject commit 47f4b247be212315313ff72e3daafdff0cbc6c33