Skip to content

Travis-CI: Allow network cache opt-in for whitelisted forks #3383

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
4 commits merged into from Apr 17, 2013
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
29 changes: 21 additions & 8 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,29 @@ language: python

python:
- 2.6
- 3.3

env:
global:
- NOSE_ARGS="not slow" # need at least this so travis page will show env column
- secure: "O04RU5QRKEDL/SrIWEsVe8O+1TxZqZQSa28Sd+Fz48NW/XddhefYyxzqcUXh\nk/NjWMqknJRQhApLolBianVpsE577OTllzlcyKn3nUL6hjOXcoszGaYray7S\niNGKGyO8xrtB/ZQDtmupz0ksK8sLoCTscdiGotFulczbx0zt+4g="
- secure: "PUJ9nC1/v2vpFUtELSoSjI53OHCVXfFTb8+t5lIGIqHtjUBkhiJSNPfCv8Bx\ndsdrx30qP8KsSceYzaa/bog6p8YNU1iih23S0KbjucutvA0LNHBTNvnxmjBR\nSJfKd5FmwnXvizRyghYBzmQ3NmGO7ADw2DBwKOhgGMqCHZ8Tlc8="
- secure: "IDcMrCCW+6pgJtsI3Q163OPc0iec1ogpitaqiRhHcrEBUCXZgVeclOeiZBlw\n/u+uGyW/O0NhHMaFXKB8BdDVwlQEEHv48syN6npS/A5+O6jriWKL4ozttOhE\npOlu+yLhHnEwx6wZVIHRTVn+t1GkOrjlBcjaQi+Z13G3XmDaSG8="
- secure: "Zu9aj0dTGpvMqT/HqBGQgDYl/v5ubC7lFwfE8Fqb0N1UVXqbpjXnNH/7oal1\nUsIT7klO++LWm+LxsP/A1FWENTSgdYe99JQtNyauW+0x5YR1JTuDJ8atDgx9\nSq66CaVpS5t+ov7UVm2bKSUX+1S8+8zGbIDADrMxEzYEMF7WoGM="
- secure: "AfIvLxvCxj22zrqg3ejGf/VePKT2AyGT9erYzlKpBS0H8yi5Pp1MfmJjhaR4\n51zBtzqHPHiIEY6ZdE06o9PioMWkXS+BqJNrxGSbt1ltxgOFrxW5zOpwiFGZ\nZOv1YeFkuPf8PEsWT7615mdydqTQT7B0pqUKK/d6aka4TQ/tg5Q="
- secure: "EM4ySBUusReNu7H1QHXvjnP/J1QowvfpwEBmjysYxJuq7KcG8HhhlfpUF+Gh\nLBzLak9QBA67k4edhum3qtKuJR5cHuja3+zuV8xmx096B/m96liJFTrwZpea\n58op3W6ZULctEpQNgIkyae20bjxl4f99JhZRUlonoPfx/rBIMFc="
- secure: "pgMYS/6MQqDGb58qdzTJesvAMmcJWTUEEM8gf9rVbfqfxceOL4Xpx8siR9B2\nC4U4MW1cHMPP3RFEb4Jy0uK49aHH10snwZY1S84YPPllpH5ZFXVdN68OayNj\nh4k5N/2hhaaQuJ6Uh8v8s783ye4oYTOW5RJUFqQu4QdG4IkTIMs="

- NOSE_ARGS="not slow" UPLOAD=true

matrix:
include:
- python: 2.7
env: NOSE_ARGS="not network" LOCALE_OVERRIDE="zh_CN.GB18030"
env: NOSE_ARGS="slow and not network" LOCALE_OVERRIDE="zh_CN.GB18030" FULL_DEPS=true UPLOAD=false
- python: 2.7
env: NOSE_ARGS="not slow" FULL_DEPS=true
env: NOSE_ARGS="not slow" FULL_DEPS=true UPLOAD=true
- python: 3.2
env: NOSE_ARGS="not slow" FULL_DEPS=true
env: NOSE_ARGS="not slow" FULL_DEPS=true UPLOAD=true
- python: 3.3
env: NOSE_ARGS="not slow" UPLOAD=true

# allow importing from site-packages,
# so apt-get python-x works for system pythons
Expand All @@ -26,11 +35,14 @@ virtualenv:
before_install:
- echo "Waldo1"
- echo $VIRTUAL_ENV
- df
- date
- export PIP_ARGS=-q # comment this this to debug travis install issues
- export APT_ARGS=-qq # comment this to debug travis install issues
# - export PIP_ARGS=-q # comment this this to debug travis install issues
# - export APT_ARGS=-qq # comment this to debug travis install issues
# - set -x # enable this to see bash commands
- source ci/before_install.sh # we need to source this to bring in the env
- export ZIP_FLAGS=-q # comment this to debug travis install issues
- source ci/envars.sh # we need to source this to bring in the envars
- ci/before_install.sh
- python -V

install:
Expand All @@ -43,3 +55,4 @@ script:

after_script:
- ci/print_versions.py
- ci/after_script.sh
48 changes: 28 additions & 20 deletions ci/before_install.sh
Original file line number Diff line number Diff line change
@@ -1,28 +1,36 @@
#!/bin/bash

# If envars.sh determined we're running in an authorized fork
# and the user opted in to the network cache,and that cached versions
# are available on the cache server, download and deploy the cached
# files to the local filesystem

echo "inside $0"

# overview
if [ ${TRAVIS_PYTHON_VERSION} == "3.3" ]; then
sudo add-apt-repository -y ppa:doko/ppa # we get the py3.3 debs from here
fi

sudo apt-get update $APT_ARGS # run apt-get update for all versions

# # hack for broken 3.3 env
# if [ x"$VIRTUAL_ENV" == x"" ]; then
# VIRTUAL_ENV=~/virtualenv/python$TRAVIS_PYTHON_VERSION_with_system_site_packages;
# fi
if $PLEASE_TRAVIS_FASTER ; then
echo "Faster? well... I'll try."

if $CACHE_FILE_AVAILABLE ; then
echo retrieving "$CACHE_FILE_URL";

wget -q "$CACHE_FILE_URL" -O "/tmp/_$CYTHON_HASH.zip";
unzip $ZIP_FLAGS /tmp/_"$CYTHON_HASH.zip" -d "$BUILD_CACHE_DIR";
rm -f /tmp/_"$CYTHON_HASH.zip"
# copy cythonized c files over
cp -R "$BUILD_CACHE_DIR"/pandas/*.c pandas/
cp -R "$BUILD_CACHE_DIR"/pandas/src/*.c pandas/src/
fi;
echo "VENV_FILE_AVAILABLE=$VENV_FILE_AVAILABLE"
if $VENV_FILE_AVAILABLE ; then
echo "getting venv"
wget -q $VENV_FILE_URL -O "/tmp/venv.zip";
sudo unzip $ZIP_FLAGS -o /tmp/venv.zip -d "/";
sudo chown travis -R "$VIRTUAL_ENV"
rm -f /tmp/_"$CYTHON_HASH.zip"
fi;
fi

# # we only recreate the virtualenv for 3.x
# # since the "Detach bug" only affects python3
# # and travis has numpy preinstalled on 2.x which is quicker
# _VENV=$VIRTUAL_ENV # save it
# if [ ${TRAVIS_PYTHON_VERSION:0:1} == "3" ] ; then
# deactivate # pop out of any venv
# sudo pip install virtualenv==1.8.4 --upgrade
# sudo apt-get install $APT_ARGS python3.3 python3.3-dev
# sudo rm -Rf $_VENV
# virtualenv -p python$TRAVIS_PYTHON_VERSION $_VENV --system-site-packages;
# source $_VENV/bin/activate
# fi
true # never fail because bad things happened here
67 changes: 67 additions & 0 deletions ci/envars.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@
#!/bin/bash

# This must be sourced by .travis.yml, so any envars exported here will
# be available to the rest of the build stages

# - computes a hash based on the cython files in the codebade
# - retrieves the decrypted key if any for all whitelisted forks
# - checks whether the user optd int to use the cache
# - if so, check for availablity of cache files on the server, based on hash
# - set envars to control what the following scripts do

# at most one of these will decrypt, so the end result is that $STORE_KEY
# either holds a single key or does not
export STORE_KEY="$STORE_KEY0""$STORE_KEY1""$STORE_KEY2""$STORE_KEY3""$STORE_KEY4"
export STORE_KEY="$STORE_KEY""$STORE_KEY5""$STORE_KEY6""$STORE_KEY7"

export CYTHON_HASH=$(find pandas | grep -P '\.(pyx|pxd)$' | sort \
| while read N; do echo $(tail -n+1 $N | md5sum ) ;done | md5sum| cut -d ' ' -f 1)

export CYTHON_HASH=$CYTHON_HASH-$TRAVIS_PYTHON_VERSION

# where the cache files live on the server
export CACHE_FILE_URL="https://cache27-pypandas.rhcloud.com/static/$STORE_KEY/$CYTHON_HASH.zip"
export VENV_FILE_URL="https://cache27-pypandas.rhcloud.com/static/$STORE_KEY/venv-$TRAVIS_PYTHON_VERSION.zip"
export CACHE_FILE_STORE_URL="https://cache27-pypandas.rhcloud.com/store/$STORE_KEY"

echo "Hashing:"
find pandas | grep -P '\.(pyx|pxd)$'
echo "Key: $CYTHON_HASH"

export CACHE_FILE_AVAILABLE=false
export VENV_FILE_AVAILABLE=false
export PLEASE_TRAVIS_FASTER=false

# check whether the user opted in to use the cache via commit message
if [ x"$(git log --format='%s' -n 1 | grep PLEASE_TRAVIS_FASTER | wc -l)" != x"0" ]; then
export PLEASE_TRAVIS_FASTER=true
fi;
if [ x"$(git log --format='%s' -n 1 | grep PTF | wc -l)" != x"0" ]; then
export PLEASE_TRAVIS_FASTER=true
fi;

if $PLEASE_TRAVIS_FASTER; then

# check whether the files exists on the server
curl -s -f -I "$CACHE_FILE_URL" # silent, don;t expose key
if [ x"$?" == x"0" ] ; then
export CACHE_FILE_AVAILABLE=true;
fi


curl -s -f -I "$VENV_FILE_URL" # silent, don;t expose key
if [ x"$?" == x"0" ] ; then
export VENV_FILE_AVAILABLE=true;
fi

# the pandas build cache machinery needs this set, and the directory created
export BUILD_CACHE_DIR="/tmp/build_cache"
mkdir "$BUILD_CACHE_DIR"
fi;

# debug
echo "PLEASE_TRAVIS_FASTER=$PLEASE_TRAVIS_FASTER"
echo "CACHE_FILE_AVAILABLE=$CACHE_FILE_AVAILABLE"
echo "VENV_FILE_AVAILABLE=$VENV_FILE_AVAILABLE"

true
157 changes: 105 additions & 52 deletions ci/install.sh
Original file line number Diff line number Diff line change
@@ -1,75 +1,128 @@
#!/bin/bash

# There are 2 distinct pieces that get zipped and cached
# - The venv site-packages dir including the installed dependencies
# - The pandas build artifacts, using the build cache support via
# scripts/use_build_cache.py
#
# if the user opted in to use the cache and we're on a whitelisted fork
# - if the server doesn't hold a cached version of venv/pandas build,
# do things the slow way, and put the results on the cache server
# for the next time.
# - if the cache files are available, instal some necessaries via apt
# (no compiling needed), then directly goto script and collect 200$.
#

echo "inside $0"
# Install Dependencies
# Install Dependencie
SITE_PKG_DIR=$VIRTUAL_ENV/lib/python$TRAVIS_PYTHON_VERSION/site-packages
echo "Using SITE_PKG_DIR: $SITE_PKG_DIR"

# workaround for travis ignoring system_site_packages in travis.yml
rm -f $VIRTUAL_ENV/lib/python$TRAVIS_PYTHON_VERSION/no-global-site-packages.txt

# Hard Deps
pip install $PIP_ARGS --use-mirrors cython nose python-dateutil pytz

# try and get numpy as a binary deb
if [ x"$LOCALE_OVERRIDE" != x"" ]; then
# make sure the locale is available
# probably useless, since you would need to relogin
sudo locale-gen "$LOCALE_OVERRIDE"
fi;

# numpy is preinstalled on 2.x
# if [ ${TRAVIS_PYTHON_VERSION} == "2.7" ]; then
# sudo apt-get $APT_ARGS install python-numpy;
# fi
#scipy is not included in the cached venv
if [ x"$FULL_DEPS" == x"true" ] ; then
# for pytables gets the lib as well
sudo apt-get $APT_ARGS install libhdf5-serial-dev;

if [ ${TRAVIS_PYTHON_VERSION} == "3.2" ]; then
sudo apt-get $APT_ARGS install python3-numpy;
elif [ ${TRAVIS_PYTHON_VERSION} == "3.3" ]; then # should be >=3,3
pip $PIP_ARGS install numpy==1.7.0;
else
pip $PIP_ARGS install numpy==1.6.1;
if [ ${TRAVIS_PYTHON_VERSION} == "3.2" ]; then
sudo apt-get $APT_ARGS install python3-scipy
elif [ ${TRAVIS_PYTHON_VERSION} == "2.7" ]; then
sudo apt-get $APT_ARGS install python-scipy
fi
fi

# Optional Deps
if [ x"$FULL_DEPS" == x"true" ]; then
echo "Installing FULL_DEPS"
if [ ${TRAVIS_PYTHON_VERSION} == "2.7" ]; then
sudo apt-get $APT_ARGS install python-scipy;
fi
# Everything installed inside this clause into site-packages
# will get included in the cached venv downloaded from the net
# in PTF mode
if ( ! $VENV_FILE_AVAILABLE ); then
echo "Running full monty"
# Hard Deps
pip install $PIP_ARGS nose python-dateutil pytz
pip install $PIP_ARGS cython

if [ ${TRAVIS_PYTHON_VERSION} == "3.2" ]; then
sudo apt-get $APT_ARGS install python3-scipy;
if [ ${TRAVIS_PYTHON_VERSION} == "3.3" ]; then # should be >=3,3
pip install $PIP_ARGS numpy==1.7.0
elif [ ${TRAVIS_PYTHON_VERSION} == "3.2" ]; then
# sudo apt-get $APT_ARGS install python3-numpy; # 1.6.2 or precise
pip install $PIP_ARGS numpy==1.6.1
else
pip install $PIP_ARGS numpy==1.6.1
fi

if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then
sudo apt-get $APT_ARGS install libhdf5-serial-dev;
pip install numexpr
pip install tables
# Optional Deps
if [ x"$FULL_DEPS" == x"true" ]; then
echo "Installing FULL_DEPS"
pip install $PIP_ARGS cython

if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then
# installed explicitly above, to get the library as well
# sudo apt-get $APT_ARGS install libhdf5-serial-dev;
pip install numexpr
pip install tables
pip install $PIP_ARGS xlwt
fi

pip install $PIP_ARGS matplotlib
pip install $PIP_ARGS openpyxl
pip install $PIP_ARGS xlrd>=0.9.0
pip install $PIP_ARGS 'http://downloads.sourceforge.net/project/pytseries/scikits.timeseries/0.91.3/scikits.timeseries-0.91.3.tar.gz?r='
pip install $PIP_ARGS patsy

# fool statsmodels into thinking pandas was already installed
# so it won't refuse to install itself. We want it in the zipped venv

mkdir $SITE_PKG_DIR/pandas
touch $SITE_PKG_DIR/pandas/__init__.py
echo "version='0.10.0-phony'" > $SITE_PKG_DIR/pandas/version.py
pip install $PIP_ARGS git+git://github.com/statsmodels/statsmodels@c9062e43b8a5f7385537ca95#egg=statsmodels

rm -Rf $SITE_PKG_DIR/pandas # scrub phoney pandas
fi

pip install $PIP_ARGS --use-mirrors openpyxl matplotlib;
pip install $PIP_ARGS --use-mirrors xlrd xlwt;
pip install $PIP_ARGS 'http://downloads.sourceforge.net/project/pytseries/scikits.timeseries/0.91.3/scikits.timeseries-0.91.3.tar.gz?r='
fi
# pack up the venv and cache it
if [ x"$STORE_KEY" != x"" ] && $UPLOAD; then
VENV_FNAME="venv-$TRAVIS_PYTHON_VERSION.zip"

if [ x"$VBENCH" == x"true" ]; then
if [ ${TRAVIS_PYTHON_VERSION:0:1} == "2" ]; then
sudo apt-get $APT_ARGS install libhdf5-serial-dev;
pip install numexpr
pip install tables
zip $ZIP_FLAGS -r "$HOME/$VENV_FNAME" $SITE_PKG_DIR/
ls -l "$HOME/$VENV_FNAME"
echo "posting venv"
# silent, don't expose key
curl -s --form upload=@"$HOME/$VENV_FNAME" "$CACHE_FILE_STORE_URL/$VENV_FNAME"
fi
pip $PIP_ARGS install sqlalchemy git+git://github.com/pydata/vbench.git;
fi

#build and install pandas
python setup.py build_ext install

#HACK: pandas is a statsmodels dependency
# so we need to install it after pandas
if [ x"$FULL_DEPS" == x"true" ]; then
pip install patsy
# pick recent 0.5dev dec/2012
pip install git+git://github.com/statsmodels/statsmodels@c9062e43b8a5f7385537ca95#egg=statsmodels
fi;

# make sure the desired locale is generated
if [ x"$LOCALE_OVERRIDE" != x"" ]; then
# piggyback this build for plotting tests. oh boy.
pip install $PIP_ARGS --use-mirrors matplotlib;
#build and install pandas
if [ x"$BUILD_CACHE_DIR" != x"" ]; then
scripts/use_build_cache.py -d
python setup.py install;
else
python setup.py build_ext install
fi

sudo locale-gen "$LOCALE_OVERRIDE"
# package pandas build artifacts and send them home
# that's everything the build cache (scripts/use_build_cache.py)
# stored during the build (.so, pyx->.c and 2to3)
if (! $CACHE_FILE_AVAILABLE) ; then
echo "Posting artifacts"
strip "$BUILD_CACHE_DIR/*" &> /dev/null
echo "$BUILD_CACHE_DIR"
cd "$BUILD_CACHE_DIR"/
zip -r $ZIP_FLAGS "$HOME/$CYTHON_HASH".zip *
cd "$TRAVIS_BUILD_DIR"
pwd
zip "$HOME/$CYTHON_HASH".zip $(find pandas | grep -P '\.(pyx|pxd)$' | sed -r 's/.(pyx|pxd)$/.c/')

# silent, don't expose key
curl -s --form upload=@"$HOME/$CYTHON_HASH".zip "$CACHE_FILE_STORE_URL/$CYTHON_HASH.zip"
fi

true
Loading