forked from blaze/blaze
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
111 lines (90 loc) · 3.22 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
sudo: false
language: python
python:
- "2.6"
- "2.7"
- "3.3"
- "3.4"
branches:
only:
- master
services:
- mongodb
# Note: conda is not available for anything but python 2.7. So below we try to install
# conda in 2.7 and use conda to install dependencies in the virtualenv for version x.y
install:
# Install conda
- wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- conda config --set always_yes yes --set changeps1 no
- conda update conda
# Install dependencies
- conda create -n test-environment python=$TRAVIS_PYTHON_VERSION pytest numpy sqlalchemy pandas h5py pip flask requests pytables cython bcolz xlrd coverage psutil networkx numba
- source activate test-environment
- if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then conda install unittest2; pip install unicodecsv; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then pip install unicodecsv; fi
- if [[ $TRAVIS_PYTHON_VERSION == '2.7' ]]; then conda install pyhive spark -c blaze; fi
- pip install psycopg2
# Install DataShape
- pip install git+http://github.com/ContinuumIO/datashape
# Install into
- pip install git+http://github.com/ContinuumIO/into
# Install BLZ
# - pip install git+http://github.com/ContinuumIO/blz
# Install PyMongo
- pip install pymongo
# Install Spark
# - wget http://d3kbcqa49mib13.cloudfront.net/spark-0.9.1.tgz
# - tar xvfz spark-0.9.1.tgz
# - mv spark-0.9.1 spark
# - export SPARK_HOME=$PWD/spark
# - export PYTHONPATH=$PYTHONPATH:$SPARK_HOME/python
# - cd spark
# - ./sbt/sbt assembly
# - cd ..
# Install dynd
- conda install -c mwiebe dynd-python
# - git clone --depth=1 https://github.com/ContinuumIO/libdynd.git
# - mkdir libdynd/build
# - pushd libdynd/build
# - cmake -DDYND_BUILD_TESTS=False ..
# - make
# - mkdir ../lib
# - cp libdynd* ../lib
# - chmod +x libdynd-config
# - export PATH=$PATH:$PWD
# - export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$PWD
# - popd
# - git clone --depth=1 https://github.com/ContinuumIO/dynd-python.git
# - mkdir dynd-python/build
# - pushd dynd-python/build
# - cmake -DCMAKE_BUILD_TYPE=RelWithDebInfo -DUSE_SEPARATE_LIBDYND=ON -DCMAKE_INSTALL_PREFIX=${PWD} -DPYTHON_PACKAGE_INSTALL_PREFIX=${PWD} ..
# - make
# - make install
# - export PYTHONPATH=$PYTHONPATH:$PWD
# - popd
#
- pip install git+https://github.com/mrocklin/multipledispatch
- pip install git+https://github.com/pytoolz/toolz
- pip install git+https://github.com/pytoolz/cytoolz
# Install coveralls
- pip install coveralls --use-mirrors
# Install Blaze
- python setup.py install
before_script:
- sleep 15
- "mongo admin --eval 'db.runCommand({setParameter: 1, textSearchEnabled: true});'"
script:
- echo '[pytest]' > pytest.ini
- echo 'addopts = -v -r sxX --doctest-modules --doctest-glob='*.rst' --pyargs blaze docs' >> pytest.ini
- echo 'norecursedirs = docs/source/scripts' >> pytest.ini
- coverage run $(which py.test)
- coverage report --show-missing
after_success:
- coveralls
notifications:
email: false
flowdock: "b08b3ba4fb86fa48121e90b5f67ccb75"
on_success: "change"
on_failure: "always" # "change"