forked from databricks/spark-sklearn
-
Notifications
You must be signed in to change notification settings - Fork 1
/
.travis.yml
39 lines (35 loc) · 1.29 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
language: python
python:
- "2.7"
- "3.4"
- "3.5"
cache:
directories:
- $HOME/.cache/spark-versions
env:
matrix:
- SPARK_VERSION="2.0.0" SPARK_BUILD="spark-$SPARK_VERSION-bin-hadoop2.7" SPARK_BUILD_URL="http://d3kbcqa49mib13.cloudfront.net/$SPARK_BUILD.tgz"
before_install:
- ./bin/download_travis_dependencies.sh
# See this page: http://conda.pydata.org/docs/travis.html
install:
- sudo apt-get update
# We do this conditionally because it saves us some downloading if the
# version is the same.
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
# Useful for debugging any issues with conda
- conda info -a
# Replace dep1 dep2 ... with your dependencies
- conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION scikit-learn==0.17.0 nose=1.3.7 pandas=0.18
- source activate test-environment
script:
- SPARK_HOME=$HOME/.cache/spark-versions/$SPARK_BUILD ./python/run-tests.sh