forked from maxpumperla/elephas
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.travis.yml
40 lines (38 loc) · 1.46 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
sudo: required
dist: trusty
language: python
python:
- "2.7"
# - "3.4" # Note that hyperopt currently seems to have issues with 3.4
install:
# code below is taken from http://conda.pydata.org/docs/travis.html
# We do this conditionally because it saves us some downloading if the
# version is the same.
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda update -q conda
- conda info -a
- conda create -q -n test-environment python=$TRAVIS_PYTHON_VERSION numpy scipy matplotlib pandas pytest h5py flask
- source activate test-environment
- pip install pytest-cov python-coveralls
- pip install git+git://github.com/Theano/Theano.git
- pip install keras
- python setup.py install
# Install Spark
- wget http://apache.mirrors.tds.net/spark/spark-1.5.2/spark-1.5.2-bin-hadoop2.6.tgz -P $HOME
- tar zxvf $HOME/spark-* -C $HOME
- export SPARK_HOME=$HOME/spark-1.5.2-bin-hadoop2.6
- export PATH=$PATH:$SPARK_HOME/bin
# Just run an example for now
script:
- python -c "import keras.backend"
- spark-submit --driver-memory 2G $PWD/examples/mnist_mlp_spark.py
after_success:
- coveralls