forked from uber/petastorm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy path.travis.yml
142 lines (119 loc) · 7.32 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
# Copyright (c) 2017-2018 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
dist: trusty
language: python
python:
- '2.7'
env:
# ARROW_PRE_0_15_IPC_FORMAT is set for pyarrow compatibility
# See https://spark.apache.org/docs/3.0.0-preview/sql-pyspark-pandas-with-arrow.html#compatibiliy-setting-for-pyarrow--0150-and-spark-23x-24x
- PYARROW_VERSION=2.0.0 NUMPY_VERSION=1.19.1 TF_VERSION=1.15.0 DEPLOYABLE=1 PY=3.7 PYSPARK_VERSION=2.4.4 ARROW_PRE_0_15_IPC_FORMAT=1
- PYARROW_VERSION=2.0.0 NUMPY_VERSION=1.19.1 TF_VERSION=1.15.0 DEPLOYABLE=0 PY=3.7 PYSPARK_VERSION=3.0.0 ARROW_PRE_0_15_IPC_FORMAT=0
- PYARROW_VERSION=2.0.0 NUMPY_VERSION=1.19.1 TF_VERSION=2.1.0 DEPLOYABLE=0 PY=3.7 PYSPARK_VERSION=3.0.0 ARROW_PRE_0_15_IPC_FORMAT=0
- PYARROW_VERSION=3.0.0 NUMPY_VERSION=1.20.1 TF_VERSION=2.1.0 DEPLOYABLE=0 PY=3.7 PYSPARK_VERSION=3.0.0 ARROW_PRE_0_15_IPC_FORMAT=0
- PYARROW_VERSION=0.17.1 NUMPY_VERSION=1.19.1 TF_VERSION=2.1.0 DEPLOYABLE=0 PY=3.7 PYSPARK_VERSION=3.0.0 ARROW_PRE_0_15_IPC_FORMAT=0
services:
- docker
install:
# We will use selitvin/petastorm_ci:latest as docker image for running on travis-ci.
# To rebuild the ci image, see docker/Makefile.The image contains all petastorm dependencies
# installed, we do rerun dependencies installation to refresh ersions of the dependencies
# on each build if needed
- export CI_IMAGE=selitvin/petastorm_ci_auto:ci-2020-07-01-00
- docker pull $CI_IMAGE
- docker images
- pip install -U codecov
before_script:
# run docker container for an hour. Will execute all docker commands inside this container
- docker run -v `pwd`:/petastorm --name petastorm_ci $CI_IMAGE /bin/sh -c "sleep 3600" &
# wait for docker to start
- sleep 20
# ARROW_PRE_0_15_IPC_FORMAT=1 is required when spark<3.0 and pyarrow>=0.15
- export RUN="docker exec -e ARROW_PRE_0_15_IPC_FORMAT=$ARROW_PRE_0_15_IPC_FORMAT petastorm_ci bash /run_in_venv.sh ${PY}"
- export PYTEST="pytest --timeout=360 -v --color=yes --cov=./ --cov-report xml:coverage.xml"
# Refresh our dependencies' versions as in the CI image, they may go stale
- $RUN pip install -e /petastorm/[test,tf,torch,docs,opencv]
# Upgrade pip to avoid weird failures, such as failing to install torchvision
# This will use requirements from setup.py and install them in the tavis's virtual environment
# [tf] chooses to depend on cpu version of tensorflow (alternatively, could do [tf_gpu])
# pyarrow was compiled against a newer version of numpy than we require so we need to upgrade it
# (or optionally install pyarrow from source instead of through binaries)
- $RUN pip install --upgrade numpy==$NUMPY_VERSION
- $RUN pip install -U pyarrow==${PYARROW_VERSION} tensorflow==${TF_VERSION} pyspark==${PYSPARK_VERSION}
- $RUN pip list
- $RUN mypy petastorm
# We run a bunch of linting in before_script to fail fast and not run unit tests in case of lint failure.
- $RUN flake8 . --count --show-source --statistics
- $RUN flake8 . --count --exit-zero --max-complexity=10 --statistics
- $RUN pylint --rcfile=.pylintrc petastorm examples -f parseable -r n
# enable core dumps
- $RUN ulimit -c unlimited -S
script:
# Build documentation
- $RUN bash -c "cd /petastorm/docs/autodoc && pwd && make html"
# Running pytest twice: first tests that do not have to be forked.
# `-Y` will result in datasets generated for testing to be persisted between runs in pytest cache.
- $RUN $PYTEST -m "forked" --forked -Y
--ignore=examples/mnist/tests/test_pytorch_mnist.py
--ignore=petastorm/tests/test_pytorch_utils.py
--ignore=petastorm/tests/test_pytorch_dataloader.py
--ignore=petastorm/tests/test_tf_autograph.py
petastorm examples
- $RUN $PYTEST -m "not forked" -Y --cov-append
--ignore=examples/mnist/tests/test_pytorch_mnist.py
--ignore=petastorm/tests/test_pytorch_utils.py
--ignore=petastorm/tests/test_pytorch_dataloader.py
--ignore=petastorm/tests/test_tf_autograph.py
petastorm examples
# Tensorflow and pytorch tests confict (segfault). Split into separate runs.
- $RUN $PYTEST --cov-append
examples/mnist/tests/test_pytorch_mnist.py
petastorm/tests/test_pytorch_dataloader.py
petastorm/tests/test_pytorch_utils.py
# Run test_tf_autograph separately because the error message "AutoGraph could not transform..." only show once, so we need ensure no other TF tests run before it.
- $RUN $PYTEST -Y --cov-append petastorm/tests/test_tf_autograph.py
after_success:
- codecov --required
- $RUN git clean -dfx
after_failure:
# Only upon failure, install gdb to process core dump
- sudo apt-get install -y gdb
- COREFILE=$(find . -maxdepth 2 -name "core*" | head -n 1)
# NOTE: without debug info, the stack trace won't be too useful.
# If it's possible to run with python2.7-dbg, then gdb hook py-bt will do its magic.
- if [[ -f "$COREFILE" ]]; then gdb -c "$COREFILE" python -ex "thread apply all bt" -ex "set pagination 0" -batch; fi
before_deploy:
# Build a python wheel before deployment
- python setup.py bdist_wheel
deploy:
# Disabling pypi deployment. There is an issue with authentication at the moment
- provider: pypi
user: uber
password:
secure: OEyw3NVW1kuSWSJAW+OBSZUk4uSSnkZnQ4njM1EFooTDZ2axAe69y+Z/PRjuLup7bbdxgq8DUXzK+jUWs07vcPYUKp0ek9Gni9gmv08B62P2plupfw35KCjxBTd3RY1cWpkX425ePucuAH1PojY5J0LU4BCPOmyzdpAniLdmcXwJTM1EsQXCub3v/TBK1dxyFMHUyW9WqRStQI1xF8mxhVupnZ/spN+ixvqTMakBczqEttd8AsoSXQtvEUo5ot082/U8LryvrHEsHH52hdbWlSR8TxbC1fK8LgEeXoeqfhOiYNtdOTB1+cGmdJSvjbHCq4OUfl1sbTbBSxrkLNeFAQztAQqIybp1F5PjjVKAr3mHB7HpnMoUcDQiVZ8srfwBpCGZlsj6BsNxYGidiSXvKyKHO1TnTHxNhXIjPzAxjPHXFm7HH9kCXavGEMwXMn06TNq9uP/e6ekd0qE2l+x7xc9i7bdSpIXC9s1uCNqbObiV+/BNXvtDjAVxmp4d+foIPwLx6Qz42IFzPb96lY2Ifow8I8406F/Y5t003xFwnkkgbfI6Ya8gJaJIP6pYGuWJY/lj6/5nElroeiZg1G3rCRBfxwG0RJ2RVhNpuqAcXrQ7SUQ/bbDKUc5psrE920kpF/V3j5tA83pSyJW49IjNQxRABGviPh7aSVWFBeyh0BE=
distributions: sdist bdist_wheel
on:
tags: true
condition: $DEPLOYABLE = 1
# # Push all python wheels created to Github release
# - provider: releases
# skip_cleanup: true
# api_key:
# secure: I96u6KqgSj0Cqx2NvLLLgw6rx7+gN4/6wdptfBrP1zxppMERu/iaLYLgDwDKRXzgTIrx5LopFIllf9/kPcUJgDFj/AxESo6aukzcYK3tB2OhEYIAxYcZf8Kt3aV+AHp/TIDdZ9sGbVLxypuxSXqiAc5dJw8S0Njja/9Qgxe4jqNXNLjLxEb3qlCrYJFEA9MxxPgH/QsWZ6M43hR5gNrGWrhSaIHSjMXXkzRjQG+bGApFf+XRx7le2M5vIeaw1K/osJ930QEjDTnp0v5hFvkB9F1buC17rZO8Xy0KAAhB1+SaEWtS4N0lFIVYE1b3Ke1ek82kW1d7UDEAcEL6ccICboKtvhFfgb1MXVQkdRq4HlhwHl8n4FJIztbwXaQW1aISR5x+m6RIFwMuI+U/MjcVVt+CEgo8X4HATb2pdgWnFcS649t7lhrx7HCBNZpUNmfoZ/YyNgPdQf+vreCNxWOpqi4UacH890GhLFUFXtijeL7xGXWIg7ugQmlK27CfM1mMN1Vp9NTRAXA0x7HuqckWKY85lCJMT+3IJMgVjz+BdGZDQ5RJI22FmRDNuWrH/Mio77EK+sOsKm/X3Q8/Sb8uwU+Ft1Vb7aikQ2NIl89uQQdVxH4TlQw6/VQSZODacTUwMz5CkB0RJNrlbZpUpPrHiTVa4dRo7/34qe7iGA7XtfY=
# file_glob: true
# file: dist/*.whl
# on:
# tags: true
# python: 3.7
# condition: $DEPLOYABLE = 1