-
Notifications
You must be signed in to change notification settings - Fork 0
144 lines (140 loc) · 5.73 KB
/
master.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
name: master
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
pip_python35:
name: PIP, Python (3.5)
runs-on: ubuntu-latest
env:
SPARK_VERSION: 2.3.4
# The name of the directory '.cache' is for Travis CI. Once we remove Travis CI,
# we should download Spark to a directory with a different name to prevent confusion.
SPARK_CACHE_DIR: /home/runner/.cache/spark-versions
PANDAS_VERSION: 0.24.2
PYARROW_VERSION: 0.10.0
# DISPLAY=0.0 does not work in Github Actions with Python 3.5. Here we work around wtih xvfb-run
PYTHON_EXECUTABLE: xvfb-run python
steps:
- uses: actions/checkout@v2
- uses: actions/setup-java@v1
with:
java-version: 1.8
- uses: actions/cache@v1
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements-dev.txt') }}
restore-keys: |
${{ runner.os }}-pip-
- uses: actions/cache@v1
with:
path: ${{ env.SPARK_CACHE_DIR }}
key: ${{ runner.os }}-koalas-spark-${{ env.SPARK_VERSION }}
# Setup Python 3.5 via `apt-get install` since the dafault Python 3.5 from `actions/setup-python`
# seems to have some problems with Tkinter, so we should manually install the python3.5-tk.
# For this, we should use the Python manually installed, not the default one from `actions/setup-python`
- name: Setup Python 3.5
run: |
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get install tk-dev python3.5-tk python3.5
sudo rm /usr/bin/python
sudo ln -s /usr/bin/python3.5 /usr/bin/python
# Below command is required for ensuring PIP-installed executables to be in the path.
echo "::add-path::/home/runner/.local/bin"
- name: Install dependencies
run: |
./dev/download_travis_dependencies.sh
sudo apt-get install xclip
pip install setuptools
# Currently PIP with Python 3.5 removes Black in the requirements-dev.txt file
# as Black only works with Python 3.6+. This is hacky but we will drop
# Python 3.5 soon so it's fine.
sed -i '/black/d' requirements-dev.txt
pip install -r requirements-dev.txt
pip install pandas==$PANDAS_VERSION pyarrow==$PYARROW_VERSION
pip list
- name: Run tests
run: |
export SPARK_HOME="$SPARK_CACHE_DIR/spark-$SPARK_VERSION-bin-hadoop2.7"
./dev/lint-python
./dev/pytest
- uses: codecov/codecov-action@v1.0.5
with:
token: ${{ secrets.CODECOV_TOKEN }}
conda_python:
name: Conda, Python
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7]
include:
- python-version: 3.6
spark-version: 2.4.5
pandas-version: 0.25.3
pyarrow-version: 0.13.0
logger: databricks.koalas.usage_logging.usage_logger
- python-version: 3.7
spark-version: 2.4.5
pandas-version: 1.0.1
pyarrow-version: 0.14.1
env:
PYTHON_VERSION: ${{ matrix.python-version }}
SPARK_VERSION: ${{ matrix.spark-version }}
PANDAS_VERSION: ${{ matrix.pandas-version }}
PYARROW_VERSION: ${{ matrix.pyarrow-version }}
# `QT_QPA_PLATFORM` for resolving 'QXcbConnection: Could not connect to display :0.0'
DISPLAY: 0.0
QT_QPA_PLATFORM: offscreen
KOALAS_USAGE_LOGGER: ${{ matrix.logger }}
# The name of the directory '.cache' is for Travis CI. Once we remove Travis CI,
# we should download Spark to a directory with a different name to prevent confusion.
SPARK_CACHE_DIR: /home/runner/.cache/spark-versions
steps:
- uses: actions/checkout@v2
- uses: actions/setup-java@v1
with:
java-version: 1.8
- uses: actions/cache@v1
with:
path: ${{ env.SPARK_CACHE_DIR }}
key: ${{ runner.os }}-koalas-spark-${{ env.SPARK_VERSION }}
- name: Install dependencies
run: |
./dev/download_travis_dependencies.sh
curl -s https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh > miniconda.sh
bash miniconda.sh -b -p $HOME/miniconda
# See also https://github.com/conda/conda/issues/7980
. $HOME/miniconda/etc/profile.d/conda.sh
hash -r
conda config --set always_yes yes --set changeps1 no
conda update -q conda
# Useful for debugging any issues with conda
conda info -a
# Replace dep1 dep2 ... with your dependencies
conda create -c conda-forge -q -n test-environment python=$PYTHON_VERSION
conda activate test-environment
conda install -c conda-forge --yes codecov
conda config --env --add pinned_packages python=$PYTHON_VERSION
conda config --env --add pinned_packages pandas==$PANDAS_VERSION
conda config --env --add pinned_packages pyarrow==$PYARROW_VERSION
conda install -c conda-forge --yes pandas==$PANDAS_VERSION pyarrow==$PYARROW_VERSION
conda install -c conda-forge --yes --freeze-installed --file requirements-dev.txt
conda list
- name: Run tests
run: |
# The environment should be initialized newly between Github Actions steps. Also,
# for some reasons we should add enviornment directory manually to allow for
# 'test-enviornment' to be found. See also https://github.com/conda/conda/issues/7980
. $HOME/miniconda/etc/profile.d/conda.sh
conda config --prepend envs_dirs $HOME/miniconda/envs
conda activate test-environment
export SPARK_HOME="$SPARK_CACHE_DIR/spark-$SPARK_VERSION-bin-hadoop2.7"
./dev/lint-python
./dev/pytest
- uses: codecov/codecov-action@v1.0.5
with:
token: ${{ secrets.CODECOV_TOKEN }}