-
Notifications
You must be signed in to change notification settings - Fork 21
144 lines (116 loc) · 4.42 KB
/
main.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
name: CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
pip install --upgrade pip
pip install .
pip install -r tests-requirements.txt
- name: Linter
run: |
pylama
- name: Typer checker
run: |
mypy --config-file setup.cfg
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install -e .
pip install -r tests-requirements.txt
- name: Tests
run: |
pytest -m "not hadoop and not conda" -s tests
standalone_spark3_with_S3:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build spark-docker
run: docker build -t spark-docker ./examples/spark-with-S3 --build-arg SPARK_INPUT_VERSION=3.2.2 --build-arg PYTHON_VERSION=3.9.15
- name: Build the docker-compose stack
run: |
export PYTHON_VERSION=3.9
docker compose -f ./examples/spark-with-S3/docker-compose.yml up -d
- name: Check running containers
run: docker ps -a
- name: Run spark Job
run: docker exec spark-master ./examples/spark-with-S3/scripts/run_spark_example.sh python3.9 3.2.2
standalone_spark2_with_S3:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build spark-docker
run: docker build -t spark-docker ./examples/spark-with-S3 --build-arg SPARK_INPUT_VERSION=2.4.2 --build-arg PYTHON_VERSION=3.9.15
- name: Build the docker-compose stack
run: |
export PYTHON_VERSION=3.9
docker compose -f ./examples/spark-with-S3/docker-compose.yml up -d
- name: Check running containers
run: docker ps -a
- name: Run spark Job
run: docker exec spark-master ./examples/spark-with-S3/scripts/run_spark_example.sh python3.9 2.4.2
# hadoop_hdfs:
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v2
# - name: Set up Python 3.9
# uses: actions/setup-python@v2
# with:
# python-version: 3.9
# - name: Install hadoop-test-cluster
# run: |
# pip install hadoop-test-cluster
# - name: Start cluster
# run: |
# htcluster startup --image cdh5 --mount .:cluster-pack
# - name: Start Job
# run: |
# # for the hack with script .. see https://github.com/actions/runner/issues/241#issuecomment-577360161
# # the prebuild image only contains a conda install, we also install python
# # to avoid sharing files on the worker node we copy the python install script via hdfs to worker /tmp folder
# script -e -c "htcluster exec -u root -s edge -- chown -R testuser /home/testuser && \
# htcluster exec -u root -s edge -- /home/testuser/cluster-pack/tests/integration/install_python.sh && \
# htcluster exec -u root -s edge -- hdfs dfs -put /home/testuser/cluster-pack/tests/integration/install_python.sh hdfs:///tmp && \
# htcluster exec -u root -s worker -- hdfs dfs -get hdfs:///tmp/install_python.sh /home/testuser && \
# htcluster exec -u root -s worker -- chmod +x /home/testuser/install_python.sh && \
# htcluster exec -u root -s worker -- /home/testuser/install_python.sh && \
# htcluster exec -s edge -- /home/testuser/cluster-pack/tests/integration/hadoop_hdfs_tests.sh"
conda:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.9]
steps:
- uses: actions/checkout@v2
- name: Run tests with conda
run: |
conda update -y conda
conda create -n venv -y python=${{ matrix.python-version }}
# https://github.com/conda/conda/issues/7980
eval "$(conda shell.bash hook)"
conda activate venv
pip install .
pip install -r tests-requirements.txt
pytest -m conda -s tests --log-cli-level=INFO