Update python versions and fix various github actions #369
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
name: CI | |
on: | |
push: | |
branches: [ master ] | |
pull_request: | |
branches: [ master ] | |
jobs: | |
lint: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Set up Python 3.9 | |
uses: actions/setup-python@v2 | |
with: | |
python-version: 3.9 | |
- name: Install dependencies | |
run: | | |
pip install --upgrade pip | |
pip install . | |
pip install -r tests-requirements.txt | |
- name: Linter | |
run: | | |
pylama | |
- name: Typer checker | |
run: | | |
mypy --config-file setup.cfg | |
build: | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
python-version: [3.7, 3.8, 3.9] | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Set up Python ${{ matrix.python-version }} | |
uses: actions/setup-python@v2 | |
with: | |
python-version: ${{ matrix.python-version }} | |
- name: Install dependencies | |
run: | | |
pip install --upgrade pip | |
pip install -e . | |
pip install -r tests-requirements.txt | |
- name: Tests | |
run: | | |
pytest -m "not hadoop and not conda" -s tests | |
standalone_spark3_with_S3: | |
runs-on: ubuntu-latest | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Build spark-docker | |
run: docker build -t spark-docker ./examples/spark-with-S3 --build-arg SPARK_INPUT_VERSION=3.2.2 --build-arg PYTHON_VERSION=3.9.15 | |
- name: Build the docker-compose stack | |
run: | | |
export PYTHON_VERSION=3.9 | |
docker compose -f ./examples/spark-with-S3/docker-compose.yml up -d | |
- name: Check running containers | |
run: docker ps -a | |
- name: Run spark Job | |
run: docker exec spark-master ./examples/spark-with-S3/scripts/run_spark_example.sh python3.9 3.2.2 | |
# hadoop_hdfs: | |
# runs-on: ubuntu-latest | |
# steps: | |
# - uses: actions/checkout@v2 | |
# - name: Set up Python 3.9 | |
# uses: actions/setup-python@v2 | |
# with: | |
# python-version: 3.9 | |
# - name: Install hadoop-test-cluster | |
# run: | | |
# pip install hadoop-test-cluster | |
# - name: Start cluster | |
# run: | | |
# htcluster startup --image cdh5 --mount .:cluster-pack | |
# - name: Start Job | |
# run: | | |
# # for the hack with script .. see https://github.com/actions/runner/issues/241#issuecomment-577360161 | |
# # the prebuild image only contains a conda install, we also install python | |
# # to avoid sharing files on the worker node we copy the python install script via hdfs to worker /tmp folder | |
# script -e -c "htcluster exec -u root -s edge -- chown -R testuser /home/testuser && \ | |
# htcluster exec -u root -s edge -- /home/testuser/cluster-pack/tests/integration/install_python.sh && \ | |
# htcluster exec -u root -s edge -- hdfs dfs -put /home/testuser/cluster-pack/tests/integration/install_python.sh hdfs:///tmp && \ | |
# htcluster exec -u root -s worker -- hdfs dfs -get hdfs:///tmp/install_python.sh /home/testuser && \ | |
# htcluster exec -u root -s worker -- chmod +x /home/testuser/install_python.sh && \ | |
# htcluster exec -u root -s worker -- /home/testuser/install_python.sh && \ | |
# htcluster exec -s edge -- /home/testuser/cluster-pack/tests/integration/hadoop_hdfs_tests.sh" | |
conda: | |
runs-on: ubuntu-latest | |
strategy: | |
matrix: | |
python-version: [3.9] | |
steps: | |
- uses: actions/checkout@v2 | |
- name: Run tests with conda | |
run: | | |
conda update -y conda | |
conda create -n venv -y python=${{ matrix.python-version }} | |
# https://github.com/conda/conda/issues/7980 | |
eval "$(conda shell.bash hook)" | |
conda activate venv | |
pip install . | |
pip install -r tests-requirements.txt | |
pytest -m conda -s tests --log-cli-level=INFO |