Skip to content

Added test workflow with basic python test #16

Added test workflow with basic python test

Added test workflow with basic python test #16

Workflow file for this run

name: iceberg-tests
on:
pull_request:
push:
branches:
- main
## These permissions are needed to interact with GitHub's OIDC Token endpoint.
#permissions:
# id-token: write
# contents: read
# packages: write
jobs:
# build-lakefs-iceberg:
# name: Build lakeFS Iceberg package
# runs-on: ubuntu-20.04
# steps:
# - name: Check-out code
# uses: actions/checkout@v3
#
# - name: Set up JDK 11
# uses: actions/setup-java@v3
# with:
# distribution: "temurin"
# java-version: "11"
# cache: "maven"
#
# - name: Build lakeFS Iceberg
# run: mvn clean install --batch-mode --update-snapshots -DskipTests -P\!sign-artifacts
## run: mvn -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests
## run: mvn -Passembly -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests
#
# - name: Store lakeFS Iceberg
# uses: actions/upload-artifact@v3
# with:
# name: lakefs-iceberg
# path: target/lakefs-iceberg-1.0-SNAPSHOT.jar
iceberg-isolation-test:
name: Test lakeFS isolation with iceberg
# needs: [ build-lakefs-iceberg ]
runs-on: ubuntu-20.04
services:
lakefs:
image: "treeverse/lakefs:latest"
ports:
- '8000:8000'
env:
LAKEFS_DATABASE_TYPE: local
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
LAKEFS_AUTH_ENCRYPT_SECRET_KEY: some random secret string
LAKEFS_STATS_ENABLED: false
steps:
- name: Check-out code
uses: actions/checkout@v3
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
distribution: "temurin"
java-version: "11"
cache: "maven"
- name: Build lakeFS Iceberg
run: mvn clean install --batch-mode --update-snapshots -DskipTests -P\!sign-artifacts
# run: mvn -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests
# run: mvn -Passembly -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests
# - name: Download lakeFS Iceberg
# uses: actions/download-artifact@v3
# with:
# name: lakefs-iceberg
# path: target/
# - name: Download Spark App
# uses: actions/download-artifact@v3
# with:
# name: spark-apps
# path: test/spark/app/target/
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/requirements.txt
- name: Generate uniquifying value
id: unique
run: echo "value=$RANDOM" >> $GITHUB_OUTPUT
# - name: Start lakeFS for Spark tests
# uses: ./.github/actions/bootstrap-test-lakefs
# with:
# compose-directory: test/spark
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# LAKEFS_DATABASE_TYPE: postgres
# LAKEFS_BLOCKSTORE_TYPE: s3
# LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
# LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
- name: Use lakeFS with S3 gateway
# working-directory: test/
run: |
python ./test/run-test.py \
--storage_namespace s3://iceberg-lakefs-testing/${{ github.run_number }}-s3-gateway/${{ steps.unique.outputs.value }} \
--repository gateway-test \
# - name: lakeFS Logs on Spark with gateway failure
# if: ${{ failure() }}
# continue-on-error: true
# working-directory: test/
# run: docker-compose logs --tail=2500 lakefs