Skip to content

Commit

Permalink
Added test workflow with basic python test
Browse files Browse the repository at this point in the history
  • Loading branch information
lynnro314 committed Jul 10, 2023
1 parent f0d452e commit 13c76f3
Show file tree
Hide file tree
Showing 4 changed files with 13,801 additions and 0 deletions.
107 changes: 107 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
name: iceberg-tests
on:
pull_request:
push:
branches:
- main

## These permissions are needed to interact with GitHub's OIDC Token endpoint.
#permissions:
# id-token: write
# contents: read
# packages: write

jobs:
build-lakefs-iceberg:
name: Build lakeFS Iceberg package
runs-on: ubuntu-20.04
steps:
- name: Check-out code
uses: actions/checkout@v3

- name: Set up JDK 11
uses: actions/setup-java@v3
with:
distribution: "temurin"
java-version: "11"
cache: "maven"

- name: Build lakeFS Iceberg
run: mvn -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests
# run: mvn -Passembly -DfinalName=lakefs-iceberg --batch-mode --update-snapshots package -DskipTests

- name: Store lakeFS Iceberg
uses: actions/upload-artifact@v3
with:
name: lakefs-iceberg
path: target/lakefs-iceberg-1.0-SNAPSHOT.jar


iceberg-isolation-test:
name: Test lakeFS isolation with iceberg
needs: [ build-lakefs-iceberg ]
runs-on: ubuntu-20.04
services:
lakefs:
image: "treeverse/lakefs:latest"
ports:
- '8000:8000'
env:
LAKEFS_DATABASE_TYPE: local
LAKEFS_BLOCKSTORE_TYPE: s3
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}
LAKEFS_AUTH_ENCRYPT_SECRET_KEY: some random secret string
LAKEFS_STATS_ENABLED: false

steps:
- name: Check-out code
uses: actions/checkout@v3

- name: Download lakeFS Iceberg
uses: actions/download-artifact@v3
with:
name: lakefs-iceberg
path: target/

# - name: Download Spark App
# uses: actions/download-artifact@v3
# with:
# name: spark-apps
# path: test/spark/app/target/

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: 3.11
cache: 'pip'
- run: pip install -r ./test/requirements.txt

- name: Generate uniquifying value
id: unique
run: echo "value=$RANDOM" >> $GITHUB_OUTPUT

# - name: Start lakeFS for Spark tests
# uses: ./.github/actions/bootstrap-test-lakefs
# with:
# compose-directory: test/spark
# env:
# AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
# AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
# LAKEFS_DATABASE_TYPE: postgres
# LAKEFS_BLOCKSTORE_TYPE: s3
# LAKEFS_BLOCKSTORE_S3_CREDENTIALS_ACCESS_KEY_ID: ${{ secrets.ESTI_AWS_ACCESS_KEY_ID }}
# LAKEFS_BLOCKSTORE_S3_CREDENTIALS_SECRET_ACCESS_KEY: ${{ secrets.ESTI_AWS_SECRET_ACCESS_KEY }}

- name: Use lakeFS with S3 gateway
working-directory: test/
run: |
python ./run-test.py \
--storage_namespace s3://iceberg-lakefs-testing/${{ github.run_number }}-s3-gateway/${{ steps.unique.outputs.value }} \
--repository gateway-test \
# - name: lakeFS Logs on Spark with gateway failure
# if: ${{ failure() }}
# continue-on-error: true
# working-directory: test/
# run: docker-compose logs --tail=2500 lakefs
Loading

0 comments on commit 13c76f3

Please sign in to comment.