Skip to content

Test tests

Test tests #2

name: Image build and test
env:
IMAGE_NAME: geoscienceaustralia/dea-coastlines
on:
push:
branches:
- develop
paths:
- 'coastlines/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-coastlines-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'requirements.txt'
- 'setup.py'
- 'codecov.yaml'
pull_request:
branches:
- develop
paths:
- 'coastlines/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-coastlines-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'requirements.txt'
- 'setup.py'
- 'codecov.yaml'
release:
types: [edited, published]
permissions:
id-token: write # Required for requesting Json web token
contents: write # Required for actions/checkout
pull-requests: write # Required for validation results comment bot
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::060378307146:role/github-actions-dea-notebooks--sandbox
aws-region: ap-southeast-2
role-duration-seconds: 7200 # 2 hours
- name: Get database credentials
run: |
username_password=$(aws ssm get-parameter --name /dea-sandbox-eks/sandbox_reader/db.creds --with-decryption --query Parameter.Value --output text)
echo DATACUBE_DB_URL=postgresql://${username_password}@localhost:5432/odc >> $GITHUB_ENV
- name: Open port forward to RDS
run: |
npx basti connect \
--custom-target-vpc vpc-086904199e505c1f6 \
--custom-target-host db-aurora-dea-sandbox-eks-1.cos5zfpkso9m.ap-southeast-2.rds.amazonaws.com \
--custom-target-port 5432 \
--local-port 5432 &
# Wait until the connection comes up, but, if it doesn't, don't hang forever.
npx wait-on --timeout 120000 --interval 1000 tcp:127.0.0.1:5432
echo "PGPORT=5432" >> $GITHUB_ENV
echo "PGHOST=localhost" >> $GITHUB_ENV
# - name: Build DEA Coastlines docker image
# timeout-minutes: 30
# shell: bash
# run: |
# docker build -t dea_coastlines .
# - name: Run integration tests
# run: |
# # Download tide modelling files and unzip
# # TODO: Replace with S3 sync from dea-non-public-data
# wget --no-verbose https://www.dropbox.com/s/uemd8ib2vfw5nad/tide_models.zip?dl=1 -O tide_models.zip
# unzip -q tide_models.zip
# # Run integration tests using Docker, setting up datacube access, AWS configuration and
# # adding volumes that provide access to tide model data and allow us to export artifacts
# # from the run
# docker run \
# --net=host \
# --env DATACUBE_DB_URL \
# --env AWS_SESSION_TOKEN \
# --env AWS_REGION \
# --env AWS_ACCESS_KEY_ID \
# --env AWS_SECRET_ACCESS_KEY \
# --env AWS_SESSION_TOKEN \
# --volume ${GITHUB_WORKSPACE}:/code \
# --volume ${GITHUB_WORKSPACE}/tide_models:/var/share/tide_models \
# --volume ${GITHUB_WORKSPACE}/artifacts:/mnt/artifacts \
# dea_coastlines pytest -v --cov=coastlines --cov-report=xml tests