Skip to content

Test tests

Test tests #10

name: Image build and test
env:
IMAGE_NAME: geoscienceaustralia/dea-coastlines
on:
push:
branches:
- develop
paths:
- 'coastlines/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-coastlines-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'requirements.txt'
- 'setup.py'
- 'codecov.yaml'
pull_request:
branches:
- develop
paths:
- 'coastlines/**'
- 'data/**'
- 'tests/**'
- '.github/workflows/dea-coastlines-image.yml'
- 'Dockerfile'
- 'requirements.in'
- 'requirements.txt'
- 'setup.py'
- 'codecov.yaml'
release:
types: [edited, published]
permissions:
id-token: write # Required for requesting Json web token
contents: write # Required for actions/checkout
pull-requests: write # Required for validation results comment bot
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v4
with:
role-to-assume: arn:aws:iam::060378307146:role/github-actions-dea-notebooks--sandbox
aws-region: ap-southeast-2
role-duration-seconds: 7200 # 2 hours
- name: Get database credentials
run: |
username_password=$(aws ssm get-parameter --name /dea-sandbox-eks/sandbox_reader/db.creds --with-decryption --query Parameter.Value --output text)
echo DATACUBE_DB_URL=postgresql://${username_password}@localhost:5432/odc >> $GITHUB_ENV
- name: Open port forward to RDS
run: |
npx basti connect \
--custom-target-vpc vpc-086904199e505c1f6 \
--custom-target-host db-aurora-dea-sandbox-eks-1.cos5zfpkso9m.ap-southeast-2.rds.amazonaws.com \
--custom-target-port 5432 \
--local-port 5432 &
# Wait until the connection comes up, but, if it doesn't, don't hang forever.
npx wait-on --timeout 120000 --interval 1000 tcp:127.0.0.1:5432
echo "PGPORT=5432" >> $GITHUB_ENV
echo "PGHOST=localhost" >> $GITHUB_ENV
- name: Build DEA Coastlines docker image
timeout-minutes: 30
shell: bash
run: |
docker build -t dea_coastlines .
- name: Run integration tests
run: |
# Download tide modelling files and unzip
# TODO: Replace with S3 sync from dea-non-public-data
wget --no-verbose https://www.dropbox.com/s/uemd8ib2vfw5nad/tide_models.zip?dl=1 -O tide_models.zip
unzip -q tide_models.zip
# Run integration tests using Docker, setting up datacube access, AWS configuration and
# adding volumes that provide access to tide model data and allow us to export artifacts
# from the run
docker run \
--net=host \
--env DATACUBE_DB_URL \
--env AWS_SESSION_TOKEN \
--env AWS_REGION \
--env AWS_ACCESS_KEY_ID \
--env AWS_SECRET_ACCESS_KEY \
--env AWS_SESSION_TOKEN \
--volume ${GITHUB_WORKSPACE}:/code \
--volume ${GITHUB_WORKSPACE}/tide_models:/var/share/tide_models \
--volume ${GITHUB_WORKSPACE}/artifacts:/mnt/artifacts \
dea_coastlines sh -c "
pytest -v --cov=coastlines --cov-report=xml tests &&
cp coverage.xml /mnt/artifacts/ &&
cp data/processed/tests/coastlines_tests.gpkg /mnt/artifacts/coastlines_tests.gpkg &&
cp data/processed/tests/coastlines_tests.shp.zip /mnt/artifacts/coastlines_tests.shp.zip &&
cp data/validation/processed/stats_tests.png /mnt/artifacts/stats_tests.png &&
cp data/validation/processed/stats_tests.csv /mnt/artifacts/stats_tests.csv &&
cp tests/README.md /mnt/artifacts/README.md
"
# # Copy out validation outputs produced by integration tests and place them in
# # correct locations on the Github runner so they can be committed back into repo
# cp ./artifacts/stats_tests.png ./data/validation/processed/stats_tests.png
# cp ./artifacts/stats_tests.csv ./data/validation/processed/stats_tests.csv
# cp ./artifacts/README.md ./tests/README.md