Skip to content

BCDA-8301: Fix large object S3 trigger + update logging #97

BCDA-8301: Fix large object S3 trigger + update logging

BCDA-8301: Fix large object S3 trigger + update logging #97

# Integration test that triggers an Opt Out import in the test environment and
# verifies the imported data in the test database.
name: opt-out-import test integration
on:
pull_request:
paths:
- .github/workflows/opt-out-import-test-integration.yml
- .github/workflows/opt-out-import-test-deploy.yml
- optout/**
- bcda/suppression/**
- bcda/lambda/optout/**
workflow_dispatch:
# Ensure we have only one integration test running at a time
concurrency:
group: opt-out-import-test-integration
jobs:
# Deploy first if triggered by pull_request
deploy:
if: ${{ github.event_name == 'pull_request' }}
uses: ./.github/workflows/opt-out-import-test-deploy.yml
secrets: inherit
trigger:
if: ${{ always() }}
needs: deploy
permissions:
contents: read
id-token: write
runs-on: ubuntu-latest
defaults:
run:
working-directory: ./optout
outputs:
filename: ${{ steps.createfile.outputs.FILENAME }}
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ vars.AWS_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.ACCOUNT_ID }}:role/delegatedadmin/developer/bcda-test-opt-out-import-function
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ vars.AWS_REGION }}
# Note that we use the BFD role with access to the bucket
role-to-assume: arn:aws:iam::${{ secrets.BFD_ACCOUNT_ID }}:role/bfd-test-eft-bcda-bucket-role
role-chaining: true
role-skip-session-tagging: true
- name: Upload test file to the BFD bucket to trigger lambda function via SNS message
id: createfile
run: |
fname=T\#EFT.ON.ACO.NGD1800.DPRF.D$(date +'%y%m%d').T$(date +'%H%M%S')1
echo "FILENAME=$fname" >> "$GITHUB_OUTPUT"
aws s3 cp ../shared_files/synthetic1800MedicareFiles/test/T\#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000009 \
s3://bfd-test-eft/bfdeft01/bcda/in/test/$fname
verify:
needs: trigger
runs-on: self-hosted
env:
ACTIONS_ALLOW_USE_UNSECURE_NODE_VERSION: "true"
steps:
- uses: actions/checkout@v3
- uses: aws-actions/configure-aws-credentials@v3
with:
aws-region: ${{ vars.AWS_REGION }}
role-to-assume: arn:aws:iam::${{ secrets.ACCOUNT_ID }}:role/delegatedadmin/developer/bcda-test-github-actions
- name: Install psql
run: |
sudo amazon-linux-extras install postgresql14
- name: Get database credentials
uses: cmsgov/ab2d-bcda-dpc-platform/actions/aws-params-env-action@main
env:
AWS_REGION: ${{ vars.AWS_REGION }}
with:
params: |
CONNECTION_INFO=/bcda/test/api/DATABASE_URL
- name: Verify suppression file was ingested
env:
FILENAME: ${{needs.trigger.outputs.filename}}
# CAUTION: if changing the script below, validate that sensitive information is not printed in the workflow
run: |
HOST=$(aws rds describe-db-instances --db-instance-identifier bcda-test-rds 2>&1 | jq -r '.DBInstances[0].Endpoint.Address' 2>&1)
CONNECTION_URL=$(echo $CONNECTION_INFO 2>&1 | sed -E "s/@.*\/bcda/\@$HOST\/bcda/" 2>&1)
SUPPRESSION_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM suppression_files WHERE name = '$FILENAME' LIMIT 1" 2>&1`
if [[ $? -ne 0 || -z $SUPPRESSION_FILE ]]; then
echo "suppression_file query returned zero results or command failed"
exit 1
else
SUPPRESSIONS=`psql -t "$CONNECTION_URL" -c "SELECT count(mbi) FROM suppressions WHERE file_id = $SUPPRESSION_FILE" 2>&1`
if [[ $? -ne 0 || -z $SUPPRESSIONS ]]; then
echo "suppressions query returned zero results or command failed"
exit 1
fi
fi