Skip to content

Commit

Permalink
BCDA-8208: Fix S3 path parsing (#960)
Browse files Browse the repository at this point in the history
## 🎫 Ticket

https://jira.cms.gov/browse/BCDA-8208

## 🛠 Changes

- Fix S3 path parsing

## ℹ️ Context

I observed failures with the lambdas attempting to list files in S3
within the bfdeft01 prefix. This is not the correct prefix — the
team-specific IAM roles can only access specific subfolders (i.e.
bfdeft01/bcda, bfdeft01/dpc, etc.) so the lambdas _should_ be trying to
list files within those subfolders.

When parsing the S3 prefix from the SQS event, it needs to split from
the last separator instead of the first separator.

## 🧪 Validation

Deployed to Dev and verified successful import by uploading a file (I
basically did this part of the integration test manually:
https://github.com/CMSgov/bcda-app/blob/fabde15cb4f460d7398dcdfcf61205840258afcd/.github/workflows/opt-out-import-test-integration.yml#L47-L48
but fixed the filename since it's using the DPC filename)

**Successful lambda logs**


![](https://github.com/CMSgov/bcda-app/assets/2308368/c13a4d4c-58da-466e-9df2-38a6e9b1903a)
  • Loading branch information
kyeah committed Jun 24, 2024
1 parent fabde15 commit e758d8f
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 18 deletions.
12 changes: 12 additions & 0 deletions bcda/aws/parsers.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@ package bcdaaws

import (
"encoding/json"
"fmt"
"strings"

"github.com/aws/aws-lambda-go/events"
"github.com/pkg/errors"
Expand Down Expand Up @@ -34,3 +36,13 @@ func ParseSQSEvent(event events.SQSEvent) (*events.S3Event, error) {

return &s3Event, nil
}

func ParseS3Directory(bucket, key string) string {
lastSeparatorIdx := strings.LastIndex(key, "/")

if lastSeparatorIdx == -1 {
return bucket
} else {
return fmt.Sprintf("%s/%s", bucket, key[:lastSeparatorIdx])
}
}
6 changes: 6 additions & 0 deletions bcda/aws/parsers_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -47,3 +47,9 @@ func TestParseSQSEvent(t *testing.T) {
assert.NotNil(t, s3Event)
assert.Equal(t, "demo-bucket", s3Event.Records[0].S3.Bucket.Name)
}

func TestParseS3Directory(t *testing.T) {
assert.Equal(t, "my-bucket", ParseS3Directory("my-bucket", "some-file"))
assert.Equal(t, "my-bucket/my-dir", ParseS3Directory("my-bucket", "my-dir/some-file"))
assert.Equal(t, "my-bucket/my-dir/nested", ParseS3Directory("my-bucket", "my-dir/nested/some-file"))
}
11 changes: 2 additions & 9 deletions bcda/lambda/cclf/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ import (
"errors"
"fmt"
"os"
"strings"
"time"

"github.com/aws/aws-lambda-go/events"
Expand Down Expand Up @@ -56,14 +55,8 @@ func cclfImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, e
return "", err
}

parts := strings.Split(e.S3.Object.Key, "/")

if len(parts) == 1 {
return handleCclfImport(s3AssumeRoleArn, e.S3.Bucket.Name)
} else {
directory := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, parts[0])
return handleCclfImport(s3AssumeRoleArn, directory)
}
dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key)
return handleCclfImport(s3AssumeRoleArn, dir)
}
}

Expand Down
11 changes: 2 additions & 9 deletions bcda/lambda/optout/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@ import (
"context"
"fmt"
"os"
"strings"
"time"

"github.com/aws/aws-lambda-go/events"
Expand Down Expand Up @@ -58,14 +57,8 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string,
return "", err
}

parts := strings.Split(e.S3.Object.Key, "/")

if len(parts) == 1 {
return handleOptOutImport(s3AssumeRoleArn, e.S3.Bucket.Name)
} else {
directory := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, parts[0])
return handleOptOutImport(s3AssumeRoleArn, directory)
}
dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key)
return handleOptOutImport(s3AssumeRoleArn, dir)
}
}

Expand Down

0 comments on commit e758d8f

Please sign in to comment.