From e758d8ff8e2d9259ebbe8045eb689faf5e4a6e06 Mon Sep 17 00:00:00 2001 From: Kevin Yeh Date: Mon, 24 Jun 2024 16:02:25 -0400 Subject: [PATCH] BCDA-8208: Fix S3 path parsing (#960) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ## ๐ŸŽซ Ticket https://jira.cms.gov/browse/BCDA-8208 ## ๐Ÿ›  Changes - Fix S3 path parsing ## โ„น๏ธ Context I observed failures with the lambdas attempting to list files in S3 within the bfdeft01 prefix. This is not the correct prefix โ€” the team-specific IAM roles can only access specific subfolders (i.e. bfdeft01/bcda, bfdeft01/dpc, etc.) so the lambdas _should_ be trying to list files within those subfolders. When parsing the S3 prefix from the SQS event, it needs to split from the last separator instead of the first separator. ## ๐Ÿงช Validation Deployed to Dev and verified successful import by uploading a file (I basically did this part of the integration test manually: https://github.com/CMSgov/bcda-app/blob/fabde15cb4f460d7398dcdfcf61205840258afcd/.github/workflows/opt-out-import-test-integration.yml#L47-L48 but fixed the filename since it's using the DPC filename) **Successful lambda logs** ![](https://github.com/CMSgov/bcda-app/assets/2308368/c13a4d4c-58da-466e-9df2-38a6e9b1903a) --- bcda/aws/parsers.go | 12 ++++++++++++ bcda/aws/parsers_test.go | 6 ++++++ bcda/lambda/cclf/main.go | 11 ++--------- bcda/lambda/optout/main.go | 11 ++--------- 4 files changed, 22 insertions(+), 18 deletions(-) diff --git a/bcda/aws/parsers.go b/bcda/aws/parsers.go index 2346d28b7..4042a4368 100644 --- a/bcda/aws/parsers.go +++ b/bcda/aws/parsers.go @@ -2,6 +2,8 @@ package bcdaaws import ( "encoding/json" + "fmt" + "strings" "github.com/aws/aws-lambda-go/events" "github.com/pkg/errors" @@ -34,3 +36,13 @@ func ParseSQSEvent(event events.SQSEvent) (*events.S3Event, error) { return &s3Event, nil } + +func ParseS3Directory(bucket, key string) string { + lastSeparatorIdx := strings.LastIndex(key, "/") + + if lastSeparatorIdx == -1 { + return bucket + } else { + return fmt.Sprintf("%s/%s", bucket, key[:lastSeparatorIdx]) + } +} diff --git a/bcda/aws/parsers_test.go b/bcda/aws/parsers_test.go index 1575b355a..fadb102e7 100644 --- a/bcda/aws/parsers_test.go +++ b/bcda/aws/parsers_test.go @@ -47,3 +47,9 @@ func TestParseSQSEvent(t *testing.T) { assert.NotNil(t, s3Event) assert.Equal(t, "demo-bucket", s3Event.Records[0].S3.Bucket.Name) } + +func TestParseS3Directory(t *testing.T) { + assert.Equal(t, "my-bucket", ParseS3Directory("my-bucket", "some-file")) + assert.Equal(t, "my-bucket/my-dir", ParseS3Directory("my-bucket", "my-dir/some-file")) + assert.Equal(t, "my-bucket/my-dir/nested", ParseS3Directory("my-bucket", "my-dir/nested/some-file")) +} diff --git a/bcda/lambda/cclf/main.go b/bcda/lambda/cclf/main.go index 94f2f7c9f..24272aea5 100644 --- a/bcda/lambda/cclf/main.go +++ b/bcda/lambda/cclf/main.go @@ -5,7 +5,6 @@ import ( "errors" "fmt" "os" - "strings" "time" "github.com/aws/aws-lambda-go/events" @@ -56,14 +55,8 @@ func cclfImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, e return "", err } - parts := strings.Split(e.S3.Object.Key, "/") - - if len(parts) == 1 { - return handleCclfImport(s3AssumeRoleArn, e.S3.Bucket.Name) - } else { - directory := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, parts[0]) - return handleCclfImport(s3AssumeRoleArn, directory) - } + dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) + return handleCclfImport(s3AssumeRoleArn, dir) } } diff --git a/bcda/lambda/optout/main.go b/bcda/lambda/optout/main.go index 8afdeaad2..e0cc2fe46 100644 --- a/bcda/lambda/optout/main.go +++ b/bcda/lambda/optout/main.go @@ -4,7 +4,6 @@ import ( "context" "fmt" "os" - "strings" "time" "github.com/aws/aws-lambda-go/events" @@ -58,14 +57,8 @@ func optOutImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (string, return "", err } - parts := strings.Split(e.S3.Object.Key, "/") - - if len(parts) == 1 { - return handleOptOutImport(s3AssumeRoleArn, e.S3.Bucket.Name) - } else { - directory := fmt.Sprintf("%s/%s", e.S3.Bucket.Name, parts[0]) - return handleOptOutImport(s3AssumeRoleArn, directory) - } + dir := bcdaaws.ParseS3Directory(e.S3.Bucket.Name, e.S3.Object.Key) + return handleOptOutImport(s3AssumeRoleArn, dir) } }