Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BCDA-8548: Update Attribution Integration Test #1015

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/cclf-import-test-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
CGO_ENABLED: 0
run: |
go build -o bin/bootstrap ./lambda/cclf/main.go
zip -j function.zip bin/bootstrap
zip -j function.zip bin/bootstrap ../conf/configs/dev.yml ../conf/configs/test.yml ../conf/configs/prod.yml
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ vars.AWS_REGION }}
Expand Down
40 changes: 36 additions & 4 deletions .github/workflows/cclf-import-test-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ jobs:
run:
working-directory: bcda
outputs:
filename: ${{ steps.createfile.outputs.FILENAME }}
cclffilename: ${{ steps.createfile.outputs.cclffilename }}
csvfilename: ${{ steps.createfile.outputs.csvfilename }}
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -53,9 +54,15 @@ jobs:
year=$(date +'%y')
date=$(date +'%y%m%d')
time=$(date +'%H%M%S')

fname=T.BCD.A0001.ZCY24.D${date}.T${time}1
cclf8_fname=T.BCD.A0001.ZC8Y24.D${date}.T${time}1
echo "FILENAME=$cclf8_fname" >> "$GITHUB_OUTPUT"
echo "CCLFFILENAME=$cclf8_fname" >> "$GITHUB_OUTPUT"

csvname=T.PCPB.M${year}11.D${date}.T${time}1
echo "CSVFILENAME=$csvname" >> "$GITHUB_OUTPUT"

mv ../shared_files/cclf/archives/csv/P.PCPB.M2411.D181120.T1000000 ${csvname}

unzip ../shared_files/cclf/archives/valid/T.BCD.A0001.ZCY18.D181120.T1000000

Expand All @@ -70,6 +77,9 @@ jobs:
aws s3 cp --no-progress $fname \
s3://bfd-test-eft/bfdeft01/bcda/in/test/$fname

aws s3 cp --no-progress ${csvname} \
s3://bfd-test-eft/bfdeft01/bcda/in/test/${csvname}

verify:
needs: trigger
runs-on: self-hosted
Expand All @@ -93,15 +103,16 @@ jobs:
CONNECTION_INFO=/bcda/test/api/DATABASE_URL
- name: Verify CCLF file was ingested
env:
FILENAME: ${{needs.trigger.outputs.filename}}
CCLFFILENAME: ${{needs.trigger.outputs.cclffilename}}
CSVFILENAME: ${{needs.trigger.outputs.csvfilename}}
PGSSLMODE: require
# CAUTION: if changing the script below, validate that sensitive information is not printed in the workflow
run: |
HOST=$(aws rds describe-db-instances --db-instance-identifier bcda-test-rds 2>&1 | jq -r '.DBInstances[0].Endpoint.Address' 2>&1)
CONNECTION_URL=$(echo $CONNECTION_INFO 2>&1 | sed -E "s/@.*\/bcda/\@$HOST\/bcda/" 2>&1)

# Verify that we have a record of the CCLF file in the database
CCLF_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$FILENAME' LIMIT 1" 2>&1`
CCLF_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$CCLFFILENAME' LIMIT 1" 2>&1`
if [[ $? -ne 0 || -z $CCLF_FILE ]]; then
echo "cclf_file query returned zero results or command failed"
exit 1
Expand All @@ -118,3 +129,24 @@ jobs:
exit 1
fi
fi

# Verify that we have a record of the CSV file in the database
CSV_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$CSVFILENAME' LIMIT 1" 2>&1`
if [[ $? -ne 0 || -z $CSV_FILE ]]; then
echo "csv_file query returned zero results or command failed"
exit 1
else

# Verify that the correct number of benes were imported into the database.
CSV_BENES=`psql -t "$CONNECTION_URL" -c "SELECT count(mbi) FROM cclf_beneficiaries WHERE file_id = $CSV_FILE" 2>&1`
if [[ $? -ne 0 || -z $CSV_BENES ]]; then
echo "CSV beneficiaries query returned zero results or command failed"
exit 1
fi
if [[ $(echo $CSV_BENES | xargs) != "5" ]]; then
echo "expected 5 beneficiaries imported from file, received $CSV_BENES".
exit 1
fi
fi
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved


2 changes: 1 addition & 1 deletion bcda/cclf/csv_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ func (s *CSVTestSuite) TestImportCSV_Integration() {
}{
{"Import CSV attribution success", filepath.Join(s.basePath, "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000"), 0, []string{"MBI000001", "MBI000002", "MBI000003", "MBI000004", "MBI000005"}, nil},
{"Import CSV attribution that already exists", filepath.Join(s.basePath, "cclf/archives/csv/P.PCPB.M2411.D181121.T1000000"), 0, []string{}, errors.New("already exists")},
{"Import CSV attribution invalid name", filepath.Join(s.basePath, "cclf/archives/csv/P.PC.M2411.D181120.T1000000"), 0, []string{}, errors.New("invalid filename")},
{"Import CSV attribution invalid name", filepath.Join(s.basePath, "cclf/archives/csv/P.PC.M2411.D181120.T1000000"), 0, []string{}, errors.New("Invalid filename")},
{"Import Opt Out failure", filepath.Join(s.basePath, "cclf/archives/csv/T#EFT.ON.ACO.NGD1800.DPRF.D181120.T1000010"), 0, []string{}, errors.New("File is type: opt-out. Skipping attribution import.")},
}

Expand Down
42 changes: 35 additions & 7 deletions bcda/cclf/local_fileprocessor_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,10 +38,18 @@ type LocalFileProcessorTestSuite struct {
}

func (s *LocalFileProcessorTestSuite) SetupTest() {
setupTestHelper(s)
}

func setupTestHelper(s *LocalFileProcessorTestSuite) {
s.basePath, s.cleanup = testUtils.CopyToTemporaryDirectory(s.T(), "../../shared_files/")
}

func (s *LocalFileProcessorTestSuite) SetupSuite() {
setupSuiteHelper(s)
}

func setupSuiteHelper(s *LocalFileProcessorTestSuite) {
s.cclfRefDate = conf.GetEnv("CCLF_REF_DATE")
conf.SetEnv(s.T(), "CCLF_REF_DATE", "181201") // Needed to allow our static CCLF files to continue to be processed
dir, err := os.MkdirTemp("", "*")
Expand Down Expand Up @@ -73,10 +81,18 @@ func (s *LocalFileProcessorTestSuite) SetupSuite() {
}

func (s *LocalFileProcessorTestSuite) TearDownTest() {
tearDownTestHelper(s)
}

func tearDownTestHelper(s *LocalFileProcessorTestSuite) {
s.cleanup()
}

func (s *LocalFileProcessorTestSuite) TearDownSuite() {
tearDownSuiteHelper(s)
}

func tearDownSuiteHelper(s *LocalFileProcessorTestSuite) {
conf.SetEnv(s.T(), "CCLF_REF_DATE", s.cclfRefDate)
os.RemoveAll(s.pendingDeletionDir)
}
Expand Down Expand Up @@ -401,22 +417,30 @@ func (s *LocalFileProcessorTestSuite) TestCleanUpCSV() {

tests := []struct {
name string
filepath string
filename string
deliverytime time.Time
imported bool
delFiles int
baseFiles int
}{
{"Not imported and expired", filepath.Join(s.basePath, "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000"), expiredTime, false, 1, 1},
{"Not imported and not expired", filepath.Join(s.basePath, "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000"), time.Now(), false, 1, 1},
{"Successfully imported", filepath.Join(s.basePath, "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000"), time.Now(), true, 1, 1},
{"Not imported and expired", "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000", expiredTime, false, 1, 2},
{"Not imported and not expired", "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000", time.Now(), false, 0, 3},
{"Successfully imported", "cclf/archives/csv/P.PCPB.M2411.D181120.T1000000", time.Now(), true, 1, 2},
}

runcount := 0

for _, test := range tests {

s.T().Run(test.name, func(tt *testing.T) {
if runcount != 0 {
setupSuiteHelper(s)
setupTestHelper(s)
}

file.metadata.deliveryDate = test.deliverytime
file.imported = test.imported
file.filepath = test.filepath
file.filepath = filepath.Join(s.basePath, test.filename)
err := s.csvProcessor.CleanUpCSV(file)
assert.Nil(s.T(), err)
delDir, err := os.ReadDir(conf.GetEnv("PENDING_DELETION_DIR"))
Expand All @@ -432,10 +456,14 @@ func (s *LocalFileProcessorTestSuite) TestCleanUpCSV() {
s.FailNow("failed to read directory: %s", conf.GetEnv("PENDING_DELETION_DIR"), err)
}
assert.Len(s.T(), baseDir, test.baseFiles)
if test.baseFiles == 2 {
assert.Equal(s.T(), file.metadata.name, baseDir[0].Name())
runcount++
tearDownTestHelper(s)
if runcount < 3 {
tearDownSuiteHelper(s)
}

})

}

}
10 changes: 6 additions & 4 deletions bcda/cclf/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,10 @@ func GetCSVMetadata(path string) (csvFileMetadata, error) {

acos, err := getACOConfigs()
if err != nil {
return csvFileMetadata{}, err
return csvFileMetadata{}, errors.New("Failed to load ACO configs")
}
if acos == nil {
return csvFileMetadata{}, errors.New("No ACO configs found.")
}

for _, v := range acos {
Expand All @@ -64,16 +67,15 @@ func GetCSVMetadata(path string) (csvFileMetadata, error) {
if len(parts) == v.AttributionFile.MetadataMatches {
metadata, err = validateCSVMetadata(parts)
if err != nil {
return csvFileMetadata{}, nil
return csvFileMetadata{}, err
}
metadata.acoID = v.Model
break
}
}

if metadata == (csvFileMetadata{}) {
err := fmt.Errorf("invalid filename for attribution file.")
return metadata, err
return metadata, errors.New("Invalid filename for csv attribution file")
}

metadata.name = path
Expand Down
15 changes: 11 additions & 4 deletions bcda/cclf/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,10 @@ func TestGetCSVMetadata(t *testing.T) {
fileType: models.FileTypeDefault,
},
},
{"invalid csv filename", "P.PPB.M2411." + fileDateTime, "invalid filename", csvFileMetadata{}},
{"invalid csv filename - extra digit", "P.PCPB.M24112." + fileDateTime, "invalid filename", csvFileMetadata{}},
{"invalid csv filename - env", "A.PCPB.M24112." + fileDateTime, "invalid filename", csvFileMetadata{}},
{"invalid csv filename - dupe match", "P.PCPBPCPB.M2411." + fileDateTime, "invalid filename", csvFileMetadata{}},
{"invalid csv filename", "P.PPB.M2411." + fileDateTime, "Invalid filename", csvFileMetadata{}},
{"invalid csv filename - extra digit", "P.PCPB.M24112." + fileDateTime, "Invalid filename", csvFileMetadata{}},
{"invalid csv filename - env", "A.PCPB.M24112." + fileDateTime, "Invalid filename", csvFileMetadata{}},
{"invalid csv filename - dupe match", "P.PCPBPCPB.M2411." + fileDateTime, "Invalid filename", csvFileMetadata{}},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
Expand Down Expand Up @@ -109,6 +109,13 @@ func TestValidateCCLFFileName(t *testing.T) {
fileType: models.FileTypeDefault,
},
},
{"valid csv test filename", "T.PCPB.M2411." + fileDateTime, nil, csvFileMetadata{
env: "test",
timestamp: validTime,
perfYear: 24,
fileType: models.FileTypeDefault,
},
},
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
{"invalid csv - file date too old", "P.PCPB.M2411.D201101.T0000001", errors.New("out of range"), csvFileMetadata{}},
{"invalid csv - file date in the future", "P.PCPB.M2411." + futureTime.Format(dateFormat), errors.New("out of range"), csvFileMetadata{}},
}
Expand Down
15 changes: 13 additions & 2 deletions bcda/lambda/cclf/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (

bcdaaws "github.com/CMSgov/bcda-app/bcda/aws"
"github.com/CMSgov/bcda-app/bcda/cclf"
"github.com/CMSgov/bcda-app/bcda/database"
"github.com/CMSgov/bcda-app/optout"

"github.com/CMSgov/bcda-app/conf"
Expand Down Expand Up @@ -55,6 +56,10 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st
if err != nil {
return "", err
}
err = loadBCDAParams()
if err != nil {
return "", err
}

// Send the entire filepath into the CCLF Importer so we are only
// importing the one file that was sent in the trigger.
Expand All @@ -79,7 +84,8 @@ func handleCSVImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath})

importer := cclf.CSVImporter{
Logger: logger,
Logger: logger,
Database: database.Connection,
FileProcessor: &cclf.S3FileProcessor{
Handler: optout.S3FileHandler{
Logger: logger,
Expand All @@ -88,7 +94,6 @@ func handleCSVImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
},
},
}

err := importer.ImportCSV(s3ImportPath)

if err != nil {
Expand Down Expand Up @@ -117,6 +122,12 @@ func loadBfdS3Params() (string, error) {
return param, nil
}

func loadBCDAParams() error {
env := conf.GetEnv("ENV")
conf.LoadLambdaEnvVars(env)
return nil
}

func handleCclfImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
env := conf.GetEnv("ENV")
appName := conf.GetEnv("APP_NAME")
Expand Down
8 changes: 8 additions & 0 deletions conf/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,14 @@ func init() {

}

func LoadLambdaEnvVars(env string) {
envPath := fmt.Sprintf("./%s.yml", env)
var configPaths []string
configPaths = append(configPaths, envPath)
envVars, state = loadConfigs(configPaths...)

}

// This is the private helper function that sets up viper. This function is
// called by the init() function only once during initialization of the package.
func loadConfigs(locations ...string) (config, configStatus) {
Expand Down
6 changes: 6 additions & 0 deletions shared_files/cclf/archives/csv/T.PCPB.M2411.D241206.T1732561
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
MBIs
MBI000001
MBI000002
MBI000003
MBI000004
MBI000005
Loading