Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

BCDA-8548: Update Attribution Integration Test #1015

Merged
Merged
Show file tree
Hide file tree
Changes from 15 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/cclf-import-test-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
CGO_ENABLED: 0
run: |
go build -o bin/bootstrap ./lambda/cclf/main.go
zip -j function.zip bin/bootstrap
zip -j function.zip bin/bootstrap ../conf/configs/dev.yml ../conf/configs/test.yml ../conf/configs/prod.yml
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
- uses: aws-actions/configure-aws-credentials@v4
with:
aws-region: ${{ vars.AWS_REGION }}
Expand Down
40 changes: 36 additions & 4 deletions .github/workflows/cclf-import-test-integration.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ jobs:
run:
working-directory: bcda
outputs:
filename: ${{ steps.createfile.outputs.FILENAME }}
cclffilename: ${{ steps.createfile.outputs.cclffilename }}
csvfilename: ${{ steps.createfile.outputs.csvfilename }}
steps:
- uses: actions/checkout@v4
- uses: aws-actions/configure-aws-credentials@v4
Expand All @@ -53,9 +54,15 @@ jobs:
year=$(date +'%y')
date=$(date +'%y%m%d')
time=$(date +'%H%M%S')

fname=T.BCD.A0001.ZCY24.D${date}.T${time}1
cclf8_fname=T.BCD.A0001.ZC8Y24.D${date}.T${time}1
echo "FILENAME=$cclf8_fname" >> "$GITHUB_OUTPUT"
echo "CCLFFILENAME=$cclf8_fname" >> "$GITHUB_OUTPUT"

csvname=T.PCPB.M${year}11.D${date}.T${time}1
echo "CSVFILENAME=$csvname" >> "$GITHUB_OUTPUT"

mv ../shared_files/cclf/archives/csv/P.PCPB.M2411.D181120.T1000000 ${csvname}

unzip ../shared_files/cclf/archives/valid/T.BCD.A0001.ZCY18.D181120.T1000000

Expand All @@ -70,6 +77,9 @@ jobs:
aws s3 cp --no-progress $fname \
s3://bfd-test-eft/bfdeft01/bcda/in/test/$fname

aws s3 cp --no-progress ${csvname} \
s3://bfd-test-eft/bfdeft01/bcda/in/test/${csvname}

verify:
needs: trigger
runs-on: self-hosted
Expand All @@ -93,15 +103,16 @@ jobs:
CONNECTION_INFO=/bcda/test/api/DATABASE_URL
- name: Verify CCLF file was ingested
env:
FILENAME: ${{needs.trigger.outputs.filename}}
CCLFFILENAME: ${{needs.trigger.outputs.cclffilename}}
CSVFILENAME: ${{needs.trigger.outputs.csvfilename}}
PGSSLMODE: require
# CAUTION: if changing the script below, validate that sensitive information is not printed in the workflow
run: |
HOST=$(aws rds describe-db-instances --db-instance-identifier bcda-test-rds 2>&1 | jq -r '.DBInstances[0].Endpoint.Address' 2>&1)
CONNECTION_URL=$(echo $CONNECTION_INFO 2>&1 | sed -E "s/@.*\/bcda/\@$HOST\/bcda/" 2>&1)

# Verify that we have a record of the CCLF file in the database
CCLF_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$FILENAME' LIMIT 1" 2>&1`
CCLF_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$CCLFFILENAME' LIMIT 1" 2>&1`
if [[ $? -ne 0 || -z $CCLF_FILE ]]; then
echo "cclf_file query returned zero results or command failed"
exit 1
Expand All @@ -118,3 +129,24 @@ jobs:
exit 1
fi
fi

# Verify that we have a record of the CSV file in the database
CSV_FILE=`psql -t "$CONNECTION_URL" -c "SELECT id FROM cclf_files WHERE name = '$CSVFILENAME' LIMIT 1" 2>&1`
if [[ $? -ne 0 || -z $CSV_FILE ]]; then
echo "csv_file query returned zero results or command failed"
exit 1
else

# Verify that the correct number of benes were imported into the database.
CSV_BENES=`psql -t "$CONNECTION_URL" -c "SELECT count(mbi) FROM cclf_beneficiaries WHERE file_id = $CSV_FILE" 2>&1`
if [[ $? -ne 0 || -z $CSV_BENES ]]; then
echo "CSV beneficiaries query returned zero results or command failed"
exit 1
fi
if [[ $(echo $CSV_BENES | xargs) != "5" ]]; then
echo "expected 5 beneficiaries imported from file, received $CSV_BENES".
exit 1
fi
fi
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved


16 changes: 12 additions & 4 deletions bcda/cclf/parser.go
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,13 @@ type CSVParser struct {
func getACOConfigs() ([]service.ACOConfig, error) {
configs, err := service.LoadConfig()
if err != nil {
log.API.Errorf("error when fetching configs: %s", err)
return []service.ACOConfig{}, err
}
if configs == nil {
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
log.API.Errorf("no configs loaded: %s", err)
}
log.API.Infof("configs loaded: %s", configs)
return configs.ACOConfigs, err

}
Expand All @@ -55,25 +60,28 @@ func GetCSVMetadata(path string) (csvFileMetadata, error) {

acos, err := getACOConfigs()
if err != nil {
return csvFileMetadata{}, err
return csvFileMetadata{}, errors.New("Failed to load ACO configs")
}
if acos == nil {
return csvFileMetadata{}, errors.New("No ACO configs found.")
}

for _, v := range acos {
filenameRegexp := regexp.MustCompile(v.AttributionFile.NamePattern)
parts := filenameRegexp.FindStringSubmatch(path)
if len(parts) == v.AttributionFile.MetadataMatches {
metadata, err = validateCSVMetadata(parts)
log.API.Error("regex for aco (%s): %s", v.Model, v.AttributionFile.NamePattern)
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
if err != nil {
return csvFileMetadata{}, nil
return csvFileMetadata{}, err
}
metadata.acoID = v.Model
break
}
}

if metadata == (csvFileMetadata{}) {
err := fmt.Errorf("invalid filename for attribution file.")
return metadata, err
return metadata, errors.New("Invalid filename for csv attribution file")
}

metadata.name = path
Expand Down
7 changes: 7 additions & 0 deletions bcda/cclf/parser_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,13 @@ func TestValidateCCLFFileName(t *testing.T) {
fileType: models.FileTypeDefault,
},
},
{"valid csv test filename", "T.PCPB.M2411." + fileDateTime, nil, csvFileMetadata{
env: "test",
timestamp: validTime,
perfYear: 24,
fileType: models.FileTypeDefault,
},
},
carlpartridge marked this conversation as resolved.
Show resolved Hide resolved
{"invalid csv - file date too old", "P.PCPB.M2411.D201101.T0000001", errors.New("out of range"), csvFileMetadata{}},
{"invalid csv - file date in the future", "P.PCPB.M2411." + futureTime.Format(dateFormat), errors.New("out of range"), csvFileMetadata{}},
}
Expand Down
23 changes: 21 additions & 2 deletions bcda/lambda/cclf/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ import (

bcdaaws "github.com/CMSgov/bcda-app/bcda/aws"
"github.com/CMSgov/bcda-app/bcda/cclf"
"github.com/CMSgov/bcda-app/bcda/database"
"github.com/CMSgov/bcda-app/optout"

"github.com/CMSgov/bcda-app/conf"
Expand Down Expand Up @@ -55,6 +56,10 @@ func attributionImportHandler(ctx context.Context, sqsEvent events.SQSEvent) (st
if err != nil {
return "", err
}
err = loadBCDAParams()
if err != nil {
return "", err
}

// Send the entire filepath into the CCLF Importer so we are only
// importing the one file that was sent in the trigger.
Expand All @@ -79,7 +84,8 @@ func handleCSVImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
logger = logger.WithFields(logrus.Fields{"import_filename": s3ImportPath})

importer := cclf.CSVImporter{
Logger: logger,
Logger: logger,
Database: database.Connection,
FileProcessor: &cclf.S3FileProcessor{
Handler: optout.S3FileHandler{
Logger: logger,
Expand All @@ -88,7 +94,13 @@ func handleCSVImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
},
},
}

logger.Info("bcda lambda api config: ", os.Getenv("BCDA_API_CONFIG_PATH"))
wd, _ := os.Getwd()
logger.Infof("Working directory: %s", wd)
dirs, _ := os.ReadDir("./")
for _, v := range dirs {
logger.Infof("dir: %s, file: %s", wd, (v.Name()))
}
err := importer.ImportCSV(s3ImportPath)

if err != nil {
Expand Down Expand Up @@ -117,6 +129,13 @@ func loadBfdS3Params() (string, error) {
return param, nil
}

func loadBCDAParams() error {
env := conf.GetEnv("ENV")
conf.LoadLambdaEnvVars(env)

return nil
}

func handleCclfImport(s3AssumeRoleArn, s3ImportPath string) (string, error) {
env := conf.GetEnv("ENV")
appName := conf.GetEnv("APP_NAME")
Expand Down
8 changes: 8 additions & 0 deletions conf/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,14 @@ func init() {

}

func LoadLambdaEnvVars(env string) {
envPath := fmt.Sprintf("./%s.yml", env)
var configPaths []string
configPaths = append(configPaths, envPath)
envVars, state = loadConfigs(configPaths...)

}

// This is the private helper function that sets up viper. This function is
// called by the init() function only once during initialization of the package.
func loadConfigs(locations ...string) (config, configStatus) {
Expand Down
6 changes: 6 additions & 0 deletions shared_files/cclf/archives/csv/T.PCPB.M2411.D241206.T1732561
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
MBIs
MBI000001
MBI000002
MBI000003
MBI000004
MBI000005
Loading