diff --git a/internal/alerting/pagerduty.go b/internal/alerting/pagerduty.go index 9e27554..78d6d7c 100644 --- a/internal/alerting/pagerduty.go +++ b/internal/alerting/pagerduty.go @@ -120,7 +120,7 @@ func (pd *PagerDuty) ping() error { return fmt.Errorf("pagerduty list abilities: %v", err) } if len(resp.Abilities) <= 0 { - return fmt.Errorf("pagerduty: missing abilities") + return errors.New("pagerduty: missing abilities") } return nil diff --git a/internal/alerting/slack.go b/internal/alerting/slack.go index 2733455..21f417d 100644 --- a/internal/alerting/slack.go +++ b/internal/alerting/slack.go @@ -54,7 +54,7 @@ func (s *Slack) AuthTest() error { return fmt.Errorf("slack auth test: %v", err) } if resp.UserID == "" { - return fmt.Errorf("slack: missing user_id") + return errors.New("slack: missing user_id") } return nil diff --git a/internal/dbtest/sql.go b/internal/dbtest/sql.go index f77d721..0c9add4 100644 --- a/internal/dbtest/sql.go +++ b/internal/dbtest/sql.go @@ -66,13 +66,13 @@ func CreateTestDatabase(t *testing.T, config database.DatabaseConfig) database.D } dbName := "test" + base.ID() - _, err = rootDb.Exec(fmt.Sprintf("CREATE DATABASE %s", dbName)) + _, err = rootDb.Exec("CREATE DATABASE " + dbName) if err != nil { t.Fatal(err) } t.Cleanup(func() { - rootDb.Exec(fmt.Sprintf("DROP DATABASE %s", dbName)) + rootDb.Exec("DROP DATABASE " + dbName) rootDb.Close() }) diff --git a/internal/environment_test.go b/internal/environment_test.go index 87ce939..81cc3b6 100644 --- a/internal/environment_test.go +++ b/internal/environment_test.go @@ -60,7 +60,7 @@ func TestEnvironment_Startup(t *testing.T) { } env, err := NewEnvironment(env) - a.Nil(err) + a.NoError(err) t.Cleanup(env.Shutdown) } diff --git a/internal/incoming/odfi/prenotes.go b/internal/incoming/odfi/prenotes.go index 7c6b4d4..9f8f579 100644 --- a/internal/incoming/odfi/prenotes.go +++ b/internal/incoming/odfi/prenotes.go @@ -21,6 +21,7 @@ import ( "context" "fmt" "path/filepath" + "strconv" "strings" "github.com/moov-io/ach" @@ -92,12 +93,12 @@ func (pc *prenoteEmitter) Handle(ctx context.Context, logger log.Logger, file Fi "origin": log.String(file.ACHFile.Header.ImmediateOrigin), "destination": log.String(file.ACHFile.Header.ImmediateDestination), }) - logger.Log(fmt.Sprintf("odfi: pre-notification traceNumber=%s", entries[j].TraceNumber)) + logger.Log("odfi: pre-notification traceNumber=" + entries[j].TraceNumber) prenoteEntriesProcessed.With( "origin", file.ACHFile.Header.ImmediateOrigin, "destination", file.ACHFile.Header.ImmediateDestination, - "transactionCode", fmt.Sprintf("%d", entries[j].TransactionCode), + "transactionCode", strconv.Itoa(entries[j].TransactionCode), ).Add(1) } } diff --git a/internal/incoming/odfi/reconciliation_test.go b/internal/incoming/odfi/reconciliation_test.go index ca38c4f..0013b4b 100644 --- a/internal/incoming/odfi/reconciliation_test.go +++ b/internal/incoming/odfi/reconciliation_test.go @@ -65,7 +65,7 @@ func TestCreditReconciliation(t *testing.T) { } batch.AddEntry(&entry) } - require.Equal(t, 20, len(file.Batches)) + require.Len(t, file.Batches, 20) // Set ValidateOpts similar to what Processor sets file.SetValidation(&ach.ValidateOpts{ @@ -122,7 +122,7 @@ func TestCreditReconciliation(t *testing.T) { return len(sent) > 0 }, 5*time.Second, 100*time.Millisecond) - require.Equal(t, 499, len(sent)) // one from previous subtest + require.Len(t, sent, 499) // one from previous subtest foundTraces := make(map[string]bool) for i := range sent { @@ -132,6 +132,6 @@ func TestCreditReconciliation(t *testing.T) { require.True(t, ok) foundTraces[event.Entry.TraceNumber] = true } - require.Equal(t, 499, len(foundTraces)) // 499 unique trace numbers + require.Len(t, foundTraces, 499) // 499 unique trace numbers }) } diff --git a/internal/incoming/stream/streamtest/streamtest.go b/internal/incoming/stream/streamtest/streamtest.go index a2f975b..8512735 100644 --- a/internal/incoming/stream/streamtest/streamtest.go +++ b/internal/incoming/stream/streamtest/streamtest.go @@ -19,7 +19,7 @@ package streamtest import ( "context" - "fmt" + "testing" "github.com/moov-io/achgateway/internal/incoming/stream" @@ -36,7 +36,7 @@ func InmemStream(t *testing.T) (stream.Publisher, stream.Subscription) { conf := &service.Config{ Inbound: service.Inbound{ InMem: &service.InMemory{ - URL: fmt.Sprintf("mem://%s", t.Name()), + URL: "mem://" + t.Name(), }, }, } diff --git a/internal/notify/mailslurper_test.go b/internal/notify/mailslurper_test.go index aff5f36..f9a3f14 100644 --- a/internal/notify/mailslurper_test.go +++ b/internal/notify/mailslurper_test.go @@ -5,7 +5,6 @@ package notify import ( - "fmt" "net" "testing" "time" @@ -50,7 +49,7 @@ func spawnMailslurp(t *testing.T) *mailslurpDeployment { err = pool.Retry(func() error { time.Sleep(1 * time.Second) - conn, err := net.Dial("tcp", fmt.Sprintf("localhost:%s", dep.SMTPPort())) + conn, err := net.Dial("tcp", "localhost:"+dep.SMTPPort()) if err != nil { return err } diff --git a/internal/notify/pagerduty.go b/internal/notify/pagerduty.go index 10edde2..8738859 100644 --- a/internal/notify/pagerduty.go +++ b/internal/notify/pagerduty.go @@ -47,7 +47,7 @@ func (pd *PagerDuty) Ping() error { return fmt.Errorf("pagerduty list abilities: %v", err) } if len(resp.Abilities) <= 0 { - return fmt.Errorf("pagerduty: missing abilities") + return errors.New("pagerduty: missing abilities") } return nil diff --git a/internal/notify/slack.go b/internal/notify/slack.go index 4e4f3b3..8be99da 100644 --- a/internal/notify/slack.go +++ b/internal/notify/slack.go @@ -63,9 +63,9 @@ func marshalSlackMessage(status uploadStatus, msg *Message) string { slackMsg := fmt.Sprintf("%s %s of %s", status, msg.Direction, msg.Filename) if msg.Hostname != "" { if msg.Direction == Upload { - slackMsg += fmt.Sprintf(" to %s", msg.Hostname) + slackMsg += " to " + msg.Hostname } else { - slackMsg += fmt.Sprintf(" from %s", msg.Hostname) + slackMsg += " from " + msg.Hostname } } slackMsg += " with ODFI server\n" diff --git a/internal/pipeline/aggregate.go b/internal/pipeline/aggregate.go index cf4075b..0bdf8a1 100644 --- a/internal/pipeline/aggregate.go +++ b/internal/pipeline/aggregate.go @@ -436,7 +436,7 @@ func formatHolidayMessage(day *schedule.Day, shardName string) string { hostname, _ := os.Hostname() if shardName != "" { - shardName = fmt.Sprintf("for %s", shardName) + shardName = "for " + shardName } return strings.TrimSpace(fmt.Sprintf("%s %s so %s will skip processing %s", day.Time.Format("Jan 02"), name, hostname, shardName)) diff --git a/internal/pipeline/events_api_test.go b/internal/pipeline/events_api_test.go index 65fd669..69ba9ee 100644 --- a/internal/pipeline/events_api_test.go +++ b/internal/pipeline/events_api_test.go @@ -169,11 +169,11 @@ func TestEventsAPI_FileUploadedErrors(t *testing.T) { for i := range paths { address := fmt.Sprintf("http://%s/shards/testing/pipeline/%s/file-uploaded", adminServer.BindAddr(), paths[i]) req, err := http.NewRequest("PUT", address, nil) - require.NoError(t, err, fmt.Sprintf("on address %s", address)) + require.NoError(t, err, "on address "+address) resp, err := http.DefaultClient.Do(req) require.NoError(t, err) defer resp.Body.Close() - require.Equal(t, http.StatusNotFound, resp.StatusCode, fmt.Sprintf("on address %s", address)) + require.Equal(t, http.StatusNotFound, resp.StatusCode, "on address "+address) } }) diff --git a/internal/pipeline/file_receiver_test.go b/internal/pipeline/file_receiver_test.go index 65add93..3e89527 100644 --- a/internal/pipeline/file_receiver_test.go +++ b/internal/pipeline/file_receiver_test.go @@ -81,7 +81,7 @@ func testFileReceiver(t *testing.T) *TestFileReceiver { conf := &service.Config{ Inbound: service.Inbound{ InMem: &service.InMemory{ - URL: fmt.Sprintf("mem://%s", t.Name()), + URL: "mem://" + t.Name(), }, }, Sharding: service.Sharding{ diff --git a/internal/pipeline/manual_cutoff_times_test.go b/internal/pipeline/manual_cutoff_times_test.go index 458d65f..45af418 100644 --- a/internal/pipeline/manual_cutoff_times_test.go +++ b/internal/pipeline/manual_cutoff_times_test.go @@ -94,7 +94,7 @@ func TestFileReceiver__ManualCutoffErr(t *testing.T) { require.NoError(t, err) require.Len(t, resp.Shards, 1) - require.Equal(t, *resp.Shards["testing"], "bad thing") + require.Equal(t, "bad thing", *resp.Shards["testing"]) } func setupFileReceiver(t *testing.T, waiterResponse error) (*FileReceiver, *sync.WaitGroup) { diff --git a/internal/pipeline/merging.go b/internal/pipeline/merging.go index 97f5acf..9024ee7 100644 --- a/internal/pipeline/merging.go +++ b/internal/pipeline/merging.go @@ -104,7 +104,7 @@ func (m *filesystemMerging) writeACHFile(ctx context.Context, xfer incoming.ACHF } fileID := strings.TrimSuffix(xfer.FileID, ".ach") - path := filepath.Join("mergable", m.shard.Name, fmt.Sprintf("%s.ach", fileID)) + path := filepath.Join("mergable", m.shard.Name, fileID+".ach") if err := m.storage.WriteFile(path, buf.Bytes()); err != nil { return err } @@ -119,7 +119,7 @@ func (m *filesystemMerging) writeACHFile(ctx context.Context, xfer incoming.ACHF }).Logf("ERROR encoding ValidateOpts: %v", err) } - path := filepath.Join("mergable", m.shard.Name, fmt.Sprintf("%s.json", fileID)) + path := filepath.Join("mergable", m.shard.Name, fileID+".json") if err := m.storage.WriteFile(path, buf.Bytes()); err != nil { m.logger.Warn().With(log.Fields{ "fileID": log.String(xfer.FileID), @@ -140,7 +140,7 @@ func (m *filesystemMerging) HandleCancel(ctx context.Context, cancel incoming.Ca defer span.End() fileID := strings.TrimSuffix(cancel.FileID, ".ach") - path := filepath.Join("mergable", m.shard.Name, fmt.Sprintf("%s.ach", fileID)) + path := filepath.Join("mergable", m.shard.Name, fileID+".ach") // Check if the file exists already originalFile, _ := m.storage.Open(path) @@ -563,7 +563,7 @@ func (m *filesystemMerging) saveMergedFile(ctx context.Context, dir string, file } name := hash(buf.Bytes()) - path := filepath.Join(dir, fmt.Sprintf("%s.ach", name)) + path := filepath.Join(dir, name+".ach") span.SetAttributes( attribute.String("achgateway.merged_filename", path), @@ -584,7 +584,7 @@ func (m *filesystemMerging) saveMergedFile(ctx context.Context, dir string, file return fmt.Errorf("marshal of merged ACH file validate opts: %w", err) } - path = filepath.Join(dir, fmt.Sprintf("%s.json", name)) + path = filepath.Join(dir, name+".json") err = m.storage.WriteFile(path, buf.Bytes()) if err != nil { return fmt.Errorf("writing merged ACH file validate opts: %w", err) diff --git a/internal/pipeline/merging_test.go b/internal/pipeline/merging_test.go index f737b60..6b97129 100644 --- a/internal/pipeline/merging_test.go +++ b/internal/pipeline/merging_test.go @@ -47,15 +47,15 @@ func TestMerging__getCanceledFiles(t *testing.T) { dir := filepath.Join(root, "test-2024") require.NoError(t, os.MkdirAll(dir, 0777)) - name1 := fmt.Sprintf("%s.ach", base.ID()) + name1 := base.ID() + ".ach" xfer1 := write(t, filepath.Join(dir, name1), nil) - write(t, filepath.Join(dir, fmt.Sprintf("%s.canceled", xfer1)), nil) + write(t, filepath.Join(dir, xfer1+".canceled"), nil) - name2 := fmt.Sprintf("%s.ach", base.ID()) + name2 := base.ID() + ".ach" write(t, filepath.Join(dir, name2), nil) - name3 := fmt.Sprintf("%s.ach", base.ID()) - write(t, filepath.Join(dir, fmt.Sprintf("%s.canceled", name3)), nil) + name3 := base.ID() + ".ach" + write(t, filepath.Join(dir, name3+".canceled"), nil) fs, err := storage.NewFilesystem(root) require.NoError(t, err) @@ -76,12 +76,12 @@ func TestMerging__getNonCanceledMatches(t *testing.T) { dir := filepath.Join(root, "test-2021") require.NoError(t, os.Mkdir(dir, 0777)) - xfer1 := write(t, filepath.Join(dir, fmt.Sprintf("%s.ach", base.ID())), nil) + xfer1 := write(t, filepath.Join(dir, base.ID()+".ach"), nil) - cancel1 := write(t, filepath.Join(dir, fmt.Sprintf("%s.ach.canceled", base.ID())), nil) + cancel1 := write(t, filepath.Join(dir, base.ID()+".ach.canceled"), nil) - xfer2 := write(t, filepath.Join(dir, fmt.Sprintf("%s.ach", base.ID())), nil) - cancel2 := write(t, filepath.Join(dir, fmt.Sprintf("%s.canceled", xfer2)), nil) + xfer2 := write(t, filepath.Join(dir, base.ID()+".ach"), nil) + cancel2 := write(t, filepath.Join(dir, xfer2+".canceled"), nil) fs, err := storage.NewFilesystem(root) require.NoError(t, err) @@ -122,9 +122,9 @@ func write(t *testing.T, where string, contents []byte) string { } func TestMerging_fileAcceptor(t *testing.T) { - name1 := fmt.Sprintf("%s.ach", base.ID()) - name2 := fmt.Sprintf("%s.ach", base.ID()) - json1 := fmt.Sprintf("%s.json", base.ID()) + name1 := base.ID() + ".ach" + name2 := base.ID() + ".ach" + json1 := base.ID() + ".json" output := fileAcceptor(nil)(name1) require.Equal(t, ach.AcceptFile, output) @@ -245,7 +245,7 @@ func TestMerging_mappings(t *testing.T) { expected := []string{"duplicate-trace.ach", "ppd-debit.ach", "ppd-debit2.ach", "ppd-debit3.ach", "ppd-debit4.ach"} require.ElementsMatch(t, expected, mapped[0].InputFilepaths) require.Equal(t, "MAPPING-0.ach", mapped[0].UploadedFilename) - require.Equal(t, 2, len(mapped[0].ACHFile.Batches)) + require.Len(t, mapped[0].ACHFile.Batches, 2) } func enqueueFile(t *testing.T, merging XferMerging, path string) { @@ -317,7 +317,7 @@ func TestMerging__writeACHFile(t *testing.T) { // Verify the .ach and .json files were written mergableFilenames := getFilenames(t, m.storage, "mergable/testing") - expected := []string{fmt.Sprintf("%s.ach", fileID), fmt.Sprintf("%s.json", fileID)} + expected := []string{fileID + ".ach", fileID + ".json"} require.ElementsMatch(t, expected, mergableFilenames) var mergeConditions ach.Conditions diff --git a/internal/schedule/cutoff.go b/internal/schedule/cutoff.go index f585ecc..4e21dc6 100644 --- a/internal/schedule/cutoff.go +++ b/internal/schedule/cutoff.go @@ -105,7 +105,7 @@ func (ct *CutoffTimes) register(tz string, timestamp string) error { var zone string var location *time.Location if tz != "" { - zone = fmt.Sprintf("CRON_TZ=%s", tz) + zone = "CRON_TZ=" + tz l, _ := time.LoadLocation(tz) location = l } else { diff --git a/internal/server.go b/internal/server.go index 17191e6..37beb46 100644 --- a/internal/server.go +++ b/internal/server.go @@ -100,7 +100,7 @@ func bootAdminServer(errs chan<- error, logger log.Logger, config service.Admin) } go func() { - logger.Info().Log(fmt.Sprintf("listening on %s", adminServer.BindAddr())) + logger.Info().Log("listening on " + adminServer.BindAddr()) if err := adminServer.Listen(); err != nil { errs <- logger.Fatal().LogErrorf("problem starting admin http: %w", err).Err() } diff --git a/internal/service/config_test.go b/internal/service/config_test.go index ff5cda7..879a364 100644 --- a/internal/service/config_test.go +++ b/internal/service/config_test.go @@ -35,7 +35,7 @@ func Test_ConfigLoading(t *testing.T) { gc := &service.GlobalConfig{} err := ConfigService.Load(gc) - require.Nil(t, err) + require.NoError(t, err) // Validate config require.NoError(t, gc.ACHGateway.Validate()) diff --git a/internal/service/model_audit_test.go b/internal/service/model_audit_test.go index 85f4744..29f0528 100644 --- a/internal/service/model_audit_test.go +++ b/internal/service/model_audit_test.go @@ -28,5 +28,5 @@ func TestSignerMasking(t *testing.T) { cfg := &Signer{KeyFile: "/foo.pem", KeyPassword: "secret"} bs, err := json.Marshal(cfg) require.NoError(t, err) - require.Equal(t, bs, []byte(`{"KeyFile":"/foo.pem","KeyPassword":"s****t"}`)) + require.JSONEq(t, string(bs), `{"KeyFile":"/foo.pem","KeyPassword":"s****t"}`) } diff --git a/internal/service/model_upload_test.go b/internal/service/model_upload_test.go index 1b3ff29..513b4cf 100644 --- a/internal/service/model_upload_test.go +++ b/internal/service/model_upload_test.go @@ -19,7 +19,6 @@ package service import ( "encoding/json" - "strings" "testing" "github.com/stretchr/testify/require" @@ -29,12 +28,12 @@ func TestFTPMasking(t *testing.T) { cfg := &FTP{Password: "secret"} bs, err := json.Marshal(cfg) require.NoError(t, err) - require.True(t, strings.Contains(string(bs), `,"Password":"s****t",`)) + require.Contains(t, string(bs), `,"Password":"s****t",`) } func TestSFTPMasking(t *testing.T) { cfg := &SFTP{Password: "secret"} bs, err := json.Marshal(cfg) require.NoError(t, err) - require.True(t, strings.Contains(string(bs), `,"Password":"s****t",`)) + require.Contains(t, string(bs), `,"Password":"s****t",`) } diff --git a/internal/shards/api_shard_mapping_test.go b/internal/shards/api_shard_mapping_test.go index 2c153e2..461e74e 100644 --- a/internal/shards/api_shard_mapping_test.go +++ b/internal/shards/api_shard_mapping_test.go @@ -3,7 +3,6 @@ package shards_test import ( - "fmt" "github.com/moov-io/achgateway/internal/service" "net/http" "testing" @@ -74,7 +73,7 @@ func clientShardMappingCreate(s ShardMappingTestScope, create *service.ShardMapp func clientShardMappingGet(s ShardMappingTestScope, shardKey string) (*service.ShardMapping, *http.Response) { i := &service.ShardMapping{} - resp := s.MakeCall(s.MakeRequest("GET", fmt.Sprintf("/shard_mappings/%s", shardKey), nil), i) + resp := s.MakeCall(s.MakeRequest("GET", "/shard_mappings/"+shardKey, nil), i) return i, resp } diff --git a/internal/shards/service_shard_mapping_test.go b/internal/shards/service_shard_mapping_test.go index 57d7529..677f4d1 100644 --- a/internal/shards/service_shard_mapping_test.go +++ b/internal/shards/service_shard_mapping_test.go @@ -49,7 +49,7 @@ func TestFacilitatorService_List(t *testing.T) { list, err := s.Service.List() require.NoError(t, err) - require.Equal(t, 3, len(list)) + require.Len(t, list, 3) } func TestFacilitatorService_Get(t *testing.T) { diff --git a/internal/storage/filesystem_test.go b/internal/storage/filesystem_test.go index 0d65811..69a773a 100644 --- a/internal/storage/filesystem_test.go +++ b/internal/storage/filesystem_test.go @@ -1,7 +1,6 @@ package storage import ( - "fmt" "os" "os/user" "path/filepath" @@ -50,7 +49,7 @@ func setupFilesystemGlobTest(tb testing.TB, iterations int) (Chest, string, int) go func(i int) { defer wg.Done() - where := filepath.Join(sub, fmt.Sprintf("%s.ach", base.ID())) + where := filepath.Join(sub, base.ID()+".ach") err := chest.WriteFile(where, contents) require.NoError(tb, err) @@ -81,7 +80,7 @@ func TestFilesystemGlob(t *testing.T) { matches, err := chest.Glob(sub + "/*.canceled") require.NoError(t, err) - require.Equal(t, canceled, len(matches)) + require.Len(t, matches, canceled) } func BenchmarkFilesystem_Glob(b *testing.B) { @@ -105,7 +104,7 @@ func BenchmarkFilesystem_Glob(b *testing.B) { matches, err := chest.Glob("/*.canceled") require.NoError(b, err) - require.Greater(b, len(matches), 0) + require.NotEmpty(b, matches) }) b.Run("write files", func(b *testing.B) { @@ -116,6 +115,6 @@ func BenchmarkFilesystem_Glob(b *testing.B) { matches, err := chest.Glob(sub + "/*.canceled") require.NoError(b, err) - require.Equal(b, canceled, len(matches)) + require.Len(b, matches, canceled) }) } diff --git a/internal/test/cancel_test.go b/internal/test/cancel_test.go index c9514e8..4feb4cf 100644 --- a/internal/test/cancel_test.go +++ b/internal/test/cancel_test.go @@ -118,7 +118,7 @@ func TestCancelFileAPI(t *testing.T) { require.NoError(t, err) parent, _ := filepath.Split(where) - return slices.Contains(filenames, filepath.Join(parent, fmt.Sprintf("%s.ach", fileID))) + return slices.Contains(filenames, filepath.Join(parent, fileID+".ach")) }, 10*time.Second, 1*time.Second) // Now cancel that file diff --git a/internal/test/download_test.go b/internal/test/download_test.go index 5ccd525..d847fad 100644 --- a/internal/test/download_test.go +++ b/internal/test/download_test.go @@ -123,10 +123,10 @@ func TestODFIDownload(t *testing.T) { downloadConf.Inbound.ODFI.Storage.Directory = odfiStorageDir auditDir := t.TempDir() downloadConf.Inbound.ODFI.Audit = &service.AuditTrail{ - BucketURI: fmt.Sprintf("file://%s", auditDir), + BucketURI: "file://" + auditDir, } - eventTopic := fmt.Sprintf("mem://%s", t.Name()) + eventTopic := "mem://" + t.Name() downloadConf.Events = &service.EventsConfig{ Stream: &service.EventsStream{ InMem: &service.InMemory{ diff --git a/internal/test/upload_test.go b/internal/test/upload_test.go index b4d8522..dd90dba 100644 --- a/internal/test/upload_test.go +++ b/internal/test/upload_test.go @@ -216,8 +216,8 @@ func TestUploads(t *testing.T) { require.NoError(t, g.Wait()) t.Logf("created %d entries (in %d files) and canceled %d entries (in %d files)", createdEntries, len(createdFileIDs), canceledEntries, len(canceledFileIDs)) - require.Greater(t, createdEntries, 0, "created entries") - require.Greater(t, canceledEntries, 0, "canceled entries") + require.Positive(t, createdEntries, "created entries") + require.Positive(t, canceledEntries, "canceled entries") // Pause for long enough that all files get accepted wait := time.Duration(5*iterations) * time.Millisecond // 50k iterations is 4m10s @@ -256,13 +256,13 @@ func TestUploads(t *testing.T) { time.Sleep(10 * time.Second) filenamePrefixCounts := countFilenamePrefixes(t, outboundPath) - require.Greater(t, filenamePrefixCounts["BETA"], 0) - require.Greater(t, filenamePrefixCounts["PROD"], 0) + require.Positive(t, filenamePrefixCounts["BETA"]) + require.Positive(t, filenamePrefixCounts["PROD"]) // Verify no files are left in mergable/ mergableFiles, err := ach.ReadDir(filepath.Join("storage", "mergable")) require.NoError(t, err) - require.Equal(t, 0, len(mergableFiles)) + require.Empty(t, mergableFiles) // Verify each fileID was isolated on disk verifyFilesWereIsolated(t, createdFileIDs) @@ -377,7 +377,7 @@ func randomTraceNumbers(t *testing.T, file *ach.File) *ach.File { b.AddEntry(entries[i]) } else { n, _ := strconv.Atoi(entries[0].TraceNumber) - entries[i].TraceNumber = fmt.Sprintf("%d", n+1) + entries[i].TraceNumber = strconv.Itoa(n + 1) b.AddEntry(entries[i]) } } @@ -473,7 +473,7 @@ func causeSubscriptionFailure(t *testing.T) error { func firstDirectory(t *testing.T, fsys fs.FS, prefix string) string { t.Helper() - matches, err := fs.Glob(fsys, fmt.Sprintf("%s-*", prefix)) + matches, err := fs.Glob(fsys, prefix+"-*") require.NoError(t, err) require.Len(t, matches, 1) @@ -487,8 +487,8 @@ func verifyFilesWereIsolated(t *testing.T, fileIDs []string) { beta, prod := firstDirectory(t, fsys, "beta"), firstDirectory(t, fsys, "prod") for i := range fileIDs { - betaMatches, _ := fs.Glob(fsys, filepath.Join(beta, fmt.Sprintf("%s.*", fileIDs[i]))) - prodMatches, _ := fs.Glob(fsys, filepath.Join(prod, fmt.Sprintf("%s.*", fileIDs[i]))) + betaMatches, _ := fs.Glob(fsys, filepath.Join(beta, fileIDs[i]+".*")) + prodMatches, _ := fs.Glob(fsys, filepath.Join(prod, fileIDs[i]+".*")) total := len(betaMatches) + len(prodMatches) if total == 0 { diff --git a/internal/upload/filename_template_test.go b/internal/upload/filename_template_test.go index 2b7f81d..69a5d5f 100644 --- a/internal/upload/filename_template_test.go +++ b/internal/upload/filename_template_test.go @@ -54,7 +54,7 @@ func TestFilenameTemplate(t *testing.T) { }) require.NoError(t, err) - expected = fmt.Sprintf("%s.ach", time.Now().Format("20060102")) + expected = time.Now().Format("20060102") + ".ach" if filename != expected { t.Errorf("filename=%s", filename) } diff --git a/internal/upload/network_security_test.go b/internal/upload/network_security_test.go index 8d11098..decb754 100644 --- a/internal/upload/network_security_test.go +++ b/internal/upload/network_security_test.go @@ -5,7 +5,6 @@ package upload import ( - "fmt" "net" "testing" @@ -33,13 +32,13 @@ func TestRejectOutboundIPRange(t *testing.T) { } // multiple whitelisted, but exact IP match - cfg.AllowedIPs = fmt.Sprintf("127.0.0.1/24,%s", addr.String()) + cfg.AllowedIPs = "127.0.0.1/24," + addr.String() if err := rejectOutboundIPRange(cfg.SplitAllowedIPs(), "moov.io"); err != nil { t.Error(err) } // multiple whitelisted, match range (convert IP to /24) - cfg.AllowedIPs = fmt.Sprintf("%s/24", addr.Mask(net.IPv4Mask(0xFF, 0xFF, 0xFF, 0x0)).String()) + cfg.AllowedIPs = addr.Mask(net.IPv4Mask(0xFF, 0xFF, 0xFF, 0x0)).String() + "/24" if err := rejectOutboundIPRange(cfg.SplitAllowedIPs(), "moov.io"); err != nil { t.Error(err) } diff --git a/internal/upload/sftp_test.go b/internal/upload/sftp_test.go index d139e0b..fd17117 100644 --- a/internal/upload/sftp_test.go +++ b/internal/upload/sftp_test.go @@ -71,7 +71,7 @@ func spawnSFTP(t *testing.T) *sftpDeployment { fmt.Sprintf("demo:password:%d:%d:upload", uid, gid), }, Mounts: []string{ - fmt.Sprintf("%s:/home/demo/upload", dir), + dir + ":/home/demo/upload", }, }) // Force container to shutdown prior to checking if it failed @@ -83,7 +83,7 @@ func spawnSFTP(t *testing.T) *sftpDeployment { }) require.NoError(t, err) - addr := fmt.Sprintf("localhost:%s", resource.GetPort("22/tcp")) + addr := "localhost:" + resource.GetPort("22/tcp") var agent *SFTPTransferAgent for i := 0; i < 10; i++ { @@ -212,7 +212,7 @@ func TestSFTP__readFilesEmpty(t *testing.T) { // Upload an empty file ctx := context.Background() - filename := fmt.Sprintf("%s.ach", base.ID()) + filename := base.ID() + ".ach" err = deployment.agent.UploadFile(ctx, File{ Filepath: filename, Contents: io.NopCloser(strings.NewReader("")), @@ -237,7 +237,7 @@ func TestSFTP__readFilesEmpty(t *testing.T) { // read a non-existent directory filepaths, err = deployment.agent.readFilepaths(ctx, "/dev/null") require.NoError(t, err) - require.Len(t, filepaths, 0) + require.Empty(t, filepaths) } func TestSFTP__uploadFile(t *testing.T) { diff --git a/pkg/compliance/compliance_test.go b/pkg/compliance/compliance_test.go index f9f7d01..f953695 100644 --- a/pkg/compliance/compliance_test.go +++ b/pkg/compliance/compliance_test.go @@ -54,11 +54,11 @@ func TestCompliance(t *testing.T) { encrypted, err := Protect(cfg, evt) require.NoError(t, err) - require.Greater(t, len(encrypted), 0) + require.NotEmpty(t, encrypted) decrypted, err := Reveal(cfg, encrypted) require.NoError(t, err) - require.Greater(t, len(decrypted), 0) + require.NotEmpty(t, decrypted) var uploaded models.FileUploaded require.NoError(t, models.ReadEvent(decrypted, &uploaded)) diff --git a/pkg/compliance/crypt_aes_test.go b/pkg/compliance/crypt_aes_test.go index 6154002..f555ad5 100644 --- a/pkg/compliance/crypt_aes_test.go +++ b/pkg/compliance/crypt_aes_test.go @@ -36,7 +36,7 @@ func TestCryptor__AES(t *testing.T) { enc, err := cc.Encrypt([]byte("hello, world")) require.NoError(t, err) - require.Greater(t, len(enc), 0) + require.NotEmpty(t, enc) dec1, err := cc.Decrypt(enc) require.NoError(t, err) diff --git a/pkg/models/events_test.go b/pkg/models/events_test.go index 7f81a95..b88ab9b 100644 --- a/pkg/models/events_test.go +++ b/pkg/models/events_test.go @@ -117,7 +117,7 @@ func TestRead__InvalidQueueFile(t *testing.T) { func TestPartialReconciliationFile(t *testing.T) { file, err := ach.ReadFile(filepath.Join("testdata", "partial-recon.ach")) - require.NotNil(t, err) + require.Error(t, err) require.True(t, base.Has(err, ach.ErrFileHeader)) var bs bytes.Buffer diff --git a/pkg/models/model_transform_test.go b/pkg/models/model_transform_test.go index 2d06fd3..1ebed03 100644 --- a/pkg/models/model_transform_test.go +++ b/pkg/models/model_transform_test.go @@ -29,5 +29,5 @@ func TestAESConfigMasking(t *testing.T) { cfg := &AESConfig{Key: strings.Repeat("1", 32)} bs, err := json.Marshal(cfg) require.NoError(t, err) - require.Equal(t, bs, []byte(`{"Key":"1*****1"}`)) + require.JSONEq(t, string(bs), `{"Key":"1*****1"}`) }