From 5004088668537543f28fb0fd7c121b8468b11596 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Thu, 24 Oct 2024 22:10:47 +0200 Subject: [PATCH 01/12] feat: reverse benchmark output comparison --- test/performance/Earthfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/performance/Earthfile b/test/performance/Earthfile index b12580238..20b0969fe 100644 --- a/test/performance/Earthfile +++ b/test/performance/Earthfile @@ -43,7 +43,7 @@ compare: COPY (+run/benchmark-output.txt --args=$args) /report/benchmark-output-local.txt COPY --allow-privileged (github.com/formancehq/ledger/test/performance:${rev}+run/benchmark-output.txt --args=$args) /report/benchmark-output-remote.txt - RUN benchstat /report/benchmark-output-local.txt /report/benchmark-output-remote.txt > benchmark-comparison.txt + RUN benchstat /report/benchmark-output-remote.txt /report/benchmark-output-local.txt > benchmark-comparison.txt SAVE ARTIFACT benchmark-comparison.txt AS LOCAL benchmark-comparison.txt From c318745d2568c85b0e09b476b89d7b2b589406c3 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Sat, 19 Oct 2024 17:10:19 +0200 Subject: [PATCH 02/12] feat: make migrations starting asynchronoulsy at application startup --- internal/storage/bucket/bucket_test.go | 5 + internal/storage/bucket/migrations.go | 22 ++- internal/storage/driver/migrations.go | 187 ++++++++++++++----------- internal/storage/module.go | 52 ++++++- 4 files changed, 183 insertions(+), 83 deletions(-) diff --git a/internal/storage/bucket/bucket_test.go b/internal/storage/bucket/bucket_test.go index 9720cc6e1..55392b299 100644 --- a/internal/storage/bucket/bucket_test.go +++ b/internal/storage/bucket/bucket_test.go @@ -3,6 +3,7 @@ package bucket_test import ( + "github.com/formancehq/go-libs/v2/bun/bundebug" "github.com/formancehq/ledger/internal/storage/bucket" "github.com/formancehq/ledger/internal/storage/driver" "go.opentelemetry.io/otel/trace/noop" @@ -23,6 +24,10 @@ func TestBuckets(t *testing.T) { db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions()) require.NoError(t, err) + if testing.Verbose() { + db.AddQueryHook(bundebug.NewQueryHook()) + } + require.NoError(t, driver.Migrate(ctx, db)) b := bucket.New(db, name) diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go index dadd6e8e7..18ec15de1 100644 --- a/internal/storage/bucket/migrations.go +++ b/internal/storage/bucket/migrations.go @@ -2,6 +2,7 @@ package bucket import ( "context" + "database/sql" "embed" "github.com/formancehq/go-libs/v2/migrations" "github.com/uptrace/bun" @@ -13,7 +14,26 @@ var migrationsDir embed.FS func GetMigrator(name string) *migrations.Migrator { migrator := migrations.NewMigrator(migrations.WithSchema(name, true)) - migrator.RegisterMigrationsFromFileSystem(migrationsDir, "migrations") + migrations, err := migrations.CollectMigrationFiles(migrationsDir, "migrations") + if err != nil { + panic(err) + } + + for ind, migration := range migrations[:12] { + originalUp := migration.Up + migration.Up = func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, "set search_path to '"+name+"'") + if err != nil { + return err + } + return originalUp(ctx, tx) + }) + } + migrations[ind] = migration + } + + migrator.RegisterMigrations(migrations...) return migrator } diff --git a/internal/storage/driver/migrations.go b/internal/storage/driver/migrations.go index dfce0341a..6dd51a682 100644 --- a/internal/storage/driver/migrations.go +++ b/internal/storage/driver/migrations.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "database/sql" "github.com/formancehq/go-libs/v2/time" "github.com/formancehq/go-libs/v2/platform/postgres" @@ -27,132 +28,155 @@ func GetMigrator() *migrations.Migrator { migrator.RegisterMigrations( migrations.Migration{ Name: "Init schema", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - create table ledgers ( + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create table _system.ledgers ( ledger varchar primary key, addedat timestamp, bucket varchar(255) ) `) - if err != nil { - return err - } - - _, err = tx.NewCreateTable(). - Model((*configuration)(nil)). - Exec(ctx) - return postgres.ResolveError(err) + if err != nil { + return err + } + + _, err = tx.NewCreateTable(). + Model((*configuration)(nil)). + Exec(ctx) + return postgres.ResolveError(err) + }) }, }, migrations.Migration{ Name: "Add ledger, bucket naming constraints 63 chars", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - alter column ledger type varchar(63), - alter column bucket type varchar(63); - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + alter column ledger type varchar(63), + alter column bucket type varchar(63); + `) + return err + }) }, }, migrations.Migration{ Name: "Add ledger metadata", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - add column if not exists metadata jsonb; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + add column if not exists metadata jsonb; + `) + return err + }) }, }, migrations.Migration{ Name: "Fix empty ledger metadata", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - update ledgers - set metadata = '{}'::jsonb - where metadata is null; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + update _system.ledgers + set metadata = '{}'::jsonb + where metadata is null; + `) + return err + }) }, }, migrations.Migration{ Name: "Add ledger state", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers - add column if not exists state varchar(255) default 'initializing'; - - update ledgers - set state = 'in-use' - where state = ''; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + add column if not exists state varchar(255) default 'initializing'; + + update _system.ledgers + set state = 'in-use' + where state = ''; + `) + return err + }) }, }, migrations.Migration{ Name: "Add features column", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers add column if not exists features jsonb; `) - return err + return err + }) }, }, migrations.Migration{ Name: "Rename ledger column to name", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table ledgers + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers rename column ledger to name; `) - return err + return err + }) }, }, migrations.Migration{ Name: "Add sequential id on ledgers", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - create sequence ledger_sequence; - - alter table ledgers - add column id bigint default nextval('ledger_sequence'); - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create sequence _system.ledger_sequence; + + alter table _system.ledgers + add column id bigint default nextval('_system.ledger_sequence'); + `) + return err + }) }, }, migrations.Migration{ Name: "Add aggregate_objects pg aggregator", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, aggregateObjects) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, aggregateObjects) + return err + }) }, }, migrations.Migration{ Name: "Remove ledger state column", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - alter table _system.ledgers - drop column state; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + alter table _system.ledgers + drop column state; + `) + return err + }) }, }, migrations.Migration{ Name: "Remove configuration table", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - drop table _system.configuration; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + drop table _system.configuration; + `) + return err + }) }, }, migrations.Migration{ Name: "Generate addedat of table ledgers", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` alter table _system.ledgers alter column addedat type timestamp without time zone; @@ -162,17 +186,20 @@ func GetMigrator() *migrations.Migrator { alter table _system.ledgers rename column addedat to added_at; `) - return err + return err + }) }, }, migrations.Migration{ Name: "add pgcrypto", - UpWithContext: func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, ` - create extension if not exists pgcrypto - with schema public; - `) - return err + Up: func(ctx context.Context, db bun.IDB) error { + return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { + _, err := tx.ExecContext(ctx, ` + create extension if not exists pgcrypto + with schema public; + `) + return err + }) }, }, ) diff --git a/internal/storage/module.go b/internal/storage/module.go index e9c23ae31..16a45915e 100644 --- a/internal/storage/module.go +++ b/internal/storage/module.go @@ -1,12 +1,60 @@ package storage import ( + "context" + "errors" + "github.com/formancehq/go-libs/v2/logging" "github.com/formancehq/ledger/internal/storage/driver" "go.uber.org/fx" ) func NewFXModule(autoUpgrade bool) fx.Option { - return fx.Options( + ret := []fx.Option{ driver.NewFXModule(autoUpgrade), - ) + } + if autoUpgrade { + ret = append(ret, + fx.Invoke(func(lc fx.Lifecycle, driver *driver.Driver) { + var ( + upgradeContext context.Context + cancelContext func() + upgradeStopped = make(chan struct{}) + ) + lc.Append(fx.Hook{ + OnStart: func(ctx context.Context) error { + upgradeContext, cancelContext = context.WithCancel(logging.ContextWithLogger( + context.Background(), + logging.FromContext(ctx), + )) + go func() { + defer close(upgradeStopped) + + if err := driver.UpgradeAllBuckets(upgradeContext); err != nil { + // Long migrations can be cancelled (app rescheduled for example) + // before fully terminated, handle this gracefully, don't panic, + // the next start will try again. + if errors.Is(err, context.DeadlineExceeded) || + errors.Is(err, context.Canceled) { + return + } + + panic(err) + } + }() + return nil + }, + OnStop: func(ctx context.Context) error { + cancelContext() + select { + case <-ctx.Done(): + return ctx.Err() + case <-upgradeStopped: + return nil + } + }, + }) + }), + ) + } + return fx.Options(ret...) } From 66001915c46713c4ba3f1d80112f1e1a1ee90407 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Sat, 19 Oct 2024 19:12:25 +0200 Subject: [PATCH 03/12] feat: refine migrations system --- internal/storage/bucket/migrations.go | 94 +++++++++++++++---- .../migration.sql} | 2 + .../migrations/0-init-schema/notes.yaml | 1 + .../migration.sql} | 4 +- .../migrations/1-fix-trigger/notes.yaml | 1 + .../migration.sql} | 2 + .../10-fillfactor-on-moves/notes.yaml | 1 + .../migration.sql} | 4 +- .../migrations/11-make-stateless/notes.yaml | 1 + .../migration.sql} | 5 +- .../2-fix-volumes-aggregation/notes.yaml | 1 + .../migration.sql} | 5 +- .../notes.yaml | 1 + .../migration.sql} | 5 +- .../notes.yaml | 1 + .../migration.sql} | 2 + .../5-add-idempotency-key-index/notes.yaml | 1 + .../migration.sql} | 2 + .../6-add-reference-index/notes.yaml | 1 + .../migration.sql} | 2 + .../7-add-ik-unique-index/notes.yaml | 1 + .../migration.sql} | 2 + .../8-ik-ledger-unique-index/notes.yaml | 1 + .../migration.sql} | 2 + .../notes.yaml | 1 + 25 files changed, 121 insertions(+), 22 deletions(-) rename internal/storage/bucket/migrations/{0-init-schema.sql => 0-init-schema/migration.sql} (99%) create mode 100644 internal/storage/bucket/migrations/0-init-schema/notes.yaml rename internal/storage/bucket/migrations/{1-fix-trigger.sql => 1-fix-trigger/migration.sql} (95%) create mode 100644 internal/storage/bucket/migrations/1-fix-trigger/notes.yaml rename internal/storage/bucket/migrations/{10-fillfactor-on-moves.sql => 10-fillfactor-on-moves/migration.sql} (54%) create mode 100644 internal/storage/bucket/migrations/10-fillfactor-on-moves/notes.yaml rename internal/storage/bucket/migrations/{11-make-stateless.sql => 11-make-stateless/migration.sql} (99%) create mode 100644 internal/storage/bucket/migrations/11-make-stateless/notes.yaml rename internal/storage/bucket/migrations/{2-fix-volumes-aggregation.sql => 2-fix-volumes-aggregation/migration.sql} (92%) create mode 100644 internal/storage/bucket/migrations/2-fix-volumes-aggregation/notes.yaml rename internal/storage/bucket/migrations/{3-fix-trigger-inserting-backdated-transactions.sql => 3-fix-trigger-inserting-backdated-transactions/migration.sql} (98%) create mode 100644 internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/notes.yaml rename internal/storage/bucket/migrations/{4-add-account-first-usage-column.sql => 4-add-account-first-usage-column/migration.sql} (99%) create mode 100644 internal/storage/bucket/migrations/4-add-account-first-usage-column/notes.yaml rename internal/storage/bucket/migrations/{5-add-idempotency-key-index.sql => 5-add-idempotency-key-index/migration.sql} (63%) create mode 100644 internal/storage/bucket/migrations/5-add-idempotency-key-index/notes.yaml rename internal/storage/bucket/migrations/{6-add-reference-index.sql => 6-add-reference-index/migration.sql} (65%) create mode 100644 internal/storage/bucket/migrations/6-add-reference-index/notes.yaml rename internal/storage/bucket/migrations/{7-add-ik-unique-index.sql => 7-add-ik-unique-index/migration.sql} (93%) create mode 100644 internal/storage/bucket/migrations/7-add-ik-unique-index/notes.yaml rename internal/storage/bucket/migrations/{8-ik-ledger-unique-index.sql => 8-ik-ledger-unique-index/migration.sql} (76%) create mode 100644 internal/storage/bucket/migrations/8-ik-ledger-unique-index/notes.yaml rename internal/storage/bucket/migrations/{9-fix-incorrect-volumes-aggregation.sql => 9-fix-incorrect-volumes-aggregation/migration.sql} (97%) create mode 100644 internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/notes.yaml diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go index 18ec15de1..356530be3 100644 --- a/internal/storage/bucket/migrations.go +++ b/internal/storage/bucket/migrations.go @@ -1,12 +1,21 @@ package bucket import ( + "bytes" "context" - "database/sql" "embed" + "fmt" + "github.com/formancehq/go-libs/v2/logging" "github.com/formancehq/go-libs/v2/migrations" + "github.com/ghodss/yaml" "github.com/uptrace/bun" "go.opentelemetry.io/otel/trace" + "io/fs" + "path/filepath" + "slices" + "strconv" + "strings" + "text/template" ) //go:embed migrations @@ -14,25 +23,10 @@ var migrationsDir embed.FS func GetMigrator(name string) *migrations.Migrator { migrator := migrations.NewMigrator(migrations.WithSchema(name, true)) - migrations, err := migrations.CollectMigrationFiles(migrationsDir, "migrations") + migrations, err := collectMigrations(name) if err != nil { panic(err) } - - for ind, migration := range migrations[:12] { - originalUp := migration.Up - migration.Up = func(ctx context.Context, db bun.IDB) error { - return db.RunInTx(ctx, &sql.TxOptions{}, func(ctx context.Context, tx bun.Tx) error { - _, err := tx.ExecContext(ctx, "set search_path to '"+name+"'") - if err != nil { - return err - } - return originalUp(ctx, tx) - }) - } - migrations[ind] = migration - } - migrator.RegisterMigrations(migrations...) return migrator @@ -44,3 +38,69 @@ func Migrate(ctx context.Context, tracer trace.Tracer, db bun.IDB, name string) return GetMigrator(name).Up(ctx, db) } + +type Notes struct { + Name string `yaml:"name"` +} + +func collectMigrations(name string) ([]migrations.Migration, error) { + entries, err := migrationsDir.ReadDir("migrations") + if err != nil { + return nil, err + } + + slices.SortFunc(entries, func(a, b fs.DirEntry) int { + fileAVersionAsString := strings.SplitN(a.Name(), "-", 2)[0] + fileAVersion, err := strconv.ParseInt(fileAVersionAsString, 10, 64) + if err != nil { + panic(err) + } + + fileBVersionAsString := strings.SplitN(b.Name(), "-", 2)[0] + fileBVersion, err := strconv.ParseInt(fileBVersionAsString, 10, 64) + if err != nil { + panic(err) + } + + return int(fileAVersion - fileBVersion) + }) + + ret := make([]migrations.Migration, len(entries)) + for i, entry := range entries { + rawNotes, err := migrationsDir.ReadFile(filepath.Join("migrations", entry.Name(), "notes.yaml")) + if err != nil { + return nil, fmt.Errorf("failed to read notes.yaml: %w", err) + } + + notes := &Notes{} + if err := yaml.Unmarshal(rawNotes, notes); err != nil { + return nil, fmt.Errorf("failed to unmarshal notes.yaml: %w", err) + } + + rawSQL, err := migrationsDir.ReadFile(filepath.Join("migrations", entry.Name(), "migration.sql")) + if err != nil { + return nil, fmt.Errorf("failed to read migration.sql: %w", err) + } + + buf := bytes.NewBuffer(nil) + err = template.Must(template.New("migration"). + Parse(string(rawSQL))). + Execute(buf, map[string]any{ + "Bucket": name, + }) + if err != nil { + return nil, fmt.Errorf("failed to execute template: %w", err) + } + + ret[i] = migrations.Migration{ + Name: notes.Name, + Up: func(ctx context.Context, db bun.IDB) error { + logging.FromContext(ctx).Infof("Applying migration %s", notes.Name) + _, err := db.ExecContext(ctx, buf.String()) + return err + }, + } + } + + return ret, nil +} diff --git a/internal/storage/bucket/migrations/0-init-schema.sql b/internal/storage/bucket/migrations/0-init-schema/migration.sql similarity index 99% rename from internal/storage/bucket/migrations/0-init-schema.sql rename to internal/storage/bucket/migrations/0-init-schema/migration.sql index 51d30bfa1..6a4503d6e 100644 --- a/internal/storage/bucket/migrations/0-init-schema.sql +++ b/internal/storage/bucket/migrations/0-init-schema/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + create aggregate aggregate_objects(jsonb) ( sfunc = jsonb_concat, stype = jsonb, diff --git a/internal/storage/bucket/migrations/0-init-schema/notes.yaml b/internal/storage/bucket/migrations/0-init-schema/notes.yaml new file mode 100644 index 000000000..d6552f3e5 --- /dev/null +++ b/internal/storage/bucket/migrations/0-init-schema/notes.yaml @@ -0,0 +1 @@ +name: Initialize schema diff --git a/internal/storage/bucket/migrations/1-fix-trigger.sql b/internal/storage/bucket/migrations/1-fix-trigger/migration.sql similarity index 95% rename from internal/storage/bucket/migrations/1-fix-trigger.sql rename to internal/storage/bucket/migrations/1-fix-trigger/migration.sql index af127449c..58c9a8314 100644 --- a/internal/storage/bucket/migrations/1-fix-trigger.sql +++ b/internal/storage/bucket/migrations/1-fix-trigger/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + create or replace function insert_posting(_transaction_seq bigint, _ledger varchar, _insertion_date timestamp without time zone, _effective_date timestamp without time zone, posting jsonb, _account_metadata jsonb) returns void @@ -27,4 +29,4 @@ begin posting ->> 'destination', posting ->> 'asset', (posting ->> 'amount')::numeric, false, _destination_exists); end; -$$ set search_path from current; \ No newline at end of file +$$ set search_path from current; diff --git a/internal/storage/bucket/migrations/1-fix-trigger/notes.yaml b/internal/storage/bucket/migrations/1-fix-trigger/notes.yaml new file mode 100644 index 000000000..6643587bc --- /dev/null +++ b/internal/storage/bucket/migrations/1-fix-trigger/notes.yaml @@ -0,0 +1 @@ +name: Fix triggers diff --git a/internal/storage/bucket/migrations/10-fillfactor-on-moves.sql b/internal/storage/bucket/migrations/10-fillfactor-on-moves/migration.sql similarity index 54% rename from internal/storage/bucket/migrations/10-fillfactor-on-moves.sql rename to internal/storage/bucket/migrations/10-fillfactor-on-moves/migration.sql index 689434e0f..c59813710 100644 --- a/internal/storage/bucket/migrations/10-fillfactor-on-moves.sql +++ b/internal/storage/bucket/migrations/10-fillfactor-on-moves/migration.sql @@ -1 +1,3 @@ +set search_path = '{{.Bucket}}'; + alter table moves set (fillfactor = 80); \ No newline at end of file diff --git a/internal/storage/bucket/migrations/10-fillfactor-on-moves/notes.yaml b/internal/storage/bucket/migrations/10-fillfactor-on-moves/notes.yaml new file mode 100644 index 000000000..56b453703 --- /dev/null +++ b/internal/storage/bucket/migrations/10-fillfactor-on-moves/notes.yaml @@ -0,0 +1 @@ +name: Define fill factor of moves table diff --git a/internal/storage/bucket/migrations/11-make-stateless.sql b/internal/storage/bucket/migrations/11-make-stateless/migration.sql similarity index 99% rename from internal/storage/bucket/migrations/11-make-stateless.sql rename to internal/storage/bucket/migrations/11-make-stateless/migration.sql index d928c0c71..1b771a4db 100644 --- a/internal/storage/bucket/migrations/11-make-stateless.sql +++ b/internal/storage/bucket/migrations/11-make-stateless/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + drop trigger insert_account on accounts; drop trigger update_account on accounts; drop trigger insert_transaction on transactions; @@ -511,4 +513,4 @@ $do$ execute vsql; end loop; END -$do$; \ No newline at end of file +$do$; diff --git a/internal/storage/bucket/migrations/11-make-stateless/notes.yaml b/internal/storage/bucket/migrations/11-make-stateless/notes.yaml new file mode 100644 index 000000000..900e05e50 --- /dev/null +++ b/internal/storage/bucket/migrations/11-make-stateless/notes.yaml @@ -0,0 +1 @@ +name: Make stateless diff --git a/internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/migration.sql similarity index 92% rename from internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql rename to internal/storage/bucket/migrations/2-fix-volumes-aggregation/migration.sql index 334ee11b7..e730101b6 100644 --- a/internal/storage/bucket/migrations/2-fix-volumes-aggregation.sql +++ b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + create or replace function get_all_account_volumes(_ledger varchar, _account varchar, _before timestamp default null) returns setof volumes_with_asset language sql @@ -20,4 +22,5 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_volumes from moves -$$ set search_path from current; \ No newline at end of file +$$ set search_path from current; + diff --git a/internal/storage/bucket/migrations/2-fix-volumes-aggregation/notes.yaml b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/notes.yaml new file mode 100644 index 000000000..f7a3c9ef2 --- /dev/null +++ b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/notes.yaml @@ -0,0 +1 @@ +name: Fix volumes aggregation diff --git a/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/migration.sql similarity index 98% rename from internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql rename to internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/migration.sql index 668771559..0f08e7552 100644 --- a/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions.sql +++ b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + create or replace function insert_move( _transactions_seq bigint, _ledger varchar, @@ -102,4 +104,5 @@ begin and seq < _seq; end if; end; -$$ set search_path from current; \ No newline at end of file +$$ set search_path from current; + diff --git a/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/notes.yaml b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/notes.yaml new file mode 100644 index 000000000..e7c2cbbe4 --- /dev/null +++ b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/notes.yaml @@ -0,0 +1 @@ +name: Fix backdated transactions insertion diff --git a/internal/storage/bucket/migrations/4-add-account-first-usage-column.sql b/internal/storage/bucket/migrations/4-add-account-first-usage-column/migration.sql similarity index 99% rename from internal/storage/bucket/migrations/4-add-account-first-usage-column.sql rename to internal/storage/bucket/migrations/4-add-account-first-usage-column/migration.sql index 34cc49c21..4878d3c0f 100644 --- a/internal/storage/bucket/migrations/4-add-account-first-usage-column.sql +++ b/internal/storage/bucket/migrations/4-add-account-first-usage-column/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + alter table accounts add column first_usage timestamp without time zone; @@ -222,4 +224,5 @@ set first_usage = ( select accounts.insertion_date limit 1 ) -where first_usage is null; \ No newline at end of file +where first_usage is null; + diff --git a/internal/storage/bucket/migrations/4-add-account-first-usage-column/notes.yaml b/internal/storage/bucket/migrations/4-add-account-first-usage-column/notes.yaml new file mode 100644 index 000000000..37decc1dd --- /dev/null +++ b/internal/storage/bucket/migrations/4-add-account-first-usage-column/notes.yaml @@ -0,0 +1 @@ +name: Add first_usage on accounts diff --git a/internal/storage/bucket/migrations/5-add-idempotency-key-index.sql b/internal/storage/bucket/migrations/5-add-idempotency-key-index/migration.sql similarity index 63% rename from internal/storage/bucket/migrations/5-add-idempotency-key-index.sql rename to internal/storage/bucket/migrations/5-add-idempotency-key-index/migration.sql index b44c5459d..5802ae6a9 100644 --- a/internal/storage/bucket/migrations/5-add-idempotency-key-index.sql +++ b/internal/storage/bucket/migrations/5-add-idempotency-key-index/migration.sql @@ -1 +1,3 @@ +set search_path = '{{.Bucket}}'; + create index logs_idempotency_key on logs (idempotency_key); \ No newline at end of file diff --git a/internal/storage/bucket/migrations/5-add-idempotency-key-index/notes.yaml b/internal/storage/bucket/migrations/5-add-idempotency-key-index/notes.yaml new file mode 100644 index 000000000..1e35330c7 --- /dev/null +++ b/internal/storage/bucket/migrations/5-add-idempotency-key-index/notes.yaml @@ -0,0 +1 @@ +name: Fix missing idempotency key index diff --git a/internal/storage/bucket/migrations/6-add-reference-index.sql b/internal/storage/bucket/migrations/6-add-reference-index/migration.sql similarity index 65% rename from internal/storage/bucket/migrations/6-add-reference-index.sql rename to internal/storage/bucket/migrations/6-add-reference-index/migration.sql index 89b0ed6f8..d764505d9 100644 --- a/internal/storage/bucket/migrations/6-add-reference-index.sql +++ b/internal/storage/bucket/migrations/6-add-reference-index/migration.sql @@ -1 +1,3 @@ +set search_path = '{{.Bucket}}'; + create index transactions_reference on transactions (reference); \ No newline at end of file diff --git a/internal/storage/bucket/migrations/6-add-reference-index/notes.yaml b/internal/storage/bucket/migrations/6-add-reference-index/notes.yaml new file mode 100644 index 000000000..b54250392 --- /dev/null +++ b/internal/storage/bucket/migrations/6-add-reference-index/notes.yaml @@ -0,0 +1 @@ +name: Fix missing reference index diff --git a/internal/storage/bucket/migrations/7-add-ik-unique-index.sql b/internal/storage/bucket/migrations/7-add-ik-unique-index/migration.sql similarity index 93% rename from internal/storage/bucket/migrations/7-add-ik-unique-index.sql rename to internal/storage/bucket/migrations/7-add-ik-unique-index/migration.sql index 92ed59085..66dbbd6cf 100644 --- a/internal/storage/bucket/migrations/7-add-ik-unique-index.sql +++ b/internal/storage/bucket/migrations/7-add-ik-unique-index/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + update logs set idempotency_key = null where idempotency_key = ''; diff --git a/internal/storage/bucket/migrations/7-add-ik-unique-index/notes.yaml b/internal/storage/bucket/migrations/7-add-ik-unique-index/notes.yaml new file mode 100644 index 000000000..329f19f5e --- /dev/null +++ b/internal/storage/bucket/migrations/7-add-ik-unique-index/notes.yaml @@ -0,0 +1 @@ +name: Add unique index on ik diff --git a/internal/storage/bucket/migrations/8-ik-ledger-unique-index.sql b/internal/storage/bucket/migrations/8-ik-ledger-unique-index/migration.sql similarity index 76% rename from internal/storage/bucket/migrations/8-ik-ledger-unique-index.sql rename to internal/storage/bucket/migrations/8-ik-ledger-unique-index/migration.sql index 1093bf9c0..6d99e2a7d 100644 --- a/internal/storage/bucket/migrations/8-ik-ledger-unique-index.sql +++ b/internal/storage/bucket/migrations/8-ik-ledger-unique-index/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + drop index logs_idempotency_key; create unique index logs_idempotency_key on logs (ledger, idempotency_key); \ No newline at end of file diff --git a/internal/storage/bucket/migrations/8-ik-ledger-unique-index/notes.yaml b/internal/storage/bucket/migrations/8-ik-ledger-unique-index/notes.yaml new file mode 100644 index 000000000..e12ce2fb5 --- /dev/null +++ b/internal/storage/bucket/migrations/8-ik-ledger-unique-index/notes.yaml @@ -0,0 +1 @@ +name: Add unique index on ledger names diff --git a/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/migration.sql similarity index 97% rename from internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql rename to internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/migration.sql index 0731ecaef..7bd19ae5b 100644 --- a/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation.sql +++ b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/migration.sql @@ -1,3 +1,5 @@ +set search_path = '{{.Bucket}}'; + create or replace function get_aggregated_volumes_for_transaction(_ledger varchar, tx numeric) returns jsonb stable language sql diff --git a/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/notes.yaml b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/notes.yaml new file mode 100644 index 000000000..b92d6d261 --- /dev/null +++ b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/notes.yaml @@ -0,0 +1 @@ +name: Fix incorrect volumes aggregation From 395d2675b0c366aea0f0953798619d5421b9c77d Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Sun, 20 Oct 2024 12:43:34 +0200 Subject: [PATCH 04/12] feat: migrate legacy data (null columns) --- go.sum | 38 ++------ internal/storage/bucket/bucket.go | 2 +- internal/storage/bucket/migrations.go | 87 +++++++++++-------- .../migrations/0-init-schema/tests_after.sql | 43 +++++++++ .../0-init-schema/{migration.sql => up.sql} | 0 .../1-fix-trigger/{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../12-moves-fill-transaction-id/notes.yaml | 1 + .../tests_after.sql | 10 +++ .../tests_before.sql | 10 +++ .../12-moves-fill-transaction-id/up.sql | 29 +++++++ .../notes.yaml | 1 + .../tests_after.sql | 10 +++ .../tests_before.sql | 10 +++ .../13-transactions-fill-inserted-at/up.sql | 31 +++++++ .../14-transactions-fill-pcv/notes.yaml | 1 + .../14-transactions-fill-pcv/tests_after.sql | 40 +++++++++ .../14-transactions-fill-pcv/tests_before.sql | 10 +++ .../14-transactions-fill-pcv/up.sql | 44 ++++++++++ .../notes.yaml | 1 + .../tests_after.sql | 26 ++++++ .../tests_before.sql | 26 ++++++ .../15-accounts-volumes-fill-history/up.sql | 40 +++++++++ .../notes.yaml | 1 + .../tests_after.sql | 10 +++ .../tests_before.sql | 10 +++ .../up.sql | 29 +++++++ .../notes.yaml | 1 + .../tests_after.sql | 10 +++ .../tests_before.sql | 10 +++ .../17-accounts-metadata-fill-address/up.sql | 29 +++++++ .../18-logs-fill-memento/notes.yaml | 1 + .../18-logs-fill-memento/tests_after.sql | 10 +++ .../18-logs-fill-memento/tests_before.sql | 10 +++ .../migrations/18-logs-fill-memento/up.sql | 25 ++++++ .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 .../{migration.sql => up.sql} | 0 internal/storage/bucket/migrations_test.go | 82 +++++++++++++++++ 45 files changed, 620 insertions(+), 68 deletions(-) create mode 100644 internal/storage/bucket/migrations/0-init-schema/tests_after.sql rename internal/storage/bucket/migrations/0-init-schema/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/1-fix-trigger/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/10-fillfactor-on-moves/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/11-make-stateless/{migration.sql => up.sql} (100%) create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/up.sql rename internal/storage/bucket/migrations/2-fix-volumes-aggregation/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/4-add-account-first-usage-column/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/5-add-idempotency-key-index/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/6-add-reference-index/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/7-add-ik-unique-index/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/8-ik-ledger-unique-index/{migration.sql => up.sql} (100%) rename internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/{migration.sql => up.sql} (100%) create mode 100644 internal/storage/bucket/migrations_test.go diff --git a/go.sum b/go.sum index b98eb6c98..5f7e744d9 100644 --- a/go.sum +++ b/go.sum @@ -6,8 +6,6 @@ github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25 github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/IBM/sarama v1.43.3 h1:Yj6L2IaNvb2mRBop39N7mmJAHBVY3dTPncr3qGVkxPA= github.com/IBM/sarama v1.43.3/go.mod h1:FVIRaLrhK3Cla/9FfRF5X9Zua2KpS3SYIXxhac1H+FQ= -github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= -github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= @@ -26,8 +24,6 @@ github.com/alitto/pond v1.9.2 h1:9Qb75z/scEZVCoSU+osVmQ0I0JOeLfdTDafrbcJ8CLs= github.com/alitto/pond v1.9.2/go.mod h1:xQn3P/sHTYcU/1BR3i86IGIrilcrGC2LiS+E2+CJWsI= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= -github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= -github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/aws/aws-msk-iam-sasl-signer-go v1.0.0 h1:UyjtGmO0Uwl/K+zpzPwLoXzMhcN9xmnR2nrqJoBrg3c= github.com/aws/aws-msk-iam-sasl-signer-go v1.0.0/go.mod h1:TJAXuFs2HcMib3sN5L0gUC+Q01Qvy3DemvA55WuC+iA= github.com/aws/aws-sdk-go-v2 v1.32.2 h1:AkNLZEyYMLnx/Q/mSKkcMqwNFXMAvFto9bNsHqcTduI= @@ -75,8 +71,6 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo= -github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnwe/otelsarama v0.0.0-20240308230250-9388d9d40bc0 h1:R2zQhFwSCyyd7L43igYjDrH0wkC/i+QBPELuY0HOu84= github.com/dnwe/otelsarama v0.0.0-20240308230250-9388d9d40bc0/go.mod h1:2MqLKYJfjs3UriXXF9Fd0Qmh/lhxi/6tHXkqtXxyIHc= github.com/docker/cli v27.3.1+incompatible h1:qEGdFBF3Xu6SCvCYhc7CzaQTlBmqDuzxPDpigSyeKQQ= @@ -87,8 +81,6 @@ github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= -github.com/dop251/goja v0.0.0-20241009100908-5f46f2705ca3 h1:MXsAuToxwsTn5BEEYm2DheqIiC4jWGmkEJ1uy+KFhvQ= -github.com/dop251/goja v0.0.0-20241009100908-5f46f2705ca3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4= github.com/eapache/go-resiliency v1.7.0 h1:n3NRTnBn5N0Cbi/IeOHuQn9s2UwVUH7Ga0ZWcP+9JTA= github.com/eapache/go-resiliency v1.7.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 h1:Oy0F4ALJ04o5Qqpdz8XLIpNA3WM/iSIXqxtqo7UGVws= @@ -103,18 +95,14 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241023163904-e440de7907c7 h1:x8vIRM5+y01pLs2YqnYcoUsvFJ/6cP5qDtsM248OmWM= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241023163904-e440de7907c7/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= -github.com/formancehq/numscript v0.0.9-0.20241009144012-1150c14a1417 h1:LOd5hxnXDIBcehFrpW1OnXk+VSs0yJXeu1iAOO+Hji4= -github.com/formancehq/numscript v0.0.9-0.20241009144012-1150c14a1417/go.mod h1:btuSv05cYwi9BvLRxVs5zrunU+O1vTgigG1T6UsawcY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46 h1:8wZtnWSIYNV7DwD0Jr4HsbcRgezOrgDJ2Q0w9ABieKc= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61 h1:GSIhsdo/YXuZXI4q8xA8IrdOkkjfFp6O+DiNywk8s8U= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8 h1:q9mP1jT2q2/QxGdEg9mWOcsf/P7NNvC1vMmyDvj5EEY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8/go.mod h1:KO+eOrTVQ5tR3TZUAHapoQ+d7y2+Ie5Tg0QwfZHAK4k= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= -github.com/gkampitakis/ciinfo v0.3.0 h1:gWZlOC2+RYYttL0hBqcoQhM7h1qNkVqvRCV1fOvpAv8= -github.com/gkampitakis/ciinfo v0.3.0/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= -github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= -github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= -github.com/gkampitakis/go-snaps v0.5.4 h1:GX+dkKmVsRenz7SoTbdIEL4KQARZctkMiZ8ZKprRwT8= -github.com/gkampitakis/go-snaps v0.5.4/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= @@ -131,8 +119,6 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= -github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= -github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= @@ -230,8 +216,6 @@ github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 h1:7UMa6KCCMjZEMD github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= -github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= -github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= @@ -315,14 +299,6 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= -github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= -github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= -github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= -github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= -github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= -github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= -github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZb78yU= github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY= github.com/tklauser/numcpus v0.9.0 h1:lmyCHtANi8aRUgkckBgoDk1nHCux3n2cgkJLXdQGPDo= @@ -426,8 +402,6 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= diff --git a/internal/storage/bucket/bucket.go b/internal/storage/bucket/bucket.go index 73777c500..866d5a2c2 100644 --- a/internal/storage/bucket/bucket.go +++ b/internal/storage/bucket/bucket.go @@ -19,7 +19,7 @@ type Bucket struct { } func (b *Bucket) Migrate(ctx context.Context, tracer trace.Tracer) error { - return Migrate(ctx, tracer, b.db, b.name) + return migrate(ctx, tracer, b.db, b.name) } func (b *Bucket) IsUpToDate(ctx context.Context) (bool, error) { diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go index 356530be3..f8937e0dc 100644 --- a/internal/storage/bucket/migrations.go +++ b/internal/storage/bucket/migrations.go @@ -5,7 +5,6 @@ import ( "context" "embed" "fmt" - "github.com/formancehq/go-libs/v2/logging" "github.com/formancehq/go-libs/v2/migrations" "github.com/ghodss/yaml" "github.com/uptrace/bun" @@ -19,7 +18,7 @@ import ( ) //go:embed migrations -var migrationsDir embed.FS +var MigrationsFS embed.FS func GetMigrator(name string) *migrations.Migrator { migrator := migrations.NewMigrator(migrations.WithSchema(name, true)) @@ -32,19 +31,46 @@ func GetMigrator(name string) *migrations.Migrator { return migrator } -func Migrate(ctx context.Context, tracer trace.Tracer, db bun.IDB, name string) error { +func migrate(ctx context.Context, tracer trace.Tracer, db bun.IDB, name string) error { ctx, span := tracer.Start(ctx, "Migrate bucket") defer span.End() return GetMigrator(name).Up(ctx, db) } -type Notes struct { +type notes struct { Name string `yaml:"name"` } func collectMigrations(name string) ([]migrations.Migration, error) { - entries, err := migrationsDir.ReadDir("migrations") + return WalkMigrations(func(entry fs.DirEntry) (*migrations.Migration, error) { + rawNotes, err := MigrationsFS.ReadFile(filepath.Join("migrations", entry.Name(), "notes.yaml")) + if err != nil { + return nil, fmt.Errorf("failed to read notes.yaml: %w", err) + } + + notes := ¬es{} + if err := yaml.Unmarshal(rawNotes, notes); err != nil { + return nil, fmt.Errorf("failed to unmarshal notes.yaml: %w", err) + } + + sqlFile, err := TemplateSQLFile(name, entry.Name(), "up.sql") + if err != nil { + return nil, fmt.Errorf("failed to template sql file: %w", err) + } + + return &migrations.Migration{ + Name: notes.Name, + Up: func(ctx context.Context, db bun.IDB) error { + _, err := db.ExecContext(ctx, sqlFile) + return err + }, + }, nil + }) +} + +func WalkMigrations[T any](transformer func(entry fs.DirEntry) (*T, error)) ([]T, error) { + entries, err := MigrationsFS.ReadDir("migrations") if err != nil { return nil, err } @@ -65,42 +91,33 @@ func collectMigrations(name string) ([]migrations.Migration, error) { return int(fileAVersion - fileBVersion) }) - ret := make([]migrations.Migration, len(entries)) + ret := make([]T, len(entries)) for i, entry := range entries { - rawNotes, err := migrationsDir.ReadFile(filepath.Join("migrations", entry.Name(), "notes.yaml")) + transformed, err := transformer(entry) if err != nil { - return nil, fmt.Errorf("failed to read notes.yaml: %w", err) - } - - notes := &Notes{} - if err := yaml.Unmarshal(rawNotes, notes); err != nil { - return nil, fmt.Errorf("failed to unmarshal notes.yaml: %w", err) + return nil, fmt.Errorf("failed to transform entry: %w", err) } + ret[i] = *transformed + } - rawSQL, err := migrationsDir.ReadFile(filepath.Join("migrations", entry.Name(), "migration.sql")) - if err != nil { - return nil, fmt.Errorf("failed to read migration.sql: %w", err) - } + return ret, nil +} - buf := bytes.NewBuffer(nil) - err = template.Must(template.New("migration"). - Parse(string(rawSQL))). - Execute(buf, map[string]any{ - "Bucket": name, - }) - if err != nil { - return nil, fmt.Errorf("failed to execute template: %w", err) - } +func TemplateSQLFile(bucket, migrationDir, file string) (string, error) { + rawSQL, err := MigrationsFS.ReadFile(filepath.Join("migrations", migrationDir, file)) + if err != nil { + return "", fmt.Errorf("failed to read file %s: %w", file, err) + } - ret[i] = migrations.Migration{ - Name: notes.Name, - Up: func(ctx context.Context, db bun.IDB) error { - logging.FromContext(ctx).Infof("Applying migration %s", notes.Name) - _, err := db.ExecContext(ctx, buf.String()) - return err - }, - } + buf := bytes.NewBuffer(nil) + err = template.Must(template.New("migration"). + Parse(string(rawSQL))). + Execute(buf, map[string]any{ + "Bucket": bucket, + }) + if err != nil { + panic(err) } - return ret, nil + return buf.String(), nil } diff --git a/internal/storage/bucket/migrations/0-init-schema/tests_after.sql b/internal/storage/bucket/migrations/0-init-schema/tests_after.sql new file mode 100644 index 000000000..308468b71 --- /dev/null +++ b/internal/storage/bucket/migrations/0-init-schema/tests_after.sql @@ -0,0 +1,43 @@ +set search_path = '{{.Bucket}}'; + +create extension if not exists "uuid-ossp"; + +insert into logs(seq, ledger, id, type, date, data, hash) +select + seq, + 'ledger' || seq % 5, + (seq/5) + (seq % 5), + 'NEW_TRANSACTION', + now(), + ('{' + '"transaction": {' + '"id": ' || (seq/5) + (seq % 5) || ',' + '"timestamp": "' || now() || '",' + '"postings": [' + '{' + '"source": "world",' + '"destination": "orders:' || seq || '",' + '"asset": "USD",' + '"amount": 100' + '},' + '{' + '"destination": "fees",' + '"source": "orders:' || seq || '",' + '"asset": "USD",' + '"amount": 1' + '},' + '{' + '"destination": "sellers:' || (seq % 5) || '",' + '"source": "orders:' || seq || '",' + '"asset": "USD",' + '"amount": 99' + '}' + '],' + '"metadata": { "tax": "1%" }' + '},' + '"accountMetadata": {' + '"orders:' || seq || '": { "tax": "1%" }' + '}' + '}')::jsonb, + 'invalid-hash' +from generate_series(0, 100) as seq; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/0-init-schema/migration.sql b/internal/storage/bucket/migrations/0-init-schema/up.sql similarity index 100% rename from internal/storage/bucket/migrations/0-init-schema/migration.sql rename to internal/storage/bucket/migrations/0-init-schema/up.sql diff --git a/internal/storage/bucket/migrations/1-fix-trigger/migration.sql b/internal/storage/bucket/migrations/1-fix-trigger/up.sql similarity index 100% rename from internal/storage/bucket/migrations/1-fix-trigger/migration.sql rename to internal/storage/bucket/migrations/1-fix-trigger/up.sql diff --git a/internal/storage/bucket/migrations/10-fillfactor-on-moves/migration.sql b/internal/storage/bucket/migrations/10-fillfactor-on-moves/up.sql similarity index 100% rename from internal/storage/bucket/migrations/10-fillfactor-on-moves/migration.sql rename to internal/storage/bucket/migrations/10-fillfactor-on-moves/up.sql diff --git a/internal/storage/bucket/migrations/11-make-stateless/migration.sql b/internal/storage/bucket/migrations/11-make-stateless/up.sql similarity index 100% rename from internal/storage/bucket/migrations/11-make-stateless/migration.sql rename to internal/storage/bucket/migrations/11-make-stateless/up.sql diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml new file mode 100644 index 000000000..4e7ed8eef --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml @@ -0,0 +1 @@ +name: Fill transaction ids of table moves diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql new file mode 100644 index 000000000..6853e9309 --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from moves + where transactions_id is null + ) = 0, 'Still some rows with null transactions_id'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql new file mode 100644 index 000000000..fbdbd7e88 --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from moves + where transactions_id is null + ) > 0, 'Should have some transactions with null transactions_id'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql new file mode 100644 index 000000000..4305015ca --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql @@ -0,0 +1,29 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + begin + loop + with _outdated_moves as ( + select * + from moves + where transactions_id is null + limit _batch_size + ) + update moves + set transactions_id = ( + select id + from transactions + where seq = moves.transactions_seq + ) + from _outdated_moves + where moves.seq in (_outdated_moves.seq); + + exit when not found; + end loop; + end +$$; + +alter table moves +alter column transactions_id set not null; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml new file mode 100644 index 000000000..69c43fb23 --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml @@ -0,0 +1 @@ +name: Fill inserted_at column of transactions table diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql new file mode 100644 index 000000000..097d689ed --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select bool_and(log.date = transactions.inserted_at) + from logs log + join transactions on transactions.id = log.id + ), 'Insertion dates of logs and transactions should not match'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql new file mode 100644 index 000000000..e26e8a01a --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select not bool_and(log.date = transactions.inserted_at) + from logs log + join transactions on transactions.id = log.id + ), 'Insertion dates of logs and transactions should match'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql new file mode 100644 index 000000000..79349e4e8 --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql @@ -0,0 +1,31 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + -- select the date where the "11-make-stateless" migration has been applied + _date timestamp without time zone = ( + select tstamp + from _system.goose_db_version + where version_id = 11 + ); + _count integer = ( + select count(*) + from logs + where date <= _date + ); + begin + for i in 0.._count by _batch_size loop + update transactions + set inserted_at = ( + select date + from logs + where transactions.id = (data->'transaction'->>'id')::bigint and transactions.ledger = ledger + ) + where id >= i and id < i + _batch_size; + end loop; + end +$$; + +alter table moves +alter column transactions_id set not null; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml b/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml new file mode 100644 index 000000000..4a8274783 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml @@ -0,0 +1 @@ +name: Fill post_commit_volumes column of transactions table diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql new file mode 100644 index 000000000..a1ee71115 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql @@ -0,0 +1,40 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from transactions + where post_commit_volumes is null + ) = 0, 'Post commit volumes should be set on all transactions'; + assert ( + select post_commit_volumes + from transactions + where ledger = 'ledger0' and id = 1 + ) = ('{' + '"fees": {' + '"USD": {' + '"inputs": 2, ' + '"outputs": 0' + '}' + '}, ' + '"world": {' + '"USD": {' + '"inputs": 0, ' + '"outputs": 200' + '}' + '}, ' + '"orders:5": {' + '"USD": {' + '"inputs": 100, ' + '"outputs": 100' + '}' + '}, ' + '"sellers:0": {' + '"USD": {' + '"inputs": 198, ' + '"outputs": 0' + '}' + '}' + '}')::jsonb, 'Post commit volumes should be correct'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql new file mode 100644 index 000000000..9b29e117d --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from transactions + where post_commit_volumes is null + ) > 0, 'Post commit volumes should be null on all transactions'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql new file mode 100644 index 000000000..0e44e9e42 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql @@ -0,0 +1,44 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + begin + loop + with _outdated_transactions as ( + select id + from transactions + where post_commit_volumes is null + limit _batch_size + ) + update transactions + set post_commit_volumes = ( + select public.aggregate_objects(post_commit_volumes::jsonb) as post_commit_volumes + from ( + select accounts_address, json_build_object(accounts_address, post_commit_volumes) post_commit_volumes + from ( + select accounts_address, json_build_object(asset, post_commit_volumes) as post_commit_volumes + from ( + select distinct on (accounts_address, asset) + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where transactions_id = transactions.id and ledger = transactions.ledger + ) moves + ) values + ) values + ) + from _outdated_transactions + where transactions.id in (_outdated_transactions.id); + + exit when not found; + end loop; + end +$$; + +alter table transactions +alter column post_commit_volumes set not null ; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml new file mode 100644 index 000000000..35624b619 --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml @@ -0,0 +1 @@ +name: Populate accounts_volumes table with historic data diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql new file mode 100644 index 000000000..fdb625fee --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql @@ -0,0 +1,26 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from ( + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + ) v + ) = 0, 'All accounts volumes should be ok'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql new file mode 100644 index 000000000..ffd42520a --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql @@ -0,0 +1,26 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from ( + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + ) v + ) > 0, 'Some accounts volumes should no be set'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql new file mode 100644 index 000000000..5f1b4efd0 --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql @@ -0,0 +1,40 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _missing record; + begin + loop + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + into _missing + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + limit 1; + + exit when not found; + + insert into accounts_volumes (ledger, accounts_address, asset, input, output) + values ( + _missing.ledger, + _missing.accounts_address, + _missing.asset, + (_missing.post_commit_volumes).inputs, + (_missing.post_commit_volumes).outputs + ) + on conflict do nothing; -- can be inserted by a concurrent transaction + end loop; + end +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml new file mode 100644 index 000000000..449dcfd17 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml @@ -0,0 +1 @@ +name: Fill transactions_id column of transactions_metadata table diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql new file mode 100644 index 000000000..8edfaa5f2 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from transactions_metadata + where transactions_id is null + ) = 0, 'Transactions ids on transactions_metadata table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql new file mode 100644 index 000000000..dc19c16a9 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from transactions_metadata + where transactions_id is null + ) > 0, 'Transactions ids of transactions_metadata table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql new file mode 100644 index 000000000..eb211aaf0 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql @@ -0,0 +1,29 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + begin + loop + with _outdated_transactions_metadata as ( + select seq + from transactions_metadata + where transactions_id is null + limit _batch_size + ) + update transactions_metadata + set transactions_id = ( + select id + from transactions + where transactions_metadata.transactions_seq = seq + ) + from _outdated_transactions_metadata + where transactions_metadata.seq in (_outdated_transactions_metadata.seq); + + exit when not found; + end loop; + end +$$; + +alter table transactions_metadata +alter column transactions_id set not null ; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml new file mode 100644 index 000000000..f599539a8 --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml @@ -0,0 +1 @@ +name: Fill accounts_address column of accounts_metadata table diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql new file mode 100644 index 000000000..cfd59a1e1 --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from accounts_metadata + where accounts_address is null + ) = 0, 'Account addresses on accounts_metadata table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql new file mode 100644 index 000000000..fa1c7ea30 --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from accounts_metadata + where accounts_address is null + ) > 0, 'Account addresses of accounts_metadata table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql new file mode 100644 index 000000000..3e80405ab --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql @@ -0,0 +1,29 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + begin + loop + with _outdated_accounts_metadata as ( + select seq + from accounts_metadata + where accounts_address is null + limit _batch_size + ) + update accounts_metadata + set accounts_address = ( + select address + from accounts + where accounts_metadata.accounts_seq = seq + ) + from _outdated_accounts_metadata + where accounts_metadata.seq in (_outdated_accounts_metadata.seq); + + exit when not found; + end loop; + end +$$; + +alter table accounts_metadata +alter column accounts_address set not null ; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml b/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml new file mode 100644 index 000000000..1f7fd9415 --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml @@ -0,0 +1 @@ +name: Fill memento column of logs table diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql new file mode 100644 index 000000000..b8f4d0f0e --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from logs + where memento is null + ) = 0, 'Mememtos of logs table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql new file mode 100644 index 000000000..f4bb108ad --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Bucket}}'; + +do $$ +begin + assert ( + select count(*) + from logs + where memento is null + ) > 0, 'Mementos of logs table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql new file mode 100644 index 000000000..2884998b7 --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql @@ -0,0 +1,25 @@ +set search_path = '{{.Bucket}}'; + +do $$ + declare + _batch_size integer := 30; + begin + loop + with _outdated_logs as ( + select seq + from logs + where memento is null + limit _batch_size + ) + update logs + set memento = convert_to(data::varchar, 'LATIN1')::bytea + from _outdated_logs + where logs.seq in (_outdated_logs.seq); + + exit when not found; + end loop; + end +$$; + +alter table logs +alter column memento set not null; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/2-fix-volumes-aggregation/migration.sql b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql similarity index 100% rename from internal/storage/bucket/migrations/2-fix-volumes-aggregation/migration.sql rename to internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql diff --git a/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/migration.sql b/internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/up.sql similarity index 100% rename from internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/migration.sql rename to internal/storage/bucket/migrations/3-fix-trigger-inserting-backdated-transactions/up.sql diff --git a/internal/storage/bucket/migrations/4-add-account-first-usage-column/migration.sql b/internal/storage/bucket/migrations/4-add-account-first-usage-column/up.sql similarity index 100% rename from internal/storage/bucket/migrations/4-add-account-first-usage-column/migration.sql rename to internal/storage/bucket/migrations/4-add-account-first-usage-column/up.sql diff --git a/internal/storage/bucket/migrations/5-add-idempotency-key-index/migration.sql b/internal/storage/bucket/migrations/5-add-idempotency-key-index/up.sql similarity index 100% rename from internal/storage/bucket/migrations/5-add-idempotency-key-index/migration.sql rename to internal/storage/bucket/migrations/5-add-idempotency-key-index/up.sql diff --git a/internal/storage/bucket/migrations/6-add-reference-index/migration.sql b/internal/storage/bucket/migrations/6-add-reference-index/up.sql similarity index 100% rename from internal/storage/bucket/migrations/6-add-reference-index/migration.sql rename to internal/storage/bucket/migrations/6-add-reference-index/up.sql diff --git a/internal/storage/bucket/migrations/7-add-ik-unique-index/migration.sql b/internal/storage/bucket/migrations/7-add-ik-unique-index/up.sql similarity index 100% rename from internal/storage/bucket/migrations/7-add-ik-unique-index/migration.sql rename to internal/storage/bucket/migrations/7-add-ik-unique-index/up.sql diff --git a/internal/storage/bucket/migrations/8-ik-ledger-unique-index/migration.sql b/internal/storage/bucket/migrations/8-ik-ledger-unique-index/up.sql similarity index 100% rename from internal/storage/bucket/migrations/8-ik-ledger-unique-index/migration.sql rename to internal/storage/bucket/migrations/8-ik-ledger-unique-index/up.sql diff --git a/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/migration.sql b/internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/up.sql similarity index 100% rename from internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/migration.sql rename to internal/storage/bucket/migrations/9-fix-incorrect-volumes-aggregation/up.sql diff --git a/internal/storage/bucket/migrations_test.go b/internal/storage/bucket/migrations_test.go new file mode 100644 index 000000000..02df0030b --- /dev/null +++ b/internal/storage/bucket/migrations_test.go @@ -0,0 +1,82 @@ +//go:build it + +package bucket_test + +import ( + "errors" + "github.com/formancehq/go-libs/v2/bun/bunconnect" + "github.com/formancehq/go-libs/v2/logging" + "github.com/formancehq/go-libs/v2/migrations" + "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/ledger/internal/storage/bucket" + "github.com/formancehq/ledger/internal/storage/driver" + "github.com/google/uuid" + _ "github.com/jackc/pgx/v5/stdlib" + "github.com/stretchr/testify/require" + "github.com/uptrace/bun/extra/bundebug" + "io/fs" + "testing" +) + +func TestMigrations(t *testing.T) { + t.Parallel() + + ctx := logging.TestingContext() + pgDatabase := srv.NewDatabase(t) + db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions()) + require.NoError(t, err) + + require.NoError(t, driver.Migrate(ctx, db)) + if testing.Verbose() { + db.AddQueryHook(bundebug.NewQueryHook()) + } + + bucketName := uuid.NewString()[:8] + migrator := bucket.GetMigrator(bucketName) + + _, err = bucket.WalkMigrations(func(entry fs.DirEntry) (*struct{}, error) { + before, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "tests_before.sql") + if !errors.Is(err, fs.ErrNotExist) { + require.NoError(t, err) + } + if err == nil { + _, err = db.ExecContext(ctx, before) + require.NoError(t, err, "executing pre migration script: %s", entry.Name()) + } + + if err := migrator.UpByOne(ctx, db); err != nil { + if !errors.Is(err, migrations.ErrAlreadyUpToDate) { + require.Fail(t, err.Error()) + } + } + + after, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "tests_after.sql") + if !errors.Is(err, fs.ErrNotExist) { + require.NoError(t, err) + } + if err == nil { + _, err = db.ExecContext(ctx, after) + require.NoErrorf(t, err, "executing post migration script: %s", entry.Name()) + } + + return pointer.For(struct{}{}), nil + }) + require.NoError(t, err) + + //moves := make([]map[string]any, 0) + //err = db.NewSelect(). + // ModelTableExpr(`"`+bucketName+`".moves`). + // Scan(ctx, &moves) + //require.NoError(t, err) + // + //rows, err := db.NewSelect(). + // ModelTableExpr(`"`+bucketName+`".transactions`). + // Column("seq", "id", "post_commit_volumes", "ledger"). + // Order("id desc"). + // Where("ledger = 'ledger0'"). + // Rows(ctx) + //require.NoError(t, err) + // + //data, _ := xsql.Pretty(rows) + //fmt.Println(data) +} From 1280a887321a73023e01f6c8395af720613625db Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Mon, 21 Oct 2024 14:38:12 +0200 Subject: [PATCH 05/12] feat: lock schema under a specific migration number --- internal/storage/bucket/bucket.go | 14 +- .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../13-transactions-fill-inserted-at/up.sql | 2 +- .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 .../{tests_after.sql => up_tests_after.sql} | 0 .../{tests_before.sql => up_tests_before.sql} | 0 internal/storage/bucket/migrations_test.go | 21 +- internal/storage/driver/driver.go | 22 +- pkg/testserver/server.go | 5 +- test/e2e/lifecycle_test.go | 288 +++++++++++------- 21 files changed, 199 insertions(+), 153 deletions(-) rename internal/storage/bucket/migrations/0-init-schema/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/12-moves-fill-transaction-id/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/12-moves-fill-transaction-id/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/13-transactions-fill-inserted-at/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/13-transactions-fill-inserted-at/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/14-transactions-fill-pcv/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/14-transactions-fill-pcv/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/15-accounts-volumes-fill-history/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/15-accounts-volumes-fill-history/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/17-accounts-metadata-fill-address/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/17-accounts-metadata-fill-address/{tests_before.sql => up_tests_before.sql} (100%) rename internal/storage/bucket/migrations/18-logs-fill-memento/{tests_after.sql => up_tests_after.sql} (100%) rename internal/storage/bucket/migrations/18-logs-fill-memento/{tests_before.sql => up_tests_before.sql} (100%) diff --git a/internal/storage/bucket/bucket.go b/internal/storage/bucket/bucket.go index 866d5a2c2..c3f2c21cc 100644 --- a/internal/storage/bucket/bucket.go +++ b/internal/storage/bucket/bucket.go @@ -4,7 +4,6 @@ import ( "bytes" "context" _ "embed" - "errors" "fmt" "github.com/formancehq/go-libs/v2/migrations" ledger "github.com/formancehq/ledger/internal" @@ -13,6 +12,9 @@ import ( "text/template" ) +// stateless version (+1 regarding directory name, as migrations start from 1 in the lib) +const MinimalSchemaVersion = 12 + type Bucket struct { name string db bun.IDB @@ -23,11 +25,13 @@ func (b *Bucket) Migrate(ctx context.Context, tracer trace.Tracer) error { } func (b *Bucket) IsUpToDate(ctx context.Context) (bool, error) { - ret, err := GetMigrator(b.name).IsUpToDate(ctx, b.db) - if err != nil && errors.Is(err, migrations.ErrMissingVersionTable) { - return false, nil + migrator := GetMigrator(b.name) + lastVersion, err := migrator.GetLastVersion(ctx, b.db) + if err != nil { + return false, err } - return ret, err + + return lastVersion >= MinimalSchemaVersion, nil } func (b *Bucket) GetMigrationsInfo(ctx context.Context) ([]migrations.Info, error) { diff --git a/internal/storage/bucket/migrations/0-init-schema/tests_after.sql b/internal/storage/bucket/migrations/0-init-schema/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/0-init-schema/tests_after.sql rename to internal/storage/bucket/migrations/0-init-schema/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_after.sql rename to internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/12-moves-fill-transaction-id/tests_before.sql rename to internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql index 79349e4e8..9fea51b23 100644 --- a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql @@ -7,7 +7,7 @@ do $$ _date timestamp without time zone = ( select tstamp from _system.goose_db_version - where version_id = 11 + where version_id = 12 ); _count integer = ( select count(*) diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_after.sql rename to internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/13-transactions-fill-inserted-at/tests_before.sql rename to internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_after.sql rename to internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/14-transactions-fill-pcv/tests_before.sql rename to internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_after.sql rename to internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/15-accounts-volumes-fill-history/tests_before.sql rename to internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_after.sql rename to internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/tests_before.sql rename to internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_after.sql rename to internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/17-accounts-metadata-fill-address/tests_before.sql rename to internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql similarity index 100% rename from internal/storage/bucket/migrations/18-logs-fill-memento/tests_after.sql rename to internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql similarity index 100% rename from internal/storage/bucket/migrations/18-logs-fill-memento/tests_before.sql rename to internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql diff --git a/internal/storage/bucket/migrations_test.go b/internal/storage/bucket/migrations_test.go index 02df0030b..c9071749e 100644 --- a/internal/storage/bucket/migrations_test.go +++ b/internal/storage/bucket/migrations_test.go @@ -35,7 +35,7 @@ func TestMigrations(t *testing.T) { migrator := bucket.GetMigrator(bucketName) _, err = bucket.WalkMigrations(func(entry fs.DirEntry) (*struct{}, error) { - before, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "tests_before.sql") + before, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "up_tests_before.sql") if !errors.Is(err, fs.ErrNotExist) { require.NoError(t, err) } @@ -50,7 +50,7 @@ func TestMigrations(t *testing.T) { } } - after, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "tests_after.sql") + after, err := bucket.TemplateSQLFile(bucketName, entry.Name(), "up_tests_after.sql") if !errors.Is(err, fs.ErrNotExist) { require.NoError(t, err) } @@ -62,21 +62,4 @@ func TestMigrations(t *testing.T) { return pointer.For(struct{}{}), nil }) require.NoError(t, err) - - //moves := make([]map[string]any, 0) - //err = db.NewSelect(). - // ModelTableExpr(`"`+bucketName+`".moves`). - // Scan(ctx, &moves) - //require.NoError(t, err) - // - //rows, err := db.NewSelect(). - // ModelTableExpr(`"`+bucketName+`".transactions`). - // Column("seq", "id", "post_commit_volumes", "ledger"). - // Order("id desc"). - // Where("ledger = 'ledger0'"). - // Rows(ctx) - //require.NoError(t, err) - // - //data, _ := xsql.Pretty(rows) - //fmt.Println(data) } diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go index f2430cfb5..42c28f719 100644 --- a/internal/storage/driver/driver.go +++ b/internal/storage/driver/driver.go @@ -5,7 +5,6 @@ import ( "database/sql" "errors" "fmt" - "github.com/formancehq/go-libs/v2/collectionutils" "github.com/formancehq/go-libs/v2/metadata" "github.com/formancehq/go-libs/v2/platform/postgres" systemcontroller "github.com/formancehq/ledger/internal/controller/system" @@ -225,22 +224,17 @@ func (d *Driver) UpgradeBucket(ctx context.Context, name string) error { func (d *Driver) UpgradeAllBuckets(ctx context.Context) error { - buckets := collectionutils.Set[string]{} - err := bunpaginate.Iterate(ctx, ledgercontroller.NewListLedgersQuery(10), - func(ctx context.Context, q ledgercontroller.ListLedgersQuery) (*bunpaginate.Cursor[ledger.Ledger], error) { - return d.ListLedgers(ctx, q) - }, - func(cursor *bunpaginate.Cursor[ledger.Ledger]) error { - for _, l := range cursor.Data { - buckets.Put(l.Bucket) - } - return nil - }) + var buckets []string + err := d.db.NewSelect(). + DistinctOn("bucket"). + Model(&ledger.Ledger{}). + Column("bucket"). + Scan(ctx, &buckets) if err != nil { - return err + return fmt.Errorf("getting buckets: %w", err) } - for _, bucketName := range collectionutils.Keys(buckets) { + for _, bucketName := range buckets { b := bucket.New(d.db, bucketName) logging.FromContext(ctx).Infof("Upgrading bucket '%s'", bucketName) diff --git a/pkg/testserver/server.go b/pkg/testserver/server.go index 2162ccfc1..45b1046d0 100644 --- a/pkg/testserver/server.go +++ b/pkg/testserver/server.go @@ -45,6 +45,7 @@ type Configuration struct { Debug bool OTLPConfig *OTLPConfig ExperimentalFeatures bool + DisableAutoUpgrade bool BulkMaxSize int ExperimentalNumscriptRewrite bool } @@ -68,11 +69,13 @@ func (s *Server) Start() error { args := []string{ "serve", "--" + cmd.BindFlag, ":0", - "--" + cmd.AutoUpgradeFlag, "--" + bunconnect.PostgresURIFlag, s.configuration.PostgresConfiguration.DatabaseSourceName, "--" + bunconnect.PostgresMaxOpenConnsFlag, fmt.Sprint(s.configuration.PostgresConfiguration.MaxOpenConns), "--" + bunconnect.PostgresConnMaxIdleTimeFlag, fmt.Sprint(s.configuration.PostgresConfiguration.ConnMaxIdleTime), } + if !s.configuration.DisableAutoUpgrade { + args = append(args, "--"+cmd.AutoUpgradeFlag) + } if s.configuration.ExperimentalFeatures { args = append( args, diff --git a/test/e2e/lifecycle_test.go b/test/e2e/lifecycle_test.go index 33a38ebcb..10af57c8d 100644 --- a/test/e2e/lifecycle_test.go +++ b/test/e2e/lifecycle_test.go @@ -5,12 +5,15 @@ package test_suite import ( "context" "database/sql" + "github.com/formancehq/go-libs/v2/bun/bunconnect" "github.com/formancehq/go-libs/v2/logging" "github.com/formancehq/go-libs/v2/pointer" "github.com/formancehq/go-libs/v2/time" ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/pkg/client/models/components" "github.com/formancehq/ledger/pkg/client/models/operations" + "github.com/formancehq/ledger/internal/storage/bucket" + "github.com/formancehq/ledger/internal/storage/driver" ledgerevents "github.com/formancehq/ledger/pkg/events" . "github.com/formancehq/ledger/pkg/testserver" "github.com/google/uuid" @@ -27,139 +30,198 @@ var _ = Context("Ledger application lifecycle tests", func() { ctx = logging.TestingContext() ) - testServer := NewTestServer(func() Configuration { - return Configuration{ - PostgresConfiguration: db.GetValue().ConnectionOptions(), - Output: GinkgoWriter, - Debug: debug, - NatsURL: natsServer.GetValue().ClientURL(), - } - }) - var events chan *nats.Msg - BeforeEach(func() { - events = Subscribe(GinkgoT(), testServer.GetValue()) - }) - - When("starting the service", func() { - It("should be ok", func() { - info, err := testServer.GetValue().Client().Ledger.GetInfo(ctx) - Expect(err).NotTo(HaveOccurred()) - Expect(info.V2ConfigInfoResponse.Version).To(Equal("develop")) - }) - }) - When("restarting the service", func() { - BeforeEach(func(ctx context.Context) { - Expect(testServer.GetValue().Restart(ctx)).To(BeNil()) + Context("Pending transaction should be fully processed before stopping or restarting the server", func() { + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + } }) - It("should be ok", func() {}) - }) - When("having some in flight transactions on a ledger", func() { - var ( - sqlTx bun.Tx - countTransactions = 80 - serverRestartTimeout = 10 * time.Second - ) + var events chan *nats.Msg BeforeEach(func() { - err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ - Ledger: "foo", + events = Subscribe(GinkgoT(), testServer.GetValue()) + }) + + When("starting the service", func() { + It("should be ok", func() { + info, err := testServer.GetValue().Client().Ledger.GetInfo(ctx) + Expect(err).NotTo(HaveOccurred()) + Expect(info.V2ConfigInfoResponse.Version).To(Equal("develop")) }) - Expect(err).ToNot(HaveOccurred()) + }) + When("restarting the service", func() { + BeforeEach(func(ctx context.Context) { + Expect(testServer.GetValue().Restart(ctx)).To(BeNil()) + }) + It("should be ok", func() {}) + }) + When("having some in flight transactions on a ledger", func() { + var ( + sqlTx bun.Tx + countTransactions = 80 + serverRestartTimeout = 10 * time.Second + ) + BeforeEach(func() { + err := CreateLedger(ctx, testServer.GetValue(), operations.V2CreateLedgerRequest{ + Ledger: "foo", + }) + Expect(err).ToNot(HaveOccurred()) - // lock logs table to block transactions creation requests - // the first tx will block on the log insertion - // the next transaction will block earlier on advisory lock acquirement for accounts - db := ConnectToDatabase(GinkgoT(), testServer.GetValue()) - sqlTx, err = db.BeginTx(ctx, &sql.TxOptions{}) - Expect(err).To(BeNil()) - DeferCleanup(func() { - _ = sqlTx.Rollback() + // lock logs table to block transactions creation requests + // the first tx will block on the log insertion + // the next transaction will block earlier on advisory lock acquirement for accounts + db := ConnectToDatabase(GinkgoT(), testServer.GetValue()) + sqlTx, err = db.BeginTx(ctx, &sql.TxOptions{}) + Expect(err).To(BeNil()) + DeferCleanup(func() { + _ = sqlTx.Rollback() + }) + + _, err = sqlTx.NewRaw("lock table _default.logs").Exec(ctx) + Expect(err).To(BeNil()) + + // Create transactions in go routines + for i := 0; i < countTransactions; i++ { + go func() { + defer GinkgoRecover() + + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: "foo", + V2PostTransaction: components.V2PostTransaction{ + Postings: []components.V2Posting{{ + Amount: big.NewInt(100), + Asset: "USD", + Destination: "bank", + Source: "world", + }}, + }, + }) + Expect(err).To(BeNil()) + }() + } + + // check postgres locks + Eventually(func(g Gomega) int { + count, err := db.NewSelect(). + Table("pg_stat_activity"). + Where("state <> 'idle' and pid <> pg_backend_pid()"). + Where(`query like 'INSERT INTO "_default".accounts%'`). + Count(ctx) + g.Expect(err).To(BeNil()) + return count + }). + WithTimeout(10 * time.Second). + // Once all the transactions are in pending state, we should have one lock + // for the first tx, trying to write a new log. + // And, we should also have countTransactions-1 pending lock for the 'bank' account + Should(BeNumerically("==", countTransactions-1)) // -1 for the first one }) + When("restarting the service", func() { + BeforeEach(func() { + // We will restart the server in a separate gorouting + // the server should not restart until all pending transactions creation requests are fully completed + restarted := make(chan struct{}) + go func() { + defer GinkgoRecover() + defer func() { + close(restarted) + }() + By("restart server", func() { + ctx, cancel := context.WithTimeout(ctx, serverRestartTimeout) + DeferCleanup(cancel) - _, err = sqlTx.NewRaw("lock table _default.logs").Exec(ctx) - Expect(err).To(BeNil()) + Expect(testServer.GetValue().Restart(ctx)).To(BeNil()) + }) + }() - // Create transactions in go routines - for i := 0; i < countTransactions; i++ { - go func() { - defer GinkgoRecover() + // Once the server is restarting, it should not accept any new connection + Eventually(func() error { + _, err := GetInfo(ctx, testServer.GetValue()) + return err + }).ShouldNot(BeNil()) - _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ - Ledger: "foo", - V2PostTransaction: components.V2PostTransaction{ - Postings: []components.V2Posting{{ - Amount: big.NewInt(100), - Asset: "USD", - Destination: "bank", - Source: "world", - }}, - }, + // by rollback sql transactions, we allow the blocked routines (which create transactions) to resume. + By("rollback tx", func() { + _ = sqlTx.Rollback() + }) + + Eventually(restarted). + WithTimeout(serverRestartTimeout). + Should(BeClosed()) + }) + It("in flight transactions should be correctly terminated before", func() { + transactions, err := ListTransactions(ctx, testServer.GetValue(), operations.V2ListTransactionsRequest{ + Ledger: "foo", + PageSize: pointer.For(int64(countTransactions)), }) Expect(err).To(BeNil()) - }() - } + Expect(transactions.Data).To(HaveLen(countTransactions)) - // check postgres locks - Eventually(func(g Gomega) int { - count, err := db.NewSelect(). - Table("pg_stat_activity"). - Where("state <> 'idle' and pid <> pg_backend_pid()"). - Where(`query like 'INSERT INTO "_default".accounts%'`). - Count(ctx) - g.Expect(err).To(BeNil()) - return count - }). - WithTimeout(10 * time.Second). - // Once all the transactions are in pending state, we should have one lock - // for the first tx, trying to write a new log. - // And, we should also have countTransactions-1 pending lock for the 'bank' account - Should(BeNumerically("==", countTransactions-1)) // -1 for the first one + By("all events should have been properly sent", func() { + for range countTransactions { + Eventually(events).Should(Receive(Event(ledgerevents.EventTypeCommittedTransactions))) + } + }) + }) + }) }) - When("restarting the service", func() { - BeforeEach(func() { - // We will restart the server in a separate gorouting - // the server should not restart until all pending transactions creation requests are fully completed - restarted := make(chan struct{}) - go func() { - defer GinkgoRecover() - defer func() { - close(restarted) - }() - By("restart server", func() { - ctx, cancel := context.WithTimeout(ctx, serverRestartTimeout) - DeferCleanup(cancel) + }) - Expect(testServer.GetValue().Restart(ctx)).To(BeNil()) - }) - }() + Context("Ledger should respond correctly as well as the minimal schema version is respected", func() { + var ( + ledgerName = "default" + ) + BeforeEach(func() { + bunDB, err := bunconnect.OpenSQLDB(ctx, db.GetValue().ConnectionOptions()) + Expect(err).To(BeNil()) - // Once the server is restarting, it should not accept any new connection - Eventually(func() error { - _, err := GetInfo(ctx, testServer.GetValue()) - return err - }).ShouldNot(BeNil()) + Expect(driver.Migrate(ctx, bunDB)).To(BeNil()) - // by rollback sql transactions, we allow the blocked routines (which create transactions) to resume. - By("rollback tx", func() { - _ = sqlTx.Rollback() - }) + _, err = bunDB.NewInsert(). + Model(pointer.For(ledger.MustNewWithDefault(ledgerName))). + Exec(ctx) + Expect(err).To(BeNil()) - Eventually(restarted). - WithTimeout(serverRestartTimeout). - Should(BeClosed()) + migrator := bucket.GetMigrator(ledger.DefaultBucket) + for i := 0; i < bucket.MinimalSchemaVersion; i++ { + Expect(migrator.UpByOne(ctx, bunDB)).To(BeNil()) + } + }) + testServer := NewTestServer(func() Configuration { + return Configuration{ + PostgresConfiguration: db.GetValue().ConnectionOptions(), + Output: GinkgoWriter, + Debug: debug, + NatsURL: natsServer.GetValue().ClientURL(), + DisableAutoUpgrade: true, + } + }) + It("should be ok", func() { + By("we should be able to create a new transaction", func() { + _, err := CreateTransaction(ctx, testServer.GetValue(), operations.V2CreateTransactionRequest{ + Ledger: ledgerName, + V2PostTransaction: components.V2PostTransaction{ + Metadata: map[string]string{}, + Postings: []components.V2Posting{ + { + Amount: big.NewInt(100), + Asset: "USD", + Source: "world", + Destination: "alice", + }, + }, + }, + }) + Expect(err).To(BeNil()) }) - It("in flight transactions should be correctly terminated before", func() { + By("we should be able to list transactions", func() { transactions, err := ListTransactions(ctx, testServer.GetValue(), operations.V2ListTransactionsRequest{ - Ledger: "foo", - PageSize: pointer.For(int64(countTransactions)), + Ledger: ledgerName, }) Expect(err).To(BeNil()) - Expect(transactions.Data).To(HaveLen(countTransactions)) - - By("all events should have been properly sent", func() { - for range countTransactions { - Eventually(events).Should(Receive(Event(ledgerevents.EventTypeCommittedTransactions))) - } - }) + Expect(transactions.Data).To(HaveLen(1)) }) }) }) From cb31afdf96f88fa5dd7d14258faea347079843f1 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Mon, 21 Oct 2024 17:43:46 +0200 Subject: [PATCH 06/12] feat: optimize storage tests --- go.sum | 2 ++ internal/storage/ledger/main_test.go | 24 +++++++++++++++--------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/go.sum b/go.sum index 5f7e744d9..238d91b57 100644 --- a/go.sum +++ b/go.sum @@ -101,6 +101,8 @@ github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61 h1:GSIhsdo github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8 h1:q9mP1jT2q2/QxGdEg9mWOcsf/P7NNvC1vMmyDvj5EEY= github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8/go.mod h1:KO+eOrTVQ5tR3TZUAHapoQ+d7y2+Ie5Tg0QwfZHAK4k= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241021183239-813f4dc647a1 h1:PnDpgyTBicMbSC/c7PTdYaNZCCHlnKyVXURo4VTJyCc= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241021183239-813f4dc647a1/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= diff --git a/internal/storage/ledger/main_test.go b/internal/storage/ledger/main_test.go index 1c63c7f3c..7670169d9 100644 --- a/internal/storage/ledger/main_test.go +++ b/internal/storage/ledger/main_test.go @@ -4,15 +4,16 @@ package ledger_test import ( "database/sql" + "github.com/formancehq/go-libs/v2/bun/bunconnect" . "github.com/formancehq/go-libs/v2/testing/utils" - systemstore "github.com/formancehq/ledger/internal/storage/driver" + "github.com/formancehq/ledger/internal/storage/driver" ledgerstore "github.com/formancehq/ledger/internal/storage/ledger" "go.opentelemetry.io/otel/trace/noop" "math/big" "os" + "sync/atomic" "testing" - "github.com/formancehq/go-libs/v2/bun/bunconnect" "github.com/formancehq/go-libs/v2/bun/bundebug" "github.com/formancehq/go-libs/v2/testing/docker" ledger "github.com/formancehq/ledger/internal" @@ -30,8 +31,9 @@ import ( ) var ( - srv = NewDeferred[*pgtesting.PostgresServer]() - bunDB = NewDeferred[*bun.DB]() + srv = NewDeferred[*pgtesting.PostgresServer]() + bunDB = NewDeferred[*bun.DB]() + ledgerCount = atomic.Int64{} ) func TestMain(m *testing.M) { @@ -43,10 +45,13 @@ func TestMain(m *testing.M) { db, err := sql.Open("pgx", ret.GetDSN()) require.NoError(t, err) - bunDB := bun.NewDB(db, pgdialect.New()) + bunDB := bun.NewDB(db, pgdialect.New(), bun.WithDiscardUnknownColumns()) if os.Getenv("DEBUG") == "true" { bunDB.AddQueryHook(bundebug.NewQueryHook()) } + bunDB.SetMaxOpenConns(100) + + require.NoError(t, driver.Migrate(logging.TestingContext(), bunDB)) return bunDB }) @@ -81,16 +86,17 @@ func newLedgerStore(t T) *ledgerstore.Store { db, err := bunconnect.OpenSQLDB(ctx, pgDatabase.ConnectionOptions(), hooks...) require.NoError(t, err) - require.NoError(t, systemstore.Migrate(ctx, db)) + require.NoError(t, driver.Migrate(ctx, db)) l := ledger.MustNewWithDefault(ledgerName) l.Bucket = ledgerName + l.ID = int(ledgerCount.Add(1)) - b := bucket.New(db, ledgerName) + b := bucket.New(bunDB.GetValue(), ledgerName) require.NoError(t, b.Migrate(ctx, noop.Tracer{})) - require.NoError(t, b.AddLedger(ctx, l, db)) + require.NoError(t, b.AddLedger(ctx, l, bunDB.GetValue())) - return ledgerstore.New(db, b, l) + return ledgerstore.New(bunDB.GetValue(), b, l) } func bigIntComparer(v1 *big.Int, v2 *big.Int) bool { From 882e4bc7ade9b0d701a647b338c69b705d6d8104 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Tue, 22 Oct 2024 11:00:17 +0200 Subject: [PATCH 07/12] fix: run migration outside sql transactions --- internal/storage/driver/driver.go | 65 ++++++++++++++----------------- 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go index 42c28f719..f43df0b63 100644 --- a/internal/storage/driver/driver.go +++ b/internal/storage/driver/driver.go @@ -2,7 +2,7 @@ package driver import ( "context" - "database/sql" + "errors" "errors" "fmt" "github.com/formancehq/go-libs/v2/metadata" @@ -36,41 +36,26 @@ type Driver struct { func (d *Driver) createLedgerStore(ctx context.Context, db bun.IDB, l *ledger.Ledger) (*ledgerstore.Store, error) { - tx, err := db.BeginTx(ctx, &sql.TxOptions{}) - if err != nil { - return nil, fmt.Errorf("begin transaction: %w", err) - } - - b := bucket.New(tx, l.Bucket) + b := bucket.New(d.db, l.Bucket) if err := b.Migrate(ctx, d.tracer); err != nil { return nil, fmt.Errorf("migrating bucket: %w", err) } - ret, err := db.NewInsert(). + _, err := db.NewInsert(). Model(l). - Ignore(). Returning("id, added_at"). Exec(ctx) if err != nil { + if errors.Is(postgres.ResolveError(err), postgres.ErrConstraintsFailed{}) { + return nil, systemcontroller.ErrLedgerAlreadyExists + } return nil, postgres.ResolveError(err) } - affected, err := ret.RowsAffected() - if err != nil { - return nil, fmt.Errorf("creating ledger: %w", err) - } - if affected == 0 { - return nil, systemcontroller.ErrLedgerAlreadyExists - } - - if err := b.AddLedger(ctx, *l, tx); err != nil { + if err := b.AddLedger(ctx, *l, d.db); err != nil { return nil, fmt.Errorf("adding ledger to bucket: %w", err) } - if err := tx.Commit(); err != nil { - return nil, fmt.Errorf("committing sql transaction to create ledger and schemas: %w", err) - } - return ledgerstore.New( d.db, b, @@ -82,29 +67,37 @@ func (d *Driver) createLedgerStore(ctx context.Context, db bun.IDB, l *ledger.Le func (d *Driver) CreateLedger(ctx context.Context, l *ledger.Ledger) (*ledgerstore.Store, error) { - // start a transaction because we will need to create the schema and apply ledger migrations - tx, err := d.db.BeginTx(ctx, &sql.TxOptions{}) - if err != nil { - return nil, fmt.Errorf("begin transaction: %w", err) - } - defer func() { - _ = tx.Rollback() - }() - if l.Metadata == nil { l.Metadata = metadata.Metadata{} } - store, err := d.createLedgerStore(ctx, tx, l) + b := bucket.New(d.db, l.Bucket) + if err := b.Migrate(ctx, d.tracer); err != nil { + return nil, fmt.Errorf("migrating bucket: %w", err) + } + + _, err := d.db.NewInsert(). + Model(l). + Returning("id, added_at"). + Exec(ctx) if err != nil { - return nil, err + if errors.Is(postgres.ResolveError(err), postgres.ErrConstraintsFailed{}) { + return nil, systemcontroller.ErrLedgerAlreadyExists + } + return nil, postgres.ResolveError(err) } - if err := tx.Commit(); err != nil { - return nil, fmt.Errorf("committing sql transaction to create ledger schema: %w", err) + if err := b.AddLedger(ctx, *l, d.db); err != nil { + return nil, fmt.Errorf("adding ledger to bucket: %w", err) } - return store, nil + return ledgerstore.New( + d.db, + b, + *l, + ledgerstore.WithMeter(d.meter), + ledgerstore.WithTracer(d.tracer), + ), nil } func (d *Driver) OpenLedger(ctx context.Context, name string) (*ledgerstore.Store, *ledger.Ledger, error) { From cb4b938234b6419c7bd9075253cccf98ee402d53 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Tue, 22 Oct 2024 12:12:41 +0200 Subject: [PATCH 08/12] fix: incremental migrations --- .../12-moves-fill-transaction-id/up.sql | 16 +++++---- .../13-transactions-fill-inserted-at/up.sql | 33 +++++++++---------- .../14-transactions-fill-pcv/up.sql | 12 +++---- .../15-accounts-volumes-fill-history/up.sql | 6 ++-- .../up.sql | 10 ++++-- .../17-accounts-metadata-fill-address/up.sql | 10 ++++-- .../migrations/18-logs-fill-memento/up.sql | 11 ++++--- internal/storage/driver/driver.go | 31 ----------------- 8 files changed, 57 insertions(+), 72 deletions(-) diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql index 4305015ca..9d698c8ad 100644 --- a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql @@ -1,9 +1,9 @@ -set search_path = '{{.Bucket}}'; - do $$ declare _batch_size integer := 30; begin + set search_path = '{{ .Bucket }}'; + loop with _outdated_moves as ( select * @@ -21,9 +21,13 @@ do $$ where moves.seq in (_outdated_moves.seq); exit when not found; + + raise info 'commit batch'; + commit ; end loop; - end -$$; -alter table moves -alter column transactions_id set not null; \ No newline at end of file + alter table moves + alter column transactions_id set not null; + end +$$ +language plpgsql; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql index 9fea51b23..11666f4a3 100644 --- a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql @@ -1,20 +1,20 @@ -set search_path = '{{.Bucket}}'; - do $$ declare _batch_size integer := 30; - -- select the date where the "11-make-stateless" migration has been applied - _date timestamp without time zone = ( - select tstamp - from _system.goose_db_version - where version_id = 12 - ); - _count integer = ( - select count(*) - from logs - where date <= _date - ); + _date timestamp without time zone; + _count integer; begin + set search_path = '{{ .Bucket }}'; + + -- select the date where the "11-make-stateless" migration has been applied + select tstamp into _date + from _system.goose_db_version + where version_id = 12; + + select count(*) into _count + from logs + where date <= _date; + for i in 0.._count by _batch_size loop update transactions set inserted_at = ( @@ -23,9 +23,8 @@ do $$ where transactions.id = (data->'transaction'->>'id')::bigint and transactions.ledger = ledger ) where id >= i and id < i + _batch_size; + + commit; end loop; end -$$; - -alter table moves -alter column transactions_id set not null; \ No newline at end of file +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql index 0e44e9e42..62b182309 100644 --- a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql @@ -1,9 +1,9 @@ -set search_path = '{{.Bucket}}'; - do $$ declare _batch_size integer := 30; begin + set search_path = '{{ .Bucket }}'; + loop with _outdated_transactions as ( select id @@ -37,8 +37,8 @@ do $$ exit when not found; end loop; + + alter table transactions + alter column post_commit_volumes set not null; end -$$; - -alter table transactions -alter column post_commit_volumes set not null ; \ No newline at end of file +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql index 5f1b4efd0..8af50929c 100644 --- a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql @@ -1,9 +1,9 @@ -set search_path = '{{.Bucket}}'; - do $$ declare _missing record; begin + set search_path = '{{ .Bucket }}'; + loop select distinct on (ledger, accounts_address, asset) ledger, @@ -35,6 +35,8 @@ do $$ (_missing.post_commit_volumes).outputs ) on conflict do nothing; -- can be inserted by a concurrent transaction + + commit; end loop; end $$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql index eb211aaf0..2da3aef83 100644 --- a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql @@ -1,9 +1,10 @@ -set search_path = '{{.Bucket}}'; do $$ declare _batch_size integer := 30; begin + set search_path = '{{.Bucket}}'; + loop with _outdated_transactions_metadata as ( select seq @@ -21,9 +22,12 @@ do $$ where transactions_metadata.seq in (_outdated_transactions_metadata.seq); exit when not found; + + commit ; end loop; + + alter table transactions_metadata + alter column transactions_id set not null ; end $$; -alter table transactions_metadata -alter column transactions_id set not null ; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql index 3e80405ab..093153309 100644 --- a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql @@ -1,9 +1,10 @@ -set search_path = '{{.Bucket}}'; do $$ declare _batch_size integer := 30; begin + set search_path = '{{.Bucket}}'; + loop with _outdated_accounts_metadata as ( select seq @@ -21,9 +22,12 @@ do $$ where accounts_metadata.seq in (_outdated_accounts_metadata.seq); exit when not found; + + commit ; end loop; + + alter table accounts_metadata + alter column accounts_address set not null ; end $$; -alter table accounts_metadata -alter column accounts_address set not null ; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql index 2884998b7..e1969dcf9 100644 --- a/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql @@ -1,9 +1,9 @@ -set search_path = '{{.Bucket}}'; - do $$ declare _batch_size integer := 30; begin + set search_path = '{{.Bucket}}'; + loop with _outdated_logs as ( select seq @@ -17,9 +17,12 @@ do $$ where logs.seq in (_outdated_logs.seq); exit when not found; + + commit ; end loop; + + alter table logs + alter column memento set not null; end $$; -alter table logs -alter column memento set not null; \ No newline at end of file diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go index f43df0b63..77c17ea2b 100644 --- a/internal/storage/driver/driver.go +++ b/internal/storage/driver/driver.go @@ -34,37 +34,6 @@ type Driver struct { meter metric.Meter } -func (d *Driver) createLedgerStore(ctx context.Context, db bun.IDB, l *ledger.Ledger) (*ledgerstore.Store, error) { - - b := bucket.New(d.db, l.Bucket) - if err := b.Migrate(ctx, d.tracer); err != nil { - return nil, fmt.Errorf("migrating bucket: %w", err) - } - - _, err := db.NewInsert(). - Model(l). - Returning("id, added_at"). - Exec(ctx) - if err != nil { - if errors.Is(postgres.ResolveError(err), postgres.ErrConstraintsFailed{}) { - return nil, systemcontroller.ErrLedgerAlreadyExists - } - return nil, postgres.ResolveError(err) - } - - if err := b.AddLedger(ctx, *l, d.db); err != nil { - return nil, fmt.Errorf("adding ledger to bucket: %w", err) - } - - return ledgerstore.New( - d.db, - b, - *l, - ledgerstore.WithMeter(d.meter), - ledgerstore.WithTracer(d.tracer), - ), nil -} - func (d *Driver) CreateLedger(ctx context.Context, l *ledger.Ledger) (*ledgerstore.Store, error) { if l.Metadata == nil { From bb2cd186ecfd62e27226e56b130a82bab447f7a5 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Tue, 22 Oct 2024 12:16:02 +0200 Subject: [PATCH 09/12] fix: tests --- go.mod | 10 +++++++-- go.sum | 13 +++++------- internal/storage/bucket/migrations.go | 2 +- internal/storage/ledger/transactions.go | 20 ------------------ internal/storage/module.go | 26 ++++++++++++++++-------- test/e2e/api_balances_aggregated_test.go | 4 ++-- test/e2e/lifecycle_test.go | 4 +++- test/e2e/suite_test.go | 22 ++++++++++---------- 8 files changed, 48 insertions(+), 53 deletions(-) diff --git a/go.mod b/go.mod index 827eebc33..7dc24df35 100644 --- a/go.mod +++ b/go.mod @@ -21,6 +21,7 @@ require ( github.com/google/go-cmp v0.6.0 github.com/google/uuid v1.6.0 github.com/invopop/jsonschema v0.12.0 + github.com/jackc/pgx/v5 v5.7.1 github.com/jamiealquiza/tachymeter v2.0.0+incompatible github.com/logrusorgru/aurora v2.0.3+incompatible github.com/nats-io/nats.go v1.37.0 @@ -28,12 +29,14 @@ require ( github.com/onsi/gomega v1.34.2 github.com/ory/dockertest/v3 v3.11.0 github.com/pborman/uuid v1.2.1 + github.com/pkg/errors v0.9.1 github.com/shomali11/xsql v0.0.0-20190608141458-bf76292144df github.com/spf13/cobra v1.8.1 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 github.com/uptrace/bun v1.2.3 github.com/uptrace/bun/dialect/pgdialect v1.2.3 + github.com/uptrace/bun/extra/bundebug v1.2.3 github.com/xeipuuv/gojsonschema v1.2.0 github.com/xo/dburl v0.23.2 go.opentelemetry.io/otel v1.31.0 @@ -44,6 +47,7 @@ require ( go.uber.org/mock v0.4.0 golang.org/x/oauth2 v0.23.0 golang.org/x/sync v0.8.0 + gopkg.in/yaml.v3 v3.0.1 ) require ( @@ -89,6 +93,7 @@ require ( github.com/eapache/queue v1.1.0 // indirect github.com/ebitengine/purego v0.8.0 // indirect github.com/ericlagergren/decimal v0.0.0-20240411145413-00de7ca16731 // indirect + github.com/fatih/color v1.17.0 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect github.com/formancehq/numscript v0.0.9-0.20241009144012-1150c14a1417 github.com/go-chi/chi v4.1.2+incompatible // indirect @@ -116,7 +121,7 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect - github.com/jackc/pgx/v5 v5.7.1 + github.com/jackc/pgx/v5 v5.7.1 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect @@ -128,6 +133,8 @@ require ( github.com/lithammer/shortuuid/v3 v3.0.7 // indirect github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 // indirect github.com/mailru/easyjson v0.7.7 // indirect + github.com/mattn/go-colorable v0.1.13 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/minio/highwayhash v1.0.3 // indirect github.com/moby/docker-image-spec v1.3.1 // indirect github.com/moby/term v0.5.0 // indirect @@ -199,5 +206,4 @@ require ( google.golang.org/protobuf v1.35.1 // indirect gopkg.in/go-jose/go-jose.v2 v2.6.3 // indirect gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect ) diff --git a/go.sum b/go.sum index 238d91b57..f7620f9fc 100644 --- a/go.sum +++ b/go.sum @@ -95,14 +95,8 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46 h1:8wZtnWSIYNV7DwD0Jr4HsbcRgezOrgDJ2Q0w9ABieKc= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241017152835-2c30f563ab46/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61 h1:GSIhsdo/YXuZXI4q8xA8IrdOkkjfFp6O+DiNywk8s8U= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241017153232-1a62cecf1a61/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8 h1:q9mP1jT2q2/QxGdEg9mWOcsf/P7NNvC1vMmyDvj5EEY= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241021110602-fbd3b37b93f8/go.mod h1:KO+eOrTVQ5tR3TZUAHapoQ+d7y2+Ie5Tg0QwfZHAK4k= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241021183239-813f4dc647a1 h1:PnDpgyTBicMbSC/c7PTdYaNZCCHlnKyVXURo4VTJyCc= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241021183239-813f4dc647a1/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241022171306-dcd601f231ec h1:50Ojig/hsng4K8FgoezONfVljawi9+sYQkjj+vf7/Fw= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241022171306-dcd601f231ec/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= @@ -220,6 +214,7 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0 github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= +github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/minio/highwayhash v1.0.3 h1:kbnuUMoHYyVl7szWjSxJnxw11k2U709jqFPPmIUyD6Q= @@ -437,8 +432,10 @@ golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go index f8937e0dc..d853c38a9 100644 --- a/internal/storage/bucket/migrations.go +++ b/internal/storage/bucket/migrations.go @@ -6,9 +6,9 @@ import ( "embed" "fmt" "github.com/formancehq/go-libs/v2/migrations" - "github.com/ghodss/yaml" "github.com/uptrace/bun" "go.opentelemetry.io/otel/trace" + "gopkg.in/yaml.v3" "io/fs" "path/filepath" "slices" diff --git a/internal/storage/ledger/transactions.go b/internal/storage/ledger/transactions.go index 3ad30d173..5f001ff11 100644 --- a/internal/storage/ledger/transactions.go +++ b/internal/storage/ledger/transactions.go @@ -160,26 +160,6 @@ func (s *Store) selectTransactions(date *time.Time, expandVolumes, expandEffecti Column("transactions_id"). ColumnExpr("aggregate_objects(post_commit_effective_volumes::jsonb) as post_commit_effective_volumes"). Group("transactions_id"), - //s.db.NewSelect(). - // Column("transactions_id"). - // ColumnExpr("aggregate_objects(pcev::jsonb) as post_commit_effective_volumes"). - // TableExpr( - // "(?) data", - // s.db.NewSelect(). - // DistinctOn("transactions_id, accounts_address, asset"). - // ModelTableExpr(s.GetPrefixedRelationName("moves")). - // Column("transactions_id"). - // ColumnExpr(` - // json_build_object( - // moves.accounts_address, - // json_build_object( - // moves.asset, - // first_value(moves.post_commit_effective_volumes) over (partition by (transactions_id, accounts_address, asset) order by seq desc) - // ) - // ) as pcev - // `), - // ). - // Group("transactions_id"), ). ColumnExpr("pcev.*") } diff --git a/internal/storage/module.go b/internal/storage/module.go index 16a45915e..093eda91b 100644 --- a/internal/storage/module.go +++ b/internal/storage/module.go @@ -29,17 +29,27 @@ func NewFXModule(autoUpgrade bool) fx.Option { go func() { defer close(upgradeStopped) - if err := driver.UpgradeAllBuckets(upgradeContext); err != nil { - // Long migrations can be cancelled (app rescheduled for example) - // before fully terminated, handle this gracefully, don't panic, - // the next start will try again. - if errors.Is(err, context.DeadlineExceeded) || - errors.Is(err, context.Canceled) { + for { + select { + case <-ctx.Done(): + return + default: + logging.FromContext(ctx).Infof("Upgrading buckets...") + if err := driver.UpgradeAllBuckets(upgradeContext); err != nil { + // Long migrations can be cancelled (app rescheduled for example) + // before fully terminated, handle this gracefully, don't panic, + // the next start will try again. + if errors.Is(err, context.DeadlineExceeded) || + errors.Is(err, context.Canceled) { + return + } + logging.FromContext(ctx).Errorf("Upgrading buckets: %s", err) + continue + } return } - - panic(err) } + }() return nil }, diff --git a/test/e2e/api_balances_aggregated_test.go b/test/e2e/api_balances_aggregated_test.go index 0cc59f4fc..375444698 100644 --- a/test/e2e/api_balances_aggregated_test.go +++ b/test/e2e/api_balances_aggregated_test.go @@ -38,7 +38,7 @@ var _ = Context("Ledger engine tests", func() { }) Expect(err).To(BeNil()) - _, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ + ret, err := CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ RequestBody: []components.V2BulkElement{ components.CreateV2BulkElementCreateTransaction(components.V2BulkElementCreateTransaction{ Data: &components.V2PostTransaction{ @@ -99,7 +99,7 @@ var _ = Context("Ledger engine tests", func() { }) Expect(err).To(Succeed()) - firstTransactionsInsertedAt = time.Now() + firstTransactionsInsertedAt = ret[2].V2BulkElementResultCreateTransaction.Data.InsertedAt _, err = CreateBulk(ctx, testServer.GetValue(), operations.V2CreateBulkRequest{ RequestBody: []components.V2BulkElement{ diff --git a/test/e2e/lifecycle_test.go b/test/e2e/lifecycle_test.go index 10af57c8d..1255a3d6d 100644 --- a/test/e2e/lifecycle_test.go +++ b/test/e2e/lifecycle_test.go @@ -8,6 +8,7 @@ import ( "github.com/formancehq/go-libs/v2/bun/bunconnect" "github.com/formancehq/go-libs/v2/logging" "github.com/formancehq/go-libs/v2/pointer" + "github.com/formancehq/go-libs/v2/testing/platform/pgtesting" "github.com/formancehq/go-libs/v2/time" ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/pkg/client/models/components" @@ -26,11 +27,11 @@ import ( var _ = Context("Ledger application lifecycle tests", func() { var ( - db = UseTemplatedDatabase() ctx = logging.TestingContext() ) Context("Pending transaction should be fully processed before stopping or restarting the server", func() { + db := UseTemplatedDatabase() testServer := NewTestServer(func() Configuration { return Configuration{ PostgresConfiguration: db.GetValue().ConnectionOptions(), @@ -172,6 +173,7 @@ var _ = Context("Ledger application lifecycle tests", func() { Context("Ledger should respond correctly as well as the minimal schema version is respected", func() { var ( ledgerName = "default" + db = pgtesting.UsePostgresDatabase(pgServer) ) BeforeEach(func() { bunDB, err := bunconnect.OpenSQLDB(ctx, db.GetValue().ConnectionOptions()) diff --git a/test/e2e/suite_test.go b/test/e2e/suite_test.go index 558df1bd1..069ff41bf 100644 --- a/test/e2e/suite_test.go +++ b/test/e2e/suite_test.go @@ -10,8 +10,6 @@ import ( ledger "github.com/formancehq/ledger/internal" "github.com/formancehq/ledger/internal/storage/bucket" "github.com/formancehq/ledger/internal/storage/driver" - "github.com/stretchr/testify/require" - "go.opentelemetry.io/otel/trace/noop" "os" "testing" @@ -35,7 +33,7 @@ var ( debug = os.Getenv("DEBUG") == "true" logger = logging.NewDefaultLogger(GinkgoWriter, debug, false) - DBTemplate = "template1" + DBTemplate = "dbtemplate" ) type ParallelExecutionContext struct { @@ -57,18 +55,20 @@ var _ = SynchronizedBeforeSuite(func() []byte { ) By("Postgres address: " + ret.GetDSN()) - db, err := bunconnect.OpenSQLDB(context.Background(), bunconnect.ConnectionOptions{ - DatabaseSourceName: ret.GetDatabaseDSN(DBTemplate), - }) - require.NoError(GinkgoT(), err) + templateDatabase := ret.NewDatabase(GinkgoT(), WithName(DBTemplate)) - err = driver.Migrate(context.Background(), db) - require.NoError(GinkgoT(), err) + bunDB, err := bunconnect.OpenSQLDB(context.Background(), templateDatabase.ConnectionOptions()) + Expect(err).To(BeNil()) + + err = driver.Migrate(context.Background(), bunDB) + Expect(err).To(BeNil()) // Initialize the _default bucket on the default database // This way, we will be able to clone this database to speed up the tests - err = bucket.Migrate(context.Background(), noop.Tracer{}, db, ledger.DefaultBucket) - require.NoError(GinkgoT(), err) + err = bucket.GetMigrator(ledger.DefaultBucket).Up(context.Background(), bunDB) + Expect(err).To(BeNil()) + + Expect(bunDB.Close()).To(BeNil()) return ret }) From 728eae8cae22f05470d43144180a3805ae2cdc77 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Tue, 22 Oct 2024 21:54:51 +0200 Subject: [PATCH 10/12] chore: fix dependencies --- go.mod | 4 +--- go.sum | 34 +++++++++++++++++++++++++-- internal/storage/driver/driver.go | 1 - internal/storage/driver/migrations.go | 8 +++---- 4 files changed, 37 insertions(+), 10 deletions(-) diff --git a/go.mod b/go.mod index 7dc24df35..6d1d57138 100644 --- a/go.mod +++ b/go.mod @@ -14,7 +14,7 @@ require ( github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 github.com/bluele/gcache v0.0.2 github.com/dop251/goja v0.0.0-20241009100908-5f46f2705ca3 - github.com/formancehq/go-libs/v2 v2.0.1-0.20241023163904-e440de7907c7 + github.com/formancehq/go-libs/v2 v2.0.1-0.20241024102856-de23d0929561 github.com/formancehq/ledger/pkg/client v0.0.0-00010101000000-000000000000 github.com/go-chi/chi/v5 v5.1.0 github.com/go-chi/cors v1.2.1 @@ -121,7 +121,6 @@ require ( github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jackc/pgpassfile v1.0.0 // indirect github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 // indirect - github.com/jackc/pgx/v5 v5.7.1 // indirect github.com/jackc/puddle/v2 v2.2.2 // indirect github.com/jcmturner/aescts/v2 v2.0.0 // indirect github.com/jcmturner/dnsutils/v2 v2.0.0 // indirect @@ -149,7 +148,6 @@ require ( github.com/opencontainers/image-spec v1.1.0 // indirect github.com/opencontainers/runc v1.1.14 // indirect github.com/pierrec/lz4/v4 v4.1.21 // indirect - github.com/pkg/errors v0.9.1 github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect diff --git a/go.sum b/go.sum index f7620f9fc..7d08924fc 100644 --- a/go.sum +++ b/go.sum @@ -6,6 +6,8 @@ github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25 github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E= github.com/IBM/sarama v1.43.3 h1:Yj6L2IaNvb2mRBop39N7mmJAHBVY3dTPncr3qGVkxPA= github.com/IBM/sarama v1.43.3/go.mod h1:FVIRaLrhK3Cla/9FfRF5X9Zua2KpS3SYIXxhac1H+FQ= +github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0rYXWg0= +github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw= @@ -24,6 +26,8 @@ github.com/alitto/pond v1.9.2 h1:9Qb75z/scEZVCoSU+osVmQ0I0JOeLfdTDafrbcJ8CLs= github.com/alitto/pond v1.9.2/go.mod h1:xQn3P/sHTYcU/1BR3i86IGIrilcrGC2LiS+E2+CJWsI= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves= github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY= +github.com/antlr4-go/antlr/v4 v4.13.1 h1:SqQKkuVZ+zWkMMNkjy5FZe5mr5WURWnlpmOuzYWrPrQ= +github.com/antlr4-go/antlr/v4 v4.13.1/go.mod h1:GKmUxMtwp6ZgGwZSva4eWPC5mS6vUAmOABFgjdkM7Nw= github.com/aws/aws-msk-iam-sasl-signer-go v1.0.0 h1:UyjtGmO0Uwl/K+zpzPwLoXzMhcN9xmnR2nrqJoBrg3c= github.com/aws/aws-msk-iam-sasl-signer-go v1.0.0/go.mod h1:TJAXuFs2HcMib3sN5L0gUC+Q01Qvy3DemvA55WuC+iA= github.com/aws/aws-sdk-go-v2 v1.32.2 h1:AkNLZEyYMLnx/Q/mSKkcMqwNFXMAvFto9bNsHqcTduI= @@ -71,6 +75,8 @@ github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSs github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dlclark/regexp2 v1.11.4 h1:rPYF9/LECdNymJufQKmri9gV604RvvABwgOA8un7yAo= +github.com/dlclark/regexp2 v1.11.4/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/dnwe/otelsarama v0.0.0-20240308230250-9388d9d40bc0 h1:R2zQhFwSCyyd7L43igYjDrH0wkC/i+QBPELuY0HOu84= github.com/dnwe/otelsarama v0.0.0-20240308230250-9388d9d40bc0/go.mod h1:2MqLKYJfjs3UriXXF9Fd0Qmh/lhxi/6tHXkqtXxyIHc= github.com/docker/cli v27.3.1+incompatible h1:qEGdFBF3Xu6SCvCYhc7CzaQTlBmqDuzxPDpigSyeKQQ= @@ -81,6 +87,8 @@ github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc= github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4= github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dop251/goja v0.0.0-20241009100908-5f46f2705ca3 h1:MXsAuToxwsTn5BEEYm2DheqIiC4jWGmkEJ1uy+KFhvQ= +github.com/dop251/goja v0.0.0-20241009100908-5f46f2705ca3/go.mod h1:MxLav0peU43GgvwVgNbLAj1s/bSGboKkhuULvq/7hx4= github.com/eapache/go-resiliency v1.7.0 h1:n3NRTnBn5N0Cbi/IeOHuQn9s2UwVUH7Ga0ZWcP+9JTA= github.com/eapache/go-resiliency v1.7.0/go.mod h1:5yPzW0MIvSe0JDsv0v+DvcjEv2FyD6iZYSs1ZI+iQho= github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 h1:Oy0F4ALJ04o5Qqpdz8XLIpNA3WM/iSIXqxtqo7UGVws= @@ -95,10 +103,18 @@ github.com/fatih/color v1.17.0 h1:GlRw1BRJxkpqUCBKzKOw098ed57fEsKeNjpTe3cSjK4= github.com/fatih/color v1.17.0/go.mod h1:YZ7TlrGPkiz6ku9fK3TLD/pl3CpsiFyu8N92HLgmosI= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241022171306-dcd601f231ec h1:50Ojig/hsng4K8FgoezONfVljawi9+sYQkjj+vf7/Fw= -github.com/formancehq/go-libs/v2 v2.0.1-0.20241022171306-dcd601f231ec/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241024102856-de23d0929561 h1:3GPxp1jm71ZjTK1WKGIDJnjGYc4fpyNTdv8fJmoq6fo= +github.com/formancehq/go-libs/v2 v2.0.1-0.20241024102856-de23d0929561/go.mod h1:LgxayMN6wgAQbkB3ioBDTHOVMKp1rC6Q55M1CvG44xY= +github.com/formancehq/numscript v0.0.9-0.20241009144012-1150c14a1417 h1:LOd5hxnXDIBcehFrpW1OnXk+VSs0yJXeu1iAOO+Hji4= +github.com/formancehq/numscript v0.0.9-0.20241009144012-1150c14a1417/go.mod h1:btuSv05cYwi9BvLRxVs5zrunU+O1vTgigG1T6UsawcY= github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw= github.com/fortytw2/leaktest v1.3.0/go.mod h1:jDsjWgpAGjm2CA7WthBh/CdZYEPF31XHquHwclZch5g= +github.com/gkampitakis/ciinfo v0.3.0 h1:gWZlOC2+RYYttL0hBqcoQhM7h1qNkVqvRCV1fOvpAv8= +github.com/gkampitakis/ciinfo v0.3.0/go.mod h1:1NIwaOcFChN4fa/B0hEBdAb6npDlFL8Bwx4dfRLRqAo= +github.com/gkampitakis/go-diff v1.3.2 h1:Qyn0J9XJSDTgnsgHRdz9Zp24RaJeKMUHg2+PDZZdC4M= +github.com/gkampitakis/go-diff v1.3.2/go.mod h1:LLgOrpqleQe26cte8s36HTWcTmMEur6OPYerdAAS9tk= +github.com/gkampitakis/go-snaps v0.5.4 h1:GX+dkKmVsRenz7SoTbdIEL4KQARZctkMiZ8ZKprRwT8= +github.com/gkampitakis/go-snaps v0.5.4/go.mod h1:ZABkO14uCuVxBHAXAfKG+bqNz+aa1bGPAg8jkI0Nk8Y= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw= @@ -115,6 +131,8 @@ github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU= +github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-task/slim-sprig/v3 v3.0.0 h1:sUs3vkvUymDpBKi3qH1YSqBQk9+9D/8M2mN1vB6EwHI= @@ -212,6 +230,8 @@ github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683 h1:7UMa6KCCMjZEMD github.com/lufia/plan9stats v0.0.0-20240909124753-873cd0166683/go.mod h1:ilwx/Dta8jXAgpFYFvSWEMwxmbWXyiUHkd5FwyKhb5k= github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0= github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= +github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= +github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= @@ -296,6 +316,14 @@ github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.9.0 h1:HtqpIVDClZ4nwg75+f6Lvsy/wHu+3BoSGCbBAcpTsTg= github.com/stretchr/testify v1.9.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= +github.com/tidwall/gjson v1.17.1 h1:wlYEnwqAHgzmhNUFfw7Xalt2JzQvsMx2Se4PcoFCT/U= +github.com/tidwall/gjson v1.17.1/go.mod h1:/wbyibRr2FHMks5tjHJ5F8dMZh3AcwJEMf5vlfC0lxk= +github.com/tidwall/match v1.1.1 h1:+Ho715JplO36QYgwN9PGYNhgZvoUSc9X2c80KVTi+GA= +github.com/tidwall/match v1.1.1/go.mod h1:eRSPERbgtNPcGhD8UCthc6PmLEQXEWd3PRB5JTxsfmM= +github.com/tidwall/pretty v1.2.1 h1:qjsOFOWWQl+N3RsoF5/ssm1pHmJJwhjlSbZ51I6wMl4= +github.com/tidwall/pretty v1.2.1/go.mod h1:ITEVvHYasfjBbM0u2Pg8T2nJnzm8xPwvNhhsoaGGjNU= +github.com/tidwall/sjson v1.2.5 h1:kLy8mja+1c9jlljvWTlSazM7cKDRfJuR/bOJhcY5NcY= +github.com/tidwall/sjson v1.2.5/go.mod h1:Fvgq9kS/6ociJEDnK0Fk1cpYF4FIW6ZF7LAe+6jwd28= github.com/tklauser/go-sysconf v0.3.14 h1:g5vzr9iPFFz24v2KZXs/pvpvh8/V9Fw6vQK5ZZb78yU= github.com/tklauser/go-sysconf v0.3.14/go.mod h1:1ym4lWMLUOhuBOPGtRcJm7tEGX4SCYNEEEtghGG/8uY= github.com/tklauser/numcpus v0.9.0 h1:lmyCHtANi8aRUgkckBgoDk1nHCux3n2cgkJLXdQGPDo= @@ -399,6 +427,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58= golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= +golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= diff --git a/internal/storage/driver/driver.go b/internal/storage/driver/driver.go index 77c17ea2b..08f6e8a3e 100644 --- a/internal/storage/driver/driver.go +++ b/internal/storage/driver/driver.go @@ -3,7 +3,6 @@ package driver import ( "context" "errors" - "errors" "fmt" "github.com/formancehq/go-libs/v2/metadata" "github.com/formancehq/go-libs/v2/platform/postgres" diff --git a/internal/storage/driver/migrations.go b/internal/storage/driver/migrations.go index 6dd51a682..404a4f516 100644 --- a/internal/storage/driver/migrations.go +++ b/internal/storage/driver/migrations.go @@ -2,9 +2,9 @@ package driver import ( "context" + "database/sql" "errors" "fmt" - "database/sql" "github.com/formancehq/go-libs/v2/time" "github.com/formancehq/go-libs/v2/platform/postgres" @@ -212,7 +212,7 @@ func Migrate(ctx context.Context, db bun.IDB) error { } func detectDowngrades(migrator *migrations.Migrator, ctx context.Context, db bun.IDB) error { - lastVersion, err := migrator.GetDBVersion(ctx, db) + lastVersion, err := migrator.GetLastVersion(ctx, db) if err != nil { if !errors.Is(err, migrations.ErrMissingVersionTable) { return fmt.Errorf("failed to get last version: %w", err) @@ -224,8 +224,8 @@ func detectDowngrades(migrator *migrations.Migrator, ctx context.Context, db bun return fmt.Errorf("failed to get all migrations: %w", err) } - if len(allMigrations) < int(lastVersion) { - return newErrRollbackDetected(int(lastVersion), len(allMigrations)) + if len(allMigrations) < lastVersion { + return newErrRollbackDetected(lastVersion, len(allMigrations)) } } From d328f656ed8a357b74907a58eaf5ca5106939594 Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Thu, 24 Oct 2024 22:10:47 +0200 Subject: [PATCH 11/12] feat: reverse benchmark output comparison --- test/performance/Earthfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/performance/Earthfile b/test/performance/Earthfile index b12580238..20b0969fe 100644 --- a/test/performance/Earthfile +++ b/test/performance/Earthfile @@ -43,7 +43,7 @@ compare: COPY (+run/benchmark-output.txt --args=$args) /report/benchmark-output-local.txt COPY --allow-privileged (github.com/formancehq/ledger/test/performance:${rev}+run/benchmark-output.txt --args=$args) /report/benchmark-output-remote.txt - RUN benchstat /report/benchmark-output-local.txt /report/benchmark-output-remote.txt > benchmark-comparison.txt + RUN benchstat /report/benchmark-output-remote.txt /report/benchmark-output-local.txt > benchmark-comparison.txt SAVE ARTIFACT benchmark-comparison.txt AS LOCAL benchmark-comparison.txt From 0056427b948c00417cfb3ee1739ea0b1cfa18c0a Mon Sep 17 00:00:00 2001 From: Geoffrey Ragot Date: Sat, 19 Oct 2024 17:10:19 +0200 Subject: [PATCH 12/12] feat: migrate old data --- go.sum | 5 --- internal/storage/bucket/migrations.go | 2 +- .../bucket/migrations/1-fix-trigger/up.sql | 2 +- .../12-moves-fill-transaction-id/notes.yaml | 1 + .../12-moves-fill-transaction-id/up.sql | 33 ++++++++++++++ .../up_tests_after.sql | 10 +++++ .../up_tests_before.sql | 10 +++++ .../notes.yaml | 1 + .../13-transactions-fill-inserted-at/up.sql | 30 +++++++++++++ .../up_tests_after.sql | 10 +++++ .../up_tests_before.sql | 10 +++++ .../14-transactions-fill-pcv/notes.yaml | 1 + .../14-transactions-fill-pcv/up.sql | 44 +++++++++++++++++++ .../up_tests_after.sql | 40 +++++++++++++++++ .../up_tests_before.sql | 10 +++++ .../notes.yaml | 1 + .../15-accounts-volumes-fill-history/up.sql | 42 ++++++++++++++++++ .../up_tests_after.sql | 26 +++++++++++ .../up_tests_before.sql | 26 +++++++++++ .../notes.yaml | 1 + .../up.sql | 33 ++++++++++++++ .../up_tests_after.sql | 10 +++++ .../up_tests_before.sql | 10 +++++ .../notes.yaml | 1 + .../17-accounts-metadata-fill-address/up.sql | 33 ++++++++++++++ .../up_tests_after.sql | 10 +++++ .../up_tests_before.sql | 10 +++++ .../18-logs-fill-memento/notes.yaml | 1 + .../migrations/18-logs-fill-memento/up.sql | 28 ++++++++++++ .../18-logs-fill-memento/up_tests_after.sql | 10 +++++ .../18-logs-fill-memento/up_tests_before.sql | 10 +++++ .../2-fix-volumes-aggregation/up.sql | 3 +- 32 files changed, 455 insertions(+), 9 deletions(-) create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/up.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql create mode 100644 internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql diff --git a/go.sum b/go.sum index b0116d52d..af2151831 100644 --- a/go.sum +++ b/go.sum @@ -235,13 +235,8 @@ github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0 github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc= github.com/maruel/natural v1.1.1 h1:Hja7XhhmvEFhcByqDoHz9QZbkWey+COd9xWfCfn1ioo= github.com/maruel/natural v1.1.1/go.mod h1:v+Rfd79xlw1AgVBjbO0BEQmptqb5HvL/k9GRHB7ZKEg= -github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/minio/highwayhash v1.0.3 h1:kbnuUMoHYyVl7szWjSxJnxw11k2U709jqFPPmIUyD6Q= diff --git a/internal/storage/bucket/migrations.go b/internal/storage/bucket/migrations.go index 6a4147b22..8f5b7fcb5 100644 --- a/internal/storage/bucket/migrations.go +++ b/internal/storage/bucket/migrations.go @@ -27,4 +27,4 @@ func migrate(ctx context.Context, tracer trace.Tracer, db bun.IDB, name string) defer span.End() return GetMigrator(name).Up(ctx, db) -} +} \ No newline at end of file diff --git a/internal/storage/bucket/migrations/1-fix-trigger/up.sql b/internal/storage/bucket/migrations/1-fix-trigger/up.sql index 73866f58c..cbef57036 100644 --- a/internal/storage/bucket/migrations/1-fix-trigger/up.sql +++ b/internal/storage/bucket/migrations/1-fix-trigger/up.sql @@ -29,4 +29,4 @@ begin posting ->> 'destination', posting ->> 'asset', (posting ->> 'amount')::numeric, false, _destination_exists); end; -$$ set search_path from current; +$$ set search_path from current; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml new file mode 100644 index 000000000..4e7ed8eef --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/notes.yaml @@ -0,0 +1 @@ +name: Fill transaction ids of table moves diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql new file mode 100644 index 000000000..c481d7674 --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up.sql @@ -0,0 +1,33 @@ +do $$ + declare + _batch_size integer := 30; + begin + set search_path = '{{ .Schema }}'; + + loop + with _outdated_moves as ( + select * + from moves + where transactions_id is null + limit _batch_size + ) + update moves + set transactions_id = ( + select id + from transactions + where seq = moves.transactions_seq + ) + from _outdated_moves + where moves.seq in (_outdated_moves.seq); + + exit when not found; + + raise info 'commit batch'; + commit ; + end loop; + + alter table moves + alter column transactions_id set not null; + end +$$ +language plpgsql; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql new file mode 100644 index 000000000..19e1e01df --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from moves + where transactions_id is null + ) = 0, 'Still some rows with null transactions_id'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql new file mode 100644 index 000000000..6def0cb52 --- /dev/null +++ b/internal/storage/bucket/migrations/12-moves-fill-transaction-id/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from moves + where transactions_id is null + ) > 0, 'Should have some transactions with null transactions_id'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml new file mode 100644 index 000000000..69c43fb23 --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/notes.yaml @@ -0,0 +1 @@ +name: Fill inserted_at column of transactions table diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql new file mode 100644 index 000000000..adef673c0 --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up.sql @@ -0,0 +1,30 @@ +do $$ + declare + _batch_size integer := 30; + _date timestamp without time zone; + _count integer; + begin + set search_path = '{{ .Schema }}'; + + -- select the date where the "11-make-stateless" migration has been applied + select tstamp into _date + from _system.goose_db_version + where version_id = 12; + + select count(*) into _count + from logs + where date <= _date; + + for i in 0.._count by _batch_size loop + update transactions + set inserted_at = ( + select date + from logs + where transactions.id = (data->'transaction'->>'id')::bigint and transactions.ledger = ledger + ) + where id >= i and id < i + _batch_size; + + commit; + end loop; + end +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql new file mode 100644 index 000000000..1216cf43e --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select bool_and(log.date = transactions.inserted_at) + from logs log + join transactions on transactions.id = log.id + ), 'Insertion dates of logs and transactions should not match'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql new file mode 100644 index 000000000..e1789bbc4 --- /dev/null +++ b/internal/storage/bucket/migrations/13-transactions-fill-inserted-at/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select not bool_and(log.date = transactions.inserted_at) + from logs log + join transactions on transactions.id = log.id + ), 'Insertion dates of logs and transactions should match'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml b/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml new file mode 100644 index 000000000..4a8274783 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/notes.yaml @@ -0,0 +1 @@ +name: Fill post_commit_volumes column of transactions table diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql new file mode 100644 index 000000000..cb36ab671 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up.sql @@ -0,0 +1,44 @@ +do $$ + declare + _batch_size integer := 30; + begin + set search_path = '{{ .Schema }}'; + + loop + with _outdated_transactions as ( + select id + from transactions + where post_commit_volumes is null + limit _batch_size + ) + update transactions + set post_commit_volumes = ( + select public.aggregate_objects(post_commit_volumes::jsonb) as post_commit_volumes + from ( + select accounts_address, json_build_object(accounts_address, post_commit_volumes) post_commit_volumes + from ( + select accounts_address, json_build_object(asset, post_commit_volumes) as post_commit_volumes + from ( + select distinct on (accounts_address, asset) + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where transactions_id = transactions.id and ledger = transactions.ledger + ) moves + ) values + ) values + ) + from _outdated_transactions + where transactions.id in (_outdated_transactions.id); + + exit when not found; + end loop; + + alter table transactions + alter column post_commit_volumes set not null; + end +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql new file mode 100644 index 000000000..6af947d92 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_after.sql @@ -0,0 +1,40 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from transactions + where post_commit_volumes is null + ) = 0, 'Post commit volumes should be set on all transactions'; + assert ( + select post_commit_volumes + from transactions + where ledger = 'ledger0' and id = 1 + ) = ('{' + '"fees": {' + '"USD": {' + '"inputs": 2, ' + '"outputs": 0' + '}' + '}, ' + '"world": {' + '"USD": {' + '"inputs": 0, ' + '"outputs": 200' + '}' + '}, ' + '"orders:5": {' + '"USD": {' + '"inputs": 100, ' + '"outputs": 100' + '}' + '}, ' + '"sellers:0": {' + '"USD": {' + '"inputs": 198, ' + '"outputs": 0' + '}' + '}' + '}')::jsonb, 'Post commit volumes should be correct'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql new file mode 100644 index 000000000..06f83e116 --- /dev/null +++ b/internal/storage/bucket/migrations/14-transactions-fill-pcv/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from transactions + where post_commit_volumes is null + ) > 0, 'Post commit volumes should be null on all transactions'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml new file mode 100644 index 000000000..35624b619 --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/notes.yaml @@ -0,0 +1 @@ +name: Populate accounts_volumes table with historic data diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql new file mode 100644 index 000000000..52de43313 --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up.sql @@ -0,0 +1,42 @@ +do $$ + declare + _missing record; + begin + set search_path = '{{ .Schema }}'; + + loop + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + into _missing + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + limit 1; + + exit when not found; + + insert into accounts_volumes (ledger, accounts_address, asset, input, output) + values ( + _missing.ledger, + _missing.accounts_address, + _missing.asset, + (_missing.post_commit_volumes).inputs, + (_missing.post_commit_volumes).outputs + ) + on conflict do nothing; -- can be inserted by a concurrent transaction + + commit; + end loop; + end +$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql new file mode 100644 index 000000000..5d1a8fa8e --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_after.sql @@ -0,0 +1,26 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from ( + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + ) v + ) = 0, 'All accounts volumes should be ok'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql new file mode 100644 index 000000000..a67966b51 --- /dev/null +++ b/internal/storage/bucket/migrations/15-accounts-volumes-fill-history/up_tests_before.sql @@ -0,0 +1,26 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from ( + select distinct on (ledger, accounts_address, asset) + ledger, + accounts_address, + asset, + first_value(post_commit_volumes) over ( + partition by ledger, accounts_address, asset + order by seq desc + ) as post_commit_volumes + from moves + where not exists( + select + from accounts_volumes + where ledger = moves.ledger + and asset = moves.asset + and accounts_address = moves.accounts_address + ) + ) v + ) > 0, 'Some accounts volumes should no be set'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml new file mode 100644 index 000000000..449dcfd17 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/notes.yaml @@ -0,0 +1 @@ +name: Fill transactions_id column of transactions_metadata table diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql new file mode 100644 index 000000000..5140958c4 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up.sql @@ -0,0 +1,33 @@ + +do $$ + declare + _batch_size integer := 30; + begin + set search_path = '{{.Schema}}'; + + loop + with _outdated_transactions_metadata as ( + select seq + from transactions_metadata + where transactions_id is null + limit _batch_size + ) + update transactions_metadata + set transactions_id = ( + select id + from transactions + where transactions_metadata.transactions_seq = seq + ) + from _outdated_transactions_metadata + where transactions_metadata.seq in (_outdated_transactions_metadata.seq); + + exit when not found; + + commit ; + end loop; + + alter table transactions_metadata + alter column transactions_id set not null ; + end +$$; + diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql new file mode 100644 index 000000000..09763ced9 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from transactions_metadata + where transactions_id is null + ) = 0, 'Transactions ids on transactions_metadata table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql new file mode 100644 index 000000000..d79f59b56 --- /dev/null +++ b/internal/storage/bucket/migrations/16-transactions-metadata-fill-transaction-id/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from transactions_metadata + where transactions_id is null + ) > 0, 'Transactions ids of transactions_metadata table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml new file mode 100644 index 000000000..f599539a8 --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/notes.yaml @@ -0,0 +1 @@ +name: Fill accounts_address column of accounts_metadata table diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql new file mode 100644 index 000000000..d3b408c0f --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up.sql @@ -0,0 +1,33 @@ + +do $$ + declare + _batch_size integer := 30; + begin + set search_path = '{{.Schema}}'; + + loop + with _outdated_accounts_metadata as ( + select seq + from accounts_metadata + where accounts_address is null + limit _batch_size + ) + update accounts_metadata + set accounts_address = ( + select address + from accounts + where accounts_metadata.accounts_seq = seq + ) + from _outdated_accounts_metadata + where accounts_metadata.seq in (_outdated_accounts_metadata.seq); + + exit when not found; + + commit ; + end loop; + + alter table accounts_metadata + alter column accounts_address set not null ; + end +$$; + diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql new file mode 100644 index 000000000..1025ba3cf --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from accounts_metadata + where accounts_address is null + ) = 0, 'Account addresses on accounts_metadata table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql new file mode 100644 index 000000000..d223b1494 --- /dev/null +++ b/internal/storage/bucket/migrations/17-accounts-metadata-fill-address/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from accounts_metadata + where accounts_address is null + ) > 0, 'Account addresses of accounts_metadata table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml b/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml new file mode 100644 index 000000000..1f7fd9415 --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/notes.yaml @@ -0,0 +1 @@ +name: Fill memento column of logs table diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql new file mode 100644 index 000000000..21800432e --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/up.sql @@ -0,0 +1,28 @@ +do $$ + declare + _batch_size integer := 30; + begin + set search_path = '{{.Schema}}'; + + loop + with _outdated_logs as ( + select seq + from logs + where memento is null + limit _batch_size + ) + update logs + set memento = convert_to(data::varchar, 'LATIN1')::bytea + from _outdated_logs + where logs.seq in (_outdated_logs.seq); + + exit when not found; + + commit ; + end loop; + + alter table logs + alter column memento set not null; + end +$$; + diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql new file mode 100644 index 000000000..733216b58 --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_after.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from logs + where memento is null + ) = 0, 'Mememtos of logs table should not be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql new file mode 100644 index 000000000..3a2957490 --- /dev/null +++ b/internal/storage/bucket/migrations/18-logs-fill-memento/up_tests_before.sql @@ -0,0 +1,10 @@ +set search_path = '{{.Schema}}'; + +do $$ +begin + assert ( + select count(*) + from logs + where memento is null + ) > 0, 'Mementos of logs table should be null'; +end$$; \ No newline at end of file diff --git a/internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql index 9cdc09172..986b42575 100644 --- a/internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql +++ b/internal/storage/bucket/migrations/2-fix-volumes-aggregation/up.sql @@ -22,5 +22,4 @@ with all_assets as (select v.v as asset ) m on true) select moves.asset, moves.post_commit_volumes from moves -$$ set search_path from current; - +$$ set search_path from current; \ No newline at end of file