Skip to content

Commit

Permalink
supabase: compact all migrations, refreshed from production
Browse files Browse the repository at this point in the history
Minor adjustments to account for drift between migrations and the
production DB.
  • Loading branch information
jgraettinger committed Oct 1, 2024
1 parent 33fe96f commit 0ed270a
Show file tree
Hide file tree
Showing 79 changed files with 8,371 additions and 6,523 deletions.
4 changes: 2 additions & 2 deletions crates/agent-sql/tests/connector_tags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@ async fn resource_path_pointers_cannot_be_changed() {
let row = sqlx::query!(
r#"
with setup_connectors as (
insert into connectors (image_name, external_url, title, short_description, logo_url)
values ('foo/image', 'http://test.test', '{"en-US": "foo"}', '{"en-US": "foo"}', '{"en-US": "foo"}')
insert into connectors (image_name, external_url, title, short_description, logo_url, recommended)
values ('foo/image', 'http://test.test', '{"en-US": "foo"}', '{"en-US": "foo"}', '{"en-US": "foo"}', false)
returning id
)
insert into connector_tags (connector_id, image_tag) select id, ':test' as image_tag from setup_connectors
Expand Down
4 changes: 2 additions & 2 deletions crates/agent-sql/tests/publications.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ async fn test_finding_forbidden_connectors() {
('bb00000000000000', 'testConnectors/Allowed', '{}'::json, 'capture', 'allowed_image', 'bbbbbbbbbbbbbbbb', 'bbbbbbbbbbbbbbbb')
),
p2 as (
insert into connectors (external_url, image_name, title, short_description, logo_url) values
('http://example.com', 'allowed_image', '{"en-US": "foo"}'::json, '{"en-US": "foo"}'::json, '{"en-US": "foo"}'::json)
insert into connectors (external_url, image_name, title, short_description, logo_url, recommended) values
('http://example.com', 'allowed_image', '{"en-US": "foo"}'::json, '{"en-US": "foo"}'::json, '{"en-US": "foo"}'::json, false)
)
select 1;
"#,
Expand Down
1 change: 0 additions & 1 deletion crates/agent/src/controllers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -457,7 +457,6 @@ mod test {
use std::collections::{BTreeSet, VecDeque};

use chrono::TimeZone;
use models::Capture;

use super::*;
use crate::controllers::materialization::SourceCaptureStatus;
Expand Down
8 changes: 4 additions & 4 deletions crates/agent/src/integration_tests/harness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,14 +120,14 @@ impl TestHarness {
async fn setup_test_connectors(&mut self) {
sqlx::query!(r##"
with source_image as (
insert into connectors (external_url, image_name, title, short_description, logo_url)
values ('http://test.test/', 'source/test', '{"en-US": "test"}', '{"en-US": "test"}', '{"en-US": "http://test.test/"}')
insert into connectors (external_url, image_name, title, short_description, logo_url, recommended)
values ('http://test.test/', 'source/test', '{"en-US": "test"}', '{"en-US": "test"}', '{"en-US": "http://test.test/"}', false)
on conflict(image_name) do update set title = excluded.title
returning id
),
materialize_image as (
insert into connectors (external_url, image_name, title, short_description, logo_url)
values ('http://test.test/', 'materialize/test', '{"en-US": "test"}', '{"en-US": "test"}', '{"en-US": "http://test.test/"}')
insert into connectors (external_url, image_name, title, short_description, logo_url, recommended)
values ('http://test.test/', 'materialize/test', '{"en-US": "test"}', '{"en-US": "test"}', '{"en-US": "http://test.test/"}', false)
on conflict(image_name) do update set title = excluded.title
returning id
),
Expand Down
4 changes: 1 addition & 3 deletions crates/agent/src/integration_tests/null_bytes.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
use super::harness::{draft_catalog, md5_hash, TestHarness};
use crate::{controllers::ControllerState, publications::JobStatus, ControlPlane};
use agent_sql::Capability;
use models::{CatalogType, Id};
use crate::publications::JobStatus;
use tables::InferredSchema;

#[tokio::test]
Expand Down
34 changes: 29 additions & 5 deletions crates/agent/src/integration_tests/source_captures.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,13 @@ async fn test_source_captures() {
);
assert_eq!(
"quacks",
a_model.bindings[0].resource.to_value().pointer("/id").unwrap().as_str().unwrap()
a_model.bindings[0]
.resource
.to_value()
.pointer("/id")
.unwrap()
.as_str()
.unwrap()
);
let a_status = a_state.current_status.unwrap_materialization();
assert!(a_status.source_capture.as_ref().unwrap().up_to_date);
Expand Down Expand Up @@ -214,15 +220,33 @@ async fn test_source_captures_collection_name() {
);
assert_eq!(
"pond",
a_model.bindings[0].resource.to_value().pointer("/schema").unwrap().as_str().unwrap()
a_model.bindings[0]
.resource
.to_value()
.pointer("/schema")
.unwrap()
.as_str()
.unwrap()
);
assert_eq!(
true,
a_model.bindings[0].resource.to_value().pointer("/delta").unwrap().as_bool().unwrap()
a_model.bindings[0]
.resource
.to_value()
.pointer("/delta")
.unwrap()
.as_bool()
.unwrap()
);
assert_eq!(
"quacks",
a_model.bindings[0].resource.to_value().pointer("/id").unwrap().as_str().unwrap()
a_model.bindings[0]
.resource
.to_value()
.pointer("/id")
.unwrap()
.as_str()
.unwrap()
);
let a_status = a_state.current_status.unwrap_materialization();
assert!(a_status.source_capture.as_ref().unwrap().up_to_date);
Expand Down Expand Up @@ -259,7 +283,7 @@ async fn test_source_captures_collection_name() {
#[tokio::test]
#[serial_test::serial]
async fn test_source_capture_no_annotations() {
let mut harness = TestHarness::init("test_source_capture_no_annotations").await;
let harness = TestHarness::init("test_source_capture_no_annotations").await;
let user_id = harness.setup_tenant("sheep").await;

let draft = draft_catalog(serde_json::json!({
Expand Down
2 changes: 1 addition & 1 deletion crates/agent/src/integration_tests/unknown_connectors.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use crate::integration_tests::harness::{draft_catalog, TestHarness};
#[tokio::test]
#[serial_test::serial]
async fn test_forbidden_connector() {
let mut harness = TestHarness::init("test_forbidden_connector").await;
let harness = TestHarness::init("test_forbidden_connector").await;
let user_id = harness.setup_tenant("sheep").await;

let draft = draft_catalog(serde_json::json!({
Expand Down
54 changes: 54 additions & 0 deletions supabase/migrations/00_polyfill.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
begin;

-- Roles which are created by supabase: anon, authenticated, supabase_admin, service_role.

create role stats_loader with login password 'stats_loader_password' bypassrls;
create role marketplace_integration;
create role gatsby_reader;
create role github_action_connector_refresh;
create role wgd_automation;
create role reporting_user;

-- Required for postgres to give ownership of catalog_stats to stats_loader.
grant stats_loader to postgres;

-- Required for stats materialization to create flow_checkpoins_v1 and flow_materializations_v2.
grant create on schema public to stats_loader;

-- TODO(johnny): Required for `authenticated` to own `drafts_ext` and `publication_specs_ext`.
-- We should make them owed by postgres and grant usage instead.
grant create on schema public to authenticated;

-- The production database has a Flow materialization of Stripe customer data.
-- This is a partial table which matches the portions we use today.
create schema stripe;

create table stripe.customers (
id text primary key,
address json,
"address/city" text,
"address/country" text,
"address/line1" text,
"address/line2" text,
"address/postal_code" text,
"address/state" text,
balance bigint,
created bigint,
currency text,
default_source text,
delinquent boolean,
description text,
email text,
invoice_prefix text,
invoice_settings json,
"invoice_settings/custom_fields" json,
"invoice_settings/default_payment_method" text,
metadata json,
name text,
phone text,
flow_document json not null
);

grant usage on schema stripe to postgres;

commit;
96 changes: 0 additions & 96 deletions supabase/migrations/01_json.sql

This file was deleted.

112 changes: 0 additions & 112 deletions supabase/migrations/02_flowid.sql

This file was deleted.

Loading

0 comments on commit 0ed270a

Please sign in to comment.