Skip to content

Commit

Permalink
feat: migration deals schema (#418)
Browse files Browse the repository at this point in the history
  • Loading branch information
hugomrdias authored Sep 17, 2021
1 parent 02975bf commit 922b881
Show file tree
Hide file tree
Showing 29 changed files with 1,173 additions and 199 deletions.
22 changes: 13 additions & 9 deletions .github/workflows/api.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,29 @@ on:
- 'packages/api/**'
- '.github/workflows/api.yml'
pull_request:
branches:
- main
paths:
- 'packages/api/**'
- '.github/workflows/api.yml'
jobs:
test:
runs-on: ubuntu-latest
name: Test
steps:
- uses: actions/checkout@v2
- uses: microsoft/playwright-github-action@v1
- uses: actions/setup-node@v2
with:
node-version: '16'
- uses: bahmutov/npm-install@v1
- run: npx playwright install-deps
- run: yarn test:api
env:
DATABASE_URL: ${{ secrets.DATABASE_URL}}
DATABASE_TOKEN: ${{ secrets.DATABASE_TOKEN}}
DATABASE_URL: ${{secrets.DATABASE_URL}}
DATABASE_TOKEN: ${{secrets.DATABASE_TOKEN}}
DATABASE_CONNECTION: ${{secrets.DATABASE_CONNECTION}}
DAG_CARGO_HOST: ${{secrets.DAG_CARGO_HOST}}
DAG_CARGO_DATABASE: ${{secrets.DAG_CARGO_DATABASE}}
DAG_CARGO_USER: ${{secrets.DAG_CARGO_USER}}
DAG_CARGO_PASSWORD: ${{secrets.DAG_CARGO_PASSWORD}}
deploy-dev:
name: Deploy Dev
if: github.event_name == 'pull_request' && github.ref != 'refs/heads/main'
Expand All @@ -37,9 +41,9 @@ jobs:
- name: Publish app
uses: cloudflare/wrangler-action@1.3.0
env:
SENTRY_TOKEN: ${{ secrets.SENTRY_TOKEN}}
SENTRY_TOKEN: ${{secrets.SENTRY_TOKEN}}
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
apiToken: ${{secrets.CF_API_TOKEN }}
workingDirectory: 'packages/api'
deploy-staging:
name: Deploy Staging
Expand All @@ -52,8 +56,8 @@ jobs:
- name: Publish app
uses: cloudflare/wrangler-action@1.3.0
env:
SENTRY_TOKEN: ${{ secrets.SENTRY_TOKEN}}
SENTRY_TOKEN: ${{secrets.SENTRY_TOKEN}}
with:
apiToken: ${{ secrets.CF_API_TOKEN }}
apiToken: ${{secrets.CF_API_TOKEN }}
workingDirectory: 'packages/api'
environment: 'staging'
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -42,14 +42,14 @@
"devDependencies": {
"lint-staged": "^11.0.0",
"npm-run-all": "^4.1.5",
"prettier": "2.3.2",
"prettier": "2.4.0",
"rimraf": "^3.0.2",
"simple-git-hooks": "^2.3.1",
"typescript": "4.3.5"
},
"resolutions": {
"graphql": "^15.5.1",
"prettier": "2.3.2",
"prettier": "2.4.0",
"esbuild": "0.12.21"
}
}
42 changes: 42 additions & 0 deletions packages/api/db/cargo.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
CREATE SCHEMA IF NOT EXISTS cargo;

-- Import dag cargo schema
IMPORT FOREIGN SCHEMA cargo
LIMIT TO (aggregate_entries, aggregates, deals)
FROM SERVER dag_cargo_server
INTO cargo;

-- Create materialized view from cargo "aggregate_entries" table
CREATE MATERIALIZED VIEW public.aggregate_entry
AS
SELECT *
FROM cargo.aggregate_entries;

-- Indexes for "aggregate_entries" mat view
CREATE UNIQUE INDEX aggregate_entry_unique_cidv1_aggregate_cid
ON public.aggregate_entry (aggregate_cid, cid_v1);
CREATE INDEX aggregate_entry_cid_v1
ON public.aggregate_entry (cid_v1);

-- Create materialized view from cargo "deals" table
CREATE MATERIALIZED VIEW public.deal
AS
SELECT *
FROM cargo.deals;

-- Indexes for "deals" mat view
CREATE UNIQUE INDEX deal_unique_deal_id
ON public.deal (deal_id);
CREATE INDEX deal_aggregate_cid
ON public.deal (aggregate_cid);

-- Create materialized view from cargo "aggregates" table
CREATE MATERIALIZED VIEW public.aggregate
AS
SELECT *
FROM cargo.aggregates;

-- Indexes for "aggregate" mat view
CREATE UNIQUE INDEX aggregate_unique_aggregate_cid
ON public.aggregate (aggregate_cid);

28 changes: 28 additions & 0 deletions packages/api/db/docker/docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
version: '3.6'
services:
rest:
image: postgrest/postgrest:v8.0.0
depends_on:
- db
restart: always
ports:
- 3000:3000/tcp
environment:
PGRST_DB_URI: postgres://postgres:postgres@db:5432/postgres
PGRST_DB_SCHEMA: public
PGRST_DB_ANON_ROLE: postgres
PGRST_JWT_SECRET: super-secret-jwt-token-with-at-least-32-characters-long
db:
build:
context: ./postgres
ports:
- 5432:5432
command:
- postgres
- -c
- wal_level=logical
environment:
POSTGRES_DB: postgres
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
POSTGRES_PORT: 5432
40 changes: 40 additions & 0 deletions packages/api/db/docker/postgres/00-initial-schema.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@


-- Set up reatime
create publication supabase_realtime for all tables;

-- Extension namespacing
create schema extensions;
create extension if not exists "uuid-ossp" with schema extensions;
create extension if not exists pgcrypto with schema extensions;
create extension if not exists pgjwt with schema extensions;

-- Developer roles
create role anon nologin noinherit;
create role authenticated nologin noinherit; -- "logged in" user: web_user, app_user, etc
create role service_role nologin noinherit bypassrls; -- allow developers to create JWT's that bypass their policies

create user authenticator noinherit;
grant anon to authenticator;
grant authenticated to authenticator;
grant service_role to authenticator;

grant usage on schema public to postgres, anon, authenticated, service_role;
alter default privileges in schema public grant all on tables to postgres, anon, authenticated, service_role;
alter default privileges in schema public grant all on functions to postgres, anon, authenticated, service_role;
alter default privileges in schema public grant all on sequences to postgres, anon, authenticated, service_role;

-- Create an event trigger function
CREATE OR REPLACE FUNCTION public.pgrst_watch() RETURNS event_trigger
LANGUAGE plpgsql
AS $$
BEGIN
NOTIFY pgrst, 'reload schema';
END;
$$;

-- This event trigger will fire after every ddl_command_end event
CREATE EVENT TRIGGER pgrst_watch
ON ddl_command_end
EXECUTE PROCEDURE public.pgrst_watch();

11 changes: 11 additions & 0 deletions packages/api/db/docker/postgres/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM supabase/postgres:0.13.0

COPY 00-initial-schema.sql /docker-entrypoint-initdb.d/00-initial-schema.sql

# Run time values
ENV POSTGRES_DB=postgres
ENV POSTGRES_USER=postgres
ENV POSTGRES_PASSWORD=postgres
ENV POSTGRES_PORT=5432

EXPOSE 5432
22 changes: 22 additions & 0 deletions packages/api/db/fdw.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
CREATE
EXTENSION IF NOT EXISTS postgres_fdw;

DROP
SERVER IF EXISTS dag_cargo_server CASCADE;

CREATE
SERVER dag_cargo_server
FOREIGN DATA WRAPPER postgres_fdw
OPTIONS (
host :'DAG_CARGO_HOST',
dbname :'DAG_CARGO_DATABASE',
fetch_size '200000'
);

CREATE
USER MAPPING FOR current_user
SERVER dag_cargo_server
OPTIONS (
user :'DAG_CARGO_USER',
password :'DAG_CARGO_PASSWORD'
);
128 changes: 80 additions & 48 deletions packages/api/db/functions.sql
Original file line number Diff line number Diff line change
@@ -1,70 +1,102 @@
DROP FUNCTION IF EXISTS upload_fn;
DROP FUNCTION IF EXISTS deals_fn;

DROP TYPE IF EXISTS upload_pin_type;

CREATE TYPE upload_pin_type AS (
status pin_status_type,
service service_type
CREATE TYPE upload_pin_type AS
(
status pin_status_type,
service service_type
);

CREATE OR REPLACE FUNCTION upload_fn(data json) RETURNS setof upload
LANGUAGE plpgsql
volatile
PARALLEL UNSAFE
AS
LANGUAGE plpgsql
volatile
PARALLEL UNSAFE
AS
$$
BEGIN

insert into content (cid, dag_size)
values (
data->>'content_cid',
(data->>'dag_size')::BIGINT
)
insert into content (cid, dag_size)
values (data ->> 'content_cid',
(data ->> 'dag_size')::BIGINT)
ON CONFLICT ( cid ) DO NOTHING;

insert into pin (content_cid, status, service)
select data->>'content_cid', status, service
from json_populate_recordset(null::upload_pin_type, (data->>'pins')::json)
insert into pin (content_cid, status, service)
select data ->> 'content_cid', status, service
from json_populate_recordset(null::upload_pin_type, (data ->> 'pins')::json)
on conflict (content_cid, service) do nothing;

insert into upload (
account_id,
key_id,
content_cid,
source_cid,
mime_type,
type,
name,
files,
origins,
meta
) values (
(data->>'account_id')::BIGINT,
(data->>'key_id')::BIGINT,
data->>'content_cid',
data->>'source_cid',
data->>'mime_type',
(data->>'type')::upload_type,
data->>'name',
(data->>'files')::jsonb,
(data->>'origins')::jsonb,
(data->>'meta')::jsonb
)
insert into upload (account_id,
key_id,
content_cid,
source_cid,
mime_type,
type,
name,
files,
origins,
meta)
values ((data ->> 'account_id')::BIGINT,
(data ->> 'key_id')::BIGINT,
data ->> 'content_cid',
data ->> 'source_cid',
data ->> 'mime_type',
(data ->> 'type')::upload_type,
data ->> 'name',
(data ->> 'files')::jsonb,
(data ->> 'origins')::jsonb,
(data ->> 'meta')::jsonb)
ON CONFLICT ( account_id, content_cid ) DO NOTHING;


return query select *
from upload u
where u.account_id = (data ->> 'account_id')::BIGINT
AND u.content_cid = data ->> 'content_cid';

--raise exception 'upload % content % ', inserted_upload.id, inserted_content.cid;

return query select *
from upload u
where u.account_id = (data->>'account_id')::BIGINT AND u.content_cid = data->>'content_cid';

IF NOT FOUND THEN
RAISE EXCEPTION 'No upload found %', data->>'content_cid';
END IF;
IF NOT FOUND THEN
RAISE EXCEPTION 'No upload found %', data ->> 'content_cid';
END IF;

RETURN;
RETURN;

END
$$;


CREATE OR REPLACE FUNCTION deals_fn(cid text)
RETURNS TABLE
(
status text,
"lastChanged" timestamptz,
"chainDealID" bigint,
"datamodelSelector" text,
"statusText" text,
"dealActivation" timestamptz,
"dealExpiration" timestamptz,
miner text,
"pieceCid" text,
"batchRootCid" text
)
LANGUAGE sql
STABLE
PARALLEL SAFE
AS
$$
SELECT COALESCE(de.status, 'queued') as status,
de.entry_last_updated as lastChanged,
de.deal_id as chainDealID,
ae.datamodel_selector as datamodelSelector,
de.status_meta as statusText,
de.start_time as dealActivation,
de.end_time as dealExpiration,
de.provider as miner,
a.piece_cid as pieceCid,
ae.aggregate_cid as batchRootCid
FROM public.aggregate_entry ae
join public.aggregate a using (aggregate_cid)
LEFT JOIN public.deal de USING (aggregate_cid)
WHERE ae.cid_v1 = cid
ORDER BY de.entry_last_updated
$$;
8 changes: 7 additions & 1 deletion packages/api/db/reset.sql
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,10 @@ DROP TABLE IF EXISTS upload CASCADE;
DROP TABLE IF EXISTS pin;
DROP TABLE IF EXISTS content;
DROP TABLE IF EXISTS auth_key;
DROP TABLE IF EXISTS account;
DROP TABLE IF EXISTS account;

DROP SCHEMA IF EXISTS cargo CASCADE;
DROP SERVER IF EXISTS dag_cargo_server CASCADE;
DROP MATERIALIZED VIEW IF EXISTS public.aggregate_entry CASCADE;
DROP MATERIALIZED VIEW IF EXISTS public.deal CASCADE;
DROP MATERIALIZED VIEW IF EXISTS public.aggregate CASCADE;
Loading

0 comments on commit 922b881

Please sign in to comment.