Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[MARXAN-1490] [MARXAN-1166] [MARXAN-1167] minimal seed refactor+remove postgis dependency for apidb #1146

15 changes: 10 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -111,14 +111,19 @@ generate-geo-test-data: extract-geo-test-data
mv -f -u -Z data/data/processed/test-features.sql api/apps/api/test/fixtures/test-features.sql
rm -rf api/apps/api/test/fixtures/features && mv -f -u -Z data/data/processed/features api/apps/api/test/fixtures/features

# Don't forget to run make clean-slate && make start-api before repopulating the whole db
# This will delete all existing data and create tables/views/etc. through the migrations that
# run when starting up the API service.
# Also, be sure to create a user before importing the geodata, otherwise it will fail with an
# unrelated error message
# Don't forget to run make clean-slate && make start-api before repopulating the
# whole db. This will delete all existing data and create tables/views/etc.
# through the migrations that run when starting up the API service.
#
# No users are needed for this seed operation, strictly speaking, as
# ETL-imported data is "associated" to a dummy userid (UUID zero).
seed-geodb-data:
docker-compose --project-name ${COMPOSE_PROJECT_NAME} -f ./data/docker-compose-data_management.yml up --build marxan-seed-data

# Same as seed-geodb-data, but it also creates a handful of users with hardcoded
# passwords (not recommended for anything else than dev environments).
seed-geodb-data-plus-initial-test-users: seed-api-init-data seed-geodb-data

test-start-services: clean-slate
@echo "$(RED)Mounting docker file:$(NC) docker-compose-test-e2e.yml / docker-compose-test-e2e.local.yml"
# start from clean slate, in case anything was left around from previous runs (mostly relevant locally, not in CI) and spin the instances (geoprocessing, api and the DBs)
Expand Down
25 changes: 0 additions & 25 deletions api/apps/api/src/migrations/api/1608149578000-EnablePostgis.ts

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ export class InitialDataMetadataEntities1611329857558
"name" varchar,
"project_id" uuid NOT NULL REFERENCES "projects" ("id"),
"country_id" varchar(3) NOT NULL,
"extent" geometry NOT NULL,
"wdpa_filter" jsonb default NULL,
"wdpa_threshold" int CHECK (wdpa_threshold BETWEEN 0 AND 100),
"admin_region_id" uuid,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,11 @@ export class UpdateNullableColumnsForProjectsAndScenarios1613988195000
await queryRunner.query(`
ALTER TABLE projects
ADD COLUMN country_id varchar(3),
ADD COLUMN admin_region_id uuid,
ADD COLUMN extent geometry;
ADD COLUMN admin_region_id uuid;

ALTER TABLE scenarios
DROP COLUMN country_id,
DROP COLUMN admin_region_id,
DROP COLUMN extent;
DROP COLUMN admin_region_id;

ALTER TABLE scenarios
ALTER COLUMN number_of_runs DROP NOT NULL,
Expand All @@ -25,13 +23,11 @@ ALTER TABLE scenarios
await queryRunner.query(`
ALTER TABLE scenarios
ADD COLUMN country_id varchar(3),
ADD COLUMN admin_region_id uuid,
ADD COLUMN extent geometry;
ADD COLUMN admin_region_id uuid;

ALTER TABLE projects
DROP COLUMN country_id,
DROP COLUMN admin_region_id,
DROP COLUMN extent;
DROP COLUMN admin_region_id;;

ALTER TABLE scenarios
ALTER COLUMN number_of_runs SET NULL,
Expand Down
28 changes: 0 additions & 28 deletions api/apps/api/src/migrations/api/1621439031072-AddBboxToProjects.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,39 +7,11 @@ export class AddBboxToProjects1621439031072 implements MigrationInterface {
-- Generates the new column we need
-----------------------------------------
ALTER TABLE projects ADD COLUMN bbox jsonb;

UPDATE projects SET bbox = jsonb_build_array(ST_XMax(extent), ST_XMin(extent), ST_YMax(extent), ST_YMin(extent)) where extent is not null;

-----------------------------------------
-- tr_GetBbox()
-- Utility func to populate bbox
-----------------------------------------
CREATE OR REPLACE FUNCTION tr_GetBbox()
RETURNS trigger AS $BODY$
BEGIN
IF NEW.extent IS NOT NULL THEN
NEW.bbox := jsonb_build_array(ST_XMax(NEW.extent), ST_XMin(NEW.extent), ST_YMax(NEW.extent), ST_YMin(NEW.extent));
ELSE
NEW.bbox := NULL;
END IF;
RETURN NEW;
END;
$BODY$ LANGUAGE plpgsql;
-----------------------------------------
-- Creates the trigger
-----------------------------------------
DROP TRIGGER IF EXISTS tr_projects_extent ON projects;

CREATE TRIGGER tr_projects_extent
BEFORE INSERT or UPDATE ON projects
FOR EACH ROW EXECUTE
PROCEDURE tr_GetBbox();
`);
}

public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
DROP TRIGGER IF EXISTS tr_projects_extent ON projects;
ALTER TABLE projects
DROP COLUMN bbox;
`);
Expand Down

This file was deleted.

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
import { MigrationInterface, QueryRunner } from 'typeorm';

export class DropFeaturesToUsersFk1655841236000 implements MigrationInterface {
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE features
DROP CONSTRAINT features_created_by_fkey;
`);
}

public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`
ALTER TABLE features
ADD CONSTRAINT features_created_by_fkey
FOREIGN KEY (created_by)
REFERENCES users(id)
ON DELETE SET NULL;
`)
}
}
1 change: 0 additions & 1 deletion api/apps/api/test/fixtures/test-data.sql
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,6 @@ VALUES
('Example scenario 1 Project 2 Org 2', (select id from projects where name = 'Example Project 2 Org 2'), 'marxan', 30, 100, 1, (SELECT id FROM users WHERE email = 'aa@example.com') ),
('Example scenario 2 Project 2 Org 2', (select id from projects where name = 'Example Project 2 Org 2'), 'marxan', 50, 100, 1, (SELECT id FROM users WHERE email = 'aa@example.com') );


INSERT INTO platform_admins
(user_id)
VALUES
Expand Down
7 changes: 6 additions & 1 deletion data/data_download/Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
.DEFAULT_GOAL := seed-data

# Seed data for production/staging environments
seed-data: seed-gadm seed-wdpa seed-ecosystems

# Seed data for development environments (includes IUCN species data and other
# demo/test data for development convenience)
seed-data-dev: seed-gadm seed-wdpa seed-demo-features-species seed-demo-features-bioregion seed-ecosystems

seed-data: seed-gadm seed-wdpa seed-demo-features-species seed-demo-features-bioregion seed-ecosystems
seed-gadm:
@echo "Starting seeding gadm data... "
@time $(MAKE) -C ./gadm_3.6 import
Expand Down
4 changes: 1 addition & 3 deletions data/data_download/demo_data_features_bioregion/Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
.PHONY: import

MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")

import: data/demo_data_features/features_demos
@for i in $</*.shp; do \
echo "start processing $$i "; \
Expand All @@ -22,7 +20,7 @@ import: data/demo_data_features/features_demos
echo "getting the feature_id..."; \
feature_id=`psql -X -A -q -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" \
-c "insert into features (feature_class_name, alias, list_property_keys, property_name, tag, creation_status, created_by) \
VALUES ('demo_$${table_name}', '$${spec_name}','$$list_properties'::jsonb, '$$default_property', 'bioregional','created','$(MarxanUser)') RETURNING id;"`; \
VALUES ('demo_$${table_name}', '$${spec_name}','$$list_properties'::jsonb, '$$default_property', 'bioregional','created','00000000-0000-0000-0000-000000000000') RETURNING id;"`; \
echo "inserting in feature data..."; \
psql "postgresql://$$GEO_POSTGRES_USER:$$GEO_POSTGRES_PASSWORD@$$GEO_POSTGRES_HOST:$$GEO_POSTGRES_PORT/$$GEO_POSTGRES_DB" \
-c "insert into features_data(the_geom, properties, source, feature_id) \
Expand Down
4 changes: 1 addition & 3 deletions data/data_download/demo_data_features_species/Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
.PHONY: import

MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")

import: data/demo_data_features/features_demos
@for i in $</*.shp; do \
echo "start processing $$i "; \
Expand All @@ -22,7 +20,7 @@ import: data/demo_data_features/features_demos
echo "getting the feature_id..."; \
feature_id=`psql -X -A -q -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" \
-c "insert into features (feature_class_name, alias, list_property_keys, property_name, tag, creation_status, created_by) \
VALUES ('demo_$${table_name}', '$${spec_name}','$$list_properties'::jsonb, '$$default_property', 'species','created','$(MarxanUser)') RETURNING id;"`; \
VALUES ('demo_$${table_name}', '$${spec_name}','$$list_properties'::jsonb, '$$default_property', 'species','created','00000000-0000-0000-0000-000000000000') RETURNING id;"`; \
echo "inserting in feature data..."; \
psql "postgresql://$$GEO_POSTGRES_USER:$$GEO_POSTGRES_PASSWORD@$$GEO_POSTGRES_HOST:$$GEO_POSTGRES_PORT/$$GEO_POSTGRES_DB" \
-c "insert into features_data(the_geom, properties, source, feature_id) \
Expand Down
3 changes: 1 addition & 2 deletions data/data_download/gadm_3.6/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
# MAKEFLAGS := --jobs=$(shell nproc)
# MAKEFLAGS += --output-sync=target

MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")
import: data/gadm/gadm36_simp.geojson
ogr2ogr -makevalid \
-update -append \
Expand All @@ -12,7 +11,7 @@ import: data/gadm/gadm36_simp.geojson
-t_srs EPSG:4326 -a_srs EPSG:4326 \
-f PostgreSQL PG:"dbname=$$GEO_POSTGRES_DB host=$$GEO_POSTGRES_HOST \
port=$$GEO_POSTGRES_PORT user=$$GEO_POSTGRES_USER password=$$GEO_POSTGRES_PASSWORD" $< \
-sql "select *,'$(MarxanUser)' as created_by from \"$$(basename -s .geojson "$<")\"";
-sql "select *,'00000000-0000-0000-0000-000000000000' as created_by from \"$$(basename -s .geojson "$<")\"";

data/gadm/gadm36_simp.geojson: data/gadm/gadm36_0.shp data/gadm/gadm36_1.shp data/gadm/gadm36_2.shp
mapshaper-xl -i data/gadm/gadm36_0.shp data/gadm/gadm36_1.shp data/gadm/gadm36_2.shp snap combine-files \
Expand Down
4 changes: 1 addition & 3 deletions data/data_download/iucn/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,6 @@
# iucn downloads came from https://spatial-data-2020onwards.s3-eu-west-1.amazonaws.com/
# in the future it should came from the api and be ingested throught the features pipe.

MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")

import: data/iucn/simp/%
@echo data/iucn/simp/%
@for i in data/iucn/simp/*.json; do \
Expand All @@ -19,7 +17,7 @@ import: data/iucn/simp/%
port=$$GEO_POSTGRES_PORT user=$$GEO_POSTGRES_USER password=$$GEO_POSTGRES_PASSWORD" "$$i" ;\
feature_id=`psql -X -A -q -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" \
-c "insert into features (feature_class_name, alias, list_property_keys, property_name, tag, creation_status, created_by) \
VALUES ('iucn_$${table_name}', '$${spec_name}','[\"id_no\",\"binomial\",\"presence\",\"origin\",\"seasonal\",\"compiler\",\"yrcompiled\",\"citation\",\"subspecies\",\"subpop\",\"source\",\"island\",\"tax_comm\",\"dist_comm\",\"generalisd\",\"legend\",\"kingdom\",\"phylum\",\"class\",\"order_\",\"family\",\"genus\",\"category\",\"marine\",\"terrestial\",\"freshwater\",\"SHAPE_Leng\",\"SHAPE_Area\"]'::jsonb, 'binomial', 'species','created','$(MarxanUser)') RETURNING id;"`; \
VALUES ('iucn_$${table_name}', '$${spec_name}','[\"id_no\",\"binomial\",\"presence\",\"origin\",\"seasonal\",\"compiler\",\"yrcompiled\",\"citation\",\"subspecies\",\"subpop\",\"source\",\"island\",\"tax_comm\",\"dist_comm\",\"generalisd\",\"legend\",\"kingdom\",\"phylum\",\"class\",\"order_\",\"family\",\"genus\",\"category\",\"marine\",\"terrestial\",\"freshwater\",\"SHAPE_Leng\",\"SHAPE_Area\"]'::jsonb, 'binomial', 'species','created','00000000-0000-0000-0000-000000000000') RETURNING id;"`; \
psql "postgresql://$$GEO_POSTGRES_USER:$$GEO_POSTGRES_PASSWORD@$$GEO_POSTGRES_HOST:$$GEO_POSTGRES_PORT/$$GEO_POSTGRES_DB" \
-c "insert into features_data(the_geom, properties, source, feature_id) \
(SELECT the_geom, row_to_json(t)::jsonb - '{the_geom}'::text[] as properties, 'iucn' as source, '$$feature_id' as feature_id from (select * from \"$$(basename -s .json "$$i" | tr -d ' \t\n\r' | tr [:upper:] [:lower:])_feature\") t); \
Expand Down
6 changes: 1 addition & 5 deletions data/data_download/wdpa/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,6 @@ URL := $(shell curl -sS 'https://www.protectedplanet.net/downloads' \
SIMP_FOLDER := data/wdpa/simp
GEO_FOLDER := data/wdpa/geojson

# @debt
# need to add as the main admin user through auth and calling for it.
MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")

import: simp
for i in $(SIMP_FOLDER)/*.json; do \
ogr2ogr -makevalid -update -append \
Expand All @@ -22,7 +18,7 @@ import: simp
-t_srs EPSG:4326 -a_srs EPSG:4326 \
-f PostgreSQL PG:"dbname=$$GEO_POSTGRES_DB host=$$GEO_POSTGRES_HOST \
port=$$GEO_POSTGRES_PORT user=$$GEO_POSTGRES_USER password=$$GEO_POSTGRES_PASSWORD schemas=myshapefiles" "$$i" \
-sql "select *,'$(MarxanUser)' as created_by from \"$$(basename -s .json "$$i")\""; \
-sql "select *,'00000000-0000-0000-0000-000000000000' as created_by from \"$$(basename -s .json "$$i")\""; \
done;
rm -rf $(SIMP_FOLDER)

Expand Down
3 changes: 1 addition & 2 deletions data/data_download/world_terrestrial_ecosystems/Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
.PHONY: import

URL := $(shell curl -i 'https://esri.maps.arcgis.com/sharing/rest/content/items/12230625b4d8409982f1361a1c03270f/data' | grep "location" | head -1 | cut -d ":" -f 2-20 | tr -d ' \t\n\r')
MarxanUser:=$(shell psql -X -A -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" -c "select id from users limit 1")

# @debt
# instead of dn being numeric, it will be more usefull in terms of info to join that with the metadata description of those values
Expand All @@ -15,7 +14,7 @@ import: data/ecosystems/World_Ecosystems_simp.geojson
port=$$GEO_POSTGRES_PORT user=$$GEO_POSTGRES_USER password=$$GEO_POSTGRES_PASSWORD" "$<";\
feature_id=`psql -X -A -q -t "postgresql://$$API_POSTGRES_USER:$$API_POSTGRES_PASSWORD@$$API_POSTGRES_HOST:$$API_POSTGRES_PORT/$$API_POSTGRES_DB" \
-c "insert into features (feature_class_name, alias, list_property_keys, property_name, tag, creation_status, created_by) \
VALUES ('$${table_name}', 'World terrestrial ecoregions','[\"dn\", \"w_ecosystm\"]'::jsonb,'w_ecosystm', 'bioregional','created','$(MarxanUser)') RETURNING id;"`; \
VALUES ('$${table_name}', 'World terrestrial ecoregions','[\"dn\", \"w_ecosystm\"]'::jsonb,'w_ecosystm', 'bioregional','created','00000000-0000-0000-0000-000000000000') RETURNING id;"`; \
psql "postgresql://$$GEO_POSTGRES_USER:$$GEO_POSTGRES_PASSWORD@$$GEO_POSTGRES_HOST:$$GEO_POSTGRES_PORT/$$GEO_POSTGRES_DB" \
-c "insert into features_data(the_geom, properties, source, feature_id) \
(SELECT (st_dump(the_geom)).geom as the_geom, row_to_json(t)::jsonb - '{the_geom}'::text[] as properties, 'ecoregions' as source, '$$feature_id' as feature_id from (select * from \"$${table_name}_feature\") t); \
Expand Down
4 changes: 2 additions & 2 deletions docker-compose-test-e2e.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ services:
test-e2e-postgresql-api:
build:
context: ./postgresql
dockerfile: Dockerfile
dockerfile: apidb.Dockerfile
container_name: marxan-test-e2e-postgresql-api
volumes:
- "./api/test/fixtures:/opt"
Expand All @@ -83,7 +83,7 @@ services:
test-e2e-postgresql-geo-api:
build:
context: ./postgresql
dockerfile: Dockerfile
dockerfile: geodb.Dockerfile
container_name: marxan-test-e2e-postgresql-geo-api
ports:
- "${POSTGRES_GEO_SERVICE_PORT}:5432"
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ services:
postgresql-api:
build:
context: ./postgresql
dockerfile: Dockerfile
dockerfile: apidb.Dockerfile
container_name: marxan-postgresql-api
ports:
- "${POSTGRES_API_SERVICE_PORT}:5432"
Expand All @@ -115,7 +115,7 @@ services:
postgresql-geo-api:
build:
context: ./postgresql
dockerfile: Dockerfile
dockerfile: geodb.Dockerfile
shm_size: 16gb
shm_size: 16gb
container_name: marxan-postgresql-geo-api
Expand Down
4 changes: 4 additions & 0 deletions postgresql/apidb.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
FROM postgres:14.4-alpine3.16
LABEL maintainer="hello@vizzuality.com"

CMD ["postgres", "-c", "max_stack_depth=7MB"]
File renamed without changes.