From 5e21bcab93dbf724b64cd049df3a6a2410382d95 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Thu, 14 Nov 2024 18:18:44 +0900 Subject: [PATCH 01/14] MF-API Server update and integration (#1) Updates related to database and Provider * Modify database schema file * Create a new provider file * Create a new test file to test the provider file --- .../provider/mf-api-sql/initdb-mobilitydb.sh | 55 + pygeoapi/provider/mf-api-sql/mf-api.sql | 45 + pygeoapi/provider/postgresql_mobilitydb.py | 1301 +++++++++++++++++ tests/test_postgresql_mobilitydb.py | 837 +++++++++++ 4 files changed, 2238 insertions(+) create mode 100644 pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh create mode 100644 pygeoapi/provider/mf-api-sql/mf-api.sql create mode 100644 pygeoapi/provider/postgresql_mobilitydb.py create mode 100644 tests/test_postgresql_mobilitydb.py diff --git a/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh b/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh new file mode 100644 index 000000000..63d44c468 --- /dev/null +++ b/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh @@ -0,0 +1,55 @@ +#!/bin/bash + +echo "shared_preload_libraries = 'postgis-3.so'" >> $PGDATA/postgresql.conf + +set -e + +# Create the 'mobilitydb' extension in the mobilitydb database +echo "Loading MobilityDB extension into mobilitydb" +psql --user="$POSTGRES_USER" --dbname="mobilitydb" <<- 'EOSQL' + CREATE EXTENSION IF NOT EXISTS PostGIS; + CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; + CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + + -- Table collection + CREATE TABLE public.collection ( + collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), + collection_property jsonb NULL, + PRIMARY KEY (collection_id) + ); + + -- Table MovingFeature + CREATE TABLE public.mfeature ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), + mf_geometry geometry NULL, + mf_property jsonb NULL, + lifespan tstzspan NULL, + PRIMARY KEY (collection_id, mfeature_id), + FOREIGN KEY (collection_id) REFERENCES collection(collection_id) + ); + + -- Table TemporalGeometry + CREATE TABLE public.tgeometry ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), + tgeometry_property tgeompoint NULL, + tgeog_property tgeompoint NULL, + PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); + + -- Table TemporalProperty + CREATE TABLE public.tproperties ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + datetime_group int4 NOT NULL, + tproperty jsonb NULL, + pvalue_float tfloat NULL, + pvalue_text ttext NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name, datetime_group), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); +EOSQL diff --git a/pygeoapi/provider/mf-api-sql/mf-api.sql b/pygeoapi/provider/mf-api-sql/mf-api.sql new file mode 100644 index 000000000..70cb140e3 --- /dev/null +++ b/pygeoapi/provider/mf-api-sql/mf-api.sql @@ -0,0 +1,45 @@ +CREATE EXTENSION IF NOT EXISTS PostGIS; +CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + + -- Table collection +CREATE TABLE public.collection ( + collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), + collection_property jsonb NULL, + PRIMARY KEY (collection_id) +); + + -- Table MovingFeature +CREATE TABLE public.mfeature ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), + mf_geometry geometry NULL, + mf_property jsonb NULL, + lifespan tstzspan NULL, + PRIMARY KEY (collection_id, mfeature_id), + FOREIGN KEY (collection_id) REFERENCES collection(collection_id) +); + + -- Table TemporalGeometry +CREATE TABLE public.tgeometry ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), + tgeometry_property tgeompoint NULL, + tgeog_property tgeompoint NULL, + PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); + + -- Table TemporalProperty +CREATE TABLE public.tproperties ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + datetime_group int4 NOT NULL, + tproperty jsonb NULL, + pvalue_float tfloat NULL, + pvalue_text ttext NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name, datetime_group), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py new file mode 100644 index 000000000..d763febb3 --- /dev/null +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -0,0 +1,1301 @@ +import json +import datetime +import psycopg2 +from functools import partial +from dateutil.parser import parse as dateparse +import pytz +from pymeos import (Temporal, TFloatSeq, TFloatSeqSet, pymeos_initialize) +from pygeoapi.util import format_datetime +from pymeos_cffi import (tfloat_from_mfjson, ttext_from_mfjson, + tgeompoint_from_mfjson) +# from mobilitydb.psycopg import register + + +class PostgresMobilityDB: + host = '127.0.0.1' + port = 5432 + db = 'mobilitydb' + user = 'docker' + password = 'docker' + connection = None + + # Local WSL environment test + # host = '172.20.241.18' + # port = 5432 + # db = 'mobility' + # user = 'postgres' + # password = 'postgres' + # connection = None + + def __init__(self, datasource=None): + """ + PostgresMobilityDB Class constructor + + :param datasource: datasource definition (default None) + host - database host address + port - connection port number + db - table name + user - user name used to authenticate + password - password used to authenticate + """ + + self.connection = None + if datasource is not None: + self.host = datasource['host'] + self.port = int(datasource['port']) + self.db = datasource['dbname'] + self.user = datasource['user'] + self.password = datasource['password'] + + def connect(self): + """ + Connection of database + """ + + # Set the connection parameters to PostgreSQL + self.connection = psycopg2.connect(host=self.host, + database=self.db, + user=self.user, + password=self.password, + port=self.port) + self.connection.autocommit = True + # Register MobilityDB data types (old library 'python-mobilitydb') + # register(self.connection) + + def disconnect(self): + """ + Close the connection + """ + + if self.connection: + self.connection.close() + + def get_collections_list(self): + """ + Query moving features collection list + GET /collections + + :returns: JSON FeatureCollection + """ + with self.connection.cursor() as cur: + select_query = "SELECT collection_id FROM collection" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_collections(self): + """ + Query moving features collections + + :returns: JSON FeatureCollections + """ + with self.connection.cursor() as cur: + select_query = """select collection.collection_id, + collection.collection_property, extentLifespan, + extentTGeometry from (select collection.collection_id, + collection.collection_property, + extent(mfeature.lifespan) as extentLifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from collection + left outer join mfeature + on collection.collection_id = mfeature.collection_id + left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + group by collection.collection_id, collection.collection_property) + collection """ + + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_collection(self, collection_id): + """ + Query specific moving features collection + GET /collections/{collectionId} + + :param collection_id: local identifier of a collection + + :returns: JSON FeatureCollection + """ + with self.connection.cursor() as cur: + select_query = ("""select collection.collection_id, + collection.collection_property, extentLifespan, + extentTGeometry from (select collection.collection_id, + collection.collection_property, + extent(mfeature.lifespan) as extentLifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from collection + left outer join mfeature + on collection.collection_id = mfeature.collection_id + left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where collection.collection_id ='{0}' + group by collection.collection_id, + collection.collection_property) + collection """ + .format(collection_id)) + + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_features_list(self): + """ + Query all moving features + + :returns: JSON MovingFeatures + """ + + with self.connection.cursor() as cur: + select_query = "SELECT collection_id, mfeature_id FROM mfeature" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_features( + self, collection_id, bbox='', datetime='', limit=10, offset=0, + sub_trajectory=False): + """ + Retrieve the moving feature collection to access + the static information of the moving feature + /collections/{collectionId}/items + + :param collection_id: local identifier of a collection + :param bbox: bounding box [lowleft1,lowleft2,min(optional), + upright1,upright2,max(optional)] + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param sub_trajectory: If specified true, This operation returns only a + subsequence of temporal geometry within a time + interval contained in the + datetime parameter (default False)[optional] + + :returns: JSON MovingFeatures + """ + + with self.connection.cursor() as cur: + bbox_restriction = "" + if bbox != '' and bbox is not None: + s_bbox = ','.join(str(x) for x in bbox) + if len(bbox) == 4: + bbox_restriction = " and box2d(stboxx(" + \ + s_bbox + ")) &&& box2d(extentTGeometry) " + elif len(bbox) == 6: + bbox_restriction = " and box3d(stboxz(" + \ + s_bbox + ")) &&& box3d(extentTGeometry) " + + datetime_restriction = "" + if datetime != '' and datetime is not None: + if sub_trajectory is False or sub_trajectory == "false": + datetime_restriction = ( + """ and((lifespan && tstzspan('[{0}]')) + or (extentTPropertiesValueFloat::tstzspan && + tstzspan('[{0}]')) or + (extentTPropertiesValueText::tstzspan && + tstzspan('[{0}]')) or + (extentTGeometry::tstzspan && tstzspan('[{0}]')))""" + .format(datetime)) + limit_restriction = " LIMIT " + \ + str(limit) + " OFFSET " + str(offset) + + # sub_trajectory is false + select_query = ( + """select mfeature.collection_id, mfeature.mfeature_id, + st_asgeojson(mfeature.mf_geometry) as mf_geometry, + mfeature.mf_property, mfeature.lifespan, extentTGeometry, + extentTPropertiesValueFloat, extentTPropertiesValueText + from (select mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan) + mfeature left outer join + (select mfeature.collection_id, mfeature.mfeature_id, + extent(tproperties.pvalue_float) + as extentTPropertiesValueFloat, + extent(tproperties.pvalue_text) as extentTPropertiesValueText + from mfeature left outer join tproperties + on mfeature.collection_id = tproperties.collection_id + and mfeature.mfeature_id = tproperties.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id) + tproperties ON + mfeature.collection_id = tproperties.collection_id + and mfeature.mfeature_id = tproperties.mfeature_id + where 1=1 {1} {2}""" .format( + collection_id, bbox_restriction, datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += limit_restriction + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + if sub_trajectory or sub_trajectory == "true": + sub_trajectory_field = ("""atTime(tgeometry.tgeometry_property, + tstzspan('[{0}]'))""" + .format(datetime)) + # sub_trajectory is true + select_geometry_query = ( + """select mfeature.collection_id, + mfeature.mfeature_id, mfeature.mf_geometry, + mfeature.mf_property, mfeature.lifespan, + extentTGeometry, tgeometry.tgeometry_id, + tgeometry_property from (select mfeature.collection_id, + mfeature.mfeature_id, st_asgeojson(mfeature.mf_geometry) + as mf_geometry, mfeature.mf_property, mfeature.lifespan, + extentTGeometry from (select mfeature.collection_id, + mfeature.mfeature_id, mfeature.mf_geometry, + mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) + as extentTGeometry from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan) + mfeature where 1=1 {1} {2}) mfeature + left outer join (select tgeometry.collection_id, + tgeometry.mfeature_id, tgeometry.tgeometry_id, {3} + as tgeometry_property from tgeometry + where tgeometry.collection_id ='{0}' and {3} is not null) + tgeometry ON mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id where 1=1 """. + format( + collection_id, bbox_restriction, + limit_restriction, + sub_trajectory_field)) + + cur.execute(select_geometry_query) + result = cur.fetchall() + + return result, number_matched, number_returned + + def get_feature(self, collection_id, mfeature_id): + """ + Access the static data of the moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + + :returns: JSON MovingFeature + """ + with self.connection.cursor() as cur: + cur = self.connection.cursor() + select_query = ( + """select mfeature.collection_id, mfeature.mfeature_id, + st_asgeojson(mfeature.mf_geometry) as mf_geometry, + mfeature.mf_property, mfeature.lifespan, extentTGeometry + from (select mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + AND mfeature.mfeature_id='{1}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, + mfeature.lifespan) mfeature """ .format( + collection_id, mfeature_id)) + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_temporalgeometries( + self, collection_id, mfeature_id, bbox='', leaf='', datetime='', + limit=10, offset=0, sub_trajectory=False): + """ + Retrieve only the movement data of a moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param bbox: bounding box [lowleft1,lowleft2,min(optional), + upright1,upright2,max(optional)] + :param leaf: only features that have a temporal geometry and + property that intersects the given + date-time are selected [optional] + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param sub_trajectory: If specified true, This operation returns only a + subsequence of temporal geometry within a time + interval contained in the + datetime parameter (default False) [optional] + + :returns: JSON TemporalGeometry + """ + with self.connection.cursor() as cur: + tgeometry_property = 'null' + + bbox_restriction = "" + if bbox != '' and bbox is not None: + s_bbox = ','.join(str(x) for x in bbox) + if len(bbox) == 4: + bbox_restriction = " and box2d(stboxx(" + s_bbox + \ + ")) &&& box2d(stbox(tgeometry_property))" + elif len(bbox) == 6: + bbox_restriction = " and box3d(stboxz(" + s_bbox + \ + ")) &&& box3d(stbox(tgeometry_property))" + + datetime_restriction = "" + if datetime != '' and datetime is not None: + datetime_restriction = (""" and atTime(tgeometry_property, + tstzspan('[{0}]')) is not null """ + .format(datetime)) + + if leaf != '' and leaf is not None: + tgeometry_property = ("""atTime(tgeometry_property, + tstzset('{0}'))""".format('{' + leaf + '}')) + elif sub_trajectory or sub_trajectory == "true": + tgeometry_property = ("""atTime(tgeometry_property, + tstzspan('[{0}]'))""".format(datetime)) + + select_query = ( + """SELECT collection_id, mfeature_id, tgeometry_id, + tgeometry_property, {0} + FROM tgeometry WHERE collection_id ='{1}' + AND mfeature_id='{2}' {3} {4}""" + .format(tgeometry_property, collection_id, + mfeature_id, bbox_restriction, + datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += " LIMIT " + str(limit) + " OFFSET " + str(offset) + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + return result, number_matched, number_returned + + def get_tProperties_name_list(self): + """ + Query all tProperties name list + + :returns: MF-JSON tProperties + """ + with self.connection.cursor() as cur: + select_query = """SELECT collection_id, mfeature_id, + tproperties_name FROM tproperties""" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_temporalproperties( + self, collection_id, mfeature_id, datetime='', limit=10, + offset=0, subTemporalValue=False): + """ + Retrieve the static information of the temporal property data + that included a single moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param subTemporalValue: only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: MF-JSON TemporalProperties or temporalProperty + """ + with self.connection.cursor() as cur: + datetime_restriction = '' + if datetime != '' and datetime is not None: + if subTemporalValue is False or subTemporalValue == "false": + datetime_restriction = (""" and (atTime(pvalue_float, + tstzspan('[{0}]')) is not null + or atTime(pvalue_text, tstzspan('[{0}]')) is not null)""" + .format(datetime)) + + limit_restriction = " LIMIT " + \ + str(limit) + " OFFSET " + str(offset) + select_query = ("""select distinct on (tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name) + tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties WHERE tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' {2}""". format( + collection_id, mfeature_id, datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += limit_restriction + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + if subTemporalValue or subTemporalValue == "true": + subTemporalValue_float_field = ( + """atTime(tproperties.pvalue_float, + tstzspan('[{0}]'))""" .format(datetime)) + subTemporalValue_text_field = ( + """atTime(tproperties.pvalue_text, + tstzspan('[{0}]'))""" .format(datetime)) + + select_temporalvalue_query = ( + """select tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name, + tproperties.tproperty, datetime_group, pvalue_float, pvalue_text + from (select distinct on (tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name) + tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' {2} {3}) tproperties + left outer join (select tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name, + tproperties.datetime_group, {4} as pvalue_float, + {5} as pvalue_text from tproperties + where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' and ({4} is not null + or {5} is not null)) tpropertiesvalue + on tproperties.collection_id = tpropertiesvalue.collection_id + and tproperties.mfeature_id = tpropertiesvalue.mfeature_id + and tproperties.tproperties_name = tpropertiesvalue.tproperties_name + where 1=1 order by datetime_group""". + format( + collection_id, mfeature_id, + datetime_restriction, + limit_restriction, + subTemporalValue_float_field, + subTemporalValue_text_field)) + + cur.execute(select_temporalvalue_query) + result = cur.fetchall() + + return result, number_matched, number_returned + + def get_temporalproperties_value( + self, collection_id, mfeature_id, tProperty_name, datetime='', + leaf='', subTemporalValue=False): + """ + Retrieve temporal values with a specified name + {tPropertyName} of temporal property. + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tProperty_name: local identifier of a temporal property + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: only features that have a temporal geometry and + property that intersects the given + date-time are selected [optional] + + :param subTemporalValue: only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: JSON TemporalPropertyValue + """ + with self.connection.cursor() as cur: + datetime_restriction = "" + if datetime != '' and datetime is not None: + datetime_restriction = ( + """ and (atTime(tproperties.pvalue_float, + tstzspan('[{0}]')) is not null + or atTime(tproperties.pvalue_text, + tstzspan('[{0}]')) is not null) """ .format(datetime)) + float_field = 'pvalue_float' + text_field = 'pvalue_text' + if leaf != '' and leaf is not None: + float_field = "atTime(tproperties.pvalue_float, \ + tstzset('{" + leaf + "}'))" + text_field = "atTime(tproperties.pvalue_text, \ + tstzset('{" + leaf + "}'))" + elif subTemporalValue or subTemporalValue == "true": + float_field = "atTime(tproperties.pvalue_float, \ + tstzspan('[" + datetime + "]'))" + text_field = "atTime(tproperties.pvalue_text, \ + tstzspan('[" + datetime + "]'))" + + select_query = ( + """select tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty, + datetime_group, pvalue_float, pvalue_text + from (select distinct on (tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name) + tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' + AND tproperties.tproperties_name='{2}') tproperties + left outer join (select tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name, + tproperties.datetime_group, {3} as pvalue_float, + {4} as pvalue_text from tproperties + where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' + AND tproperties.tproperties_name='{2}' {5}) tpropertiesvalue + on tproperties.collection_id = tpropertiesvalue.collection_id + and tproperties.mfeature_id = tpropertiesvalue.mfeature_id + and tproperties.tproperties_name = tpropertiesvalue.tproperties_name + where 1=1 order by datetime_group""" + .format(collection_id, mfeature_id, tProperty_name, + float_field, text_field, datetime_restriction)) + cur.execute(select_query) + result = cur.fetchall() + return result + + def post_collection(self, collection_property): + """ + Register metadata about a collection of moving features + + :param collection_property: metadata about a collection + title - human readable title of the collection + updateFrequency - a time interval of sampling location + description - any description + itemType - indicator about the type of the items in the + moving features collection (default "movingfeature") + + :returns: Collection ID + """ + with self.connection.cursor() as cur: + cur.execute( + "INSERT INTO collection(collection_property) \ + VALUES ('{0}') RETURNING collection_id". + format(json.dumps(collection_property))) + + collection_id = cur.fetchone()[0] + return collection_id + + def post_movingfeature(self, collection_id, movingfeature): + """ + Insert a set of moving features or a moving feature into a collection + + :param collection_id: local identifier of a collection + :param movingfeature: MovingFeature object or + MovingFeatureCollection object + + :returns: MovingFeature ID + """ + with self.connection.cursor() as cur: + g_movingfeature = dict(movingfeature) + lifespan = g_movingfeature.pop("time", None) + if lifespan is not None: + lifespan = "'[" + self.validate_lifespan(lifespan) + "]'" + else: + lifespan = "NULL" + temporal_geometries = g_movingfeature.pop("temporalGeometry", None) + temporal_properties = g_movingfeature.pop( + "temporalProperties", None) + + if 'geometry' in g_movingfeature: + geometry = g_movingfeature.pop("geometry", None) + cur.execute( + """INSERT INTO mfeature(collection_id, mf_geometry, + mf_property, lifespan) VALUES ('{0}', + ST_GeomFromGeoJSON('{1}'), '{2}', {3}) + RETURNING mfeature_id""" + .format(collection_id, json.dumps(geometry), + json.dumps(g_movingfeature), lifespan)) + else: + cur.execute( + """INSERT INTO mfeature(collection_id, + mf_property, lifespan) + VALUES ('{0}', '{1}', {2}) RETURNING mfeature_id""" + .format( + collection_id, json.dumps(g_movingfeature), lifespan)) + mfeature_id = cur.fetchone()[0] + + if temporal_geometries is not None: + temporal_geometries = [temporal_geometries] if not isinstance( + temporal_geometries, list) else temporal_geometries + for temporal_geometry in temporal_geometries: + self.post_temporalgeometry( + collection_id, mfeature_id, temporal_geometry) + + if temporal_properties is not None: + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + for temporal_property in temporal_properties: + self.post_temporalproperties( + collection_id, mfeature_id, temporal_property) + + return mfeature_id + + def post_temporalgeometry( + self, collection_id, mfeature_id, temporal_geometry): + """ + Add movement data into the moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporal_geometry: TemporalPrimitiveGeometry object + in the OGC MF-JSON + :returns: TemporalGeometry ID + """ + + with self.connection.cursor() as cur: + # pymeos of python + pymeos_initialize() + temporal_geometry = self.convert_temporalgeometry_to_new_version( + temporal_geometry) + value = Temporal._factory( + tgeompoint_from_mfjson(json.dumps(temporal_geometry))) + cur.execute( + """INSERT INTO tgeometry(collection_id, mfeature_id, + tgeometry_property, tgeog_property) + VALUES ('{0}', '{1}', '{2}', '{3}') RETURNING tgeometry_id""" + .format(collection_id, mfeature_id, str(value), str(value))) + + tgeometry_id = cur.fetchone()[0] + + return tgeometry_id + + def post_temporalproperties( + self, collection_id, mfeature_id, temporal_property): + """ + Add temporal property data into a moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporalProperty: TemporalProperties object in the OGC MF-JSON + + :returns: TemporalProperty Name + """ + g_temporal_property = dict(temporal_property) + datetimes = [] + if 'datetimes' in g_temporal_property: + datetimes = g_temporal_property.pop("datetimes", None) + datetime_group = self.get_temporalproperties_group( + collection_id, mfeature_id, datetimes) + for tproperties_name in g_temporal_property: + with self.connection.cursor() as cur: + if 'values' in g_temporal_property[tproperties_name] \ + and 'interpolation' in g_temporal_property[ + tproperties_name]: + values = g_temporal_property[tproperties_name].pop( + "values", None) + interpolation = g_temporal_property[tproperties_name].pop( + "interpolation", None) + + temporal_value = self.create_temporalproperty_value( + datetimes, values, interpolation) + + dataType = temporal_value["type"] + pvalue_column = "" + value = None + + pymeos_initialize() + if dataType == 'MovingFloat': + pvalue_column = "pValue_float" + value = Temporal._factory( + tfloat_from_mfjson(json.dumps(temporal_value))) + else: + pvalue_column = "pValue_text" + value = Temporal._factory( + ttext_from_mfjson(json.dumps(temporal_value))) + + insert_query = ( + """INSERT INTO tproperties(collection_id, mfeature_id, + tproperties_name, datetime_group, tproperty, {0}) + VALUES ('{1}', '{2}', '{3}', {4}, '{5}', '{6}')""" + .format( + pvalue_column, collection_id, mfeature_id, + tproperties_name, datetime_group, json.dumps( + temporal_property[tproperties_name]), + str(value))) + cur.execute(insert_query) + else: + insert_query = ("""INSERT INTO tproperties(collection_id, + mfeature_id, tproperties_name, datetime_group, tproperty) + VALUES ('{0}', '{1}', '{2}', {3}, '{4}')""".format( + collection_id, mfeature_id, tproperties_name, + datetime_group, json.dumps( + temporal_property[tproperties_name]))) + cur.execute(insert_query) + + # TODO replace g_temporal_property + return tproperties_name + + def post_temporalvalue( + self, collection_id, mfeature_id, tproperties_name, + temporal_value_data): + """ + Add more temporal values data into a temporal property + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tproperties_name: local identifier of a temporal property + :param temporal_value_data: temporal primitive value + datetimes - array of strings <date-time> + values - number or string or boolean + interpolation - Enum: "Discrete" "Step" "Linear" "Regression" + + :returns: Temporal Primitive Value + """ + + with self.connection.cursor() as cur: + + datetimes = temporal_value_data['datetimes'] + values = temporal_value_data['values'] + interpolation = temporal_value_data['interpolation'] + temporal_value = self.create_temporalproperty_value( + datetimes, values, interpolation) + + datetime_group = self.get_temporalproperties_group( + collection_id, mfeature_id, datetimes) + dataType = temporal_value["type"] + pvalue_column = "" + value = None + + pymeos_initialize() + if dataType == 'MovingFloat': + pvalue_column = "pValue_float" + value = Temporal._factory( + tfloat_from_mfjson(json.dumps(temporal_value))) + else: + pvalue_column = "pValue_text" + value = Temporal._factory( + ttext_from_mfjson(json.dumps(temporal_value))) + + insert_querry = ( + """INSERT INTO tproperties(collection_id, mfeature_id, + tproperties_name, datetime_group, {0}) + VALUES ('{1}', '{2}', '{3}', {4}, '{5}')""" + .format( + pvalue_column, collection_id, mfeature_id, + tproperties_name, datetime_group, str(value))) + cur.execute(insert_querry) + pValue_id = '' + + return pValue_id + + def put_collection(self, collection_id, collection_property): + """ + Replace metadata about the collection + + :param collection_id: local identifier of a collection + :param collection_property: metadata about a collection + title - human readable title of the collection + updateFrequency - a time interval of sampling location + description - any description + itemType - indicator about the type of the items in the + moving features collection (default "movingfeature") + """ + with self.connection.cursor() as cur: + cur.execute( + "UPDATE collection set collection_property = '{0}' \ + WHERE collection_id = '{1}'" .format( + json.dumps(collection_property), collection_id)) + + def delete_collection(self, restriction): + """ + Delete records associated with a collection id + + :param restriction: moving feature collection id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM mfeature WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM collection WHERE 1=1 {0}".format(restriction)) + + def delete_movingfeature(self, restriction): + """ + Delete records associated with a moving feature id + + :param restriction: moving feature id + """ + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM mfeature WHERE 1=1 {0}".format(restriction)) + + def delete_temporalgeometry(self, restriction): + """ + Delete the temporal geometry record with the given restriction. + + :param restriction: temporal geometry id + """ + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + + def delete_temporalproperties(self, restriction): + """ + Delete the temporal properties record with the given restriction. + + :param restriction: temporal properties id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + + def convert_temporalgeometry_to_new_version(self, temporal_geometry): + """ + Convert temporal geometory to new version + + :param temporal_geometry: MF-JSON TemporalPrimitiveGeometry (object) or + MF-JSON TemporalComplexGeometry + + :returns: temporalGeometry object + """ + + if 'datetimes' in temporal_geometry: + datetimes = temporal_geometry['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].replace('Z', '') + temporal_geometry['datetimes'] = datetimes + + if 'lower_inc' not in temporal_geometry: + temporal_geometry['lower_inc'] = True + if 'upper_inc' not in temporal_geometry: + temporal_geometry['upper_inc'] = True + return temporal_geometry + + def convert_temporalgeometry_to_old_version(self, temporal_geometry): + """ + Convert temporal geometory to old version + + :param temporal_geometry: MF-JSON TemporalPrimitiveGeometry (object) or + MF-JSON TemporalComplexGeometry + + :returns: temporalGeometry object + """ + + if 'datetimes' in temporal_geometry: + datetimes = temporal_geometry['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].split('+')[0] + 'Z' + temporal_geometry['datetimes'] = datetimes + + if 'lower_inc' in temporal_geometry: + del temporal_geometry['lower_inc'] + if 'upper_inc' in temporal_geometry: + del temporal_geometry['upper_inc'] + + return temporal_geometry + + def create_temporalproperty_value(self, datetimes, values, interpolation): + """ + Create temporal property value + + :param datetimes: array of strings <date-time> + :param values: number or string or boolean + :param interpolation: Enum: "Discrete" "Step" "Linear" "Regression" + + :returns: temporalValue object + """ + + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3).strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + if all( + [isinstance(item, int) or isinstance(item, float) + for item in values]): + dataType = 'MovingFloat' + else: + dataType = 'MovingText' + temporal_value = { + "type": dataType, + "lower_inc": True, + "upper_inc": True, + 'datetimes': datetimes, + 'values': values, + 'interpolation': interpolation + } + return temporal_value + + def convert_temporalproperty_value_to_base_version( + self, temporal_property_value): + """ + Convert temporal property value to base version + + :param temporal_property_value: database type(tText,tFloat) + temporalPropertyValue object + + :returns: JSON temporalPropertyValue + """ + + if 'type' in temporal_property_value: + del temporal_property_value['type'] + + if 'datetimes' in temporal_property_value: + datetimes = temporal_property_value['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].split('+')[0] + 'Z' + temporal_property_value['datetimes'] = datetimes + + if 'lower_inc' in temporal_property_value: + del temporal_property_value['lower_inc'] + if 'upper_inc' in temporal_property_value: + del temporal_property_value['upper_inc'] + return temporal_property_value + + def validate_lifespan(self, datetime_=None) -> str: + """ + Validate datetime lifespan + + :param datetime_: either a date-time or an interval. (default None) + + :returns: start and end datetype string + """ + + datetime_for_return = '' + if datetime_ is not None and datetime_ != []: + dateparse_begin = partial(dateparse, default=datetime.datetime.min) + dateparse_end = partial(dateparse, default=datetime.datetime.max) + + datetime_begin = datetime_[0] + datetime_end = datetime_[-1] + datetime_begin = dateparse_begin(datetime_begin) + if datetime_begin.tzinfo is None: + datetime_begin = datetime_begin.replace( + tzinfo=pytz.UTC) + + datetime_end = dateparse_end(datetime_end) + if datetime_end.tzinfo is None: + datetime_end = datetime_end.replace(tzinfo=pytz.UTC) + + datetime_invalid = any([ + (datetime_begin > datetime_end) + ]) + + if not datetime_invalid: + datetime_for_return = datetime_begin.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime_end.strftime('%Y-%m-%d %H:%M:%S.%f') + return datetime_for_return + + def check_temporalproperty_can_post( + self, collection_id, mfeature_id, temporal_properties, + tproperties_name=None): + """ + Check temporalProperties object can be POSTed + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporal_properties: temporalProperties object + :param tproperties_name: temporal property name (default None) + + :returns: True or False + """ + + with self.connection.cursor() as cur: + for temporal_property in temporal_properties: + g_temporal_property = dict(temporal_property) + if 'datetimes' in g_temporal_property: + datetimes = g_temporal_property["datetimes"] + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3)\ + .strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + tproperties_name_list = [] + if tproperties_name is not None: + tproperties_name_list = [tproperties_name] + else: + for tproperties_name in g_temporal_property: + tproperties_name_list.append(tproperties_name) + + select_query = ( + """select collection_id, mfeature_id, tproperties_name, + count(datetime_group) as intersect_count + from tproperties where collection_id ='{0}' + and mfeature_id='{1}' and tproperties_name in ({2}) + and ((pvalue_float::tstzspan && tstzset('{3}')::tstzspan) + or (pvalue_text::tstzspan && tstzset('{3}')::tstzspan)) + group by collection_id, mfeature_id, tproperties_name""" + .format(collection_id, mfeature_id, + "'" + "', '".join(tproperties_name_list) + "'", + "{" + ", ".join(datetimes) + "}")) + cur.execute(select_query) + rows = cur.fetchall() + + for row in rows: + if int(row[3]) > 0: + return False + return True + + def get_temporalproperties_group( + self, collection_id, mfeature_id, datetimes): + """ + Get temporal properties group + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param datetimes: array of strings <date-time> + + :returns: ID of the group that summarizes same datetime in tproperty + """ + + with self.connection.cursor() as cur: + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3).strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + select_query = ( + """select temp1.collection_id, temp1.mfeature_id, + COALESCE(temp2.datetime_group, temp3.max_datetime_group) + from (select collection_id, mfeature_id from tproperties + where collection_id ='{0}' and mfeature_id='{1}') temp1 + left outer join (select collection_id, mfeature_id, + datetime_group from tproperties + where collection_id ='{0}' and mfeature_id='{1}' + and (timestamps(getTime(pvalue_float)) = tstzset('{2}') + or timestamps(getTime(pvalue_text)) = tstzset('{2}'))) temp2 + on temp1.collection_id = temp2.collection_id + and temp1.mfeature_id = temp2.mfeature_id + left outer join (select collection_id, mfeature_id, + COALESCE(max(datetime_group), 0) + 1 as max_datetime_group + from tproperties where collection_id ='{0}' + and mfeature_id='{1}' + group by collection_id, mfeature_id ) temp3 + on temp1.collection_id = temp3.collection_id + and temp1.mfeature_id = temp3.mfeature_id """ + .format(collection_id, mfeature_id, + "{" + ", ".join(datetimes) + "}")) + cur.execute(select_query) + result = cur.fetchall() + if len(result) > 0: + return result[0][2] + return 1 + + def get_velocity( + self, collection_id, mfeature_id, tgeometry_id, datetime=None): + """ + Get temporal property of velocity + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: array of strings <date-time> (default None) + + :returns: TemporalProperty of velocity + """ + + form = "MTS" + name = "velocity" + + with self.connection.cursor() as cur: + if datetime is None: + select_query = f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + else: + select_query = \ + f"""SELECT valueAtTimestamp(speed(tgeog_property), + '{datetime}') AS speed, interp(speed(tgeog_property)) + AS interp + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + cur.execute(select_query) + result = cur.fetchall() + + return self.to_tproperties(result, name, form, datetime) + + def get_distance( + self, collection_id, mfeature_id, tgeometry_id, datetime=None): + """ + Get temporal property of distance + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: array of strings <date-time> (default None) + + :returns: TemporalProperty of distance + """ + + form = "MTR" + name = "distance" + with self.connection.cursor() as cur: + if datetime is None: + select_query = f"""SELECT cumulativeLength(tgeog_property) + AS distance FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + else: + select_query = f"""SELECT + valueAtTimestamp(cumulativeLength(tgeog_property), + '{datetime}') AS distance, + interp(cumulativeLength(tgeog_property)) AS interp + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + cur.execute(select_query) + result = cur.fetchall() + + return self.to_tproperties(result, name, form, datetime) + + def get_acceleration( + self, collection_id, mfeature_id, tgeometry_id, datetime=None): + """ + Get temporal property of acceleration + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: array of strings <date-time> (default None) + + :returns: TemporalProperty of acceleration + """ + + tProperty = { + "name": "acceleration", + "type": "TReal", + "form": "MTS", + "valueSequence": [] + } + with self.connection.cursor() as cur: + select_query = f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + cur.execute(select_query) + result = cur.fetchall() + + pymeos_initialize() + for each_row in result: + each_row_converted = TFloatSeqSet(each_row[0]) + interpolation = each_row_converted.interpolation().to_string() + + each_time = [ + each_val.time().start_timestamp().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + for each_val in each_row_converted.instants()] + if interpolation == "Step": + each_values = [0 for each_val in each_row_converted.instants()] + else: + each_values = [each_val.value() + for each_val in each_row_converted.instants()] + + value_sequence = self.calculate_acceleration( + each_values, each_time, datetime) + if value_sequence.get("values"): + if datetime is not None: + value_sequence["interpolation"] = "Discrete" + elif interpolation == "Linear": + value_sequence["interpolation"] = "Step" + else: + value_sequence["interpolation"] = interpolation + tProperty["valueSequence"].append(value_sequence) + return tProperty + + def to_tproperties(self, results, name, form, datetime): + """ + Convert Temoral properties object + + :param results: temporal property object of query + :param name: temporal property name + :param form: a unit of measurement + :param datetime: array of strings <date-time> + + :returns: TemporalProperty object + """ + tProperty = { + "name": name, + "type": "TReal", + "form": form, + "valueSequence": [] + } + pymeos_initialize() + for each_row in results: + if datetime is None: + each_row_converted = None + if name == "velocity": + each_row_converted = TFloatSeqSet(each_row[0]) + else: + each_row_converted = TFloatSeq(each_row[0]) + each_values = [each_val.value() + for each_val in each_row_converted.instants()] + each_time = [ + each_val.time().start_timestamp().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + for each_val in each_row_converted.instants()] + interpolation = each_row_converted.interpolation().to_string() + value_sequence = { + "datetimes": each_time, + "values": each_values, + "interpolation": interpolation + } + else: + value_sequence = { + "datetimes": [format_datetime(datetime)], + "values": [each_row[0]], + "interpolation": "Discrete" + } + tProperty["valueSequence"].append(value_sequence) + return tProperty + + def calculate_acceleration(self, velocities, times, chk_dtime): + """ + Calculate acceleration + + :param velocities: interpolation value list + :param times: interpolation datetime list + :param chk_dtime: array of strings <date-time> + + :returns: valueSequence object + """ + + value_sequence = {} + time_format = '%Y-%m-%d %H:%M:%S.%f' + time_format2 = '%Y-%m-%dT%H:%M:%S.%fZ' + if chk_dtime is not None: + chk_time = datetime.datetime.strptime(chk_dtime, time_format) + + for i in range(1, len(velocities)): + time1 = datetime.datetime.strptime(times[i - 1], time_format2) + time2 = datetime.datetime.strptime(times[i], time_format2) + if chk_time <= time2 and chk_time >= time1: + delta_v = velocities[i] - velocities[i - 1] + delta_t = (time2 - time1).total_seconds() + acceleration = delta_v / delta_t + value_sequence["values"] = [acceleration] + value_sequence["datetimes"] = [format_datetime(chk_dtime)] + break + else: + value_sequence["values"] = [] + value_sequence["datetimes"] = [] + for i in range(1, len(velocities)): + delta_v = velocities[i] - velocities[i - 1] + time1 = datetime.datetime.strptime(times[i - 1], time_format2) + time2 = datetime.datetime.strptime(times[i], time_format2) + delta_t = (time2 - time1).total_seconds() + acceleration = delta_v / delta_t + value_sequence["values"].append(acceleration) + value_sequence["datetimes"].append(times[i]) + + return value_sequence diff --git a/tests/test_postgresql_mobilitydb.py b/tests/test_postgresql_mobilitydb.py new file mode 100644 index 000000000..e3cf7bdc4 --- /dev/null +++ b/tests/test_postgresql_mobilitydb.py @@ -0,0 +1,837 @@ +from pygeoapi.pmdb_provider.postgresql_mobilitydb import PostgresMobilityDB +import pytest + + +@pytest.fixture(scope="session") +def context(): + return {} + + +@pytest.fixture() +def collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example" + } + + +@pytest.fixture() +def update_collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "test_update" + } + + +@pytest.fixture() +def movingfeature(): + return { + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" + } + + +@pytest.fixture() +def temporalgeometry(): + return { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + } + + +@pytest.fixture() +def temporalproperties(): + return [ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ] + + +@pytest.fixture() +def temporalvalue_data(): + return { + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" + } + + +def test_query_post_collection(context, collection_property): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + collection_id = pmdb_provider.post_collection(collection_property) + + context['collection_id'] = collection_id + assert collection_id + + +def test_query_post_movingfeature(context, movingfeature): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + mfeature_id = \ + pmdb_provider.post_movingfeature(context.get('collection_id'), + movingfeature) + + context['mfeature_id'] = mfeature_id + assert mfeature_id + + +def test_query_post_temporalgeometry(context, temporalgeometry): + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tgeometry_id = \ + pmdb_provider.post_temporalgeometry(context.get('collection_id'), + context.get('mfeature_id'), + temporalgeometry) + + context['tgeometry_id'] = tgeometry_id + assert tgeometry_id + + +def test_query_post_temporalproperties(context, temporalproperties): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + l_temporal_properties = [temporalproperties] if not isinstance( + temporalproperties, list) else temporalproperties + + canPost = pmdb_provider.check_temporalproperty_can_post( + context.get('collection_id'), + context.get('mfeature_id'), + l_temporal_properties) + + if canPost: + for temporal_property in l_temporal_properties: + tProperty_name = pmdb_provider.\ + post_temporalproperties( + context.get('collection_id'), + context.get('mfeature_id'), + temporal_property) + + context['tProperty_name'] = tProperty_name + assert tProperty_name + + +def test_query_post_temporalvalue(context, temporalvalue_data): + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.post_temporalvalue(context.get('collection_id'), + context.get('mfeature_id'), + context.get('tProperty_name'), + temporalvalue_data) + + assert True + + +def test_query_put_collection(context, update_collection_property): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.put_collection(context.get('collection_id'), + update_collection_property) + + result = pmdb_provider.get_collection(context.get('collection_id')) + collection = result[0] + assert collection[1].get('description') == 'test_update' + + +def test_query_get_collections_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collections_list() + + assert result + assert len(result) > 0 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + + +def test_query_get_collections(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collections() + + assert result + assert len(result) > 0 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + collection_property = collection[1] + assert collection_property is not None + + +def test_query_get_collection(context): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collection(context.get('collection_id')) + + assert result + assert len(result) == 1 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + collection_property = collection[1] + assert collection_property is not None + extentLifespan = collection[2] + assert extentLifespan is not None + extentTGeometry = collection[3] + assert extentTGeometry is not None + + +def test_query_get_features_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_features_list() + + assert result + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + + +def test_query_get_tProperties_name_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_tProperties_name_list() + + assert result + assert len(result) > 0 + tPropertie = result[0] + l_collection_id = tPropertie[0] + assert l_collection_id is not None + l_mfeature_id = tPropertie[1] + assert l_mfeature_id is not None + tproperties_name = tPropertie[2] + assert tproperties_name is not None + + +def test_query_get_features( + context, + bbox=[ + 100, + 30, + 0, + 200, + 400, + 10], + datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', + limit=10, offset=0, sub_trajectory=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.get_features( + context.get('collection_id'), bbox, datetime, limit, offset, + sub_trajectory) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + mf_geometry = mfeature[2] + assert mf_geometry is not None + mf_property = mfeature[3] + assert mf_property is not None + lifespan = mfeature[4] + assert lifespan is not None + extent_tGeometry = mfeature[5] + assert extent_tGeometry is not None + extent_tProperties_value_float = mfeature[6] + assert extent_tProperties_value_float is not None + extent_tProperties_value_text = mfeature[7] + assert extent_tProperties_value_text is not None + + +def test_query_get_feature(context): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + + assert result + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + mf_geometry = mfeature[2] + assert mf_geometry is not None + mf_property = mfeature[3] + assert mf_property is not None + lifespan = mfeature[4] + assert lifespan is not None + extent_tGeometry = mfeature[5] + assert extent_tGeometry is not None + + +def test_query_get_temporalgeometries( + context, + bbox=[ + 100, + 30, + 0, + 200, + 400, + 10], + leaf='2011-07-14 22:01:01.000', + datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', + limit=10, + offset=0, + sub_trajectory=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_temporalgeometries( + context.get('collection_id'), context.get('mfeature_id'), bbox, + leaf, datetime, limit, offset, sub_trajectory) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + tgeometry = result[0] + l_collection_id = tgeometry[0] + assert l_collection_id is not None + l_mfeature_id = tgeometry[1] + assert l_mfeature_id is not None + tgeometry_id = tgeometry[2] + assert tgeometry_id is not None + + +def test_query_get_temporalproperties( + context, + datetime='2011-07-14 22:01:01.450,2011-07-14 22:01:01.450', + limit=10, + offset=0, + sub_temporal_value=True): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_temporalproperties( + context.get('collection_id'), context.get('mfeature_id'), + datetime, limit, offset, sub_temporal_value) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + tproperties = result[0] + l_collection_id = tproperties[0] + assert l_collection_id is not None + l_mfeature_id = tproperties[1] + assert l_mfeature_id is not None + tgeometry_id = tproperties[2] + assert tgeometry_id is not None + tproperty = tproperties[3] + assert tproperty is not None + + +def test_query_get_temporalproperties_value( + context, + datetime='2011-07-16 22:01:01.450,2011-07-16 22:01:01.450', + leaf='2011-07-16 22:01:01.450', + sub_temporal_value=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_temporalproperties_value( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tProperty_name'), datetime, leaf, sub_temporal_value) + + assert result + assert len(result) > 0 + tpropertiesvalue = result[0] + l_collection_id = tpropertiesvalue[0] + assert l_collection_id is not None + l_mfeature_id = tpropertiesvalue[1] + assert l_mfeature_id is not None + tgeometry_id = tpropertiesvalue[2] + assert tgeometry_id is not None + tproperty = tpropertiesvalue[3] + assert tproperty is not None + datetime_group = tpropertiesvalue[4] + assert datetime_group is not None + pvalue_float = tpropertiesvalue[5] + pvalue_text = tpropertiesvalue[6] + assert pvalue_float is not None or pvalue_text is not None + + +def test_query_get_velocity(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_velocity( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_get_distance(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_distance( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_get_acceleration(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_acceleration( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_delete_temporalproperties(context): + restriction = "AND tproperties_name ='{0}'".format( + context.get('tProperty_name')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalproperties(restriction) + + assert True + + +def test_query_delete_temporalgeometry(context): + restriction = "AND tgeometry_id ='{0}'".format(context.get('tgeometry_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalgeometry(restriction) + + assert True + + +def test_query_delete_movingfeature(context): + restriction = "AND mfeature_id ='{0}'".format(context.get('mfeature_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_movingfeature(restriction) + + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + assert len(result) == 0 + + +def test_query_delete_collection(context): + restriction = "AND collection_id ='{0}'".format( + context.get('collection_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_collection(restriction) + + result = pmdb_provider.get_collection(context.get('collection_id')) + assert len(result) == 0 From 6e1a31cdcb6517d5a7d55e942ff691469829197a Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 19 Nov 2024 15:41:55 +0900 Subject: [PATCH 02/14] MF-API Server update and integration (#1) Updates related to api * Create a new api file * Create a new test file to test the api file --- pygeoapi/api/movingfeatures.py | 2301 +++++++++++++++++++++++++++++++ tests/api/test_movingfeature.py | 1459 ++++++++++++++++++++ 2 files changed, 3760 insertions(+) create mode 100644 pygeoapi/api/movingfeatures.py create mode 100644 tests/api/test_movingfeature.py diff --git a/pygeoapi/api/movingfeatures.py b/pygeoapi/api/movingfeatures.py new file mode 100644 index 000000000..9bab3d061 --- /dev/null +++ b/pygeoapi/api/movingfeatures.py @@ -0,0 +1,2301 @@ +# ================================================================= +# +# Authors: Tom Kralidis <tomkralidis@gmail.com> +# Francesco Bartoli <xbartolone@gmail.com> +# Sander Schaminee <sander.schaminee@geocat.net> +# John A Stevenson <jostev@bgs.ac.uk> +# Colin Blackburn <colb@bgs.ac.uk> +# +# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2020 Francesco Bartoli +# Copyright (c) 2022 John A Stevenson and Colin Blackburn +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= +""" Root level code of pygeoapi, parsing content provided by web framework. +Returns content from plugins and sets responses. +""" + +from copy import deepcopy +from datetime import datetime +from functools import partial +import json +import logging +import re +from typing import Any, Tuple, Union +import urllib.parse + +from dateutil.parser import parse as dateparse +import pytz +from http import HTTPStatus + +from pygeoapi import l10n +from pygeoapi.log import setup_logger +from pygeoapi.linked_data import (jsonldify) +from pygeoapi.plugin import PLUGINS +from pygeoapi.process.manager.base import get_manager + +from pymeos import (STBox, TsTzSpan, TTextSeq, TFloatSeq, + TGeomPointSeq, Temporal, pymeos_initialize) +import psycopg2 +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB +from pygeoapi.api import ( + pre_process, gzip, APIRequest, SYSTEM_LOCALE, CHARSET, + TEMPLATES, FORMAT_TYPES, F_JSON, F_HTML, F_GZIP) +from pygeoapi.util import ( + UrlPrefetcher, get_api_rules, get_base_url, render_j2_template, + to_json) +LOGGER = logging.getLogger(__name__) + + +class MOVING_FEATURES: + def __init__(self, config, openapi): + """ + constructor + + :param config: configuration dict + :param openapi: openapi dict + + :returns: `pygeoapi.API` instance + """ + + self.config = config + self.openapi = openapi + self.api_headers = get_api_rules(self.config).response_headers + self.base_url = get_base_url(self.config) + self.prefetcher = UrlPrefetcher() + + CHARSET[0] = config['server'].get('encoding', 'utf-8') + if config['server'].get('gzip'): + FORMAT_TYPES[F_GZIP] = 'application/gzip' + FORMAT_TYPES.move_to_end(F_JSON) + + # Process language settings (first locale is default!) + self.locales = l10n.get_locales(config) + self.default_locale = self.locales[0] + + if 'templates' not in self.config['server']: + self.config['server']['templates'] = {'path': TEMPLATES} + + if 'pretty_print' not in self.config['server']: + self.config['server']['pretty_print'] = False + + self.pretty_print = self.config['server']['pretty_print'] + + setup_logger(self.config['logging']) + + # Create config clone for HTML templating with modified base URL + self.tpl_config = deepcopy(self.config) + self.tpl_config['server']['url'] = self.base_url + + self.manager = get_manager(self.config) + LOGGER.info('Process manager plugin loaded') + + @gzip + @pre_process + @jsonldify + def manage_collection(self, request: Union[APIRequest, Any], + action, dataset=None) -> Tuple[dict, int, str]: + """ + Adds a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(SYSTEM_LOCALE) + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if action in ['create', 'update']: + data = request.data + if not data: + # TODO not all processes require input, e.g. time-dependent or + # random value generators + msg = 'missing request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'MissingParameterValue', msg) + + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if action == 'create': + try: + pmdb_provider.connect() + collection_id = pmdb_provider.post_collection(data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + url = '{}/{}'.format(self.get_collections_url(), collection_id) + + headers['Location'] = url + return headers, HTTPStatus.CREATED, '' + + if action == 'update': + LOGGER.debug('Updating item') + try: + pmdb_provider.connect() + pmdb_provider.put_collection(collection_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + try: + pmdb_provider.connect() + pmdb_provider.delete_collection( + "AND collection_id ='{0}'".format(collection_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + @gzip + @pre_process + @jsonldify + def get_collection(self, request: Union[APIRequest, Any], + dataset=None) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_collection(collection_id) + if len(result) > 0: + row = result[0] + else: + msg = 'Collection not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + collection = {} + if row is not None: + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id + + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' + else: + crs = 'http://www.opengis.net/def/crs/\ + OGC/1.3/CRS84' + + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + + collection['extent'] = { + 'spatial': { + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs + } + } + + collection['links'] = [] + collection['links'].append({ + 'href': '{}/{}'.format( + self.get_collections_url(), collection_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) + + return headers, HTTPStatus.OK, to_json(collection, self.pretty_print) + + @gzip + @pre_process + def get_collection_items( + self, request: Union[APIRequest, Any], + dataset) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + # Set Content-Language to system locale until provider locale + # has been determined + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, collections = get_list_of_collections_id() + collection_id = dataset + if excuted is False: + msg = str(collections) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if collection_id not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(self.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: + try: + bbox = validate_bbox(bbox) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "type": "FeatureCollection", + "features": [], + "crs": {}, + "trs": {}, + "links": [] + } + + try: + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_features(collection_id=collection_id, + bbox=bbox, datetime=datetime_, + limit=limit, offset=offset, + sub_trajectory=sub_trajectory) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeatures = [] + crs = None + trs = None + + split_mfeature = {} + for i in range(len(result)): + mfeature_id = str(result[i][1]) + if mfeature_id not in split_mfeature: + split_mfeature[mfeature_id] = [] + split_mfeature[mfeature_id].append(i) + + pymeos_initialize() + for key, mfeature_row_index in split_mfeature.items(): + row = result[mfeature_row_index[0]] + + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if 'crs' in mfeature and crs is None: + crs = mfeature['crs'] + if 'trs' in mfeature and trs is None: + trs = mfeature['trs'] + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + else: + mfeature['geometry'] = None + + if 'properties' not in mfeature: + mfeature['properties'] = None + + if sub_trajectory or sub_trajectory == "true": + prisms = [] + for row_index in mfeature_row_index: + row_tgeometory = result[int(row_index)] + if row_tgeometory[7] is not None: + mfeature_check = row_tgeometory[1] + if mfeature_check == mfeature_id: + temporal_geometry = json.loads( + Temporal.as_mfjson( + TGeomPointSeq( + str(row_tgeometory[7]).replace( + "'", "")), + False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = \ + pmdb_provider.\ + convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row_tgeometory[6] + prisms.append(temporal_geometry) + mfeature['temporalGeometry'] = prisms + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + if 'trs' not in mfeature: + mfeature['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + mfeatures.append(mfeature) + + content['features'] = mfeatures + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + + # TODO: translate titles + uri = '{}/{}/items'.format(self.get_collections_url(), collection_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['features']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, self.pretty_print) + + @gzip + @pre_process + def manage_collection_item( + self, request: Union[APIRequest, Any], + action, dataset, identifier=None) -> Tuple[dict, int, str]: + """ + Adds an item to a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return self.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, collections = get_list_of_collections_id() + + if excuted is False: + msg = str(collections) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if dataset not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_feature(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,temporalgeometry) \ + is missing from the request data.' + return self.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + if data['type'] == 'FeatureCollection': + for feature in data['features']: + if check_required_field_feature(feature) is False: + # TODO not all processes require input + msg = 'The required tag \ + (e.g., type,temporalgeometry) \ + is missing from the request data.' + return self.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, + 'MissingParameterValue', msg) + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, feature) + else: + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + headers['Location'] = '{}/{}/items/{}'.format( + self.get_collections_url(), dataset, mfeature_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_movingfeature( + "AND mfeature_id ='{0}'".format(mfeature_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + @gzip + @pre_process + def get_collection_item(self, request: Union[APIRequest, Any], + dataset, identifier) -> Tuple[dict, int, str]: + """ + Get a single collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + mfeature_id = str(identifier) + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_feature(collection_id, mfeature_id) + if len(result) > 0: + row = result[0] + else: + msg = 'Feature not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeature = {} + if row is not None: + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + print(lifespan) + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + if 'trs' not in mfeature: + mfeature['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + mfeature['links'] = [] + mfeature['links'].append({ + 'href': '{}/{}/items/{}'.format( + self.get_collections_url(), collection_id, mfeature_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) + return headers, HTTPStatus.OK, to_json(mfeature, self.pretty_print) + + @gzip + @pre_process + def get_collection_items_tGeometry(self, + request: Union[APIRequest, Any], + dataset, identifier) \ + -> Tuple[dict, int, str]: + """ + Get temporal Geometry of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(self.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: + try: + bbox = validate_bbox(bbox) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + LOGGER.debug('Processing leaf parameter') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False + + if (leaf_ != '' and leaf_ is not None) \ + and (sub_trajectory or sub_trajectory == 'true'): + msg = 'Cannot use both parameter `subTrajectory` \ + and `leaf` at the same time' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "type": "TemporalGeometrySequence", + "geometrySequence": [], + "crs": {}, + "trs": {}, + "links": [], + } + + crs = None + trs = None + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalgeometries(collection_id=collection_id, + mfeature_id=mfeature_id, + bbox=bbox, + leaf=leaf_, + datetime=datetime_, + limit=limit, + offset=offset, + sub_trajectory=sub_trajectory) + pymeos_initialize() + prisms = [] + for row in result: + temporal_geometry = json.loads(Temporal.as_mfjson( + TGeomPointSeq(str(row[3]).replace("'", "")), False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = pmdb_provider\ + .convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row[2] + + if (leaf_ != '' and leaf_ is not None) or \ + (sub_trajectory or sub_trajectory == 'true'): + if row[4] is not None: + temporal_geometry_filter = json.loads( + Temporal.as_mfjson( + TGeomPointSeq(str(row[4]).replace("'", "")), + False)) + temporal_geometry['datetimes'] = \ + temporal_geometry_filter['datetimes'] + temporal_geometry['coordinates'] = \ + temporal_geometry_filter['coordinates'] + else: + continue + # temporalGeometry['datetimes'] = [] + # temporalGeometry['coordinates'] = [] + prisms.append(temporal_geometry) + content["geometrySequence"] = prisms + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + + # TODO: translate titles + uri = '{}/{}/items/{}/tgsequence'.format( + self.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['geometrySequence']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = len(content["geometrySequence"]) + return headers, HTTPStatus.OK, to_json(content, self.pretty_print) + + @gzip + @pre_process + def manage_collection_item_tGeometry( + self, request: Union[APIRequest, Any], + action, dataset, identifier, + tGeometry=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Geometry item to a moving feature + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return self.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tGeometry_id = tGeometry + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_geometries(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,prisms) \ + is missing from the request data.' + return self.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + if data['type'] == 'MovingGeometryCollection': + for tGeometry in data['prisms']: + tGeometry_id = pmdb_provider.\ + post_temporalgeometry( + collection_id, mfeature_id, tGeometry) + + else: + tGeometry_id = pmdb_provider.post_temporalgeometry( + collection_id, mfeature_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + headers['Location'] = '{}/{}/items/{}/tgsequence/{}'.format( + self.get_collections_url(), dataset, mfeature_id, tGeometry_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalgeometry( + "AND tgeometry_id ='{0}'".format(tGeometry_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + @gzip + @pre_process + def get_collection_items_tGeometry_velocity(self, + request: + Union[APIRequest, Any], + dataset, identifier, + tGeometry) \ + -> Tuple[dict, int, str]: + + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + print(datetime_) + content = pmdb_provider.get_velocity( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'Server Internal Error', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + @gzip + @pre_process + def get_collection_items_tGeometry_distance(self, + request: + Union[APIRequest, Any], + dataset, identifier, + tGeometry) \ + -> Tuple[dict, int, str]: + + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = str(dataset) + mfeature_id = str(identifier) + tgeometry_id = str(tGeometry) + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + content = pmdb_provider.get_distance( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + @gzip + @pre_process + def get_collection_items_tGeometry_acceleration(self, + request: + Union[APIRequest, Any], + dataset, identifier, + tGeometry) \ + -> Tuple[dict, + int, str]: + + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + content = pmdb_provider.get_acceleration( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + @gzip + @pre_process + def get_collection_items_tProperty(self, request: Union[APIRequest, Any], + dataset, + identifier) -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(self.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "temporalProperties": [], + "links": [] + } + + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalproperties(collection_id=collection_id, + mfeature_id=mfeature_id, + datetime=datetime_, + limit=limit, offset=offset, + sub_temporal_value=sub_temporal_value) + + temporal_properties = [] + if sub_temporal_value is False or sub_temporal_value == "false": + for row in result: + temporal_property = row[3] if row[3] is not None else {} + temporal_property['name'] = row[2] + + temporal_properties.append(temporal_property) + else: + split_groups = {} + for i in range(len(result)): + group_id = str(result[i][4]) + if group_id not in split_groups: + split_groups[group_id] = [] + split_groups[group_id].append(i) + pymeos_initialize() + for key, group_row_index in split_groups.items(): + group = {} + group["datetimes"] = [] + for row_index in group_row_index: + row = result[int(row_index)] + tproperties_name = row[2] + group[tproperties_name] \ + = row[3] if row[3] is not None else {} + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] \ + is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + temporal_property_value = pmdb_provider.\ + convert_temporalproperty_value_to_base_version( + json.loads(temporal_property_value)) + + if 'datetimes' in temporal_property_value: + group["datetimes"] = \ + temporal_property_value.pop( + "datetimes", None) + group[tproperties_name].update( + temporal_property_value) + temporal_properties.append(group) + content["temporalProperties"] = temporal_properties + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + uri = '{}/{}/items/{}/tProperties'.format( + self.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['temporalProperties']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next', }) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, self.pretty_print) + + @gzip + @pre_process + def manage_collection_item_tProperty( + self, request: Union[APIRequest, Any], + action, dataset, identifier, + tProperty=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property item to a moving feature + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return self.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperties_name = tProperty + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + if not isinstance(data, list): + data = json.loads(data) + else: + for d in data: + _ = json.loads(d) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_property(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,interpolation) \ + is missing from the request data.' + return self.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + # temporalProperties = data['temporalProperties'] + temporal_properties = data + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, temporal_properties) + tProperties_name_list = [] + if can_post: + for temporalProperty in temporal_properties: + tProperties_name_list.extend( + pmdb_provider. post_temporalproperties( + collection_id, mfeature_id, temporalProperty)) + else: + return headers, HTTPStatus.BAD_REQUEST, '' + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + location_list = [] + for tProperties_name in tProperties_name_list: + location_list.append('{}/{}/items/{}/tProperties/{}'.format( + self.get_collections_url(), dataset, mfeature_id, + tProperties_name)) + headers['Locations'] = location_list + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalproperties( + "AND tproperties_name ='{0}'".format(tProperties_name)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + @gzip + @pre_process + def get_collection_items_tProperty_value(self, + request: Union[APIRequest, Any], + dataset, + identifier, + tProperty) \ + -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + :param tProperty: Temporal Property + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return self.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(self.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing leaf parameter') + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + if (leaf_ != '' and leaf_ is not None) and \ + (sub_temporal_value or sub_temporal_value == 'true'): + msg = 'Cannot use both parameter `subTemporalValue` \ + and `leaf` at the same time' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = {} + + try: + pmdb_provider.connect() + result = pmdb_provider.get_temporalproperties_value( + collection_id=collection_id, mfeature_id=mfeature_id, + tProperty_name=tProperty_name, + datetime=datetime_, leaf=leaf_, + sub_temporal_value=sub_temporal_value) + pymeos_initialize() + value_sequence = [] + for row in result: + content = row[3] + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + value_sequence.append( + pmdb_provider. + convert_temporalproperty_value_to_base_version( + json.loads( + temporal_property_value))) + content["valueSequence"] = value_sequence + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + return headers, HTTPStatus.OK, to_json(content, self.pretty_print) + + @gzip + @pre_process + def manage_collection_item_tProperty_value( + self, request: Union[APIRequest, Any], + action, dataset, identifier, + tProperty=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property Value item to a Temporal Property + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return self.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_value(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,value) \ + is missing from the request data.' + return self.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, [data], tProperty_name) + if can_post: + pValue_id = pmdb_provider.post_temporalvalue( + collection_id, mfeature_id, tProperty_name, data) + else: + return headers, HTTPStatus.BAD_REQUEST, '' + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + headers['Location'] = '{}/{}/items/{}/tProperties/{}/pvalue/{}'\ + .format(self.get_collections_url(), dataset, mfeature_id, + tProperty_name, pValue_id) + + return headers, HTTPStatus.CREATED, '' + + def get_exception(self, status, headers, format_, code, + description) -> Tuple[dict, int, str]: + """ + Exception handler + + :param status: HTTP status code + :param headers: dict of HTTP response headers + :param format_: format string + :param code: OGC API exception code + :param description: OGC API exception code + + :returns: tuple of headers, status, and message + """ + + LOGGER.error(description) + exception = { + 'code': code, + 'description': description + } + + if format_ == F_HTML: + headers['Content-Type'] = FORMAT_TYPES[F_HTML] + content = render_j2_template( + self.config, 'exception.html', exception, SYSTEM_LOCALE) + else: + content = to_json(exception, self.pretty_print) + + return headers, status, content + + def get_format_exception(self, request) -> Tuple[dict, int, str]: + """ + Returns a format exception. + + :param request: An APIRequest instance. + + :returns: tuple of (headers, status, message) + """ + + # Content-Language is in the system locale (ignore language settings) + headers = request.get_response_headers(SYSTEM_LOCALE) + msg = f'Invalid format: {request.format}' + return self.get_exception( + HTTPStatus.BAD_REQUEST, headers, + request.format, 'InvalidParameterValue', msg) + + def get_collections_url(self): + return '{}/collections'.format(self.config['server']['url']) + + +def validate_bbox(value=None) -> list: + """ + Helper function to validate bbox parameter + + :param value: `list` of minx, miny, maxx, maxy + + :returns: bbox as `list` of `float` values + """ + + if value is None: + LOGGER.debug('bbox is empty') + return [] + + bbox = value.split(',') + + if len(bbox) != 4 and len(bbox) != 6: + msg = 'bbox should be 4 values (minx,miny,maxx,maxy) or \ + 6 values (minx,miny,minz,maxx,maxy,maxz)' + LOGGER.debug(msg) + raise ValueError(msg) + + try: + bbox = [float(c) for c in bbox] + except ValueError as err: + msg = 'bbox values must be numbers' + err.args = (msg,) + LOGGER.debug(msg) + raise + + if len(bbox) == 4: + if bbox[1] > bbox[3]: + msg = 'miny should be less than maxy' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[0] > bbox[2]: + msg = 'minx is greater than maxx (possibly antimeridian bbox)' + LOGGER.debug(msg) + raise ValueError(msg) + + if len(bbox) == 6: + if bbox[2] > bbox[5]: + msg = 'minz should be less than maxz' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[1] > bbox[4]: + msg = 'miny should be less than maxy' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[0] > bbox[3]: + msg = 'minx is greater than maxx (possibly antimeridian bbox)' + LOGGER.debug(msg) + raise ValueError(msg) + + return bbox + + +def validate_leaf(leaf_=None) -> str: + """ + Helper function to validate temporal parameter + + :param resource_def: `dict` of configuration resource definition + :param datetime_: `str` of datetime parameter + + :returns: `str` of datetime input, if valid + """ + + # TODO: pass datetime to query as a `datetime` object + # we would need to ensure partial dates work accordingly + # as well as setting '..' values to `None` so that underlying + # providers can just assume a `datetime.datetime` object + # + # NOTE: needs testing when passing partials from API to backend + + unix_epoch = datetime(1970, 1, 1, 0, 0, 0) + dateparse_ = partial(dateparse, default=unix_epoch) + + leaf_invalid = False + + if leaf_ is not None: + LOGGER.debug('detected leaf_') + LOGGER.debug('Validating time windows') + leaf_list = leaf_.split(',') + + leaf_ = '' + if (len(leaf_list) > 0): + datetime_ = dateparse_(leaf_list[0]) + leaf_ = datetime_.strftime('%Y-%m-%d %H:%M:%S.%f') + + for i in range(1, len(leaf_list)): + datetime_pre = dateparse_(leaf_list[i - 1]) + datetime_ = dateparse_(leaf_list[i]) + + if datetime_pre != '..': + if datetime_pre.tzinfo is None: + datetime_pre = datetime_pre.replace(tzinfo=pytz.UTC) + + if datetime_ != '..': + if datetime_.tzinfo is None: + datetime_ = datetime_.replace(tzinfo=pytz.UTC) + + if datetime_pre >= datetime_: + leaf_invalid = True + break + leaf_ += ',' + datetime_.strftime('%Y-%m-%d %H:%M:%S.%f') + + if leaf_invalid: + msg = 'invalid leaf' + LOGGER.debug(msg) + raise ValueError(msg) + return leaf_ + + +def validate_datetime(datetime_=None, return_type=True) -> str: + """ + Helper function to validate temporal parameter + + :param resource_def: `dict` of configuration resource definition + :param datetime_: `str` of datetime parameter + + :returns: `str` of datetime input, if valid + """ + + # TODO: pass datetime to query as a `datetime` object + # we would need to ensure partial dates work accordingly + # as well as setting '..' values to `None` so that underlying + # providers can just assume a `datetime.datetime` object + # + # NOTE: needs testing when passing partials from API to backend + + datetime_invalid = False + + if datetime_ is not None and datetime_ != '': + dateparse_begin = partial(dateparse, default=datetime.min) + dateparse_end = partial(dateparse, default=datetime.max) + unix_epoch = datetime(1970, 1, 1, 0, 0, 0) + dateparse_ = partial(dateparse, default=unix_epoch) + + if '/' in datetime_: # envelope + LOGGER.debug('detected time range') + LOGGER.debug('Validating time windows') + + # normalize "" to ".." (actually changes datetime_) + datetime_ = re.sub(r'^/', '../', datetime_) + datetime_ = re.sub(r'/$', '/..', datetime_) + + datetime_begin, datetime_end = datetime_.split('/') + if datetime_begin != '..': + datetime_begin = dateparse_begin(datetime_begin) + if datetime_begin.tzinfo is None: + datetime_begin = datetime_begin.replace( + tzinfo=pytz.UTC) + else: + datetime_begin = datetime(1, 1, 1, 0, 0, 0).replace( + tzinfo=pytz.UTC) + + if datetime_end != '..': + datetime_end = dateparse_end(datetime_end) + if datetime_end.tzinfo is None: + datetime_end = datetime_end.replace(tzinfo=pytz.UTC) + else: + datetime_end = datetime(9999, 1, 1, 0, 0, 0).replace( + tzinfo=pytz.UTC) + + datetime_invalid = any([ + (datetime_begin > datetime_end) + ]) + datetime_ = datetime_begin.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime_end.strftime('%Y-%m-%d %H:%M:%S.%f') + else: # time instant + LOGGER.debug('detected time instant') + datetime__ = dateparse_(datetime_) + if datetime__ != '..': + if datetime__.tzinfo is None: + datetime__ = datetime__.replace(tzinfo=pytz.UTC) + datetime_invalid = any([ + (datetime__ == '..') + ]) + if return_type: + datetime_ = datetime__.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime__.strftime('%Y-%m-%d %H:%M:%S.%f') + else: + datetime_ = datetime__.strftime('%Y-%m-%d %H:%M:%S.%f') + + if datetime_invalid: + msg = 'datetime parameter out of range' + LOGGER.debug(msg) + raise ValueError(msg) + return datetime_ + + +def get_list_of_collections_id(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_collections_list() + collections_id = [] + for row in result: + collections_id.append(row[0]) + return True, collections_id + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def get_list_of_features_id(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_features_list() + features_list = [] + for row in result: + features_list.append([row[0], row[1]]) + return True, features_list + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def get_list_of_tproperties_name(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_tProperties_name_list() + tproperties_name_list = [] + for row in result: + tproperties_name_list.append([row[0], row[1], row[2]]) + return True, tproperties_name_list + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def check_required_field_feature(feature): + if 'type' in feature: + if feature['type'] == 'FeatureCollection': + return True + if 'type' not in feature or 'temporalGeometry' not in feature: + return False + if check_required_field_temporal_geometries( + feature['temporalGeometry']) is False: + return False + if 'temporalProperties' in feature: + if check_required_field_temporal_property( + feature['temporalProperties']) is False: + return False + if 'geometry' in feature: + if check_required_field_geometries(feature['geometry']) is False: + return False + if 'crs' in feature: + if check_required_field_crs(feature['crs']) is False: + return False + if 'trs' in feature: + if check_required_field_trs(feature['trs']) is False: + return False + return True + + +def check_required_field_geometries(geometry): + if (check_required_field_geometry_array(geometry) is False + and check_required_field_geometry_single(geometry) is False): + return False + return True + + +def check_required_field_geometry_array(geometry): + if ('type' not in geometry + or 'geometries' not in geometry): + return False + geometries = geometry['geometries'] + geometries = [geometries] if not isinstance( + geometries, list) else geometries + for l_geometry in geometries: + if check_required_field_geometry_single(l_geometry) is False: + return False + return True + + +def check_required_field_geometry_single(geometry): + if ('type' not in geometry + or 'coordinates' not in geometry): + return False + return True + + +def check_required_field_temporal_geometries(temporal_geometries): + if (check_required_field_temporal_geometry_array( + temporal_geometries) is False + and check_required_field_temporal_geometry_single + (temporal_geometries) is False): + return False + return True + + +def check_required_field_temporal_geometry_array(temporal_geometries): + if ('type' not in temporal_geometries + or 'prisms' not in temporal_geometries): + return False + prisms = temporal_geometries['prisms'] + prisms = [prisms] if not isinstance(prisms, list) else prisms + for temporal_geometry in prisms: + if check_required_field_temporal_geometry_single( + temporal_geometry) is False: + return False + if 'crs' in temporal_geometries: + if check_required_field_crs(temporal_geometry['crs']) is False: + return False + if 'trs' in temporal_geometries: + if check_required_field_trs(temporal_geometry['trs']) is False: + return False + return True + + +def check_required_field_temporal_geometry_single(temporal_geometry): + if ('type' not in temporal_geometry + or 'datetimes' not in temporal_geometry + or 'coordinates' not in temporal_geometry): + return False + if 'crs' in temporal_geometry: + if check_required_field_crs(temporal_geometry['crs']) is False: + return False + if 'trs' in temporal_geometry: + if check_required_field_trs(temporal_geometry['trs']) is False: + return False + return True + +# TODO Do you still have the 'temporalProperties' key? +# def checkRequiredFieldTemporalProperties(temporalProperties): +# if 'temporalProperties' not in temporalProperties: +# return False +# if check_required_field_temporal_property\ +# (temporalProperties['temporalProperties']) is False: +# return False +# return True + + +def check_required_field_temporal_property(temporal_properties): + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + for temporal_property in temporal_properties: + if ('datetimes' not in temporal_property): + return False + for tproperties_name in temporal_property: + if tproperties_name != 'datetimes' and ( + 'values' + not + in + temporal_property + [tproperties_name] + or + 'interpolation' + not + in + temporal_property + [tproperties_name]): + return False + return True + + +def check_required_field_temporal_value(temporalValue): + if ('datetimes' not in temporalValue + or 'values' not in temporalValue + or 'interpolation' not in temporalValue): + return False + return True + + +def check_required_field_crs(crs): + if ('type' not in crs + or 'properties' not in crs): + return False + return True + + +def check_required_field_trs(trs): + if ('type' not in trs + or 'properties' not in trs): + return False + return True diff --git a/tests/api/test_movingfeature.py b/tests/api/test_movingfeature.py new file mode 100644 index 000000000..acf986e9e --- /dev/null +++ b/tests/api/test_movingfeature.py @@ -0,0 +1,1459 @@ +from pygeoapi.api.movingfeatures import MOVING_FEATURES +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB +import pytest +import json +from http import HTTPStatus +from pygeoapi.util import yaml_load + +from tests.util import get_test_file_path, mock_request + + +@pytest.fixture() +def config(): + with open(get_test_file_path('example-config.yml')) as fh: + return yaml_load(fh) + + +@pytest.fixture() +def openapi(): + with open(get_test_file_path('example-openapi.yml')) as fh: + return yaml_load(fh) + +# @pytest.fixture(scope="session") +# def context(): +# return { +# 'collection_id':'f81e0521-cf63-4cc5-b690-3daf1f326104', +# 'mfeature_id':'196695e8-b79b-4655-a1e0-b90de887f205', +# 'tgeometry_id':'1d2edbdc-717a-4fcb-94ad-19a00ee208e0', +# 'tProperty_name':'labels' +# } + + +@pytest.fixture(scope="session") +def context(): + return {} + + +@pytest.fixture() +def collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example" + } + + +@pytest.fixture() +def update_collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "test_update" + } + + +@pytest.fixture() +def movingfeature(): + return { + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" + } + + +@pytest.fixture() +def temporalgeometry(): + return { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + } + + +@pytest.fixture() +def temporalproperties(): + return [ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ] + + +@pytest.fixture() +def temporalvalue_data(): + return { + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" + } + + +def test_manage_collection_create( + config, + openapi, + collection_property, + context): + mf = MOVING_FEATURES(config, openapi) + + # missing request data + req = mock_request() + rsp_headers, code, response = mf.manage_collection(req, 'create') + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'Invalid data. Valid parameter is JSON' + rsp_headers, code, response = mf.manage_collection(req, 'create') + assert code == HTTPStatus.BAD_REQUEST + + # successful request data + req = mock_request() + req.data = json.dumps(collection_property) + rsp_headers, code, response = mf.manage_collection(req, 'create') + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + collection_id = location.split('/')[-1] + assert collection_id is not None + context['collection_id'] = collection_id + + +def test_manage_collection_item_create( + config, openapi, movingfeature, context): + mf = MOVING_FEATURES(config, openapi) + + # collection not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item( + req, 'create', '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item( + req, 'create', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'data' + rsp_headers, code, response = mf.manage_collection_item( + req, 'create', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., type,temporalgeometry) + # is missing from the request data. + missing_data = dict(movingfeature) + del missing_data['temporalGeometry'] + + req = mock_request() + req.data = json.dumps(missing_data) + rsp_headers, code, response = mf.manage_collection_item( + req, 'create', context['collection_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_request() + req.data = json.dumps(movingfeature) + rsp_headers, code, response = mf.manage_collection_item( + req, 'create', context['collection_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + mfeature_id = location.split('/')[-1] + assert mfeature_id is not None + context['mfeature_id'] = mfeature_id + + +def test_manage_collection_item_tGeometry_create( + config, openapi, temporalgeometry, context): + mf = MOVING_FEATURES(config, openapi) + + # feature not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'data' + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., type,prisms) + # is missing from the request data. + missing_data = dict(temporalgeometry) + del missing_data['type'] + + req = mock_request() + req.data = json.dumps(missing_data) + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_request() + req.data = json.dumps(temporalgeometry) + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'create', context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + tgeometry_id = location.split('/')[-1] + assert tgeometry_id is not None + context['tgeometry_id'] = tgeometry_id + + +def test_manage_collection_item_tProperty_create( + config, openapi, temporalproperties, context): + mf = MOVING_FEATURES(config, openapi) + + # feature not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'data' + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., datetimes,interpolation) + # is missing from the request data. + missing_data = [] + for temporalproperty in temporalproperties: + missing_data.append(dict(temporalproperty)) + del missing_data[0]['datetimes'] + + req = mock_request() + req.data = json.dumps(missing_data, indent=2) + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_request() + req.data = json.dumps(temporalproperties, indent=2) + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'create', context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Locations' in rsp_headers + + location = rsp_headers['Locations'] + assert len(location) == 4 + tProperty_name = location[-1].split('/')[-1] + assert tProperty_name is not None + context['tProperty_name'] = tProperty_name + + +def test_manage_collection_item_tProperty_value_create( + config, openapi, temporalvalue_data, context): + mf = MOVING_FEATURES(config, openapi) + + # temporal property not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty_value( + req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty_value( + req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'data' + rsp_headers, code, response = mf.manage_collection_item_tProperty_value( + req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., datetimes,interpolation) + # is missing from the request data. + missing_data = dict(temporalvalue_data) + del missing_data['datetimes'] + + req = mock_request() + req.data = json.dumps(missing_data) + rsp_headers, code, response = mf.manage_collection_item_tProperty_value( + req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_request() + req.data = json.dumps(temporalvalue_data) + rsp_headers, code, response = mf.manage_collection_item_tProperty_value( + req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + +def test_manage_collection_update( + config, + openapi, + update_collection_property, + context): + mf = MOVING_FEATURES(config, openapi) + + # missing request data + req = mock_request() + rsp_headers, code, response = mf.manage_collection( + req, 'update', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_request() + req.data = 'data' + rsp_headers, code, response = mf.manage_collection( + req, 'update', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful request data + req = mock_request() + req.data = json.dumps(update_collection_property) + rsp_headers, code, response = mf.manage_collection( + req, 'update', context['collection_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + + +def test_get_collection_items(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection_items( + req, '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_request({'offset': -1}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_request({'offset': 'one'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox values must be numbers + req = mock_request( + {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values + # (minx,miny,minz,maxx,maxy,maxz) + req = mock_request({'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # minx is greater than maxx (possibly antimeridian bbox) + req = mock_request( + {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z', # noqa + 'subTrajectory': 'true'}) + rsp_headers, code, response = mf.get_collection_items( + req, context['collection_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + collection = json.loads(response) + + # check response data + assert 'type' in collection + assert 'features' in collection + assert len(collection['features']) == 1 + + mfeature = collection['features'][0] + assert 'id' in mfeature + assert 'type' in mfeature + assert mfeature['type'] == 'Feature' + assert 'properties' in mfeature + + assert 'geometry' in mfeature + assert 'type' in mfeature['geometry'] + assert 'coordinates' in mfeature['geometry'] + + assert 'temporalGeometry' in mfeature + assert len(mfeature['temporalGeometry']) == 2 + temporal_geometry = mfeature['temporalGeometry'][0] + assert 'type' in temporal_geometry + assert temporal_geometry['type'] == 'MovingPoint' + assert 'datetimes' in temporal_geometry + assert 'interpolation' in temporal_geometry + assert 'id' in temporal_geometry + + assert 'bbox' in mfeature + assert mfeature['bbox'] == [ + 139.757083, + 35.627483, + 0.5, + 139.757716, + 35.627701, + 4] + assert 'time' in mfeature + assert mfeature['time'] == ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + + assert 'crs' in collection + assert 'trs' in collection + + assert 'links' in collection + assert len(collection['links']) == 1 + + assert 'timeStamp' in collection + assert 'numberMatched' in collection + assert collection['numberMatched'] == 1 + assert 'numberReturned' in collection + assert collection['numberReturned'] == 1 + + +def test_get_collection(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection( + req, '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful data + req = mock_request() + rsp_headers, code, response = mf.get_collection( + req, context['collection_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + collection = json.loads(response) + + assert 'id' in collection + assert 'itemType' in collection + assert collection['itemType'] == 'movingfeature' + + assert 'title' in collection + assert collection['title'] == 'moving_feature_collection_sample' + assert 'updateFrequency' in collection + assert collection['updateFrequency'] == 1000 + assert 'description' in collection + assert collection['description'] == 'test_update' + + assert 'extent' in collection + assert collection['extent']['spatial']['bbox'] == [ + 139.757083, 35.627483, 0.5, 139.757716, 35.627701, 4] + assert collection['extent']['spatial']['crs'] == \ + 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + assert collection['extent']['temporal']['interval'] == \ + ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + assert collection['extent']['temporal']['trs'] == \ + 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + assert 'links' in collection + assert len(collection['links']) == 1 + + +def test_get_collection_item(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection_item( + req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful data + rsp_headers, code, response = mf.get_collection_item( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + mfeature = json.loads(response) + + assert 'id' in mfeature + assert 'type' in mfeature + assert mfeature['type'] == 'Feature' + assert 'properties' in mfeature + + assert 'geometry' in mfeature + assert 'type' in mfeature['geometry'] + assert 'coordinates' in mfeature['geometry'] + + assert 'crs' in mfeature + assert 'trs' in mfeature + + assert 'bbox' in mfeature + assert mfeature['bbox'] == [ + 139.757083, + 35.627483, + 0.5, + 139.757716, + 35.627701, + 4] + assert 'time' in mfeature + assert mfeature['time'] == ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + + assert 'links' in mfeature + assert len(mfeature['links']) == 1 + + +def test_get_collection_items_tGeometry(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_request({'offset': -1}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_request({'offset': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox values must be numbers + req = mock_request( + {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values + # (minx,miny,minz,maxx,maxy,maxz) + req = mock_request({'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # minx is greater than maxx (possibly antimeridian bbox) + req = mock_request( + {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid leaf + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.BAD_REQUEST + + # cannot use both parameter `subTrajectory` and `leaf` at the same time + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'subTrajectory': True}) + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tGeometry( + req, context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_geometries = json.loads(response) + + assert 'geometrySequence' in temporal_geometries + assert len(temporal_geometries["geometrySequence"]) == 1 + + temporal_geometry = temporal_geometries['geometrySequence'][0] + assert 'id' in temporal_geometry + assert 'datetimes' in temporal_geometry + assert temporal_geometry['datetimes'] == ["2011-07-14T22:01:01+09"] + assert 'coordinates' in temporal_geometry + assert temporal_geometry['coordinates'] == [[139.757083, 35.627701, 0.5]] + assert 'type' in temporal_geometry + assert temporal_geometry['type'] == 'MovingPoint' + assert 'interpolation' in temporal_geometry + assert temporal_geometry['interpolation'] == 'Linear' + + assert 'crs' in temporal_geometries + assert 'trs' in temporal_geometries + assert 'links' in temporal_geometries + assert len(temporal_geometries['links']) == 1 + + assert 'timeStamp' in temporal_geometries + assert 'numberMatched' in temporal_geometries + assert temporal_geometries['numberMatched'] == 2 + assert 'numberReturned' in temporal_geometries + assert temporal_geometries['numberReturned'] == 1 + + +def test_get_collection_items_tGeometry_velocity(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # successful data + req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry_velocity( + req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'velocity' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTS' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [0.00013296616111996862] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tGeometry_distance(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # successful data + req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = mf.get_collection_items_tGeometry_distance( + req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'distance' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTR' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [3.5000000394115824] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tGeometry_acceleration(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # successful data + req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = \ + mf.get_collection_items_tGeometry_acceleration( + req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'acceleration' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTS' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [0] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tProperty(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_request({'offset': -1}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_request({'offset': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_request({'offset': 0, 'limit': 10, + 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_request({'offset': 0, + 'limit': 10, + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z', # noqa + 'subTemporalValue': 'true'}) + rsp_headers, code, response = mf.get_collection_items_tProperty( + req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + result = json.loads(response) + + assert 'temporalProperties' in result + temporal_properties = result['temporalProperties'] + assert len(temporal_properties) == 2 + + temporal_property = temporal_properties[0] + assert 'datetimes' in temporal_property + assert 'discharge' in temporal_property + assert 'form' in temporal_property['discharge'] + assert temporal_property['discharge']['form'] == 'MQS' + assert 'type' in temporal_property['discharge'] + assert temporal_property['discharge']['type'] == 'Measure' + assert 'values' in temporal_property['discharge'] + assert temporal_property['discharge']['values'] == [3, 4, 5] + + assert 'length' in temporal_property + assert 'form' in temporal_property['length'] + assert temporal_property['length']['form'] == \ + 'http://www.qudt.org/qudt/owl/1.0.0/quantity/Length' + assert 'type' in temporal_property['length'] + assert temporal_property['length']['type'] == 'Measure' + assert 'values' in temporal_property['length'] + assert temporal_property['length']['values'] == [1, 2.4, 1] + + assert 'links' in result + assert len(result['links']) == 1 + + assert 'timeStamp' in result + assert 'numberMatched' in result + assert result['numberMatched'] == 4 + assert 'numberReturned' in result + assert result['numberReturned'] == 4 + + +def test_get_collection_items_tProperty_value(config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # not found + req = mock_request() + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_request({'offset': -1}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_request({'offset': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid leaf + req = mock_request({'offset': 0, 'limit': 10, + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # cannot use both parameter `subTemporalValue` + # and `leaf` at the same time + req = mock_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'subTemporalValue': True}) + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z'}) # noqa + rsp_headers, code, response = mf.get_collection_items_tProperty_value( + req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + tProperty_value = json.loads(response) + + assert 'type' in tProperty_value + assert tProperty_value['type'] == 'Text' + assert 'valueSequence' in tProperty_value + assert len(tProperty_value["valueSequence"]) == 1 + + valueSequence = tProperty_value['valueSequence'][0] + assert 'values' in valueSequence + assert valueSequence['values'] == ["car"] + assert 'datetimes' in valueSequence + assert valueSequence['datetimes'] == ["2011-07-16T22:01:01.45Z"] + assert 'interpolation' in valueSequence + assert valueSequence['interpolation'] == 'Discrete' + + +def test_manage_collection_item_tProperty_delete( + config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # feature not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tProperty( + req, 'delete', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + +def test_manage_collection_item_tGeometry_delete( + config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # feature not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item_tGeometry( + req, 'delete', context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + +def test_manage_collection_item_delete( + config, openapi, context): + mf = MOVING_FEATURES(config, openapi) + + # collection not found + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item( + req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_request() + rsp_headers, code, response = mf.manage_collection_item( + req, 'delete', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + # check feature + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + assert len(result) == 0 + + +def test_manage_collection_delete( + config, + openapi, + context): + mf = MOVING_FEATURES(config, openapi) + + # successful delete + req = mock_request() + rsp_headers, code, response = mf.manage_collection( + req, 'delete', context['collection_id']) + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + # check collection + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collection(context.get('collection_id')) + assert len(result) == 0 From 303540f6dd15b4d090c11964aa877900c92a2606 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 19 Nov 2024 15:46:19 +0900 Subject: [PATCH 03/14] MF-API Server update and integration (#1) revision related to database and Provider * Correcting of test check judgments * Refactoring of parameter names --- pygeoapi/provider/postgresql_mobilitydb.py | 21 +++++++++------- tests/test_postgresql_mobilitydb.py | 29 ++++++++++++---------- 2 files changed, 28 insertions(+), 22 deletions(-) diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py index d763febb3..91a6d77db 100644 --- a/pygeoapi/provider/postgresql_mobilitydb.py +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -396,7 +396,7 @@ def get_tProperties_name_list(self): def get_temporalproperties( self, collection_id, mfeature_id, datetime='', limit=10, - offset=0, subTemporalValue=False): + offset=0, sub_temporal_value=False): """ Retrieve the static information of the temporal property data that included a single moving feature @@ -406,7 +406,7 @@ def get_temporalproperties( :param datetime: either a date-time or an interval(datestamp or extent) :param limit: number of items (default 10) [optional] :param offset: starting record to return (default 0) - :param subTemporalValue: only features with a temporal property + :param sub_temporal_value: only features with a temporal property intersecting the given time interval will return (default False) [optional] @@ -415,7 +415,8 @@ def get_temporalproperties( with self.connection.cursor() as cur: datetime_restriction = '' if datetime != '' and datetime is not None: - if subTemporalValue is False or subTemporalValue == "false": + if sub_temporal_value is False \ + or sub_temporal_value == "false": datetime_restriction = (""" and (atTime(pvalue_float, tstzspan('[{0}]')) is not null or atTime(pvalue_text, tstzspan('[{0}]')) is not null)""" @@ -440,7 +441,7 @@ def get_temporalproperties( result = cur.fetchall() number_returned = len(result) - if subTemporalValue or subTemporalValue == "true": + if sub_temporal_value or sub_temporal_value == "true": subTemporalValue_float_field = ( """atTime(tproperties.pvalue_float, tstzspan('[{0}]'))""" .format(datetime)) @@ -483,7 +484,7 @@ def get_temporalproperties( def get_temporalproperties_value( self, collection_id, mfeature_id, tProperty_name, datetime='', - leaf='', subTemporalValue=False): + leaf='', sub_temporal_value=False): """ Retrieve temporal values with a specified name {tPropertyName} of temporal property. @@ -496,7 +497,7 @@ def get_temporalproperties_value( property that intersects the given date-time are selected [optional] - :param subTemporalValue: only features with a temporal property + :param sub_temporal_value: only features with a temporal property intersecting the given time interval will return (default False) [optional] @@ -517,7 +518,7 @@ def get_temporalproperties_value( tstzset('{" + leaf + "}'))" text_field = "atTime(tproperties.pvalue_text, \ tstzset('{" + leaf + "}'))" - elif subTemporalValue or subTemporalValue == "true": + elif sub_temporal_value or sub_temporal_value == "true": float_field = "atTime(tproperties.pvalue_float, \ tstzspan('[" + datetime + "]'))" text_field = "atTime(tproperties.pvalue_text, \ @@ -674,6 +675,7 @@ def post_temporalproperties( datetimes = g_temporal_property.pop("datetimes", None) datetime_group = self.get_temporalproperties_group( collection_id, mfeature_id, datetimes) + tproperties_name_list = [] for tproperties_name in g_temporal_property: with self.connection.cursor() as cur: if 'values' in g_temporal_property[tproperties_name] \ @@ -720,8 +722,9 @@ def post_temporalproperties( temporal_property[tproperties_name]))) cur.execute(insert_query) - # TODO replace g_temporal_property - return tproperties_name + tproperties_name_list.append(tproperties_name) + + return tproperties_name_list def post_temporalvalue( self, collection_id, mfeature_id, tproperties_name, diff --git a/tests/test_postgresql_mobilitydb.py b/tests/test_postgresql_mobilitydb.py index e3cf7bdc4..92126c05d 100644 --- a/tests/test_postgresql_mobilitydb.py +++ b/tests/test_postgresql_mobilitydb.py @@ -1,4 +1,4 @@ -from pygeoapi.pmdb_provider.postgresql_mobilitydb import PostgresMobilityDB +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB import pytest @@ -433,8 +433,8 @@ def test_query_post_collection(context, collection_property): pmdb_provider.connect() collection_id = pmdb_provider.post_collection(collection_property) + assert collection_id is not None context['collection_id'] = collection_id - assert collection_id def test_query_post_movingfeature(context, movingfeature): @@ -444,8 +444,8 @@ def test_query_post_movingfeature(context, movingfeature): pmdb_provider.post_movingfeature(context.get('collection_id'), movingfeature) + assert mfeature_id is not None context['mfeature_id'] = mfeature_id - assert mfeature_id def test_query_post_temporalgeometry(context, temporalgeometry): @@ -457,8 +457,8 @@ def test_query_post_temporalgeometry(context, temporalgeometry): context.get('mfeature_id'), temporalgeometry) + assert tgeometry_id is not None context['tgeometry_id'] = tgeometry_id - assert tgeometry_id def test_query_post_temporalproperties(context, temporalproperties): @@ -472,16 +472,19 @@ def test_query_post_temporalproperties(context, temporalproperties): context.get('mfeature_id'), l_temporal_properties) + tProperty_name_list = [] if canPost: for temporal_property in l_temporal_properties: - tProperty_name = pmdb_provider.\ - post_temporalproperties( - context.get('collection_id'), - context.get('mfeature_id'), - temporal_property) + tProperty_name_list.extend(pmdb_provider. + post_temporalproperties( + context.get('collection_id'), + context.get('mfeature_id'), + temporal_property)) - context['tProperty_name'] = tProperty_name - assert tProperty_name + assert len(tProperty_name_list) == 4 + tProperty_name = tProperty_name_list[-1] + assert tProperty_name is not None + context['tProperty_name'] = tProperty_name def test_query_post_temporalvalue(context, temporalvalue_data): @@ -588,7 +591,7 @@ def test_query_get_features( 30, 0, 200, - 400, + 40, 10], datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', limit=10, offset=0, sub_trajectory=False): @@ -651,7 +654,7 @@ def test_query_get_temporalgeometries( 30, 0, 200, - 400, + 40, 10], leaf='2011-07-14 22:01:01.000', datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', From 04d29c842fd710980f6a2a46699150be99c506f6 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 19 Nov 2024 15:49:37 +0900 Subject: [PATCH 04/14] MF-API Server update and integration (#1) modify service interface and entry points * Modifying service entry point * Modifying the MF-API service interface --- pygeoapi/api/__init__.py | 579 +++++++++++++-------------------------- pygeoapi/flask_app.py | 260 +++++++++++++++--- 2 files changed, 415 insertions(+), 424 deletions(-) diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py index b47541f23..03948ae2b 100644 --- a/pygeoapi/api/__init__.py +++ b/pygeoapi/api/__init__.py @@ -40,6 +40,13 @@ Returns content from plugins and sets responses. """ +from pygeoapi.util import (CrsTransformSpec, TEMPLATES, UrlPrefetcher, + get_api_rules, get_base_url, get_provider_by_type, + get_typed_value, get_crs_from_uri, + get_supported_crs_list, render_j2_template, to_json) +from pymeos import STBox, TsTzSpan, pymeos_initialize +import psycopg2 +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB import asyncio from collections import OrderedDict from copy import deepcopy @@ -61,14 +68,7 @@ from pygeoapi.plugin import load_plugin from pygeoapi.process.manager.base import get_manager from pygeoapi.provider.base import ( - ProviderConnectionError, ProviderGenericError, ProviderTypeError) - -from pygeoapi.util import ( - CrsTransformSpec, TEMPLATES, UrlPrefetcher, dategetter, - filter_dict_by_key_value, get_api_rules, get_base_url, - get_provider_by_type, get_provider_default, get_typed_value, - get_crs_from_uri, get_supported_crs_list, render_j2_template, to_json -) + ProviderGenericError, ProviderTypeError) LOGGER = logging.getLogger(__name__) @@ -112,6 +112,54 @@ 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/oas30' ] +CONFORMANCE = { + 'common': [ + 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/core', + 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/collections' + ], + 'feature': [ + 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core', + 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30', + 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/html', + 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson', + 'http://www.opengis.net/spec/ogcapi-features-4/1.0/conf/create-replace-delete' # noqa + ], + 'coverage': [ + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/core', + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/oas30', + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/html', + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/geodata-coverage', # noqa + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-subset', # noqa + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-rangesubset', # noqa + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-bbox', # noqa + 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-datetime' # noqa + ], + 'tile': [ + 'http://www.opengis.net/spec/ogcapi-tiles-1/1.0/conf/core' + ], + 'record': [ + 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/core', + 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/sorting', + 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/opensearch', + 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/json', + 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/html' + ], + 'process': [ + 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/ogc-process-description', # noqa + 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/core', + 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/json', + 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/oas30' + ], + 'edr': [ + 'http://www.opengis.net/spec/ogcapi-edr-1/1.0/conf/core' + ], + 'movingfeatures': [ + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common", + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", # noqa + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" # noqa + ] +} + OGC_RELTYPES_BASE = 'http://www.opengis.net/def/rel/ogc/1.0' DEFAULT_CRS_LIST = [ @@ -142,7 +190,7 @@ def all_apis() -> dict: 'map': maps, 'process': processes, 'tile': tiles, - 'stac': stac + 'stac': stac, } @@ -292,6 +340,7 @@ def example_method(self, request: Union[APIRequest, Any], custom_arg): :param request: The web platform specific Request instance. :param supported_locales: List or set of supported Locale instances. """ + def __init__(self, request, supported_locales): # Set default request data self._data = b'' @@ -454,7 +503,7 @@ def _get_format(self, headers) -> Union[str, None]: # Format not specified: get from Accept headers (MIME types) # e.g. format_ = 'text/html' - h = headers.get('accept', headers.get('Accept', '')).strip() # noqa + h = headers.get('accept', headers.get('Accept', '')).strip() # noqa (fmts, mimes) = zip(*FORMAT_TYPES.items()) # basic support for complex types (i.e. with "q=0.x") for type_ in (t.split(';')[0].strip() for t in h.split(',') if t): @@ -787,18 +836,6 @@ def landing_page(self, fcm['stac'] = False fcm['collection'] = False - if filter_dict_by_key_value(self.config['resources'], - 'type', 'process'): - fcm['processes'] = True - - if filter_dict_by_key_value(self.config['resources'], - 'type', 'stac-collection'): - fcm['stac'] = True - - if filter_dict_by_key_value(self.config['resources'], - 'type', 'collection'): - fcm['collection'] = True - content = render_j2_template(self.tpl_config, 'landing_page.html', fcm, request.locale) return headers, HTTPStatus.OK, content @@ -812,7 +849,7 @@ def landing_page(self, @gzip @pre_process def openapi_(self, request: Union[APIRequest, Any]) -> Tuple[ - dict, int, str]: + dict, int, str]: """ Provide OpenAPI document @@ -860,28 +897,12 @@ def conformance(self, :returns: tuple of headers, status code, content """ - apis_dict = all_apis() - if not request.is_valid(): return self.get_format_exception(request) conformance_list = CONFORMANCE_CLASSES - - for key, value in self.config['resources'].items(): - if value['type'] == 'process': - conformance_list.extend( - apis_dict['process'].CONFORMANCE_CLASSES) - else: - for provider in value['providers']: - if provider['type'] in apis_dict: - conformance_list.extend( - apis_dict[provider['type']].CONFORMANCE_CLASSES) - if provider['type'] == 'feature': - conformance_list.extend( - apis_dict['itemtypes'].CONFORMANCE_CLASSES_FEATURES) # noqa - if provider['type'] == 'record': - conformance_list.extend( - apis_dict['itemtypes'].CONFORMANCE_CLASSES_RECORDS) + conformance_list.extend( + CONFORMANCE['movingfeatures']) conformance = { 'conformsTo': sorted(list(set(conformance_list))) @@ -898,395 +919,185 @@ def conformance(self, @gzip @pre_process @jsonldify - def describe_collections(self, request: Union[APIRequest, Any], - dataset=None) -> Tuple[dict, int, str]: + def describe_collections( + self, request: Union[APIRequest, Any]) -> Tuple[dict, int, str]: """ - Provide collection metadata + Queries collection :param request: A request object - :param dataset: name of collection :returns: tuple of headers, status code, content """ - if not request.is_valid(): return self.get_format_exception(request) - headers = request.get_response_headers(**self.api_headers) + headers = request.get_response_headers() + pmdb_provider = PostgresMobilityDB() fcm = { 'collections': [], 'links': [] } - collections = filter_dict_by_key_value(self.config['resources'], - 'type', 'collection') - - if all([dataset is not None, dataset not in collections.keys()]): - msg = 'Collection not found' + try: + pmdb_provider.connect() + result = pmdb_provider.get_collections() + except (Exception, psycopg2.Error) as error: + msg = str(error) return self.get_exception( - HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) - - if dataset is not None: - collections_dict = { - k: v for k, v in collections.items() if k == dataset - } - else: - collections_dict = collections - - LOGGER.debug('Creating collections') - for k, v in collections_dict.items(): - if v.get('visibility', 'default') == 'hidden': - LOGGER.debug(f'Skipping hidden layer: {k}') - continue - collection_data = get_provider_default(v['providers']) - collection_data_type = collection_data['type'] - - collection_data_format = None - - if 'format' in collection_data: - collection_data_format = collection_data['format'] - - is_vector_tile = (collection_data_type == 'tile' and - collection_data_format['name'] not - in [F_PNG, F_JPEG]) - - collection = { - 'id': k, - 'title': l10n.translate(v['title'], request.locale), - 'description': l10n.translate(v['description'], request.locale), # noqa - 'keywords': l10n.translate(v['keywords'], request.locale), - 'links': [] - } + HTTPStatus.BAD_REQUEST, + headers, + request.format, + 'ConnectingError', + msg) + + pymeos_initialize() + collections = [] + for row in result: + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id + + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' + else: + crs = 'http://www.opengis.net/def/\ + crs/OGC/1.3/CRS84' + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) - bbox = v['extents']['spatial']['bbox'] - # The output should be an array of bbox, so if the user only - # provided a single bbox, wrap it in a array. - if not isinstance(bbox[0], list): - bbox = [bbox] collection['extent'] = { 'spatial': { - 'bbox': bbox + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs } } - if 'crs' in v['extents']['spatial']: - collection['extent']['spatial']['crs'] = \ - v['extents']['spatial']['crs'] - - t_ext = v.get('extents', {}).get('temporal', {}) - if t_ext: - begins = dategetter('begin', t_ext) - ends = dategetter('end', t_ext) - collection['extent']['temporal'] = { - 'interval': [[begins, ends]] - } - if 'trs' in t_ext: - collection['extent']['temporal']['trs'] = t_ext['trs'] - - LOGGER.debug('Processing configured collection links') - for link in l10n.translate(v.get('links', []), request.locale): - lnk = { - 'type': link['type'], - 'rel': link['rel'], - 'title': l10n.translate(link['title'], request.locale), - 'href': l10n.translate(link['href'], request.locale), - } - if 'hreflang' in link: - lnk['hreflang'] = l10n.translate( - link['hreflang'], request.locale) - content_length = link.get('length', 0) - - if lnk['rel'] == 'enclosure' and content_length == 0: - # Issue HEAD request for enclosure links without length - lnk_headers = self.prefetcher.get_headers(lnk['href']) - content_length = int(lnk_headers.get('content-length', 0)) - content_type = lnk_headers.get('content-type', lnk['type']) - if content_length == 0: - # Skip this (broken) link - LOGGER.debug(f"Enclosure {lnk['href']} is invalid") - continue - if content_type != lnk['type']: - # Update content type if different from specified - lnk['type'] = content_type - LOGGER.debug( - f"Fixed media type for enclosure {lnk['href']}") - - if content_length > 0: - lnk['length'] = content_length - - collection['links'].append(lnk) + + collection['links'] = [] # TODO: provide translations LOGGER.debug('Adding JSON and HTML link relations') - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': 'root', - 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa - 'href': f"{self.base_url}?f={F_JSON}" - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': 'root', - 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa - 'href': f"{self.base_url}?f={F_HTML}" - }) + collection['links'].append({ 'type': FORMAT_TYPES[F_JSON], 'rel': request.get_linkrel(F_JSON), 'title': l10n.translate('This document as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}?f={F_JSON}' + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSON}' # noqa }) collection['links'].append({ 'type': FORMAT_TYPES[F_JSONLD], 'rel': request.get_linkrel(F_JSONLD), 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}?f={F_JSONLD}' + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSONLD}' # noqa }) collection['links'].append({ 'type': FORMAT_TYPES[F_HTML], 'rel': request.get_linkrel(F_HTML), 'title': l10n.translate('This document as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}?f={F_HTML}' + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_HTML}' # noqa }) - if collection_data_type in ['feature', 'coverage', 'record']: - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': f'{OGC_RELTYPES_BASE}/schema', - 'title': l10n.translate('Schema of collection in JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/schema?f={F_JSON}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': f'{OGC_RELTYPES_BASE}/schema', - 'title': l10n.translate('Schema of collection in HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/schema?f={F_HTML}' # noqa - }) - - if is_vector_tile or collection_data_type in ['feature', 'record']: - # TODO: translate - collection['itemType'] = collection_data_type - LOGGER.debug('Adding feature/record based links') - collection['links'].append({ - 'type': 'application/schema+json', - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', - 'title': l10n.translate('Queryables for this collection as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/queryables?f={F_JSON}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', - 'title': l10n.translate('Queryables for this collection as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/queryables?f={F_HTML}' # noqa - }) - collection['links'].append({ - 'type': 'application/geo+json', - 'rel': 'items', - 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/items?f={F_JSON}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSONLD], - 'rel': 'items', - 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/items?f={F_JSONLD}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': 'items', - 'title': l10n.translate('Items as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{k}/items?f={F_HTML}' # noqa - }) - - # OAPIF Part 2 - list supported CRSs and StorageCRS - if collection_data_type == 'feature': - collection['crs'] = get_supported_crs_list(collection_data, DEFAULT_CRS_LIST) # noqa - collection['storageCRS'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa - if 'storage_crs_coordinate_epoch' in collection_data: - collection['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa - - elif collection_data_type == 'coverage': - # TODO: translate - LOGGER.debug('Adding coverage based links') - collection['links'].append({ - 'type': 'application/prs.coverage+json', - 'rel': f'{OGC_RELTYPES_BASE}/coverage', - 'title': l10n.translate('Coverage data', request.locale), - 'href': f'{self.get_collections_url()}/{k}/coverage?f={F_JSON}' # noqa - }) - if collection_data_format is not None: - title_ = l10n.translate('Coverage data as', request.locale) # noqa - title_ = f"{title_} {collection_data_format['name']}" - collection['links'].append({ - 'type': collection_data_format['mimetype'], - 'rel': f'{OGC_RELTYPES_BASE}/coverage', - 'title': title_, - 'href': f"{self.get_collections_url()}/{k}/coverage?f={collection_data_format['name']}" # noqa - }) - if dataset is not None: - LOGGER.debug('Creating extended coverage metadata') - try: - provider_def = get_provider_by_type( - self.config['resources'][k]['providers'], - 'coverage') - p = load_plugin('provider', provider_def) - except ProviderConnectionError: - msg = 'connection error (check logs)' - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, - headers, request.format, - 'NoApplicableCode', msg) - except ProviderTypeError: - pass - else: - collection['extent']['spatial']['grid'] = [{ - 'cellsCount': p._coverage_properties['width'], - 'resolution': p._coverage_properties['resx'] - }, { - 'cellsCount': p._coverage_properties['height'], - 'resolution': p._coverage_properties['resy'] - }] - - try: - tile = get_provider_by_type(v['providers'], 'tile') - p = load_plugin('provider', tile) - except ProviderConnectionError: - msg = 'connection error (check logs)' - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, - headers, request.format, - 'NoApplicableCode', msg) - except ProviderTypeError: - tile = None - - if tile: - # TODO: translate - - LOGGER.debug('Adding tile links') - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': f'http://www.opengis.net/def/rel/ogc/1.0/tilesets-{p.tile_type}', # noqa - 'title': l10n.translate('Tiles as JSON', request.locale), - 'href': f'{self.get_collections_url()}/{k}/tiles?f={F_JSON}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': f'http://www.opengis.net/def/rel/ogc/1.0/tilesets-{p.tile_type}', # noqa - 'title': l10n.translate('Tiles as HTML', request.locale), - 'href': f'{self.get_collections_url()}/{k}/tiles?f={F_HTML}' # noqa - }) - - try: - map_ = get_provider_by_type(v['providers'], 'map') - except ProviderTypeError: - map_ = None - - if map_: - LOGGER.debug('Adding map links') - - map_mimetype = map_['format']['mimetype'] - map_format = map_['format']['name'] - - title_ = l10n.translate('Map as', request.locale) - title_ = f"{title_} {map_format}" - - collection['links'].append({ - 'type': map_mimetype, - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/map', - 'title': title_, - 'href': f"{self.get_collections_url()}/{k}/map?f={map_format}" # noqa - }) - - try: - edr = get_provider_by_type(v['providers'], 'edr') - p = load_plugin('provider', edr) - except ProviderConnectionError: - msg = 'connection error (check logs)' - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, headers, - request.format, 'NoApplicableCode', msg) - except ProviderTypeError: - edr = None - - if edr: - # TODO: translate - LOGGER.debug('Adding EDR links') - collection['data_queries'] = {} - parameters = p.get_fields() - if parameters: - collection['parameter_names'] = {} - for key, value in parameters.items(): - collection['parameter_names'][key] = { - 'id': key, - 'type': 'Parameter', - 'name': value['title'], - 'unit': { - 'label': { - 'en': value['title'] - }, - 'symbol': { - 'value': value['x-ogc-unit'], - 'type': 'http://www.opengis.net/def/uom/UCUM/' # noqa - } - } - } - - for qt in p.get_query_types(): - data_query = { - 'link': { - 'href': f'{self.get_collections_url()}/{k}/{qt}', - 'rel': 'data' - } - } - collection['data_queries'][qt] = data_query - - title1 = l10n.translate('query for this collection as JSON', request.locale) # noqa - title1 = f'{qt} {title1}' - title2 = l10n.translate('query for this collection as HTML', request.locale) # noqa - title2 = f'{qt} {title2}' - - collection['links'].append({ - 'type': 'application/json', - 'rel': 'data', - 'title': title1, - 'href': f'{self.get_collections_url()}/{k}/{qt}?f={F_JSON}' # noqa - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': 'data', - 'title': title2, - 'href': f'{self.get_collections_url()}/{k}/{qt}?f={F_HTML}' # noqa - }) - - if dataset is not None and k == dataset: - fcm = collection - break - - fcm['collections'].append(collection) - - if dataset is None: - # TODO: translate - fcm['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': request.get_linkrel(F_JSON), - 'title': l10n.translate('This document as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSON}' + collection['links'].append({ + 'type': 'application/geo+json', + 'rel': 'items', + 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSON}' # noqa }) - fcm['links'].append({ + collection['links'].append({ 'type': FORMAT_TYPES[F_JSONLD], - 'rel': request.get_linkrel(F_JSONLD), - 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSONLD}' + 'rel': 'items', + 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSONLD}' # noqa }) - fcm['links'].append({ + collection['links'].append({ 'type': FORMAT_TYPES[F_HTML], - 'rel': request.get_linkrel(F_HTML), - 'title': l10n.translate('This document as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_HTML}' + 'rel': 'items', + 'title': l10n.translate('Items as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_HTML}' # noqa }) + collections.append(collection) + fcm['collections'] = collections + # fcm['links'].append({ + # 'href': '{}'.format( + # self.get_collections_url()), + # 'rel': request.get_linkrel(F_JSON), + # 'type': FORMAT_TYPES[F_JSON] + # }) + fcm['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa + 'href': f"{self.base_url}?f={F_JSON}" + }) + fcm['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa + 'href': f"{self.base_url}?f={F_HTML}" + }) + fcm['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': request.get_linkrel(F_JSON), + 'title': l10n.translate('This document as JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_JSON}' + }) + fcm['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': request.get_linkrel(F_JSONLD), + 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_JSONLD}' + }) + fcm['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': request.get_linkrel(F_HTML), + 'title': l10n.translate('This document as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_HTML}' + }) if request.format == F_HTML: # render fcm['collections_path'] = self.get_collections_url() - if dataset is not None: + if len(result) > 0: content = render_j2_template(self.tpl_config, 'collections/collection.html', fcm, request.locale) @@ -1299,7 +1110,7 @@ def describe_collections(self, request: Union[APIRequest, Any], if request.format == F_JSONLD: jsonld = self.fcmld.copy() - if dataset is not None: + if len(result) > 0: jsonld['dataset'] = jsonldify_collection(self, fcm, request.locale) else: diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index 4e4f0097e..252334f53 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -38,6 +38,7 @@ send_from_directory, Response, Request) from pygeoapi.api import API, APIRequest, apply_gzip +from pygeoapi.api.movingfeatures import MOVING_FEATURES import pygeoapi.api.coverages as coverages_api import pygeoapi.api.environmental_data_retrieval as edr_api import pygeoapi.api.itemtypes as itemtypes_api @@ -85,6 +86,7 @@ 'pretty_print', True) api_ = API(CONFIG, OPENAPI) +movingFeatures = MOVING_FEATURES(CONFIG, OPENAPI) OGC_SCHEMAS_LOCATION = CONFIG['server'].get('ogc_schemas_location') @@ -179,6 +181,16 @@ def openapi(): """ OpenAPI endpoint + :returns: HTTP response + """ + return get_response(api_.openapi_(request)) + + +@BLUEPRINT.route('/api') +def api(): + """ + OpenAPI endpoint + :returns: HTTP response """ @@ -221,8 +233,10 @@ def get_tilematrix_sets(): return execute_from_flask(tiles_api.tilematrixsets, request) -@BLUEPRINT.route('/collections') -@BLUEPRINT.route('/collections/<path:collection_id>') +@BLUEPRINT.route('/collections', + methods=['GET', 'POST']) +@BLUEPRINT.route('/collections/<path:collection_id>', + methods=['GET', 'PUT', 'DELETE']) def collections(collection_id=None): """ OGC API collections endpoint @@ -232,7 +246,24 @@ def collections(collection_id=None): :returns: HTTP response """ - return get_response(api_.describe_collections(request, collection_id)) + if collection_id is None: + if request.method == 'GET': # list items + return get_response(api_.describe_collections(request)) + elif request.method == 'POST': # filter or manage items + return get_response(movingFeatures.manage_collection(request, + 'create')) + + elif request.method == 'DELETE': + return get_response( + movingFeatures.manage_collection(request, 'delete', + collection_id)) + elif request.method == 'PUT': + return get_response( + movingFeatures.manage_collection(request, 'update', + collection_id)) + else: + return get_response( + movingFeatures.get_collection(request, collection_id)) @BLUEPRINT.route('/collections/<path:collection_id>/schema') @@ -258,16 +289,14 @@ def collection_queryables(collection_id=None): :returns: HTTP response """ - return execute_from_flask(itemtypes_api.get_collection_queryables, request, - collection_id) + return execute_from_flask(itemtypes_api.get_collection_queryables, + request, collection_id) @BLUEPRINT.route('/collections/<path:collection_id>/items', - methods=['GET', 'POST', 'OPTIONS'], - provide_automatic_options=False) + methods=['GET', 'POST']) @BLUEPRINT.route('/collections/<path:collection_id>/items/<path:item_id>', - methods=['GET', 'PUT', 'DELETE', 'OPTIONS'], - provide_automatic_options=False) + methods=['GET', 'DELETE'],) def collection_items(collection_id, item_id=None): """ OGC API collections items endpoint @@ -277,43 +306,24 @@ def collection_items(collection_id, item_id=None): :returns: HTTP response """ - if item_id is None: if request.method == 'GET': # list items - return execute_from_flask(itemtypes_api.get_collection_items, - request, collection_id, - skip_valid_check=True) + return get_response( + movingFeatures.get_collection_items(request, collection_id)) elif request.method == 'POST': # filter or manage items - if request.content_type is not None: - if request.content_type == 'application/geo+json': - return execute_from_flask( - itemtypes_api.manage_collection_item, - request, 'create', collection_id, - skip_valid_check=True) - else: - return execute_from_flask( - itemtypes_api.post_collection_items, request, - collection_id, skip_valid_check=True) - elif request.method == 'OPTIONS': - return execute_from_flask( - itemtypes_api.manage_collection_item, request, 'options', - collection_id, skip_valid_check=True) + return get_response( + movingFeatures.manage_collection_item(request, + 'create', collection_id)) elif request.method == 'DELETE': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'delete', collection_id, item_id, - skip_valid_check=True) - elif request.method == 'PUT': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'update', collection_id, item_id, - skip_valid_check=True) - elif request.method == 'OPTIONS': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'options', collection_id, item_id, - skip_valid_check=True) + return get_response( + movingFeatures.manage_collection_item(request, + 'delete', collection_id, + item_id)) else: - return execute_from_flask(itemtypes_api.get_collection_item, request, - collection_id, item_id) + return get_response( + movingFeatures.get_collection_item(request, + collection_id, item_id)) @BLUEPRINT.route('/collections/<path:collection_id>/coverage') @@ -547,6 +557,176 @@ def stac_catalog_path(path): return execute_from_flask(stac_api.get_stac_path, request, path) +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence', + methods=['GET', 'POST']) +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>', # noqa + methods=['DELETE']) +def collection_items_tgeometries(collection_id, item_id, tGeometry_id=None): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if tGeometry_id is None: + if request.method == 'GET': # list items + return get_response( + movingFeatures.get_collection_items_tGeometry(request, + collection_id, + item_id)) + elif request.method == 'POST': # filter or manage items + return get_response( + movingFeatures.manage_collection_item_tGeometry(request, + 'create', + collection_id, + item_id)) + + elif request.method == 'DELETE': + return get_response( + movingFeatures.manage_collection_item_tGeometry(request, + 'delete', + collection_id, + item_id, + tGeometry_id)) + + +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/velocity', # noqa + methods=['GET']) +def collection_items_tgeometries_velocity( + collection_id, item_id, tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return get_response( + movingFeatures + .get_collection_items_tGeometry_velocity(request, + collection_id, + item_id, + tGeometry_id)) + + +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/distance', # noqa + methods=['GET']) +def collection_items_tgeometries_distance( + collection_id, item_id, tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return get_response( + movingFeatures + .get_collection_items_tGeometry_distance(request, + collection_id, + item_id, + tGeometry_id)) + + +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/acceleration', # noqa + methods=['GET']) +def collection_items_tgeometries_acceleration(collection_id, item_id, + tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return get_response( + movingFeatures + .get_collection_items_tGeometry_acceleration(request, + collection_id, + item_id, + tGeometry_id)) + + +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tproperties', + methods=['GET', 'POST']) +def collection_items_tproperties(collection_id, item_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return get_response( + movingFeatures.get_collection_items_tProperty(request, + collection_id, + item_id)) + elif request.method == 'POST': # filter or manage items + return get_response( + movingFeatures.manage_collection_item_tProperty(request, + 'create', + collection_id, + item_id)) + + +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>', # noqa + methods=['GET', 'POST', 'DELETE']) +def collection_items_tproperties_values(collection_id, item_id, tProperty_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return get_response( + movingFeatures.get_collection_items_tProperty_value(request, + collection_id, + item_id, + tProperty_id)) + elif request.method == 'POST': # filter or manage items + return get_response( + movingFeatures + .manage_collection_item_tProperty_value(request, + 'create', + collection_id, + item_id, + tProperty_id)) + elif request.method == 'DELETE': # filter or manage items + return get_response( + movingFeatures + .manage_collection_item_tProperty(request, + 'delete', + collection_id, + item_id, + tProperty_id)) + + @ADMIN_BLUEPRINT.route('/admin/config', methods=['GET', 'PUT', 'PATCH']) def admin_config(): """ From fbfdc7306000f43cdf6c89ebeb703c7dadc05b01 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Fri, 22 Nov 2024 14:59:24 +0900 Subject: [PATCH 05/14] MF-API Server update and integration (#1) modify movingfeature api * Remove class(Same as other APIs) * Created function 'get_oas_30' as OpenAPI for MovingFeatures (Same as other APIs) --- pygeoapi/api/movingfeatures.py | 4401 +++++++++++++++++++------------ tests/api/test_movingfeature.py | 671 +++-- 2 files changed, 3040 insertions(+), 2032 deletions(-) diff --git a/pygeoapi/api/movingfeatures.py b/pygeoapi/api/movingfeatures.py index 9bab3d061..7aeb1eb48 100644 --- a/pygeoapi/api/movingfeatures.py +++ b/pygeoapi/api/movingfeatures.py @@ -36,1881 +36,1766 @@ Returns content from plugins and sets responses. """ -from copy import deepcopy from datetime import datetime from functools import partial import json import logging import re -from typing import Any, Tuple, Union +from typing import Tuple import urllib.parse from dateutil.parser import parse as dateparse import pytz from http import HTTPStatus -from pygeoapi import l10n -from pygeoapi.log import setup_logger -from pygeoapi.linked_data import (jsonldify) from pygeoapi.plugin import PLUGINS -from pygeoapi.process.manager.base import get_manager from pymeos import (STBox, TsTzSpan, TTextSeq, TFloatSeq, TGeomPointSeq, Temporal, pymeos_initialize) import psycopg2 from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB -from pygeoapi.api import ( - pre_process, gzip, APIRequest, SYSTEM_LOCALE, CHARSET, - TEMPLATES, FORMAT_TYPES, F_JSON, F_HTML, F_GZIP) -from pygeoapi.util import ( - UrlPrefetcher, get_api_rules, get_base_url, render_j2_template, - to_json) +from . import (API, APIRequest, SYSTEM_LOCALE, + FORMAT_TYPES, F_JSON) +from pygeoapi.util import (to_json) + LOGGER = logging.getLogger(__name__) -class MOVING_FEATURES: - def __init__(self, config, openapi): - """ - constructor - - :param config: configuration dict - :param openapi: openapi dict - - :returns: `pygeoapi.API` instance - """ - - self.config = config - self.openapi = openapi - self.api_headers = get_api_rules(self.config).response_headers - self.base_url = get_base_url(self.config) - self.prefetcher = UrlPrefetcher() - - CHARSET[0] = config['server'].get('encoding', 'utf-8') - if config['server'].get('gzip'): - FORMAT_TYPES[F_GZIP] = 'application/gzip' - FORMAT_TYPES.move_to_end(F_JSON) - - # Process language settings (first locale is default!) - self.locales = l10n.get_locales(config) - self.default_locale = self.locales[0] - - if 'templates' not in self.config['server']: - self.config['server']['templates'] = {'path': TEMPLATES} - - if 'pretty_print' not in self.config['server']: - self.config['server']['pretty_print'] = False - - self.pretty_print = self.config['server']['pretty_print'] - - setup_logger(self.config['logging']) - - # Create config clone for HTML templating with modified base URL - self.tpl_config = deepcopy(self.config) - self.tpl_config['server']['url'] = self.base_url - - self.manager = get_manager(self.config) - LOGGER.info('Process manager plugin loaded') - - @gzip - @pre_process - @jsonldify - def manage_collection(self, request: Union[APIRequest, Any], - action, dataset=None) -> Tuple[dict, int, str]: - """ - Adds a collection - - :param request: A request object - :param dataset: dataset name - - :returns: tuple of headers, status code, content - """ - - headers = request.get_response_headers(SYSTEM_LOCALE) - pmdb_provider = PostgresMobilityDB() - collection_id = str(dataset) - if action in ['create', 'update']: - data = request.data - if not data: - # TODO not all processes require input, e.g. time-dependent or - # random value generators - msg = 'missing request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'MissingParameterValue', msg) - - try: - # Parse bytes data, if applicable - data = data.decode() - LOGGER.debug(data) - except (UnicodeDecodeError, AttributeError): - pass - - try: - data = json.loads(data) - except (json.decoder.JSONDecodeError, TypeError) as err: - # Input does not appear to be valid JSON - LOGGER.error(err) - msg = 'invalid request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - if action == 'create': - try: - pmdb_provider.connect() - collection_id = pmdb_provider.post_collection(data) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - url = '{}/{}'.format(self.get_collections_url(), collection_id) - - headers['Location'] = url - return headers, HTTPStatus.CREATED, '' - - if action == 'update': - LOGGER.debug('Updating item') - try: - pmdb_provider.connect() - pmdb_provider.put_collection(collection_id, data) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - return headers, HTTPStatus.NO_CONTENT, '' - - if action == 'delete': - LOGGER.debug('Deleting item') - try: - pmdb_provider.connect() - pmdb_provider.delete_collection( - "AND collection_id ='{0}'".format(collection_id)) - - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - return headers, HTTPStatus.NO_CONTENT, '' - - @gzip - @pre_process - @jsonldify - def get_collection(self, request: Union[APIRequest, Any], - dataset=None) -> Tuple[dict, int, str]: - """ - Queries collection - - :param request: A request object - :param dataset: dataset name - - :returns: tuple of headers, status code, content - """ - pmdb_provider = PostgresMobilityDB() - collection_id = str(dataset) - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers() +CONFORMANCE_CLASSES_MOVINGFEATURES = [ + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common", + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", # noqa + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" # noqa +] + + +def manage_collection(api: API, request: APIRequest, + action, dataset=None) -> Tuple[dict, int, str]: + """ + Adds a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(SYSTEM_LOCALE) + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if action in ['create', 'update']: + data = request.data + if not data: + # TODO not all processes require input, e.g. time-dependent or + # random value generators + msg = 'missing request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'MissingParameterValue', msg) + + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if action == 'create': try: pmdb_provider.connect() - result = pmdb_provider.get_collection(collection_id) - if len(result) > 0: - row = result[0] - else: - msg = 'Collection not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) + collection_id = pmdb_provider.post_collection(data) except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - collection = {} - if row is not None: - collection_id = row[0] - collection = row[1] - collection['itemType'] = 'movingfeature' - collection['id'] = collection_id + url = '{}/{}'.format(api.get_collections_url(), collection_id) - crs = None - trs = None - if 'crs' in collection: - crs = collection.pop('crs', None) - if 'trs' in collection: - trs = collection.pop('trs', None) + headers['Location'] = url + return headers, HTTPStatus.CREATED, '' - extend_stbox = STBox(row[3]) if row[3] is not None else None - lifespan = TsTzSpan(row[2]) if row[2] is not None else None + if action == 'update': + LOGGER.debug('Updating item') + try: + pmdb_provider.connect() + pmdb_provider.put_collection(collection_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - bbox = [] - if extend_stbox is not None: - bbox.append(extend_stbox.xmin()) - bbox.append(extend_stbox.ymin()) - if extend_stbox.zmin() is not None: - bbox.append(extend_stbox.zmin()) - bbox.append(extend_stbox.xmax()) - bbox.append(extend_stbox.ymax()) - if extend_stbox.zmax() is not None: - bbox.append(extend_stbox.zmax()) - - if crs is None: - if extend_stbox.srid() == 4326: - if extend_stbox.zmax() is not None: - crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' - else: - crs = 'http://www.opengis.net/def/crs/\ - OGC/1.3/CRS84' + return headers, HTTPStatus.NO_CONTENT, '' - if crs is None: - crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' - if trs is None: - trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' - - time = [] - if lifespan is not None: - time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) - time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) - else: - if extend_stbox is not None: - if extend_stbox.tmin() is not None: - time.append(extend_stbox.tmin().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - time.append(extend_stbox.tmax().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - - collection['extent'] = { - 'spatial': { - 'bbox': bbox, - 'crs': crs - }, - 'temporal': { - 'interval': time, - 'trs': trs - } - } + if action == 'delete': + LOGGER.debug('Deleting item') + try: + pmdb_provider.connect() + pmdb_provider.delete_collection( + "AND collection_id ='{0}'".format(collection_id)) - collection['links'] = [] - collection['links'].append({ - 'href': '{}/{}'.format( - self.get_collections_url(), collection_id), - 'rel': request.get_linkrel(F_JSON), - 'type': FORMAT_TYPES[F_JSON] - }) - - return headers, HTTPStatus.OK, to_json(collection, self.pretty_print) - - @gzip - @pre_process - def get_collection_items( - self, request: Union[APIRequest, Any], - dataset) -> Tuple[dict, int, str]: - """ - Queries collection - - :param request: A request object - :param dataset: dataset name - - :returns: tuple of headers, status code, content - """ - - # Set Content-Language to system locale until provider locale - # has been determined - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers(SYSTEM_LOCALE) - - excuted, collections = get_list_of_collections_id() - collection_id = dataset - if excuted is False: - msg = str(collections) - return self.get_exception( + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection(api: API, request: APIRequest, + dataset=None) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers() - if collection_id not in collections: + try: + pmdb_provider.connect() + result = pmdb_provider.get_collection(collection_id) + if len(result) > 0: + row = result[0] + else: msg = 'Collection not found' LOGGER.error(msg) - return self.get_exception( + return api.get_exception( HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) - LOGGER.debug('Processing query parameters') + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) - LOGGER.debug('Processing offset parameter') - try: - offset = int(request.params.get('offset')) - if offset < 0: - msg = 'offset value should be positive or zero' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - offset = 0 - except ValueError: - msg = 'offset value should be an integer' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) + collection = {} + if row is not None: + pymeos_initialize() + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id - LOGGER.debug('Processing limit parameter') - try: - limit = int(request.params.get('limit')) - # TODO: We should do more validation, against the min and max - # allowed by the server configuration - if limit <= 0: - msg = 'limit value should be strictly positive' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - if limit > 10000: - msg = 'limit value should be less than or equal to 10000' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - limit = int(self.config['server']['limit']) - except ValueError: - msg = 'limit value should be an integer' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) - LOGGER.debug('Processing bbox parameter') + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' + else: + crs = 'http://www.opengis.net/def/crs/\ + OGC/1.3/CRS84' + + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + + collection['extent'] = { + 'spatial': { + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs + } + } - bbox = request.params.get('bbox') + collection['links'] = [] + collection['links'].append({ + 'href': '{}/{}'.format( + api.get_collections_url(), collection_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) - if bbox is None: - bbox = [] - else: - try: - bbox = validate_bbox(bbox) - except ValueError as err: - msg = str(err) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing datetime parameter') - datetime_ = request.params.get('datetime') + return headers, HTTPStatus.OK, to_json(collection, api.pretty_print) + + +def get_collection_items( + api: API, request: APIRequest, + dataset) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + # Set Content-Language to system locale until provider locale + # has been determined + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, collections = get_list_of_collections_id() + collection_id = dataset + if excuted is False: + msg = str(collections) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if collection_id not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: try: - datetime_ = validate_datetime(datetime_) + bbox = validate_bbox(bbox) except ValueError as err: msg = str(err) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - sub_trajectory = request.params.get('subTrajectory') - if sub_trajectory is None: - sub_trajectory = False - - LOGGER.debug('Querying provider') - LOGGER.debug('offset: {}'.format(offset)) - LOGGER.debug('limit: {}'.format(limit)) - LOGGER.debug('bbox: {}'.format(bbox)) - LOGGER.debug('datetime: {}'.format(datetime_)) - - pmdb_provider = PostgresMobilityDB() - content = { - "type": "FeatureCollection", - "features": [], - "crs": {}, - "trs": {}, - "links": [] - } - - try: - pmdb_provider.connect() - result, number_matched, number_returned = \ - pmdb_provider.get_features(collection_id=collection_id, - bbox=bbox, datetime=datetime_, - limit=limit, offset=offset, - sub_trajectory=sub_trajectory) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - - mfeatures = [] - crs = None - trs = None + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) - split_mfeature = {} - for i in range(len(result)): - mfeature_id = str(result[i][1]) - if mfeature_id not in split_mfeature: - split_mfeature[mfeature_id] = [] - split_mfeature[mfeature_id].append(i) + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False - pymeos_initialize() - for key, mfeature_row_index in split_mfeature.items(): - row = result[mfeature_row_index[0]] + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('datetime: {}'.format(datetime_)) - mfeature_id = row[1] - mfeature = row[3] - mfeature['id'] = mfeature_id - mfeature['type'] = 'Feature' + pmdb_provider = PostgresMobilityDB() + content = { + "type": "FeatureCollection", + "features": [], + "crs": {}, + "trs": {}, + "links": [] + } - if 'crs' in mfeature and crs is None: - crs = mfeature['crs'] - if 'trs' in mfeature and trs is None: - trs = mfeature['trs'] + try: + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_features(collection_id=collection_id, + bbox=bbox, datetime=datetime_, + limit=limit, offset=offset, + sub_trajectory=sub_trajectory) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeatures = [] + crs = None + trs = None + + split_mfeature = {} + for i in range(len(result)): + mfeature_id = str(result[i][1]) + if mfeature_id not in split_mfeature: + split_mfeature[mfeature_id] = [] + split_mfeature[mfeature_id].append(i) + + pymeos_initialize() + for key, mfeature_row_index in split_mfeature.items(): + row = result[mfeature_row_index[0]] + + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if 'crs' in mfeature and crs is None: + crs = mfeature['crs'] + if 'trs' in mfeature and trs is None: + trs = mfeature['trs'] + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + else: + mfeature['geometry'] = None - if row[2] is not None: - mfeature['geometry'] = json.loads(row[2]) - else: - mfeature['geometry'] = None - - if 'properties' not in mfeature: - mfeature['properties'] = None - - if sub_trajectory or sub_trajectory == "true": - prisms = [] - for row_index in mfeature_row_index: - row_tgeometory = result[int(row_index)] - if row_tgeometory[7] is not None: - mfeature_check = row_tgeometory[1] - if mfeature_check == mfeature_id: - temporal_geometry = json.loads( - Temporal.as_mfjson( - TGeomPointSeq( - str(row_tgeometory[7]).replace( - "'", "")), - False)) - if 'crs' in temporal_geometry and crs is None: - crs = temporal_geometry['crs'] - if 'trs' in temporal_geometry and trs is None: - trs = temporal_geometry['trs'] - temporal_geometry = \ - pmdb_provider.\ - convert_temporalgeometry_to_old_version( - temporal_geometry) - temporal_geometry['id'] = row_tgeometory[6] - prisms.append(temporal_geometry) - mfeature['temporalGeometry'] = prisms - - extend_stbox = STBox(row[5]) if row[5] is not None else None - lifespan = TsTzSpan(row[4]) if row[4] is not None else None - - bbox = [] - if extend_stbox is not None: - bbox.append(extend_stbox.xmin()) - bbox.append(extend_stbox.ymin()) - if extend_stbox.zmin() is not None: - bbox.append(extend_stbox.zmin()) - bbox.append(extend_stbox.xmax()) - bbox.append(extend_stbox.ymax()) - if extend_stbox.zmax() is not None: - bbox.append(extend_stbox.zmax()) - mfeature['bbox'] = bbox - - time = [] - if lifespan is not None: - time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) - time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) - else: - if extend_stbox is not None: - if extend_stbox.tmin() is not None: - time.append(extend_stbox.tmin().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - time.append(extend_stbox.tmax().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - mfeature['time'] = time - - if 'crs' not in mfeature: - mfeature['crs'] = { - "type": "Name", - "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" - } - if 'trs' not in mfeature: - mfeature['trs'] = { - "type": "Name", - "properties": "urn:ogc:data:time:iso8601" - } - mfeatures.append(mfeature) + if 'properties' not in mfeature: + mfeature['properties'] = None - content['features'] = mfeatures - if crs is not None: - content['crs'] = crs + if sub_trajectory or sub_trajectory == "true": + prisms = [] + for row_index in mfeature_row_index: + row_tgeometory = result[int(row_index)] + if row_tgeometory[7] is not None: + mfeature_check = row_tgeometory[1] + if mfeature_check == mfeature_id: + temporal_geometry = json.loads( + Temporal.as_mfjson( + TGeomPointSeq( + str(row_tgeometory[7]).replace( + "'", "")), + False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = \ + pmdb_provider.\ + convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row_tgeometory[6] + prisms.append(temporal_geometry) + mfeature['temporalGeometry'] = prisms + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) else: - content['crs'] = { + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { "type": "Name", "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" } - - if trs is not None: - content['trs'] = trs - else: - content['trs'] = { + if 'trs' not in mfeature: + mfeature['trs'] = { "type": "Name", "properties": "urn:ogc:data:time:iso8601" } + mfeatures.append(mfeature) + + content['features'] = mfeatures + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } - # TODO: translate titles - uri = '{}/{}/items'.format(self.get_collections_url(), collection_id) - - serialized_query_params = '' - for k, v in request.params.items(): - if k not in ('f', 'offset'): - serialized_query_params += '&' - serialized_query_params += urllib.parse.quote(k, safe='') - serialized_query_params += '=' - serialized_query_params += urllib.parse.quote(str(v), safe=',') + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } - content['links'] = [ + # TODO: translate titles + uri = '{}/{}/items'.format(api.get_collections_url(), collection_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['features']) == limit: + next_ = offset + limit + content['links'].append( {'href': '{}?offset={}{}'.format( - uri, offset, serialized_query_params), - 'rel': request.get_linkrel(F_JSON), - 'type': FORMAT_TYPES[F_JSON]}] - - if len(content['features']) == limit: - next_ = offset + limit - content['links'].append( - {'href': '{}?offset={}{}'.format( - uri, next_, serialized_query_params), - 'type': 'application/geo+json', 'rel': 'next'}) - - content['timeStamp'] = datetime.utcnow().strftime( - '%Y-%m-%dT%H:%M:%S.%fZ') - - content['numberMatched'] = number_matched - content['numberReturned'] = number_returned - return headers, HTTPStatus.OK, to_json(content, self.pretty_print) - - @gzip - @pre_process - def manage_collection_item( - self, request: Union[APIRequest, Any], - action, dataset, identifier=None) -> Tuple[dict, int, str]: - """ - Adds an item to a collection - - :param request: A request object - :param dataset: dataset name - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(PLUGINS['formatter'].keys()): - return self.get_format_exception(request) - - # Set Content-Language to system locale until provider locale - # has been determined - headers = request.get_response_headers(SYSTEM_LOCALE) - - pmdb_provider = PostgresMobilityDB() - excuted, collections = get_list_of_collections_id() - - if excuted is False: - msg = str(collections) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) - if dataset not in collections: - msg = 'Collection not found' + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item( + api: API, request: APIRequest, + action, dataset, identifier=None) -> Tuple[dict, int, str]: + """ + Adds an item to a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, collections = get_list_of_collections_id() + + if excuted is False: + msg = str(collections) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if dataset not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + if action == 'create': + if not request.data: + msg = 'No data found' LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) - collection_id = dataset - mfeature_id = identifier - if action == 'create': - if not request.data: - msg = 'No data found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - data = request.data - try: - # Parse bytes data, if applicable - data = data.decode() - LOGGER.debug(data) - except (UnicodeDecodeError, AttributeError): - pass - - try: - data = json.loads(data) - except (json.decoder.JSONDecodeError, TypeError) as err: - # Input does not appear to be valid JSON - LOGGER.error(err) - msg = 'invalid request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - if check_required_field_feature(data) is False: - # TODO not all processes require input - msg = 'The required tag (e.g., type,temporalgeometry) \ - is missing from the request data.' - return self.get_exception( - HTTPStatus.NOT_IMPLEMENTED, - headers, request.format, 'MissingParameterValue', msg) - - LOGGER.debug('Creating item') - try: - pmdb_provider.connect() - if data['type'] == 'FeatureCollection': - for feature in data['features']: - if check_required_field_feature(feature) is False: - # TODO not all processes require input - msg = 'The required tag \ - (e.g., type,temporalgeometry) \ - is missing from the request data.' - return self.get_exception( - HTTPStatus.NOT_IMPLEMENTED, - headers, request.format, - 'MissingParameterValue', msg) - mfeature_id = pmdb_provider.post_movingfeature( - collection_id, feature) - else: - mfeature_id = pmdb_provider.post_movingfeature( - collection_id, data) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - headers['Location'] = '{}/{}/items/{}'.format( - self.get_collections_url(), dataset, mfeature_id) - - return headers, HTTPStatus.CREATED, '' - - if action == 'delete': - LOGGER.debug('Deleting item') - - try: - pmdb_provider.connect() - pmdb_provider.delete_movingfeature( - "AND mfeature_id ='{0}'".format(mfeature_id)) - - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - return headers, HTTPStatus.NO_CONTENT, '' - - @gzip - @pre_process - def get_collection_item(self, request: Union[APIRequest, Any], - dataset, identifier) -> Tuple[dict, int, str]: - """ - Get a single collection item - - :param request: A request object - :param dataset: dataset name - :param identifier: item identifier - - :returns: tuple of headers, status code, content - """ - - pmdb_provider = PostgresMobilityDB() - collection_id = str(dataset) - mfeature_id = str(identifier) - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers() + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_feature(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,temporalgeometry) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + LOGGER.debug('Creating item') try: pmdb_provider.connect() - result = pmdb_provider.get_feature(collection_id, mfeature_id) - if len(result) > 0: - row = result[0] + if data['type'] == 'FeatureCollection': + for feature in data['features']: + if check_required_field_feature(feature) is False: + # TODO not all processes require input + msg = 'The required tag \ + (e.g., type,temporalgeometry) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, + 'MissingParameterValue', msg) + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, feature) else: - msg = 'Feature not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, data) except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - mfeature = {} - if row is not None: - mfeature_id = row[1] - mfeature = row[3] - mfeature['id'] = mfeature_id - mfeature['type'] = 'Feature' + headers['Location'] = '{}/{}/items/{}'.format( + api.get_collections_url(), dataset, mfeature_id) - if row[2] is not None: - mfeature['geometry'] = json.loads(row[2]) + return headers, HTTPStatus.CREATED, '' - extend_stbox = STBox(row[5]) if row[5] is not None else None - lifespan = TsTzSpan(row[4]) if row[4] is not None else None + if action == 'delete': + LOGGER.debug('Deleting item') - bbox = [] - if extend_stbox is not None: - bbox.append(extend_stbox.xmin()) - bbox.append(extend_stbox.ymin()) - if extend_stbox.zmin() is not None: - bbox.append(extend_stbox.zmin()) - bbox.append(extend_stbox.xmax()) - bbox.append(extend_stbox.ymax()) - if extend_stbox.zmax() is not None: - bbox.append(extend_stbox.zmax()) - mfeature['bbox'] = bbox - - print(lifespan) - time = [] - if lifespan is not None: - time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) - time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) - else: - if extend_stbox is not None: - if extend_stbox.tmin() is not None: - time.append(extend_stbox.tmin().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - time.append(extend_stbox.tmax().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - mfeature['time'] = time - - if 'crs' not in mfeature: - mfeature['crs'] = { - "type": "Name", - "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" - } - if 'trs' not in mfeature: - mfeature['trs'] = { - "type": "Name", - "properties": "urn:ogc:data:time:iso8601" - } - mfeature['links'] = [] - mfeature['links'].append({ - 'href': '{}/{}/items/{}'.format( - self.get_collections_url(), collection_id, mfeature_id), - 'rel': request.get_linkrel(F_JSON), - 'type': FORMAT_TYPES[F_JSON] - }) - return headers, HTTPStatus.OK, to_json(mfeature, self.pretty_print) - - @gzip - @pre_process - def get_collection_items_tGeometry(self, - request: Union[APIRequest, Any], - dataset, identifier) \ - -> Tuple[dict, int, str]: - """ - Get temporal Geometry of collection item - - :param request: A request object - :param dataset: dataset name - :param identifier: item identifier - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers(SYSTEM_LOCALE) - - excuted, feature_list = get_list_of_features_id() - if excuted is False: - msg = str(feature_list) - return self.get_exception( + try: + pmdb_provider.connect() + pmdb_provider.delete_movingfeature( + "AND mfeature_id ='{0}'".format(mfeature_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection_item(api: API, request: APIRequest, + dataset, identifier) -> Tuple[dict, int, str]: + """ + Get a single collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + mfeature_id = str(identifier) + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers() - if [dataset, identifier] not in feature_list: + try: + pmdb_provider.connect() + result = pmdb_provider.get_feature(collection_id, mfeature_id) + if len(result) > 0: + row = result[0] + else: msg = 'Feature not found' LOGGER.error(msg) - return self.get_exception( + return api.get_exception( HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeature = {} + if row is not None: + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + if 'trs' not in mfeature: + mfeature['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + mfeature['links'] = [] + mfeature['links'].append({ + 'href': '{}/{}/items/{}'.format( + api.get_collections_url(), collection_id, mfeature_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) + return headers, HTTPStatus.OK, to_json(mfeature, api.pretty_print) + + +def get_collection_items_tGeometry(api: API, request: APIRequest, + dataset, identifier) \ + -> Tuple[dict, int, str]: + """ + Get temporal Geometry of collection item - collection_id = dataset - mfeature_id = identifier - LOGGER.debug('Processing query parameters') + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier - LOGGER.debug('Processing offset parameter') - try: - offset = int(request.params.get('offset')) - if offset < 0: - msg = 'offset value should be positive or zero' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - offset = 0 - except ValueError: - msg = 'offset value should be an integer' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) + :returns: tuple of headers, status code, content + """ - LOGGER.debug('Processing limit parameter') - try: - limit = int(request.params.get('limit')) - # TODO: We should do more validation, against the min and max - # allowed by the server configuration - if limit <= 0: - msg = 'limit value should be strictly positive' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - if limit > 10000: - msg = 'limit value should be less than or equal to 10000' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - limit = int(self.config['server']['limit']) - except ValueError: - msg = 'limit value should be an integer' - return self.get_exception( + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing bbox parameter') - - bbox = request.params.get('bbox') - - if bbox is None: - bbox = [] - else: - try: - bbox = validate_bbox(bbox) - except ValueError as err: - msg = str(err) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - leaf_ = request.params.get('leaf') - LOGGER.debug('Processing leaf parameter') - try: - leaf_ = validate_leaf(leaf_) - except ValueError as err: - msg = str(err) - return self.get_exception( + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - sub_trajectory = request.params.get('subTrajectory') - if sub_trajectory is None: - sub_trajectory = False - - if (leaf_ != '' and leaf_ is not None) \ - and (sub_trajectory or sub_trajectory == 'true'): - msg = 'Cannot use both parameter `subTrajectory` \ - and `leaf` at the same time' - return self.get_exception( + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing datetime parameter') - datetime_ = request.params.get('datetime') + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: try: - datetime_ = validate_datetime(datetime_) + bbox = validate_bbox(bbox) except ValueError as err: msg = str(err) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - LOGGER.debug('Querying provider') - LOGGER.debug('offset: {}'.format(offset)) - LOGGER.debug('limit: {}'.format(limit)) - LOGGER.debug('bbox: {}'.format(bbox)) - LOGGER.debug('leaf: {}'.format(leaf_)) - LOGGER.debug('datetime: {}'.format(datetime_)) - - pmdb_provider = PostgresMobilityDB() - content = { - "type": "TemporalGeometrySequence", - "geometrySequence": [], - "crs": {}, - "trs": {}, - "links": [], + leaf_ = request.params.get('leaf') + LOGGER.debug('Processing leaf parameter') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False + + if (leaf_ != '' and leaf_ is not None) \ + and (sub_trajectory or sub_trajectory == 'true'): + msg = 'Cannot use both parameter `subTrajectory` \ + and `leaf` at the same time' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "type": "TemporalGeometrySequence", + "geometrySequence": [], + "crs": {}, + "trs": {}, + "links": [], + } + + crs = None + trs = None + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalgeometries(collection_id=collection_id, + mfeature_id=mfeature_id, + bbox=bbox, + leaf=leaf_, + datetime=datetime_, + limit=limit, + offset=offset, + sub_trajectory=sub_trajectory) + pymeos_initialize() + prisms = [] + for row in result: + temporal_geometry = json.loads(Temporal.as_mfjson( + TGeomPointSeq(str(row[3]).replace("'", "")), False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = pmdb_provider\ + .convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row[2] + + if (leaf_ != '' and leaf_ is not None) or \ + (sub_trajectory or sub_trajectory == 'true'): + if row[4] is not None: + temporal_geometry_filter = json.loads( + Temporal.as_mfjson( + TGeomPointSeq(str(row[4]).replace("'", "")), + False)) + temporal_geometry['datetimes'] = \ + temporal_geometry_filter['datetimes'] + temporal_geometry['coordinates'] = \ + temporal_geometry_filter['coordinates'] + else: + continue + # temporalGeometry['datetimes'] = [] + # temporalGeometry['coordinates'] = [] + prisms.append(temporal_geometry) + content["geometrySequence"] = prisms + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" } - crs = None - trs = None - try: - pmdb_provider.connect() - result, number_matched, number_returned = pmdb_provider.\ - get_temporalgeometries(collection_id=collection_id, - mfeature_id=mfeature_id, - bbox=bbox, - leaf=leaf_, - datetime=datetime_, - limit=limit, - offset=offset, - sub_trajectory=sub_trajectory) - pymeos_initialize() - prisms = [] - for row in result: - temporal_geometry = json.loads(Temporal.as_mfjson( - TGeomPointSeq(str(row[3]).replace("'", "")), False)) - if 'crs' in temporal_geometry and crs is None: - crs = temporal_geometry['crs'] - if 'trs' in temporal_geometry and trs is None: - trs = temporal_geometry['trs'] - temporal_geometry = pmdb_provider\ - .convert_temporalgeometry_to_old_version( - temporal_geometry) - temporal_geometry['id'] = row[2] - - if (leaf_ != '' and leaf_ is not None) or \ - (sub_trajectory or sub_trajectory == 'true'): - if row[4] is not None: - temporal_geometry_filter = json.loads( - Temporal.as_mfjson( - TGeomPointSeq(str(row[4]).replace("'", "")), - False)) - temporal_geometry['datetimes'] = \ - temporal_geometry_filter['datetimes'] - temporal_geometry['coordinates'] = \ - temporal_geometry_filter['coordinates'] - else: - continue - # temporalGeometry['datetimes'] = [] - # temporalGeometry['coordinates'] = [] - prisms.append(temporal_geometry) - content["geometrySequence"] = prisms - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } - if crs is not None: - content['crs'] = crs - else: - content['crs'] = { - "type": "Name", - "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" - } + # TODO: translate titles + uri = '{}/{}/items/{}/tgsequence'.format( + api.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['geometrySequence']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) - if trs is not None: - content['trs'] = trs - else: - content['trs'] = { - "type": "Name", - "properties": "urn:ogc:data:time:iso8601" - } + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') - # TODO: translate titles - uri = '{}/{}/items/{}/tgsequence'.format( - self.get_collections_url(), collection_id, mfeature_id) - - serialized_query_params = '' - for k, v in request.params.items(): - if k not in ('f', 'offset'): - serialized_query_params += '&' - serialized_query_params += urllib.parse.quote(k, safe='') - serialized_query_params += '=' - serialized_query_params += urllib.parse.quote(str(v), safe=',') - - content['links'] = [ - {'href': '{}?offset={}{}'.format( - uri, offset, serialized_query_params), - 'rel': request.get_linkrel(F_JSON), - 'type': FORMAT_TYPES[F_JSON]}] - - if len(content['geometrySequence']) == limit: - next_ = offset + limit - content['links'].append( - {'href': '{}?offset={}{}'.format( - uri, next_, serialized_query_params), - 'type': 'application/geo+json', 'rel': 'next'}) - - content['timeStamp'] = datetime.utcnow().strftime( - '%Y-%m-%dT%H:%M:%S.%fZ') - - content['numberMatched'] = number_matched - content['numberReturned'] = len(content["geometrySequence"]) - return headers, HTTPStatus.OK, to_json(content, self.pretty_print) - - @gzip - @pre_process - def manage_collection_item_tGeometry( - self, request: Union[APIRequest, Any], - action, dataset, identifier, - tGeometry=None) -> Tuple[dict, int, str]: - """ - Adds Temporal Geometry item to a moving feature - - :param request: A request object - :param dataset: dataset name - :param identifier: moving feature's id - :param tGeometry: Temporal Geometry's id - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(PLUGINS['formatter'].keys()): - return self.get_format_exception(request) - - # Set Content-Language to system locale until provider locale - # has been determined - headers = request.get_response_headers(SYSTEM_LOCALE) - - pmdb_provider = PostgresMobilityDB() - excuted, feature_list = get_list_of_features_id() - - if excuted is False: - msg = str(feature_list) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) + content['numberMatched'] = number_matched + content['numberReturned'] = len(content["geometrySequence"]) + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) - if [dataset, identifier] not in feature_list: - msg = 'Feature not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) - collection_id = dataset - mfeature_id = identifier - tGeometry_id = tGeometry - if action == 'create': - if not request.data: - msg = 'No data found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - data = request.data - try: - # Parse bytes data, if applicable - data = data.decode() - LOGGER.debug(data) - except (UnicodeDecodeError, AttributeError): - pass - - try: - data = json.loads(data) - except (json.decoder.JSONDecodeError, TypeError) as err: - # Input does not appear to be valid JSON - LOGGER.error(err) - msg = 'invalid request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - if check_required_field_temporal_geometries(data) is False: - # TODO not all processes require input - msg = 'The required tag (e.g., type,prisms) \ - is missing from the request data.' - return self.get_exception( - HTTPStatus.NOT_IMPLEMENTED, - headers, request.format, 'MissingParameterValue', msg) - - LOGGER.debug('Creating item') - try: - pmdb_provider.connect() - if data['type'] == 'MovingGeometryCollection': - for tGeometry in data['prisms']: - tGeometry_id = pmdb_provider.\ - post_temporalgeometry( - collection_id, mfeature_id, tGeometry) +def manage_collection_item_tGeometry( + api: API, request: APIRequest, + action, dataset, identifier, + tGeometry=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Geometry item to a moving feature - else: - tGeometry_id = pmdb_provider.post_temporalgeometry( - collection_id, mfeature_id, data) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - headers['Location'] = '{}/{}/items/{}/tgsequence/{}'.format( - self.get_collections_url(), dataset, mfeature_id, tGeometry_id) - - return headers, HTTPStatus.CREATED, '' - - if action == 'delete': - LOGGER.debug('Deleting item') - - try: - pmdb_provider.connect() - pmdb_provider.delete_temporalgeometry( - "AND tgeometry_id ='{0}'".format(tGeometry_id)) - - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - return headers, HTTPStatus.NO_CONTENT, '' - - @gzip - @pre_process - def get_collection_items_tGeometry_velocity(self, - request: - Union[APIRequest, Any], - dataset, identifier, - tGeometry) \ - -> Tuple[dict, int, str]: - - headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') - collection_id = dataset - mfeature_id = identifier - tgeometry_id = tGeometry - pmdb_provider = PostgresMobilityDB() - try: - datetime_ = validate_datetime(datetime_, return_type=False) - except ValueError as err: - msg = str(err) - return self.get_exception( + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tGeometry_id = tGeometry + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - try: - pmdb_provider.connect() - print(datetime_) - content = pmdb_provider.get_velocity( - collection_id, mfeature_id, tgeometry_id, datetime_) - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, - headers, request.format, 'Server Internal Error', msg) - finally: - pmdb_provider.disconnect() - return headers, HTTPStatus.OK, content + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass - @gzip - @pre_process - def get_collection_items_tGeometry_distance(self, - request: - Union[APIRequest, Any], - dataset, identifier, - tGeometry) \ - -> Tuple[dict, int, str]: - - headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') - collection_id = str(dataset) - mfeature_id = str(identifier) - tgeometry_id = str(tGeometry) - pmdb_provider = PostgresMobilityDB() try: - datetime_ = validate_datetime(datetime_, return_type=False) - except ValueError as err: - msg = str(err) - return self.get_exception( + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_geometries(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,prisms) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') try: pmdb_provider.connect() - content = pmdb_provider.get_distance( - collection_id, mfeature_id, tgeometry_id, datetime_) + if data['type'] == 'MovingGeometryCollection': + for tGeometry in data['prisms']: + tGeometry_id = pmdb_provider.\ + post_temporalgeometry( + collection_id, mfeature_id, tGeometry) + + else: + tGeometry_id = pmdb_provider.post_temporalgeometry( + collection_id, mfeature_id, data) except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) finally: pmdb_provider.disconnect() - return headers, HTTPStatus.OK, content - - @gzip - @pre_process - def get_collection_items_tGeometry_acceleration(self, - request: - Union[APIRequest, Any], - dataset, identifier, - tGeometry) \ - -> Tuple[dict, - int, str]: - - headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') - collection_id = dataset - mfeature_id = identifier - tgeometry_id = tGeometry - pmdb_provider = PostgresMobilityDB() - try: - datetime_ = validate_datetime(datetime_, return_type=False) - except ValueError as err: - msg = str(err) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) + headers['Location'] = '{}/{}/items/{}/tgsequence/{}'.format( + api.get_collections_url(), dataset, mfeature_id, tGeometry_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + try: pmdb_provider.connect() - content = pmdb_provider.get_acceleration( - collection_id, mfeature_id, tgeometry_id, datetime_) + pmdb_provider.delete_temporalgeometry( + "AND tgeometry_id ='{0}'".format(tGeometry_id)) + except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( - HTTPStatus.INTERNAL_SERVER_ERROR, + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) finally: pmdb_provider.disconnect() - return headers, HTTPStatus.OK, content + return headers, HTTPStatus.NO_CONTENT, '' - @gzip - @pre_process - def get_collection_items_tProperty(self, request: Union[APIRequest, Any], - dataset, - identifier) -> Tuple[dict, int, str]: - """ - Get temporal Properties of collection item - :param request: A request object - :param dataset: dataset name - :param identifier: item identifier +def get_collection_items_tGeometry_velocity(api: API, request: APIRequest, + dataset, identifier, + tGeometry) \ + -> Tuple[dict, int, str]: - :returns: tuple of headers, status code, content - """ + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + content = pmdb_provider.get_velocity( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'Server Internal Error', msg) + finally: + pmdb_provider.disconnect() - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers(SYSTEM_LOCALE) + return headers, HTTPStatus.OK, content - excuted, feature_list = get_list_of_features_id() - if excuted is False: - msg = str(feature_list) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - if [dataset, identifier] not in feature_list: - msg = 'Feature not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) +def get_collection_items_tGeometry_distance(api: API, request: APIRequest, + dataset, identifier, + tGeometry) \ + -> Tuple[dict, int, str]: + + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = str(dataset) + mfeature_id = str(identifier) + tgeometry_id = str(tGeometry) + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + content = pmdb_provider.get_distance( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - collection_id = dataset - mfeature_id = identifier - LOGGER.debug('Processing query parameters') + return headers, HTTPStatus.OK, content - LOGGER.debug('Processing offset parameter') - try: - offset = int(request.params.get('offset')) - if offset < 0: - msg = 'offset value should be positive or zero' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - offset = 0 - except ValueError: - msg = 'offset value should be an integer' - return self.get_exception( + +def get_collection_items_tGeometry_acceleration(api: API, request: APIRequest, + dataset, identifier, + tGeometry) \ + -> Tuple[dict, + int, str]: + + headers = request.get_response_headers(SYSTEM_LOCALE) + datetime_ = request.params.get('date-time') + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + try: + pmdb_provider.connect() + content = pmdb_provider.get_acceleration( + collection_id, mfeature_id, tgeometry_id, datetime_) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + +def get_collection_items_tProperty(api: API, request: APIRequest, + dataset, + identifier) -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing limit parameter') - try: - limit = int(request.params.get('limit')) - # TODO: We should do more validation, against the min and max - # allowed by the server configuration - if limit <= 0: - msg = 'limit value should be strictly positive' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - if limit > 10000: - msg = 'limit value should be less than or equal to 10000' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - limit = int(self.config['server']['limit']) - except ValueError: - msg = 'limit value should be an integer' - return self.get_exception( + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) - LOGGER.debug('Processing datetime parameter') - datetime_ = request.params.get('datetime') - try: - datetime_ = validate_datetime(datetime_) - except ValueError as err: - msg = str(err) - return self.get_exception( + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "temporalProperties": [], + "links": [] + } + + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalproperties(collection_id=collection_id, + mfeature_id=mfeature_id, + datetime=datetime_, + limit=limit, offset=offset, + sub_temporal_value=sub_temporal_value) + + temporal_properties = [] + if sub_temporal_value is False or sub_temporal_value == "false": + for row in result: + temporal_property = row[3] if row[3] is not None else {} + temporal_property['name'] = row[2] + + temporal_properties.append(temporal_property) + else: + split_groups = {} + for i in range(len(result)): + group_id = str(result[i][4]) + if group_id not in split_groups: + split_groups[group_id] = [] + split_groups[group_id].append(i) + pymeos_initialize() + for key, group_row_index in split_groups.items(): + group = {} + group["datetimes"] = [] + for row_index in group_row_index: + row = result[int(row_index)] + tproperties_name = row[2] + group[tproperties_name] \ + = row[3] if row[3] is not None else {} + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] \ + is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + temporal_property_value = pmdb_provider.\ + convert_temporalproperty_value_to_base_version( + json.loads(temporal_property_value)) + + if 'datetimes' in temporal_property_value: + group["datetimes"] = \ + temporal_property_value.pop( + "datetimes", None) + group[tproperties_name].update( + temporal_property_value) + temporal_properties.append(group) + content["temporalProperties"] = temporal_properties + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + uri = '{}/{}/items/{}/tProperties'.format( + api.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['temporalProperties']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next', }) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item_tProperty( + api: API, request: APIRequest, + action, dataset, identifier, + tProperty=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property item to a moving feature + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperties_name = tProperty + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - sub_temporal_value = request.params.get('subTemporalValue') - if sub_temporal_value is None: - sub_temporal_value = False + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass - LOGGER.debug('Querying provider') - LOGGER.debug('offset: {}'.format(offset)) - LOGGER.debug('limit: {}'.format(limit)) - LOGGER.debug('datetime: {}'.format(datetime_)) + try: + if not isinstance(data, list): + data = json.loads(data) + else: + for d in data: + _ = json.loads(d) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) - pmdb_provider = PostgresMobilityDB() - content = { - "temporalProperties": [], - "links": [] - } + if check_required_field_temporal_property(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,interpolation) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + LOGGER.debug('Creating item') try: pmdb_provider.connect() - result, number_matched, number_returned = pmdb_provider.\ - get_temporalproperties(collection_id=collection_id, - mfeature_id=mfeature_id, - datetime=datetime_, - limit=limit, offset=offset, - sub_temporal_value=sub_temporal_value) - - temporal_properties = [] - if sub_temporal_value is False or sub_temporal_value == "false": - for row in result: - temporal_property = row[3] if row[3] is not None else {} - temporal_property['name'] = row[2] - - temporal_properties.append(temporal_property) + # temporalProperties = data['temporalProperties'] + temporal_properties = data + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, temporal_properties) + tProperties_name_list = [] + if can_post: + for temporalProperty in temporal_properties: + tProperties_name_list.extend( + pmdb_provider. post_temporalproperties( + collection_id, mfeature_id, temporalProperty)) else: - split_groups = {} - for i in range(len(result)): - group_id = str(result[i][4]) - if group_id not in split_groups: - split_groups[group_id] = [] - split_groups[group_id].append(i) - pymeos_initialize() - for key, group_row_index in split_groups.items(): - group = {} - group["datetimes"] = [] - for row_index in group_row_index: - row = result[int(row_index)] - tproperties_name = row[2] - group[tproperties_name] \ - = row[3] if row[3] is not None else {} - if row[5] is not None or row[6] is not None: - temporal_property_value = Temporal.as_mfjson( - TFloatSeq(str(row[5]).replace("'", "")), - False) if row[5] \ - is not None else Temporal.as_mfjson( - TTextSeq(str(row[6]).replace("'", "")), - False) - temporal_property_value = pmdb_provider.\ - convert_temporalproperty_value_to_base_version( - json.loads(temporal_property_value)) - - if 'datetimes' in temporal_property_value: - group["datetimes"] = \ - temporal_property_value.pop( - "datetimes", None) - group[tproperties_name].update( - temporal_property_value) - temporal_properties.append(group) - content["temporalProperties"] = temporal_properties + return headers, HTTPStatus.BAD_REQUEST, '' except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - # TODO: translate titles - uri = '{}/{}/items/{}/tProperties'.format( - self.get_collections_url(), collection_id, mfeature_id) + location_list = [] + for tProperties_name in tProperties_name_list: + location_list.append('{}/{}/items/{}/tProperties/{}'.format( + api.get_collections_url(), dataset, mfeature_id, + tProperties_name)) + headers['Locations'] = location_list - serialized_query_params = '' - for k, v in request.params.items(): - if k not in ('f', 'offset'): - serialized_query_params += '&' - serialized_query_params += urllib.parse.quote(k, safe='') - serialized_query_params += '=' - serialized_query_params += urllib.parse.quote(str(v), safe=',') + return headers, HTTPStatus.CREATED, '' - content['links'] = [ - {'href': '{}?offset={}{}'.format( - uri, offset, serialized_query_params), - 'rel': request.get_linkrel(F_JSON), - 'type': FORMAT_TYPES[F_JSON]}] - - if len(content['temporalProperties']) == limit: - next_ = offset + limit - content['links'].append( - {'href': '{}?offset={}{}'.format( - uri, next_, serialized_query_params), - 'type': 'application/geo+json', 'rel': 'next', }) - - content['timeStamp'] = datetime.utcnow().strftime( - '%Y-%m-%dT%H:%M:%S.%fZ') - - content['numberMatched'] = number_matched - content['numberReturned'] = number_returned - return headers, HTTPStatus.OK, to_json(content, self.pretty_print) - - @gzip - @pre_process - def manage_collection_item_tProperty( - self, request: Union[APIRequest, Any], - action, dataset, identifier, - tProperty=None) -> Tuple[dict, int, str]: - """ - Adds Temporal Property item to a moving feature - - :param request: A request object - :param dataset: dataset name - :param identifier: moving feature's id - :param tProperty: Temporal Property's id - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(PLUGINS['formatter'].keys()): - return self.get_format_exception(request) - - # Set Content-Language to system locale until provider locale - # has been determined - headers = request.get_response_headers(SYSTEM_LOCALE) - - pmdb_provider = PostgresMobilityDB() - excuted, feature_list = get_list_of_features_id() - - if excuted is False: - msg = str(feature_list) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) + if action == 'delete': + LOGGER.debug('Deleting item') - if [dataset, identifier] not in feature_list: - msg = 'Feature not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalproperties( + """AND collection_id ='{0}' AND mfeature_id ='{1}' + AND tproperties_name ='{2}'""".format( + collection_id, mfeature_id, tProperties_name)) - collection_id = dataset - mfeature_id = identifier - tProperties_name = tProperty - if action == 'create': - if not request.data: - msg = 'No data found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - data = request.data - try: - # Parse bytes data, if applicable - data = data.decode() - LOGGER.debug(data) - except (UnicodeDecodeError, AttributeError): - pass - - try: - if not isinstance(data, list): - data = json.loads(data) - else: - for d in data: - _ = json.loads(d) - except (json.decoder.JSONDecodeError, TypeError) as err: - # Input does not appear to be valid JSON - LOGGER.error(err) - msg = 'invalid request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - if check_required_field_temporal_property(data) is False: - # TODO not all processes require input - msg = 'The required tag (e.g., datetimes,interpolation) \ - is missing from the request data.' - return self.get_exception( - HTTPStatus.NOT_IMPLEMENTED, - headers, request.format, 'MissingParameterValue', msg) - - LOGGER.debug('Creating item') - try: - pmdb_provider.connect() - # temporalProperties = data['temporalProperties'] - temporal_properties = data - temporal_properties = [temporal_properties] if not isinstance( - temporal_properties, list) else temporal_properties - - can_post = pmdb_provider.check_temporalproperty_can_post( - collection_id, mfeature_id, temporal_properties) - tProperties_name_list = [] - if can_post: - for temporalProperty in temporal_properties: - tProperties_name_list.extend( - pmdb_provider. post_temporalproperties( - collection_id, mfeature_id, temporalProperty)) - else: - return headers, HTTPStatus.BAD_REQUEST, '' - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - location_list = [] - for tProperties_name in tProperties_name_list: - location_list.append('{}/{}/items/{}/tProperties/{}'.format( - self.get_collections_url(), dataset, mfeature_id, - tProperties_name)) - headers['Locations'] = location_list - - return headers, HTTPStatus.CREATED, '' - - if action == 'delete': - LOGGER.debug('Deleting item') - - try: - pmdb_provider.connect() - pmdb_provider.delete_temporalproperties( - "AND tproperties_name ='{0}'".format(tProperties_name)) - - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - - return headers, HTTPStatus.NO_CONTENT, '' - - @gzip - @pre_process - def get_collection_items_tProperty_value(self, - request: Union[APIRequest, Any], - dataset, - identifier, - tProperty) \ - -> Tuple[dict, int, str]: - """ - Get temporal Properties of collection item - - :param request: A request object - :param dataset: dataset name - :param identifier: item identifier - :param tProperty: Temporal Property - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(): - return self.get_format_exception(request) - headers = request.get_response_headers(SYSTEM_LOCALE) - - excuted, tproperty_list = get_list_of_tproperties_name() - if excuted is False: - msg = str(tproperty_list) - return self.get_exception( + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() - if [dataset, identifier, tProperty] not in tproperty_list: - msg = 'Temporal Property not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) + return headers, HTTPStatus.NO_CONTENT, '' - collection_id = dataset - mfeature_id = identifier - tProperty_name = tProperty - LOGGER.debug('Processing query parameters') - LOGGER.debug('Processing offset parameter') - try: - offset = int(request.params.get('offset')) - if offset < 0: - msg = 'offset value should be positive or zero' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - offset = 0 - except ValueError: - msg = 'offset value should be an integer' - return self.get_exception( +def get_collection_items_tProperty_value(api: API, request: APIRequest, + dataset, + identifier, + tProperty) \ + -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + :param tProperty: Temporal Property + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing limit parameter') - try: - limit = int(request.params.get('limit')) - # TODO: We should do more validation, against the min and max - # allowed by the server configuration - if limit <= 0: - msg = 'limit value should be strictly positive' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - if limit > 10000: - msg = 'limit value should be less than or equal to 10000' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - except TypeError as err: - LOGGER.warning(err) - limit = int(self.config['server']['limit']) - except ValueError: - msg = 'limit value should be an integer' - return self.get_exception( + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - - LOGGER.debug('Processing leaf parameter') - leaf_ = request.params.get('leaf') - try: - leaf_ = validate_leaf(leaf_) - except ValueError as err: - msg = str(err) - return self.get_exception( + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing leaf parameter') + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + if (leaf_ != '' and leaf_ is not None) and \ + (sub_temporal_value or sub_temporal_value == 'true'): + msg = 'Cannot use both parameter `subTemporalValue` \ + and `leaf` at the same time' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = {} + + try: + pmdb_provider.connect() + result = pmdb_provider.get_temporalproperties_value( + collection_id=collection_id, mfeature_id=mfeature_id, + tProperty_name=tProperty_name, + datetime=datetime_, leaf=leaf_, + sub_temporal_value=sub_temporal_value) + pymeos_initialize() + value_sequence = [] + for row in result: + content = row[3] + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + value_sequence.append( + pmdb_provider. + convert_temporalproperty_value_to_base_version( + json.loads( + temporal_property_value))) + content["valueSequence"] = value_sequence + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item_tProperty_value( + api: API, request: APIRequest, + action, dataset, identifier, + tProperty=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property Value item to a Temporal Property + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ - sub_temporal_value = request.params.get('subTemporalValue') - if sub_temporal_value is None: - sub_temporal_value = False + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) - if (leaf_ != '' and leaf_ is not None) and \ - (sub_temporal_value or sub_temporal_value == 'true'): - msg = 'Cannot use both parameter `subTemporalValue` \ - and `leaf` at the same time' - return self.get_exception( + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - LOGGER.debug('Processing datetime parameter') - datetime_ = request.params.get('datetime') + data = request.data try: - datetime_ = validate_datetime(datetime_) - except ValueError as err: - msg = str(err) - return self.get_exception( + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) - LOGGER.debug('Querying provider') - LOGGER.debug('offset: {}'.format(offset)) - LOGGER.debug('limit: {}'.format(limit)) - LOGGER.debug('leaf: {}'.format(leaf_)) - LOGGER.debug('datetime: {}'.format(datetime_)) - - pmdb_provider = PostgresMobilityDB() - content = {} + if check_required_field_temporal_value(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,value) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + LOGGER.debug('Creating item') try: pmdb_provider.connect() - result = pmdb_provider.get_temporalproperties_value( - collection_id=collection_id, mfeature_id=mfeature_id, - tProperty_name=tProperty_name, - datetime=datetime_, leaf=leaf_, - sub_temporal_value=sub_temporal_value) - pymeos_initialize() - value_sequence = [] - for row in result: - content = row[3] - if row[5] is not None or row[6] is not None: - temporal_property_value = Temporal.as_mfjson( - TFloatSeq(str(row[5]).replace("'", "")), - False) if row[5] is not None else Temporal.as_mfjson( - TTextSeq(str(row[6]).replace("'", "")), - False) - value_sequence.append( - pmdb_provider. - convert_temporalproperty_value_to_base_version( - json.loads( - temporal_property_value))) - content["valueSequence"] = value_sequence + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, [data], tProperty_name) + if can_post: + pValue_id = pmdb_provider.post_temporalvalue( + collection_id, mfeature_id, tProperty_name, data) + else: + return headers, HTTPStatus.BAD_REQUEST, '' except (Exception, psycopg2.Error) as error: msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - - # TODO: translate titles - return headers, HTTPStatus.OK, to_json(content, self.pretty_print) - - @gzip - @pre_process - def manage_collection_item_tProperty_value( - self, request: Union[APIRequest, Any], - action, dataset, identifier, - tProperty=None) -> Tuple[dict, int, str]: - """ - Adds Temporal Property Value item to a Temporal Property - - :param request: A request object - :param dataset: dataset name - :param identifier: moving feature's id - :param tProperty: Temporal Property's id - - :returns: tuple of headers, status code, content - """ - - if not request.is_valid(PLUGINS['formatter'].keys()): - return self.get_format_exception(request) - - # Set Content-Language to system locale until provider locale - # has been determined - headers = request.get_response_headers(SYSTEM_LOCALE) - - pmdb_provider = PostgresMobilityDB() - excuted, tproperty_list = get_list_of_tproperties_name() - if excuted is False: - msg = str(tproperty_list) - return self.get_exception( + return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + headers['Location'] = '{}/{}/items/{}/tProperties/{}/pvalue/{}'\ + .format(api.get_collections_url(), dataset, mfeature_id, + tProperty_name, pValue_id) - if [dataset, identifier, tProperty] not in tproperty_list: - msg = 'Temporal Property not found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.NOT_FOUND, - headers, request.format, 'NotFound', msg) - - collection_id = dataset - mfeature_id = identifier - tProperty_name = tProperty - if action == 'create': - if not request.data: - msg = 'No data found' - LOGGER.error(msg) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - data = request.data - try: - # Parse bytes data, if applicable - data = data.decode() - LOGGER.debug(data) - except (UnicodeDecodeError, AttributeError): - pass - - try: - data = json.loads(data) - except (json.decoder.JSONDecodeError, TypeError) as err: - # Input does not appear to be valid JSON - LOGGER.error(err) - msg = 'invalid request data' - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'InvalidParameterValue', msg) - - if check_required_field_temporal_value(data) is False: - # TODO not all processes require input - msg = 'The required tag (e.g., datetimes,value) \ - is missing from the request data.' - return self.get_exception( - HTTPStatus.NOT_IMPLEMENTED, - headers, request.format, 'MissingParameterValue', msg) - - LOGGER.debug('Creating item') - try: - pmdb_provider.connect() - can_post = pmdb_provider.check_temporalproperty_can_post( - collection_id, mfeature_id, [data], tProperty_name) - if can_post: - pValue_id = pmdb_provider.post_temporalvalue( - collection_id, mfeature_id, tProperty_name, data) - else: - return headers, HTTPStatus.BAD_REQUEST, '' - except (Exception, psycopg2.Error) as error: - msg = str(error) - return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, request.format, 'ConnectingError', msg) - finally: - pmdb_provider.disconnect() - headers['Location'] = '{}/{}/items/{}/tProperties/{}/pvalue/{}'\ - .format(self.get_collections_url(), dataset, mfeature_id, - tProperty_name, pValue_id) - - return headers, HTTPStatus.CREATED, '' - - def get_exception(self, status, headers, format_, code, - description) -> Tuple[dict, int, str]: - """ - Exception handler - - :param status: HTTP status code - :param headers: dict of HTTP response headers - :param format_: format string - :param code: OGC API exception code - :param description: OGC API exception code - - :returns: tuple of headers, status, and message - """ - - LOGGER.error(description) - exception = { - 'code': code, - 'description': description - } - - if format_ == F_HTML: - headers['Content-Type'] = FORMAT_TYPES[F_HTML] - content = render_j2_template( - self.config, 'exception.html', exception, SYSTEM_LOCALE) - else: - content = to_json(exception, self.pretty_print) - - return headers, status, content - - def get_format_exception(self, request) -> Tuple[dict, int, str]: - """ - Returns a format exception. - - :param request: An APIRequest instance. - - :returns: tuple of (headers, status, message) - """ - - # Content-Language is in the system locale (ignore language settings) - headers = request.get_response_headers(SYSTEM_LOCALE) - msg = f'Invalid format: {request.format}' - return self.get_exception( - HTTPStatus.BAD_REQUEST, headers, - request.format, 'InvalidParameterValue', msg) - - def get_collections_url(self): - return '{}/collections'.format(self.config['server']['url']) + return headers, HTTPStatus.CREATED, '' def validate_bbox(value=None) -> list: @@ -2299,3 +2184,1157 @@ def check_required_field_trs(trs): or 'properties' not in trs): return False return True + +# fmt: off +def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, dict]]: # noqa + """ + Get OpenAPI fragments + + :param cfg: `dict` of configuration + :param locale: `str` of locale + + :returns: `tuple` of `list` of tag objects, and `dict` of path objects + """ + from pygeoapi.openapi import OPENAPI_YAML + + paths = {} + collections_collectionId_path = '/collections/{collectionId}' + paths[collections_collectionId_path] = { + "get": { + "operationId": "accessMetadata", + "summary": "Access metadata about the collection", + "description": "A user can access metadata with id `collectionId`.\n", # noqa + "tags": [ + "MovingFeatureCollection" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/Collection" # noqa + }, + "404": { + "description": "A collection with the specified id was not found." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "delete": { + "operationId": "deleteCollection", + "summary": "Delete the collection", + "description": "The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted.\n", # noqa + "tags": [ + "MovingFeatureCollection" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + } + ], + "responses": { + "204": { + "description": "Successfully deleted." + }, + "404": { + "description": "A collection with the specified name was not found." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "put": { + "operationId": "replaceMetadata", + "summary": "Replace metadata about the collection", + "description": "A user SHOULD replace metadata with id `collectionId`.\n\nThe request body schema is the same the POST's one. \n\nHowever, `updateFrequency` property is NOT updated.\n", # noqa + "tags": [ + "MovingFeatureCollection" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body" # noqa + }, + "example": { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example", + "itemType": "movingfeature" + } + } + } + }, + "responses": { + "204": { + "description": "Successfully replaced." + }, + "404": { + "description": "A collection with the specified name was not found." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_path = '/collections/{collectionId}/items' + paths[collections_collectionId_items_path] = { + "get": { + "operationId": "retrieveMovingFeatures", + "summary": "Retrieve moving feature collection", + "description": "A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit.\n\nSpecifically, if the `subTrajectory` parameter is \"true\", it will return the temporal geometry within the time interval specified by `datetime` parameter.\n", # noqa + "tags": [ + "MovingFeatures" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeatures" # noqa + }, + "404": { + "description": "A collection with the specified id was not found." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "post": { + "operationId": "insertMovingFeatures", + "summary": "Insert moving features", + "description": "A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`.\n\nThe request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or \n[MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON.\n", # noqa + "tags": [ + "MovingFeatures" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "oneOf": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeature-mfjson" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeatureCollection" # noqa + } + ] + }, + "example": { + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" # noqa + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" # noqa + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://qudt.org/vocab/quantitykind/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" + } + } + } + }, + "responses": { + "201": { + "description": "Successful create a set of moving features or a moving feature into a specific collection.\n", # noqa + "headers": { + "Locations": { + "description": "A list of URI of the newly added resources", # noqa + "schema": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "https://data.example.org/collections/mfc-1/items/mf-1", # noqa + "https://data.example.org/collections/mfc-1/items/109301273" # noqa + ] + } + } + } + }, + "400": { + "description": "A query parameter was not validly used." + }, + "404": { + "description": "A collection with the specified id was not found." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_path = '/collections/{collectionId}/items/{mFeatureId}' # noqa + paths[collections_collectionId_items_mFeatureId_path] = { + "get": { + "operationId": "accessMovingFeature", + "summary": "Access the static data of the moving feature", + "description": "A user can access a static data of a moving feature with id `mFeatureId`.\n\nThe static data of a moving feature is not included temporal geometries and temporal properties.\n", # noqa + "tags": [ + "MovingFeatures" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeature" # noqa + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "delete": { + "operationId": "deleteMovingFeature", + "summary": "Delete a single moving feature", + "description": "The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted.\n", # noqa + "tags": [ + "MovingFeatures" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + } + ], + "responses": { + "204": { + "description": "Successfully deleted." + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + collections_collectionId_items_mFeatureId_tgsequence_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence' # noqa + paths[collections_collectionId_items_mFeatureId_tgsequence_path] = { + "get": { + "operationId": "retrieveTemporalGeometrySequence", + "summary": "Retrieve the movement data of the single moving feature", # noqa + "description": "A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit.\n", # noqa + "tags": [ + "TemporalGeometry" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalGeometrySequence" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "post": { + "operationId": "insertTemporalPrimitiveGeometry", + "summary": "Add movement data into the moving feature", + "description": "A user SHOULD add more movement data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON.\n", # noqa + "tags": [ + "TemporalGeometry" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveGeometry" # noqa + }, + "example": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + } + } + } + }, + "responses": { + "201": { + "description": "Successful add more movement data into a specified moving feature.\n", # noqa + "headers": { + "Location": { + "description": "A URI of the newly added resource", # noqa + "schema": { + "type": "string", + "example": "https://data.example.org/collections/mfc-1/items/mf-1/tgsequence/tg-2" # noqa + } + } + } + }, + "400": { + "description": "A query parameter was not validly used." # noqa + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}' # noqa + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path] = { # noqa + "delete": { + "operationId": "deleteTemporalPrimitiveGeometry", + "summary": "Delete a singe temporal primitive geometry", + "description": "The temporal primitive geometry with id `tGeometryId` SHOULD be deleted.\n", # noqa + "tags": [ + "TemporalGeometry" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa + } + ], + "responses": { + "204": { + "description": "Successfully deleted." + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal primitive geometry with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance' # noqa + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path] = { # noqa + "get": { + "operationId": "getDistanceOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-distance curve of a temporal primitive geometry", # noqa + "description": "A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa + "tags": [ + "TemporalGeometryQuery" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/DistanceQuery" # noqa + }, + "400": { + "description": "A query parameter was not validly used." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity' # noqa + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path] = { # noqa + "get": { + "operationId": "getVelocityOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-velocity curve of a temporal primitive geometry", # noqa + "description": "A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa + "tags": [ + "TemporalGeometryQuery" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/VelocityQuery" # noqa + }, + "400": { + "description": "A query parameter was not validly used." + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration' # noqa + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path] = { # noqa + "get": { + "operationId": "getAccelerationOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-acceleration curve of a temporal primitive geometry", # noqa + "description": "A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa + "tags": [ + "TemporalGeometryQuery" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/AccelerationQuery" # noqa + }, + "400": { + "description": "A query parameter was not validly used." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties' # noqa + paths[collections_collectionId_items_mFeatureId_tproperties_path] = { # noqa + "get": { + "operationId": "retrieveTemporalProperties", + "summary": "Retrieve a set of the temporal property data", + "description": "A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`.\n\nThe static data of a temporal property is not included temporal values (property `valueSequence`).\n\nAlso a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. \nIn this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperties" # noqa + }, + "400": { + "description": "A query parameter was not validly used." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "post": { + "operationId": "insertTemporalProperty", + "summary": "Add temporal property data", + "description": "A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalProperties-mfjson" # noqa + }, + "example": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://qudt.org/vocab/quantitykind/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ] + } + } + }, + "responses": { + "201": { + "description": "Successful add more temporal property into a specified moving feature.\n", # noqa + "headers": { + "Locations": { + "description": "A list of URI of the newly added resources", # noqa + "schema": { + "type": "array", + "items": { + "type": "string" + }, + "example": [ + "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/length", # noqa + "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/discharge", # noqa + "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/camera", # noqa + "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/labels" # noqa + ] + } + } + } + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}' # noqa + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path] = { # noqa + "get": { + "operationId": "retrieveTemporalProperty", + "summary": "Retrieve a temporal property", + "description": "A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa + } + ], + "responses": { + "200": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperty" # noqa + }, + "400": { + "description": "A query parameter was not validly used." # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "post": { + "operationId": "insertTemporalPrimitiveValue", + "summary": "Add temporal primitive value data", + "description": "A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveValue" # noqa + }, + "example": { + "datetimes": [ + "2011-07-15T08:00:00Z", + "2011-07-15T08:00:01Z", + "2011-07-15T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" + } + } + } + }, + "responses": { + "201": { + "description": "Successful add more temporal primitive value data into a specified temporal property.\n", # noqa + "headers": { + "Location": { + "description": "A URI of the newly added resource", + "schema": { + "type": "string", + "example": "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/tvalue/tpv-1" # noqa + } + } + } + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + }, + "delete": { + "operationId": "deleteTemporalProperty", + "summary": "Delete a specified temporal property", + "description": "The temporal property with id `tPropertyName` SHOULD be deleted.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa + } + ], + "responses": { + "204": { + "description": "Successfully deleted." + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId}' # noqa + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path] = { # noqa + "delete": { + "operationId": "deleteTemporalPrimitiveValue", + "summary": "Delete a singe temporal primitive value", + "description": "The temporal primitive value with id `tValueId` SHOULD be deleted.\n", # noqa + "tags": [ + "TemporalProperty" + ], + "parameters": [ + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa + }, + { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tValueId" # noqa + } + ], + "responses": { + "204": { + "description": "Successfully deleted." + }, + "404": { + "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n- Or a temporal primitive primitive with the specified id was not found.\n" # noqa + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } + } + } + + return [{'name': 'MovingFeatureCollection'}], {'paths': paths} +# fmt: on diff --git a/tests/api/test_movingfeature.py b/tests/api/test_movingfeature.py index acf986e9e..2883a0b07 100644 --- a/tests/api/test_movingfeature.py +++ b/tests/api/test_movingfeature.py @@ -1,32 +1,37 @@ -from pygeoapi.api.movingfeatures import MOVING_FEATURES from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB import pytest import json from http import HTTPStatus from pygeoapi.util import yaml_load -from tests.util import get_test_file_path, mock_request +from tests.util import get_test_file_path, mock_api_request +from pygeoapi.api.movingfeatures import ( + manage_collection, + manage_collection_item, + manage_collection_item_tGeometry, + manage_collection_item_tProperty, + manage_collection_item_tProperty_value, + get_collection_items, + get_collection, + get_collection_item, + get_collection_items_tGeometry, + get_collection_items_tGeometry_velocity, + get_collection_items_tGeometry_distance, + get_collection_items_tGeometry_acceleration, + get_collection_items_tProperty, + get_collection_items_tProperty_value) -@pytest.fixture() -def config(): - with open(get_test_file_path('example-config.yml')) as fh: - return yaml_load(fh) +from pygeoapi.api import API @pytest.fixture() -def openapi(): +def api_(): + with open(get_test_file_path('example-config.yml')) as fh: + config = yaml_load(fh) with open(get_test_file_path('example-openapi.yml')) as fh: - return yaml_load(fh) - -# @pytest.fixture(scope="session") -# def context(): -# return { -# 'collection_id':'f81e0521-cf63-4cc5-b690-3daf1f326104', -# 'mfeature_id':'196695e8-b79b-4655-a1e0-b90de887f205', -# 'tgeometry_id':'1d2edbdc-717a-4fcb-94ad-19a00ee208e0', -# 'tProperty_name':'labels' -# } + openapi = yaml_load(fh) + return API(config, openapi) @pytest.fixture(scope="session") @@ -456,27 +461,23 @@ def temporalvalue_data(): def test_manage_collection_create( - config, - openapi, + api_, collection_property, context): - mf = MOVING_FEATURES(config, openapi) # missing request data - req = mock_request() - rsp_headers, code, response = mf.manage_collection(req, 'create') + req = mock_api_request() + rsp_headers, code, response = manage_collection(api_, req, 'create') assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'Invalid data. Valid parameter is JSON' - rsp_headers, code, response = mf.manage_collection(req, 'create') + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection(api_, req, 'create') assert code == HTTPStatus.BAD_REQUEST # successful request data - req = mock_request() - req.data = json.dumps(collection_property) - rsp_headers, code, response = mf.manage_collection(req, 'create') + req = mock_api_request(data=json.dumps(collection_property)) + rsp_headers, code, response = manage_collection(api_, req, 'create') assert code == HTTPStatus.CREATED assert response == '' assert rsp_headers['Content-Type'] == 'application/json' @@ -489,26 +490,24 @@ def test_manage_collection_create( def test_manage_collection_item_create( - config, openapi, movingfeature, context): - mf = MOVING_FEATURES(config, openapi) + api_, movingfeature, context): # collection not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item( - req, 'create', '00000000-0000-0000-0000-000000000000') + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # no data found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item( - req, 'create', context['collection_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'data' - rsp_headers, code, response = mf.manage_collection_item( - req, 'create', context['collection_id']) + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # The required tag (e.g., type,temporalgeometry) @@ -516,17 +515,15 @@ def test_manage_collection_item_create( missing_data = dict(movingfeature) del missing_data['temporalGeometry'] - req = mock_request() - req.data = json.dumps(missing_data) - rsp_headers, code, response = mf.manage_collection_item( - req, 'create', context['collection_id']) + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) assert code == HTTPStatus.NOT_IMPLEMENTED # successful request data - req = mock_request() - req.data = json.dumps(movingfeature) - rsp_headers, code, response = mf.manage_collection_item( - req, 'create', context['collection_id']) + req = mock_api_request(data=json.dumps(movingfeature)) + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) assert code == HTTPStatus.CREATED assert response == '' @@ -540,27 +537,25 @@ def test_manage_collection_item_create( def test_manage_collection_item_tGeometry_create( - config, openapi, temporalgeometry, context): - mf = MOVING_FEATURES(config, openapi) + api_, temporalgeometry, context): # feature not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'create', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # no data found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'data' - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # The required tag (e.g., type,prisms) @@ -568,17 +563,15 @@ def test_manage_collection_item_tGeometry_create( missing_data = dict(temporalgeometry) del missing_data['type'] - req = mock_request() - req.data = json.dumps(missing_data) - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.NOT_IMPLEMENTED # successful request data - req = mock_request() - req.data = json.dumps(temporalgeometry) - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data=json.dumps(temporalgeometry)) + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.CREATED assert response == '' @@ -592,27 +585,25 @@ def test_manage_collection_item_tGeometry_create( def test_manage_collection_item_tProperty_create( - config, openapi, temporalproperties, context): - mf = MOVING_FEATURES(config, openapi) + api_, temporalproperties, context): # feature not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'create', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # no data found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'data' - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # The required tag (e.g., datetimes,interpolation) @@ -622,17 +613,15 @@ def test_manage_collection_item_tProperty_create( missing_data.append(dict(temporalproperty)) del missing_data[0]['datetimes'] - req = mock_request() - req.data = json.dumps(missing_data, indent=2) - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data=json.dumps(missing_data, indent=2)) + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.NOT_IMPLEMENTED # successful request data - req = mock_request() - req.data = json.dumps(temporalproperties, indent=2) - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'create', context['collection_id'], context['mfeature_id']) + req = mock_api_request(data=json.dumps(temporalproperties, indent=2)) + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.CREATED assert response == '' @@ -647,28 +636,26 @@ def test_manage_collection_item_tProperty_create( def test_manage_collection_item_tProperty_value_create( - config, openapi, temporalvalue_data, context): - mf = MOVING_FEATURES(config, openapi) + api_, temporalvalue_data, context): # temporal property not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty_value( - req, 'create', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', '') assert code == HTTPStatus.NOT_FOUND # no data found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty_value( - req, 'create', context['collection_id'], context['mfeature_id'], + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'data' - rsp_headers, code, response = mf.manage_collection_item_tProperty_value( - req, 'create', context['collection_id'], context['mfeature_id'], + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST @@ -677,18 +664,16 @@ def test_manage_collection_item_tProperty_value_create( missing_data = dict(temporalvalue_data) del missing_data['datetimes'] - req = mock_request() - req.data = json.dumps(missing_data) - rsp_headers, code, response = mf.manage_collection_item_tProperty_value( - req, 'create', context['collection_id'], context['mfeature_id'], + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.NOT_IMPLEMENTED # successful request data - req = mock_request() - req.data = json.dumps(temporalvalue_data) - rsp_headers, code, response = mf.manage_collection_item_tProperty_value( - req, 'create', context['collection_id'], context['mfeature_id'], + req = mock_api_request(data=json.dumps(temporalvalue_data)) + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.CREATED @@ -698,111 +683,107 @@ def test_manage_collection_item_tProperty_value_create( def test_manage_collection_update( - config, - openapi, + api_, update_collection_property, context): - mf = MOVING_FEATURES(config, openapi) # missing request data - req = mock_request() - rsp_headers, code, response = mf.manage_collection( - req, 'update', context['collection_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # invalid request data - req = mock_request() - req.data = 'data' - rsp_headers, code, response = mf.manage_collection( - req, 'update', context['collection_id']) + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # successful request data - req = mock_request() - req.data = json.dumps(update_collection_property) - rsp_headers, code, response = mf.manage_collection( - req, 'update', context['collection_id']) + req = mock_api_request(data=json.dumps(update_collection_property)) + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) assert code == HTTPStatus.NO_CONTENT assert response == '' -def test_get_collection_items(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection_items( - req, '00000000-0000-0000-0000-000000000000') + req = mock_api_request() + rsp_headers, code, response = get_collection_items( + api_, req, '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # offset value should be positive or zero - req = mock_request({'offset': -1}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # offset value should be an integer - req = mock_request({'offset': 'one'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be strictly positive - req = mock_request({'offset': 0, 'limit': 0}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be less than or equal to 10000 - req = mock_request({'offset': 0, 'limit': 10001}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be an integer - req = mock_request({'offset': 0, 'limit': 'one'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # bbox values must be numbers - req = mock_request( + req = mock_api_request( {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values # (minx,miny,minz,maxx,maxy,maxz) - req = mock_request({'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # minx is greater than maxx (possibly antimeridian bbox) - req = mock_request( + req = mock_api_request( {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST # datetime parameter out of range - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.BAD_REQUEST - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z', # noqa - 'subTrajectory': 'true'}) - rsp_headers, code, response = mf.get_collection_items( - req, context['collection_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z', # noqa + 'subTrajectory': 'true'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) assert code == HTTPStatus.OK assert rsp_headers['Content-Type'] == 'application/json' @@ -856,19 +837,18 @@ def test_get_collection_items(config, openapi, context): assert collection['numberReturned'] == 1 -def test_get_collection(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection( - req, '00000000-0000-0000-0000-000000000000') + req = mock_api_request() + rsp_headers, code, response = get_collection( + api_, req, '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # successful data - req = mock_request() - rsp_headers, code, response = mf.get_collection( - req, context['collection_id']) + req = mock_api_request() + rsp_headers, code, response = get_collection( + api_, req, context['collection_id']) assert code == HTTPStatus.OK assert rsp_headers['Content-Type'] == 'application/json' @@ -899,19 +879,18 @@ def test_get_collection(config, openapi, context): assert len(collection['links']) == 1 -def test_get_collection_item(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_item(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection_item( - req, '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = get_collection_item( + api_, req, '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # successful data - rsp_headers, code, response = mf.get_collection_item( - req, context['collection_id'], context['mfeature_id']) + rsp_headers, code, response = get_collection_item( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.OK assert rsp_headers['Content-Type'] == 'application/json' @@ -944,105 +923,105 @@ def test_get_collection_item(config, openapi, context): assert len(mfeature['links']) == 1 -def test_get_collection_items_tGeometry(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tGeometry(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # offset value should be positive or zero - req = mock_request({'offset': -1}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # offset value should be an integer - req = mock_request({'offset': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be strictly positive - req = mock_request({'offset': 0, 'limit': 0}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be less than or equal to 10000 - req = mock_request({'offset': 0, 'limit': 10001}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be an integer - req = mock_request({'offset': 0, 'limit': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # bbox values must be numbers - req = mock_request( + req = mock_api_request( {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values # (minx,miny,minz,maxx,maxy,maxz) - req = mock_request({'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # minx is greater than maxx (possibly antimeridian bbox) - req = mock_request( + req = mock_api_request( {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # invalid leaf - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # cannot use both parameter `subTrajectory` and `leaf` at the same time - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'leaf': '2011-07-14T22:01:01.000Z', - 'subTrajectory': True}) - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'subTrajectory': True}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # datetime parameter out of range - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'leaf': '2011-07-14T22:01:01.000Z', - 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # successful data - req = mock_request({'offset': 0, - 'limit': 10, - 'bbox': '100,30,0,200,40,10', - 'leaf': '2011-07-14T22:01:01.000Z', - 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tGeometry( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.OK @@ -1075,13 +1054,12 @@ def test_get_collection_items_tGeometry(config, openapi, context): assert temporal_geometries['numberReturned'] == 1 -def test_get_collection_items_tGeometry_velocity(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tGeometry_velocity(api_, context): # successful data - req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry_velocity( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = get_collection_items_tGeometry_velocity( + api_, req, context['collection_id'], context['mfeature_id'], context['tgeometry_id']) assert code == HTTPStatus.OK @@ -1107,13 +1085,12 @@ def test_get_collection_items_tGeometry_velocity(config, openapi, context): assert value_sequence['interpolation'], 1 == "Discrete" -def test_get_collection_items_tGeometry_distance(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tGeometry_distance(api_, context): # successful data - req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) - rsp_headers, code, response = mf.get_collection_items_tGeometry_distance( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = get_collection_items_tGeometry_distance( + api_, req, context['collection_id'], context['mfeature_id'], context['tgeometry_id']) assert code == HTTPStatus.OK @@ -1139,14 +1116,13 @@ def test_get_collection_items_tGeometry_distance(config, openapi, context): assert value_sequence['interpolation'], 1 == "Discrete" -def test_get_collection_items_tGeometry_acceleration(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tGeometry_acceleration(api_, context): # successful data - req = mock_request({'date-time': '2011-07-14T22:01:08Z'}) + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) rsp_headers, code, response = \ - mf.get_collection_items_tGeometry_acceleration( - req, context['collection_id'], context['mfeature_id'], + get_collection_items_tGeometry_acceleration( + api_, req, context['collection_id'], context['mfeature_id'], context['tgeometry_id']) assert code == HTTPStatus.OK @@ -1172,60 +1148,59 @@ def test_get_collection_items_tGeometry_acceleration(config, openapi, context): assert value_sequence['interpolation'], 1 == "Discrete" -def test_get_collection_items_tProperty(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tProperty(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # offset value should be positive or zero - req = mock_request({'offset': -1}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # offset value should be an integer - req = mock_request({'offset': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be strictly positive - req = mock_request({'offset': 0, 'limit': 0}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be less than or equal to 10000 - req = mock_request({'offset': 0, 'limit': 10001}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # limit value should be an integer - req = mock_request({'offset': 0, 'limit': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # datetime parameter out of range - req = mock_request({'offset': 0, 'limit': 10, + req = mock_api_request({'offset': 0, 'limit': 10, 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.BAD_REQUEST # successful data - req = mock_request({'offset': 0, - 'limit': 10, - 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z', # noqa - 'subTemporalValue': 'true'}) - rsp_headers, code, response = mf.get_collection_items_tProperty( - req, context['collection_id'], context['mfeature_id']) + req = mock_api_request({'offset': 0, + 'limit': 10, + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z', # noqa + 'subTemporalValue': 'true'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.OK assert rsp_headers['Content-Type'] == 'application/json' @@ -1264,87 +1239,86 @@ def test_get_collection_items_tProperty(config, openapi, context): assert result['numberReturned'] == 4 -def test_get_collection_items_tProperty_value(config, openapi, context): - mf = MOVING_FEATURES(config, openapi) +def test_get_collection_items_tProperty_value(api_, context): # not found - req = mock_request() - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', '') assert code == HTTPStatus.NOT_FOUND # offset value should be positive or zero - req = mock_request({'offset': -1}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # offset value should be an integer - req = mock_request({'offset': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # limit value should be strictly positive - req = mock_request({'offset': 0, 'limit': 0}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # limit value should be less than or equal to 10000 - req = mock_request({'offset': 0, 'limit': 10001}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # limit value should be an integer - req = mock_request({'offset': 0, 'limit': 'one'}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # invalid leaf - req = mock_request({'offset': 0, 'limit': 10, - 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, 'limit': 10, + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # cannot use both parameter `subTemporalValue` # and `leaf` at the same time - req = mock_request({'offset': 0, - 'limit': 10, - 'leaf': '2011-07-16T22:01:01.450Z', - 'subTemporalValue': True}) - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'subTemporalValue': True}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # datetime parameter out of range - req = mock_request({'offset': 0, - 'limit': 10, - 'leaf': '2011-07-16T22:01:01.450Z', - 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.BAD_REQUEST # successful data - req = mock_request({'offset': 0, - 'limit': 10, - 'leaf': '2011-07-16T22:01:01.450Z', - 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z'}) # noqa - rsp_headers, code, response = mf.get_collection_items_tProperty_value( - req, context['collection_id'], context['mfeature_id'], + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.OK @@ -1367,20 +1341,19 @@ def test_get_collection_items_tProperty_value(config, openapi, context): def test_manage_collection_item_tProperty_delete( - config, openapi, context): - mf = MOVING_FEATURES(config, openapi) + api_, context): # feature not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'delete', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', '') assert code == HTTPStatus.NOT_FOUND # successful delete - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tProperty( - req, 'delete', context['collection_id'], context['mfeature_id'], + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], context['tProperty_name']) assert code == HTTPStatus.NO_CONTENT @@ -1389,21 +1362,20 @@ def test_manage_collection_item_tProperty_delete( def test_manage_collection_item_tGeometry_delete( - config, openapi, context): - mf = MOVING_FEATURES(config, openapi) + api_, context): # feature not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'delete', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # successful delete - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item_tGeometry( - req, 'delete', context['collection_id'], context['mfeature_id'], + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], context['tgeometry_id']) assert code == HTTPStatus.NO_CONTENT @@ -1412,20 +1384,19 @@ def test_manage_collection_item_tGeometry_delete( def test_manage_collection_item_delete( - config, openapi, context): - mf = MOVING_FEATURES(config, openapi) + api_, context): # collection not found - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item( - req, 'delete', '00000000-0000-0000-0000-000000000000', + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', '00000000-0000-0000-0000-000000000000') assert code == HTTPStatus.NOT_FOUND # successful delete - req = mock_request() - rsp_headers, code, response = mf.manage_collection_item( - req, 'delete', context['collection_id'], context['mfeature_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'delete', context['collection_id'], context['mfeature_id']) assert code == HTTPStatus.NO_CONTENT assert response == '' assert rsp_headers['Content-Type'] == 'application/json' @@ -1439,15 +1410,13 @@ def test_manage_collection_item_delete( def test_manage_collection_delete( - config, - openapi, + api_, context): - mf = MOVING_FEATURES(config, openapi) # successful delete - req = mock_request() - rsp_headers, code, response = mf.manage_collection( - req, 'delete', context['collection_id']) + req = mock_api_request() + rsp_headers, code, response = manage_collection( + api_, req, 'delete', context['collection_id']) assert code == HTTPStatus.NO_CONTENT assert response == '' assert rsp_headers['Content-Type'] == 'application/json' From 9a727df63edb5a83efb13ffebf64dea7c9ab2e0c Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Fri, 22 Nov 2024 15:01:32 +0900 Subject: [PATCH 06/14] MF-API Server update and integration (#1) modify service interface * Modified to process MovingFeatures in the same way as other APIs * Change YAML file references to the Web --- pygeoapi/api/__init__.py | 696 +++++++++++++++++++++++++++++---------- pygeoapi/flask_app.py | 260 +++++++++------ pygeoapi/openapi.py | 65 +++- 3 files changed, 729 insertions(+), 292 deletions(-) diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py index 03948ae2b..138f87e39 100644 --- a/pygeoapi/api/__init__.py +++ b/pygeoapi/api/__init__.py @@ -42,8 +42,9 @@ from pygeoapi.util import (CrsTransformSpec, TEMPLATES, UrlPrefetcher, get_api_rules, get_base_url, get_provider_by_type, - get_typed_value, get_crs_from_uri, - get_supported_crs_list, render_j2_template, to_json) + get_typed_value, get_crs_from_uri, dategetter, + get_supported_crs_list, render_j2_template, to_json, + get_provider_default, filter_dict_by_key_value) from pymeos import STBox, TsTzSpan, pymeos_initialize import psycopg2 from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB @@ -68,7 +69,7 @@ from pygeoapi.plugin import load_plugin from pygeoapi.process.manager.base import get_manager from pygeoapi.provider.base import ( - ProviderGenericError, ProviderTypeError) + ProviderConnectionError, ProviderGenericError, ProviderTypeError) LOGGER = logging.getLogger(__name__) @@ -112,54 +113,6 @@ 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/oas30' ] -CONFORMANCE = { - 'common': [ - 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/core', - 'http://www.opengis.net/spec/ogcapi-common-2/1.0/conf/collections' - ], - 'feature': [ - 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/core', - 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/oas30', - 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/html', - 'http://www.opengis.net/spec/ogcapi-features-1/1.0/conf/geojson', - 'http://www.opengis.net/spec/ogcapi-features-4/1.0/conf/create-replace-delete' # noqa - ], - 'coverage': [ - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/core', - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/oas30', - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/html', - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/geodata-coverage', # noqa - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-subset', # noqa - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-rangesubset', # noqa - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-bbox', # noqa - 'http://www.opengis.net/spec/ogcapi-coverages-1/1.0/conf/coverage-datetime' # noqa - ], - 'tile': [ - 'http://www.opengis.net/spec/ogcapi-tiles-1/1.0/conf/core' - ], - 'record': [ - 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/core', - 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/sorting', - 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/opensearch', - 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/json', - 'http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/html' - ], - 'process': [ - 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/ogc-process-description', # noqa - 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/core', - 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/json', - 'http://www.opengis.net/spec/ogcapi-processes-1/1.0/conf/oas30' - ], - 'edr': [ - 'http://www.opengis.net/spec/ogcapi-edr-1/1.0/conf/core' - ], - 'movingfeatures': [ - "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common", - "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", # noqa - "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" # noqa - ] -} - OGC_RELTYPES_BASE = 'http://www.opengis.net/def/rel/ogc/1.0' DEFAULT_CRS_LIST = [ @@ -181,7 +134,7 @@ def all_apis() -> dict: """ from . import (coverages, environmental_data_retrieval, itemtypes, maps, - processes, tiles, stac) + processes, tiles, stac, movingfeatures) return { 'coverage': coverages, @@ -191,6 +144,7 @@ def all_apis() -> dict: 'process': processes, 'tile': tiles, 'stac': stac, + 'movingfeature': movingfeatures, } @@ -836,6 +790,18 @@ def landing_page(self, fcm['stac'] = False fcm['collection'] = False + if filter_dict_by_key_value(self.config['resources'], + 'type', 'process'): + fcm['processes'] = True + + if filter_dict_by_key_value(self.config['resources'], + 'type', 'stac-collection'): + fcm['stac'] = True + + if filter_dict_by_key_value(self.config['resources'], + 'type', 'collection'): + fcm['collection'] = True + content = render_j2_template(self.tpl_config, 'landing_page.html', fcm, request.locale) return headers, HTTPStatus.OK, content @@ -897,12 +863,31 @@ def conformance(self, :returns: tuple of headers, status code, content """ + apis_dict = all_apis() + if not request.is_valid(): return self.get_format_exception(request) conformance_list = CONFORMANCE_CLASSES - conformance_list.extend( - CONFORMANCE['movingfeatures']) + + for key, value in self.config['resources'].items(): + if value['type'] == 'process': + conformance_list.extend( + apis_dict['process'].CONFORMANCE_CLASSES) + else: + for provider in value['providers']: + if provider['type'] in apis_dict: + conformance_list.extend( + apis_dict[provider['type']].CONFORMANCE_CLASSES) + if provider['type'] == 'feature': + conformance_list.extend( + apis_dict['itemtypes'].CONFORMANCE_CLASSES_FEATURES) # noqa + if provider['type'] == 'record': + conformance_list.extend( + apis_dict['itemtypes'].CONFORMANCE_CLASSES_RECORDS) + if provider['type'] == 'movingfeatures': + conformance_list.extend( + apis_dict['movingfeatures'].CONFORMANCE_CLASSES_RECORDS) # noqa conformance = { 'conformsTo': sorted(list(set(conformance_list))) @@ -920,7 +905,8 @@ def conformance(self, @pre_process @jsonldify def describe_collections( - self, request: Union[APIRequest, Any]) -> Tuple[dict, int, str]: + self, request: Union[APIRequest, Any], + dataset=None) -> Tuple[dict, int, str]: """ Queries collection @@ -932,168 +918,514 @@ def describe_collections( return self.get_format_exception(request) headers = request.get_response_headers() - pmdb_provider = PostgresMobilityDB() fcm = { 'collections': [], 'links': [] } - try: - pmdb_provider.connect() - result = pmdb_provider.get_collections() - except (Exception, psycopg2.Error) as error: - msg = str(error) + collections = filter_dict_by_key_value(self.config['resources'], + 'type', 'collection') + + if all([dataset is not None, dataset not in collections.keys()]): + msg = 'Collection not found' return self.get_exception( - HTTPStatus.BAD_REQUEST, - headers, - request.format, - 'ConnectingError', - msg) - - pymeos_initialize() - collections = [] - for row in result: - collection_id = row[0] - collection = row[1] - collection['itemType'] = 'movingfeature' - collection['id'] = collection_id - - crs = None - trs = None - if 'crs' in collection: - crs = collection.pop('crs', None) - if 'trs' in collection: - trs = collection.pop('trs', None) - - extend_stbox = STBox(row[3]) if row[3] is not None else None - lifespan = TsTzSpan(row[2]) if row[2] is not None else None - - bbox = [] - if extend_stbox is not None: - bbox.append(extend_stbox.xmin()) - bbox.append(extend_stbox.ymin()) - if extend_stbox.zmin() is not None: - bbox.append(extend_stbox.zmin()) - bbox.append(extend_stbox.xmax()) - bbox.append(extend_stbox.ymax()) - if extend_stbox.zmax() is not None: - bbox.append(extend_stbox.zmax()) + HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) - if crs is None: - if extend_stbox.srid() == 4326: - if extend_stbox.zmax() is not None: - crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' - else: - crs = 'http://www.opengis.net/def/\ - crs/OGC/1.3/CRS84' - if crs is None: - crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' - if trs is None: - trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' - - time = [] - if lifespan is not None: - time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) - time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) - else: - if extend_stbox is not None: - if extend_stbox.tmin() is not None: - time.append(extend_stbox.tmin().strftime( - "%Y-%m-%dT%H:%M:%SZ")) - time.append(extend_stbox.tmax().strftime( - "%Y-%m-%dT%H:%M:%SZ")) + if dataset is not None: + collections_dict = { + k: v for k, v in collections.items() if k == dataset + } + else: + collections_dict = collections + + LOGGER.debug('Creating collections') + for k, v in collections_dict.items(): + if v.get('visibility', 'default') == 'hidden': + LOGGER.debug(f'Skipping hidden layer: {k}') + continue + collection_data = get_provider_default(v['providers']) + collection_data_type = collection_data['type'] + + collection_data_format = None + + if 'format' in collection_data: + collection_data_format = collection_data['format'] + + is_vector_tile = (collection_data_type == 'tile' and + collection_data_format['name'] not + in [F_PNG, F_JPEG]) + + collection = { + 'id': k, + 'title': l10n.translate(v['title'], request.locale), + 'description': l10n.translate(v['description'], request.locale), # noqa + 'keywords': l10n.translate(v['keywords'], request.locale), + 'links': [] + } + bbox = v['extents']['spatial']['bbox'] + # The output should be an array of bbox, so if the user only + # provided a single bbox, wrap it in a array. + if not isinstance(bbox[0], list): + bbox = [bbox] collection['extent'] = { 'spatial': { - 'bbox': bbox, - 'crs': crs - }, - 'temporal': { - 'interval': time, - 'trs': trs + 'bbox': bbox } } - - collection['links'] = [] + if 'crs' in v['extents']['spatial']: + collection['extent']['spatial']['crs'] = \ + v['extents']['spatial']['crs'] + + t_ext = v.get('extents', {}).get('temporal', {}) + if t_ext: + begins = dategetter('begin', t_ext) + ends = dategetter('end', t_ext) + collection['extent']['temporal'] = { + 'interval': [[begins, ends]] + } + if 'trs' in t_ext: + collection['extent']['temporal']['trs'] = t_ext['trs'] + + LOGGER.debug('Processing configured collection links') + for link in l10n.translate(v.get('links', []), request.locale): + lnk = { + 'type': link['type'], + 'rel': link['rel'], + 'title': l10n.translate(link['title'], request.locale), + 'href': l10n.translate(link['href'], request.locale), + } + if 'hreflang' in link: + lnk['hreflang'] = l10n.translate( + link['hreflang'], request.locale) + content_length = link.get('length', 0) + + if lnk['rel'] == 'enclosure' and content_length == 0: + # Issue HEAD request for enclosure links without length + lnk_headers = self.prefetcher.get_headers(lnk['href']) + content_length = int(lnk_headers.get('content-length', 0)) + content_type = lnk_headers.get('content-type', lnk['type']) + if content_length == 0: + # Skip this (broken) link + LOGGER.debug(f"Enclosure {lnk['href']} is invalid") + continue + if content_type != lnk['type']: + # Update content type if different from specified + lnk['type'] = content_type + LOGGER.debug( + f"Fixed media type for enclosure {lnk['href']}") + + if content_length > 0: + lnk['length'] = content_length + + collection['links'].append(lnk) # TODO: provide translations LOGGER.debug('Adding JSON and HTML link relations') - + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa + 'href': f"{self.base_url}?f={F_JSON}" + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa + 'href': f"{self.base_url}?f={F_HTML}" + }) collection['links'].append({ 'type': FORMAT_TYPES[F_JSON], 'rel': request.get_linkrel(F_JSON), 'title': l10n.translate('This document as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSON}' # noqa + 'href': f'{self.get_collections_url()}/{k}?f={F_JSON}' }) collection['links'].append({ 'type': FORMAT_TYPES[F_JSONLD], 'rel': request.get_linkrel(F_JSONLD), 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSONLD}' # noqa + 'href': f'{self.get_collections_url()}/{k}?f={F_JSONLD}' }) collection['links'].append({ 'type': FORMAT_TYPES[F_HTML], 'rel': request.get_linkrel(F_HTML), 'title': l10n.translate('This document as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}?f={F_HTML}' # noqa + 'href': f'{self.get_collections_url()}/{k}?f={F_HTML}' }) - collection['links'].append({ - 'type': 'application/geo+json', - 'rel': 'items', - 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSON}' # noqa + if collection_data_type in ['feature', 'coverage', 'record']: + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': l10n.translate('Schema of collection in JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/schema?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': l10n.translate('Schema of collection in HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/schema?f={F_HTML}' # noqa + }) + + if is_vector_tile or collection_data_type in ['feature', 'record']: + # TODO: translate + collection['itemType'] = collection_data_type + LOGGER.debug('Adding feature/record based links') + collection['links'].append({ + 'type': 'application/schema+json', + 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', + 'title': l10n.translate('Queryables for this collection as JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/queryables?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', + 'title': l10n.translate('Queryables for this collection as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/queryables?f={F_HTML}' # noqa + }) + collection['links'].append({ + 'type': 'application/geo+json', + 'rel': 'items', + 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/items?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': 'items', + 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/items?f={F_JSONLD}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'items', + 'title': l10n.translate('Items as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{k}/items?f={F_HTML}' # noqa + }) + + # OAPIF Part 2 - list supported CRSs and StorageCRS + if collection_data_type == 'feature': + collection['crs'] = get_supported_crs_list(collection_data, DEFAULT_CRS_LIST) # noqa + collection['storageCRS'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa + if 'storage_crs_coordinate_epoch' in collection_data: + collection['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa + + elif collection_data_type == 'coverage': + # TODO: translate + LOGGER.debug('Adding coverage based links') + collection['links'].append({ + 'type': 'application/prs.coverage+json', + 'rel': f'{OGC_RELTYPES_BASE}/coverage', + 'title': l10n.translate('Coverage data', request.locale), + 'href': f'{self.get_collections_url()}/{k}/coverage?f={F_JSON}' # noqa + }) + if collection_data_format is not None: + title_ = l10n.translate('Coverage data as', request.locale) # noqa + title_ = f"{title_} {collection_data_format['name']}" + collection['links'].append({ + 'type': collection_data_format['mimetype'], + 'rel': f'{OGC_RELTYPES_BASE}/coverage', + 'title': title_, + 'href': f"{self.get_collections_url()}/{k}/coverage?f={collection_data_format['name']}" # noqa + }) + if dataset is not None: + LOGGER.debug('Creating extended coverage metadata') + try: + provider_def = get_provider_by_type( + self.config['resources'][k]['providers'], + 'coverage') + p = load_plugin('provider', provider_def) + except ProviderConnectionError: + msg = 'connection error (check logs)' + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, + 'NoApplicableCode', msg) + except ProviderTypeError: + pass + else: + collection['extent']['spatial']['grid'] = [{ + 'cellsCount': p._coverage_properties['width'], + 'resolution': p._coverage_properties['resx'] + }, { + 'cellsCount': p._coverage_properties['height'], + 'resolution': p._coverage_properties['resy'] + }] + + try: + tile = get_provider_by_type(v['providers'], 'tile') + p = load_plugin('provider', tile) + except ProviderConnectionError: + msg = 'connection error (check logs)' + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, + 'NoApplicableCode', msg) + except ProviderTypeError: + tile = None + + if tile: + # TODO: translate + + LOGGER.debug('Adding tile links') + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': f'http://www.opengis.net/def/rel/ogc/1.0/tilesets-{p.tile_type}', # noqa + 'title': l10n.translate('Tiles as JSON', request.locale), + 'href': f'{self.get_collections_url()}/{k}/tiles?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'http://www.opengis.net/def/rel/ogc/1.0/tilesets-{p.tile_type}', # noqa + 'title': l10n.translate('Tiles as HTML', request.locale), + 'href': f'{self.get_collections_url()}/{k}/tiles?f={F_HTML}' # noqa + }) + + try: + map_ = get_provider_by_type(v['providers'], 'map') + except ProviderTypeError: + map_ = None + + if map_: + LOGGER.debug('Adding map links') + + map_mimetype = map_['format']['mimetype'] + map_format = map_['format']['name'] + + title_ = l10n.translate('Map as', request.locale) + title_ = f"{title_} {map_format}" + + collection['links'].append({ + 'type': map_mimetype, + 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/map', + 'title': title_, + 'href': f"{self.get_collections_url()}/{k}/map?f={map_format}" # noqa + }) + + try: + edr = get_provider_by_type(v['providers'], 'edr') + p = load_plugin('provider', edr) + except ProviderConnectionError: + msg = 'connection error (check logs)' + return self.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, headers, + request.format, 'NoApplicableCode', msg) + except ProviderTypeError: + edr = None + + if edr: + # TODO: translate + LOGGER.debug('Adding EDR links') + collection['data_queries'] = {} + parameters = p.get_fields() + if parameters: + collection['parameter_names'] = {} + for key, value in parameters.items(): + collection['parameter_names'][key] = { + 'id': key, + 'type': 'Parameter', + 'name': value['title'], + 'unit': { + 'label': { + 'en': value['title'] + }, + 'symbol': { + 'value': value['x-ogc-unit'], + 'type': 'http://www.opengis.net/def/uom/UCUM/' # noqa + } + } + } + + for qt in p.get_query_types(): + data_query = { + 'link': { + 'href': f'{self.get_collections_url()}/{k}/{qt}', + 'rel': 'data' + } + } + collection['data_queries'][qt] = data_query + + title1 = l10n.translate('query for this collection as JSON', request.locale) # noqa + title1 = f'{qt} {title1}' + title2 = l10n.translate('query for this collection as HTML', request.locale) # noqa + title2 = f'{qt} {title2}' + + collection['links'].append({ + 'type': 'application/json', + 'rel': 'data', + 'title': title1, + 'href': f'{self.get_collections_url()}/{k}/{qt}?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'data', + 'title': title2, + 'href': f'{self.get_collections_url()}/{k}/{qt}?f={F_HTML}' # noqa + }) + + if dataset is not None and k == dataset: + fcm = collection + break + + fcm['collections'].append(collection) + + if dataset is None: + # get moving feature collections + pmdb_provider = PostgresMobilityDB() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_collections() + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, + request.format, + 'ConnectingError', + msg) + + pymeos_initialize() + for row in result: + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id + + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' # noqa + else: + crs = 'http://www.opengis.net/def/\ + crs/OGC/1.3/CRS84' + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append( + lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append( + lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + + collection['extent'] = { + 'spatial': { + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs + } + } + + collection['links'] = [] + + # TODO: provide translations + LOGGER.debug('Adding JSON and HTML link relations') + + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa + 'href': f"{self.base_url}?f={F_JSON}" + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa + 'href': f"{self.base_url}?f={F_HTML}" + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': request.get_linkrel(F_JSON), + 'title': l10n.translate('This document as JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': request.get_linkrel(F_JSONLD), + 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSONLD}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': request.get_linkrel(F_HTML), + 'title': l10n.translate('This document as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_HTML}' # noqa + }) + + collection['links'].append({ + 'type': 'application/geo+json', + 'rel': 'items', + 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': 'items', + 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSONLD}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'items', + 'title': l10n.translate('Items as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_HTML}' # noqa + }) + + fcm['collections'].append(collection) + + if dataset is None: + # TODO: translate + fcm['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': request.get_linkrel(F_JSON), + 'title': l10n.translate('This document as JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_JSON}' }) - collection['links'].append({ + fcm['links'].append({ 'type': FORMAT_TYPES[F_JSONLD], - 'rel': 'items', - 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSONLD}' # noqa + 'rel': request.get_linkrel(F_JSONLD), + 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_JSONLD}' }) - collection['links'].append({ + fcm['links'].append({ 'type': FORMAT_TYPES[F_HTML], - 'rel': 'items', - 'title': l10n.translate('Items as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_HTML}' # noqa + 'rel': request.get_linkrel(F_HTML), + 'title': l10n.translate('This document as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}?f={F_HTML}' }) - collections.append(collection) - fcm['collections'] = collections - # fcm['links'].append({ - # 'href': '{}'.format( - # self.get_collections_url()), - # 'rel': request.get_linkrel(F_JSON), - # 'type': FORMAT_TYPES[F_JSON] - # }) - fcm['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': 'root', - 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa - 'href': f"{self.base_url}?f={F_JSON}" - }) - fcm['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': 'root', - 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa - 'href': f"{self.base_url}?f={F_HTML}" - }) - fcm['links'].append({ - 'type': FORMAT_TYPES[F_JSON], - 'rel': request.get_linkrel(F_JSON), - 'title': l10n.translate('This document as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSON}' - }) - fcm['links'].append({ - 'type': FORMAT_TYPES[F_JSONLD], - 'rel': request.get_linkrel(F_JSONLD), - 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSONLD}' - }) - fcm['links'].append({ - 'type': FORMAT_TYPES[F_HTML], - 'rel': request.get_linkrel(F_HTML), - 'title': l10n.translate('This document as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_HTML}' - }) if request.format == F_HTML: # render fcm['collections_path'] = self.get_collections_url() @@ -1110,7 +1442,7 @@ def describe_collections( if request.format == F_JSONLD: jsonld = self.fcmld.copy() - if len(result) > 0: + if dataset is not None: jsonld['dataset'] = jsonldify_collection(self, fcm, request.locale) else: diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index 252334f53..8ab7002cb 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -38,7 +38,6 @@ send_from_directory, Response, Request) from pygeoapi.api import API, APIRequest, apply_gzip -from pygeoapi.api.movingfeatures import MOVING_FEATURES import pygeoapi.api.coverages as coverages_api import pygeoapi.api.environmental_data_retrieval as edr_api import pygeoapi.api.itemtypes as itemtypes_api @@ -46,9 +45,10 @@ import pygeoapi.api.processes as processes_api import pygeoapi.api.stac as stac_api import pygeoapi.api.tiles as tiles_api +import pygeoapi.api.movingfeatures as movingfeatures from pygeoapi.openapi import load_openapi_document from pygeoapi.config import get_config -from pygeoapi.util import get_mimetype, get_api_rules +from pygeoapi.util import get_mimetype, get_api_rules, filter_dict_by_key_value CONFIG = get_config() @@ -86,7 +86,6 @@ 'pretty_print', True) api_ = API(CONFIG, OPENAPI) -movingFeatures = MOVING_FEATURES(CONFIG, OPENAPI) OGC_SCHEMAS_LOCATION = CONFIG['server'].get('ogc_schemas_location') @@ -250,20 +249,29 @@ def collections(collection_id=None): if request.method == 'GET': # list items return get_response(api_.describe_collections(request)) elif request.method == 'POST': # filter or manage items - return get_response(movingFeatures.manage_collection(request, - 'create')) - - elif request.method == 'DELETE': - return get_response( - movingFeatures.manage_collection(request, 'delete', - collection_id)) - elif request.method == 'PUT': - return get_response( - movingFeatures.manage_collection(request, 'update', - collection_id)) + return execute_from_flask( + movingfeatures.manage_collection, request, 'create') else: - return get_response( - movingFeatures.get_collection(request, collection_id)) + collections = filter_dict_by_key_value(api_.config['resources'], + 'type', 'collection') + # collection in config + if collection_id in collections: + return get_response( + api_.describe_collections( + request, collection_id)) + # moving feature collection + else: + if request.method == 'DELETE': + return execute_from_flask( + movingfeatures.manage_collection, request, 'delete', + collection_id) + elif request.method == 'PUT': + return execute_from_flask( + movingfeatures.manage_collection, request, 'update', + collection_id) + else: + return execute_from_flask( + movingfeatures.get_collection, request, collection_id) @BLUEPRINT.route('/collections/<path:collection_id>/schema') @@ -306,24 +314,80 @@ def collection_items(collection_id, item_id=None): :returns: HTTP response """ - if item_id is None: - if request.method == 'GET': # list items - return get_response( - movingFeatures.get_collection_items(request, collection_id)) - elif request.method == 'POST': # filter or manage items - return get_response( - movingFeatures.manage_collection_item(request, - 'create', collection_id)) - elif request.method == 'DELETE': - return get_response( - movingFeatures.manage_collection_item(request, - 'delete', collection_id, - item_id)) + collections = filter_dict_by_key_value(api_.config['resources'], + 'type', 'collection') + # collection in config + if collection_id in collections: + if item_id is None: + if request.method == 'GET': # list items + return execute_from_flask(itemtypes_api.get_collection_items, + request, collection_id, + skip_valid_check=True) + elif request.method == 'POST': # filter or manage items + if request.content_type is not None: + if request.content_type == 'application/geo+json': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, 'create', collection_id, + skip_valid_check=True) + else: + return execute_from_flask( + itemtypes_api.post_collection_items, request, + collection_id, skip_valid_check=True) + elif request.method == 'OPTIONS': + return execute_from_flask( + itemtypes_api.manage_collection_item, request, + 'options', collection_id, skip_valid_check=True) + + elif request.method == 'DELETE': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, + 'delete', + collection_id, + item_id, + skip_valid_check=True) + elif request.method == 'PUT': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, + 'update', + collection_id, + item_id, + skip_valid_check=True) + elif request.method == 'OPTIONS': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, + 'options', + collection_id, + item_id, + skip_valid_check=True) + else: + return execute_from_flask(itemtypes_api.get_collection_item, + request, collection_id, item_id) + # moving feature collections else: - return get_response( - movingFeatures.get_collection_item(request, - collection_id, item_id)) + if item_id is None: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items, request, + collection_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item, request, + 'create', collection_id) + + elif request.method == 'DELETE': + return execute_from_flask( + movingfeatures.manage_collection_item, request, + 'delete', collection_id, + item_id) + else: + return execute_from_flask( + movingfeatures.get_collection_item, request, + collection_id, item_id) @BLUEPRINT.route('/collections/<path:collection_id>/coverage') @@ -561,7 +625,7 @@ def stac_catalog_path(path): '/collections/<path:collection_id>/items/<path:item_id>/tgsequence', methods=['GET', 'POST']) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>', # noqa + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>', # noqa methods=['DELETE']) def collection_items_tgeometries(collection_id, item_id, tGeometry_id=None): """ @@ -575,28 +639,28 @@ def collection_items_tgeometries(collection_id, item_id, tGeometry_id=None): if tGeometry_id is None: if request.method == 'GET': # list items - return get_response( - movingFeatures.get_collection_items_tGeometry(request, - collection_id, - item_id)) + return execute_from_flask( + movingfeatures.get_collection_items_tGeometry, request, + collection_id, + item_id) elif request.method == 'POST': # filter or manage items - return get_response( - movingFeatures.manage_collection_item_tGeometry(request, - 'create', - collection_id, - item_id)) + return execute_from_flask( + movingfeatures.manage_collection_item_tGeometry, request, + 'create', + collection_id, + item_id) elif request.method == 'DELETE': - return get_response( - movingFeatures.manage_collection_item_tGeometry(request, - 'delete', - collection_id, - item_id, - tGeometry_id)) + return execute_from_flask( + movingfeatures.manage_collection_item_tGeometry, request, + 'delete', + collection_id, + item_id, + tGeometry_id) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/velocity', # noqa + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/velocity', # noqa methods=['GET']) def collection_items_tgeometries_velocity( collection_id, item_id, tGeometry_id): @@ -610,16 +674,16 @@ def collection_items_tgeometries_velocity( """ if request.method == 'GET': # list items - return get_response( - movingFeatures - .get_collection_items_tGeometry_velocity(request, - collection_id, - item_id, - tGeometry_id)) + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_velocity, request, + collection_id, + item_id, + tGeometry_id) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/distance', # noqa + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/distance', # noqa methods=['GET']) def collection_items_tgeometries_distance( collection_id, item_id, tGeometry_id): @@ -633,16 +697,16 @@ def collection_items_tgeometries_distance( """ if request.method == 'GET': # list items - return get_response( - movingFeatures - .get_collection_items_tGeometry_distance(request, - collection_id, - item_id, - tGeometry_id)) + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_distance, request, + collection_id, + item_id, + tGeometry_id) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/acceleration', # noqa + '/collections/<path:collection_id>/items/<path:item_id>/tgsequence/<path:tGeometry_id>/acceleration', # noqa methods=['GET']) def collection_items_tgeometries_acceleration(collection_id, item_id, tGeometry_id): @@ -656,12 +720,12 @@ def collection_items_tgeometries_acceleration(collection_id, item_id, """ if request.method == 'GET': # list items - return get_response( - movingFeatures - .get_collection_items_tGeometry_acceleration(request, - collection_id, - item_id, - tGeometry_id)) + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_acceleration, request, + collection_id, + item_id, + tGeometry_id) @BLUEPRINT.route( @@ -678,20 +742,20 @@ def collection_items_tproperties(collection_id, item_id): """ if request.method == 'GET': # list items - return get_response( - movingFeatures.get_collection_items_tProperty(request, - collection_id, - item_id)) + return execute_from_flask( + movingfeatures.get_collection_items_tProperty, request, + collection_id, + item_id) elif request.method == 'POST': # filter or manage items - return get_response( - movingFeatures.manage_collection_item_tProperty(request, - 'create', - collection_id, - item_id)) + return execute_from_flask( + movingfeatures.manage_collection_item_tProperty, request, + 'create', + collection_id, + item_id) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>', # noqa + '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>', # noqa methods=['GET', 'POST', 'DELETE']) def collection_items_tproperties_values(collection_id, item_id, tProperty_id): """ @@ -704,27 +768,27 @@ def collection_items_tproperties_values(collection_id, item_id, tProperty_id): """ if request.method == 'GET': # list items - return get_response( - movingFeatures.get_collection_items_tProperty_value(request, - collection_id, - item_id, - tProperty_id)) + return execute_from_flask( + movingfeatures.get_collection_items_tProperty_value, request, + collection_id, + item_id, + tProperty_id) elif request.method == 'POST': # filter or manage items - return get_response( - movingFeatures - .manage_collection_item_tProperty_value(request, - 'create', - collection_id, - item_id, - tProperty_id)) + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty_value, request, + 'create', + collection_id, + item_id, + tProperty_id) elif request.method == 'DELETE': # filter or manage items - return get_response( - movingFeatures - .manage_collection_item_tProperty(request, - 'delete', - collection_id, - item_id, - tProperty_id)) + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty, request, + 'delete', + collection_id, + item_id, + tProperty_id) @ADMIN_BLUEPRINT.route('/admin/config', methods=['GET', 'PUT', 'PATCH']) diff --git a/pygeoapi/openapi.py b/pygeoapi/openapi.py index 3cf5c0b8f..ed5d20674 100644 --- a/pygeoapi/openapi.py +++ b/pygeoapi/openapi.py @@ -53,13 +53,14 @@ OPENAPI_YAML = { 'oapif-1': 'https://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml', # noqa - 'oapif-2': 'https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml', # noqa + 'oapif-2': 'https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml', # noqa 'oapip': 'https://schemas.opengis.net/ogcapi/processes/part1/1.0/openapi', 'oacov': 'https://raw.githubusercontent.com/tomkralidis/ogcapi-coverages-1/fix-cis/yaml-unresolved', # noqa 'oapir': 'https://raw.githubusercontent.com/opengeospatial/ogcapi-records/master/core/openapi', # noqa - 'oaedr': 'https://schemas.opengis.net/ogcapi/edr/1.0/openapi', # noqa + 'oaedr': 'https://schemas.opengis.net/ogcapi/edr/1.0/openapi', # noqa 'oapit': 'https://schemas.opengis.net/ogcapi/tiles/part1/1.0/openapi/ogcapi-tiles-1.yaml', # noqa - 'pygeoapi': 'https://raw.githubusercontent.com/geopython/pygeoapi/master/pygeoapi/schemas/config/pygeoapi-config-0.x.yml' # noqa + 'pygeoapi': 'https://raw.githubusercontent.com/geopython/pygeoapi/master/pygeoapi/schemas/config/pygeoapi-config-0.x.yml', # noqa + 'movingfeature': 'https://schemas.opengis.net/ogcapi/movingfeatures/part1/1.0/openapi/ogcapi-movingfeatures-1.bundled.yaml' # noqa } THISDIR = os.path.dirname(os.path.realpath(__file__)) @@ -154,7 +155,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: osl = get_ogc_schemas_location(cfg['server']) OPENAPI_YAML['oapif-1'] = os.path.join(osl, 'ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml') # noqa - OPENAPI_YAML['oapif-2'] = os.path.join(osl, 'ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml') # noqa + OPENAPI_YAML['oapif-2'] = os.path.join(osl, 'ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml') # noqa LOGGER.debug('setting up server info') oas = { @@ -267,6 +268,46 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: '400': {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter"}, # noqa '500': {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError"} # noqa } + }, + "post": { + "operationId": "registerMetadata", + "summary": "Register metadata about a collection of moving features", # noqa + "description": "A user SHOULD register metadata about a collection of moving features into the system.\n", # noqa + "tags": [ + "MovingFeatureCollection" + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body" # noqa + }, + "example": { + "title": "moving_feature_collection_sample", # noqa + "updateFrequency": 1000, + "description": "example", + "itemType": "movingfeature" + } + } + } + }, + "responses": { + "201": { + "description": "Successful create a collection to manage moving features.", # noqa + "headers": { + "Location": { + "description": "A URI of the newly added resource", # noqa + "schema": { + "type": "string", + "example": "https://data.example.org/collections/mfc-1" # noqa + } + } + } + }, + "500": { + "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa + } + } } } @@ -311,30 +352,30 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: ], 'properties': { 'queryable': { - 'description': 'the token that may be used in a CQL predicate', # noqa + 'description': 'the token that may be used in a CQL predicate', # noqa 'type': 'string' }, 'title': { - 'description': 'a human readable title for the queryable', # noqa + 'description': 'a human readable title for the queryable', # noqa 'type': 'string' }, 'description': { - 'description': 'a human-readable narrative describing the queryable', # noqa + 'description': 'a human-readable narrative describing the queryable', # noqa 'type': 'string' }, 'language': { - 'description': 'the language used for the title and description', # noqa + 'description': 'the language used for the title and description', # noqa 'type': 'string', 'default': [ 'en' ] }, 'type': { - 'description': 'the data type of the queryable', # noqa + 'description': 'the data type of the queryable', # noqa 'type': 'string' }, 'type-ref': { - 'description': 'a reference to the formal definition of the type', # noqa + 'description': 'a reference to the formal definition of the type', # noqa 'type': 'string', 'format': 'url' } @@ -401,7 +442,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: oas['components']['responses'].update({ 'Tiles': { - 'description': 'Retrieves the tiles description for this collection', # noqa + 'description': 'Retrieves the tiles description for this collection', # noqa 'content': { 'application/json': { 'schema': { @@ -436,7 +477,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: 'tileMatrixSetLinks': { 'type': 'array', 'items': { - '$ref': '#/components/schemas/tilematrixsetlink' # noqa + '$ref': '#/components/schemas/tilematrixsetlink' # noqa } }, 'links': { From bb6868ac171b8b1f37ce6646240cd9f4121c62dc Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 26 Nov 2024 13:07:18 +0900 Subject: [PATCH 07/14] About of the changing of table configuration (#3) create new table for new api request * DDL changed because new table was added * Added new API requests and associated tests * Adjusted file placement position --- pygeoapi/api/movingfeatures.py | 23 +- pygeoapi/flask_app.py | 82 +- .../provider/mf-api-sql/initdb-mobilitydb.sh | 55 - pygeoapi/provider/mf-api-sql/mf-api.sql | 45 - pygeoapi/provider/postgresql_mobilitydb.py | 211 +- tests/api/test_movingfeature.py | 31 +- tests/data/mf-api.sql | 51 + tests/pygeoapi-test-config-mfapi.yml | 94 + tests/pygeoapi-test-openapi-mfapi.yml | 2416 +++++++++++++++++ tests/test_postgresql_mobilitydb.py | 29 +- 10 files changed, 2792 insertions(+), 245 deletions(-) delete mode 100644 pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh delete mode 100644 pygeoapi/provider/mf-api-sql/mf-api.sql create mode 100644 tests/data/mf-api.sql create mode 100644 tests/pygeoapi-test-config-mfapi.yml create mode 100644 tests/pygeoapi-test-openapi-mfapi.yml diff --git a/pygeoapi/api/movingfeatures.py b/pygeoapi/api/movingfeatures.py index 7aeb1eb48..32294f844 100644 --- a/pygeoapi/api/movingfeatures.py +++ b/pygeoapi/api/movingfeatures.py @@ -1329,7 +1329,6 @@ def get_collection_items_tProperty(api: API, request: APIRequest, datetime=datetime_, limit=limit, offset=offset, sub_temporal_value=sub_temporal_value) - temporal_properties = [] if sub_temporal_value is False or sub_temporal_value == "false": for row in result: @@ -1703,7 +1702,7 @@ def get_collection_items_tProperty_value(api: API, request: APIRequest, def manage_collection_item_tProperty_value( api: API, request: APIRequest, action, dataset, identifier, - tProperty=None) -> Tuple[dict, int, str]: + tProperty=None, tvalue=None) -> Tuple[dict, int, str]: """ Adds Temporal Property Value item to a Temporal Property @@ -1729,7 +1728,6 @@ def manage_collection_item_tProperty_value( return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'ConnectingError', msg) - if [dataset, identifier, tProperty] not in tproperty_list: msg = 'Temporal Property not found' LOGGER.error(msg) @@ -1740,6 +1738,7 @@ def manage_collection_item_tProperty_value( collection_id = dataset mfeature_id = identifier tProperty_name = tProperty + tvalue_id = tvalue if action == 'create': if not request.data: msg = 'No data found' @@ -1797,6 +1796,24 @@ def manage_collection_item_tProperty_value( return headers, HTTPStatus.CREATED, '' + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalvalue( + "AND tvalue_id ='{0}'".format(tvalue_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + def validate_bbox(value=None) -> list: """ diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index 8ab7002cb..6cb9e0f2b 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -367,7 +367,7 @@ def collection_items(collection_id, item_id=None): else: return execute_from_flask(itemtypes_api.get_collection_item, request, collection_id, item_id) - # moving feature collections + else: if item_id is None: if request.method == 'GET': # list items @@ -731,7 +731,10 @@ def collection_items_tgeometries_acceleration(collection_id, item_id, @BLUEPRINT.route( '/collections/<path:collection_id>/items/<path:item_id>/tproperties', methods=['GET', 'POST']) -def collection_items_tproperties(collection_id, item_id): +@BLUEPRINT.route( + '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>', # noqa + methods=['GET', 'POST', 'DELETE']) +def collection_items_tproperties(collection_id, item_id, tProperty_id=None): """ OGC API collections items endpoint @@ -741,23 +744,48 @@ def collection_items_tproperties(collection_id, item_id): :returns: HTTP response """ - if request.method == 'GET': # list items - return execute_from_flask( - movingfeatures.get_collection_items_tProperty, request, - collection_id, - item_id) - elif request.method == 'POST': # filter or manage items - return execute_from_flask( - movingfeatures.manage_collection_item_tProperty, request, - 'create', - collection_id, - item_id) + if tProperty_id is None: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items_tProperty, request, + collection_id, + item_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item_tProperty, request, + 'create', + collection_id, + item_id) + else: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items_tProperty_value, request, + collection_id, + item_id, + tProperty_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty_value, request, + 'create', + collection_id, + item_id, + tProperty_id) + elif request.method == 'DELETE': # filter or manage items + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty, request, + 'delete', + collection_id, + item_id, + tProperty_id) @BLUEPRINT.route( - '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>', # noqa - methods=['GET', 'POST', 'DELETE']) -def collection_items_tproperties_values(collection_id, item_id, tProperty_id): + '/collections/<path:collection_id>/items/<path:item_id>/tproperties/<path:tProperty_id>/<path:tValue_id>', # noqa + methods=['DELETE']) +def collection_items_tproperties_values(collection_id, item_id, + tProperty_id, tValue_id=None): """ OGC API collections items endpoint @@ -767,28 +795,14 @@ def collection_items_tproperties_values(collection_id, item_id, tProperty_id): :returns: HTTP response """ - if request.method == 'GET': # list items - return execute_from_flask( - movingfeatures.get_collection_items_tProperty_value, request, - collection_id, - item_id, - tProperty_id) - elif request.method == 'POST': # filter or manage items + if request.method == 'DELETE': # filter or manage items return execute_from_flask( - movingfeatures - .manage_collection_item_tProperty_value, request, - 'create', - collection_id, - item_id, - tProperty_id) - elif request.method == 'DELETE': # filter or manage items - return execute_from_flask( - movingfeatures - .manage_collection_item_tProperty, request, + movingfeatures.manage_collection_item_tProperty_value, request, 'delete', collection_id, item_id, - tProperty_id) + tProperty_id, + tValue_id) @ADMIN_BLUEPRINT.route('/admin/config', methods=['GET', 'PUT', 'PATCH']) diff --git a/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh b/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh deleted file mode 100644 index 63d44c468..000000000 --- a/pygeoapi/provider/mf-api-sql/initdb-mobilitydb.sh +++ /dev/null @@ -1,55 +0,0 @@ -#!/bin/bash - -echo "shared_preload_libraries = 'postgis-3.so'" >> $PGDATA/postgresql.conf - -set -e - -# Create the 'mobilitydb' extension in the mobilitydb database -echo "Loading MobilityDB extension into mobilitydb" -psql --user="$POSTGRES_USER" --dbname="mobilitydb" <<- 'EOSQL' - CREATE EXTENSION IF NOT EXISTS PostGIS; - CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; - CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; - - -- Table collection - CREATE TABLE public.collection ( - collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), - collection_property jsonb NULL, - PRIMARY KEY (collection_id) - ); - - -- Table MovingFeature - CREATE TABLE public.mfeature ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), - mf_geometry geometry NULL, - mf_property jsonb NULL, - lifespan tstzspan NULL, - PRIMARY KEY (collection_id, mfeature_id), - FOREIGN KEY (collection_id) REFERENCES collection(collection_id) - ); - - -- Table TemporalGeometry - CREATE TABLE public.tgeometry ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL, - tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), - tgeometry_property tgeompoint NULL, - tgeog_property tgeompoint NULL, - PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), - FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) - ); - - -- Table TemporalProperty - CREATE TABLE public.tproperties ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL, - tproperties_name text NOT NULL, - datetime_group int4 NOT NULL, - tproperty jsonb NULL, - pvalue_float tfloat NULL, - pvalue_text ttext NULL, - PRIMARY KEY (collection_id, mfeature_id, tproperties_name, datetime_group), - FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) - ); -EOSQL diff --git a/pygeoapi/provider/mf-api-sql/mf-api.sql b/pygeoapi/provider/mf-api-sql/mf-api.sql deleted file mode 100644 index 70cb140e3..000000000 --- a/pygeoapi/provider/mf-api-sql/mf-api.sql +++ /dev/null @@ -1,45 +0,0 @@ -CREATE EXTENSION IF NOT EXISTS PostGIS; -CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; -CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; - - -- Table collection -CREATE TABLE public.collection ( - collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), - collection_property jsonb NULL, - PRIMARY KEY (collection_id) -); - - -- Table MovingFeature -CREATE TABLE public.mfeature ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), - mf_geometry geometry NULL, - mf_property jsonb NULL, - lifespan tstzspan NULL, - PRIMARY KEY (collection_id, mfeature_id), - FOREIGN KEY (collection_id) REFERENCES collection(collection_id) -); - - -- Table TemporalGeometry -CREATE TABLE public.tgeometry ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL, - tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), - tgeometry_property tgeompoint NULL, - tgeog_property tgeompoint NULL, - PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), - FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) -); - - -- Table TemporalProperty -CREATE TABLE public.tproperties ( - collection_id uuid NOT NULL, - mfeature_id uuid NOT NULL, - tproperties_name text NOT NULL, - datetime_group int4 NOT NULL, - tproperty jsonb NULL, - pvalue_float tfloat NULL, - pvalue_text ttext NULL, - PRIMARY KEY (collection_id, mfeature_id, tproperties_name, datetime_group), - FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) -); diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py index 91a6d77db..c808613d4 100644 --- a/pygeoapi/provider/postgresql_mobilitydb.py +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -12,21 +12,20 @@ class PostgresMobilityDB: - host = '127.0.0.1' - port = 5432 - db = 'mobilitydb' - user = 'docker' - password = 'docker' - connection = None - - # Local WSL environment test - # host = '172.20.241.18' + # host = '127.0.0.1' # port = 5432 - # db = 'mobility' - # user = 'postgres' - # password = 'postgres' + # db = 'mobilitydb' + # user = 'docker' + # password = 'docker' # connection = None + host = '172.20.241.18' + port = 5432 + db = 'mobility' + user = 'postgres' + password = 'postgres' + connection = None + def __init__(self, datasource=None): """ PostgresMobilityDB Class constructor @@ -218,17 +217,17 @@ def get_features( mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan) mfeature left outer join (select mfeature.collection_id, mfeature.mfeature_id, - extent(tproperties.pvalue_float) + extent(tvalue.pvalue_float) as extentTPropertiesValueFloat, - extent(tproperties.pvalue_text) as extentTPropertiesValueText - from mfeature left outer join tproperties - on mfeature.collection_id = tproperties.collection_id - and mfeature.mfeature_id = tproperties.mfeature_id + extent(tvalue.pvalue_text) as extentTPropertiesValueText + from mfeature left outer join tvalue + on mfeature.collection_id = tvalue.collection_id + and mfeature.mfeature_id = tvalue.mfeature_id where mfeature.collection_id ='{0}' group by mfeature.collection_id, mfeature.mfeature_id) - tproperties ON - mfeature.collection_id = tproperties.collection_id - and mfeature.mfeature_id = tproperties.mfeature_id + tvalue ON + mfeature.collection_id = tvalue.collection_id + and mfeature.mfeature_id = tvalue.mfeature_id where 1=1 {1} {2}""" .format( collection_id, bbox_restriction, datetime_restriction)) @@ -428,7 +427,11 @@ def get_temporalproperties( tproperties.mfeature_id, tproperties.tproperties_name) tproperties.collection_id, tproperties.mfeature_id, tproperties.tproperties_name, tproperties.tproperty - from tproperties WHERE tproperties.collection_id ='{0}' + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + WHERE tproperties.collection_id ='{0}' AND tproperties.mfeature_id='{1}' {2}""". format( collection_id, mfeature_id, datetime_restriction)) @@ -443,10 +446,10 @@ def get_temporalproperties( if sub_temporal_value or sub_temporal_value == "true": subTemporalValue_float_field = ( - """atTime(tproperties.pvalue_float, + """atTime(tvalue.pvalue_float, tstzspan('[{0}]'))""" .format(datetime)) subTemporalValue_text_field = ( - """atTime(tproperties.pvalue_text, + """atTime(tvalue.pvalue_text, tstzspan('[{0}]'))""" .format(datetime)) select_temporalvalue_query = ( @@ -457,18 +460,22 @@ def get_temporalproperties( tproperties.mfeature_id, tproperties.tproperties_name) tproperties.collection_id, tproperties.mfeature_id, tproperties.tproperties_name, tproperties.tproperty - from tproperties where tproperties.collection_id ='{0}' - AND tproperties.mfeature_id='{1}' {2} {3}) tproperties - left outer join (select tproperties.collection_id, - tproperties.mfeature_id, tproperties.tproperties_name, - tproperties.datetime_group, {4} as pvalue_float, - {5} as pvalue_text from tproperties + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name where tproperties.collection_id ='{0}' - AND tproperties.mfeature_id='{1}' and ({4} is not null - or {5} is not null)) tpropertiesvalue - on tproperties.collection_id = tpropertiesvalue.collection_id - and tproperties.mfeature_id = tpropertiesvalue.mfeature_id - and tproperties.tproperties_name = tpropertiesvalue.tproperties_name + AND tproperties.mfeature_id='{1}' {2} {3}) tproperties + left outer join (select tvalue.collection_id, + tvalue.mfeature_id, tvalue.tproperties_name, + tvalue.datetime_group, {4} as pvalue_float, + {5} as pvalue_text from tvalue + where tvalue.collection_id ='{0}' + AND tvalue.mfeature_id='{1}' and ({4} is not null + or {5} is not null)) tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name where 1=1 order by datetime_group""". format( collection_id, mfeature_id, @@ -507,44 +514,45 @@ def get_temporalproperties_value( datetime_restriction = "" if datetime != '' and datetime is not None: datetime_restriction = ( - """ and (atTime(tproperties.pvalue_float, + """ and (atTime(tvalue.pvalue_float, tstzspan('[{0}]')) is not null - or atTime(tproperties.pvalue_text, + or atTime(tvalue.pvalue_text, tstzspan('[{0}]')) is not null) """ .format(datetime)) float_field = 'pvalue_float' text_field = 'pvalue_text' if leaf != '' and leaf is not None: - float_field = "atTime(tproperties.pvalue_float, \ + float_field = "atTime(tvalue.pvalue_float, \ tstzset('{" + leaf + "}'))" - text_field = "atTime(tproperties.pvalue_text, \ + text_field = "atTime(tvalue.pvalue_text, \ tstzset('{" + leaf + "}'))" elif sub_temporal_value or sub_temporal_value == "true": - float_field = "atTime(tproperties.pvalue_float, \ + float_field = "atTime(tvalue.pvalue_float, \ tstzspan('[" + datetime + "]'))" - text_field = "atTime(tproperties.pvalue_text, \ + text_field = "atTime(tvalue.pvalue_text, \ tstzspan('[" + datetime + "]'))" select_query = ( """select tproperties.collection_id, tproperties.mfeature_id, tproperties.tproperties_name, tproperties.tproperty, datetime_group, pvalue_float, pvalue_text - from (select distinct on (tproperties.collection_id, - tproperties.mfeature_id, tproperties.tproperties_name) - tproperties.collection_id, tproperties.mfeature_id, + from (select tproperties.collection_id, tproperties.mfeature_id, tproperties.tproperties_name, tproperties.tproperty from tproperties where tproperties.collection_id ='{0}' AND tproperties.mfeature_id='{1}' AND tproperties.tproperties_name='{2}') tproperties left outer join (select tproperties.collection_id, tproperties.mfeature_id, tproperties.tproperties_name, - tproperties.datetime_group, {3} as pvalue_float, - {4} as pvalue_text from tproperties + tvalue.datetime_group, {3} as pvalue_float, {4} as pvalue_text + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name where tproperties.collection_id ='{0}' AND tproperties.mfeature_id='{1}' - AND tproperties.tproperties_name='{2}' {5}) tpropertiesvalue - on tproperties.collection_id = tpropertiesvalue.collection_id - and tproperties.mfeature_id = tpropertiesvalue.mfeature_id - and tproperties.tproperties_name = tpropertiesvalue.tproperties_name + AND tproperties.tproperties_name='{2}' {5}) tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name where 1=1 order by datetime_group""" .format(collection_id, mfeature_id, tProperty_name, float_field, text_field, datetime_restriction)) @@ -673,11 +681,11 @@ def post_temporalproperties( datetimes = [] if 'datetimes' in g_temporal_property: datetimes = g_temporal_property.pop("datetimes", None) - datetime_group = self.get_temporalproperties_group( - collection_id, mfeature_id, datetimes) + tproperties_name_list = [] for tproperties_name in g_temporal_property: with self.connection.cursor() as cur: + temporal_value_data = {} if 'values' in g_temporal_property[tproperties_name] \ and 'interpolation' in g_temporal_property[ tproperties_name]: @@ -686,44 +694,30 @@ def post_temporalproperties( interpolation = g_temporal_property[tproperties_name].pop( "interpolation", None) - temporal_value = self.create_temporalproperty_value( - datetimes, values, interpolation) - - dataType = temporal_value["type"] - pvalue_column = "" - value = None - - pymeos_initialize() - if dataType == 'MovingFloat': - pvalue_column = "pValue_float" - value = Temporal._factory( - tfloat_from_mfjson(json.dumps(temporal_value))) - else: - pvalue_column = "pValue_text" - value = Temporal._factory( - ttext_from_mfjson(json.dumps(temporal_value))) - - insert_query = ( - """INSERT INTO tproperties(collection_id, mfeature_id, - tproperties_name, datetime_group, tproperty, {0}) - VALUES ('{1}', '{2}', '{3}', {4}, '{5}', '{6}')""" - .format( - pvalue_column, collection_id, mfeature_id, - tproperties_name, datetime_group, json.dumps( - temporal_property[tproperties_name]), - str(value))) - cur.execute(insert_query) - else: - insert_query = ("""INSERT INTO tproperties(collection_id, - mfeature_id, tproperties_name, datetime_group, tproperty) - VALUES ('{0}', '{1}', '{2}', {3}, '{4}')""".format( + temporal_value_data['datetimes'] = datetimes + temporal_value_data['values'] = values + temporal_value_data['interpolation'] = interpolation + + insert_query = ( + """INSERT INTO tproperties(collection_id, mfeature_id, + tproperties_name, tproperty) + VALUES ('{0}', '{1}', '{2}', '{3}') + ON CONFLICT (collection_id, mfeature_id, + tproperties_name) + DO UPDATE SET tproperty = EXCLUDED.tproperty""" + .format(collection_id, mfeature_id, + tproperties_name, json.dumps( + g_temporal_property[tproperties_name]))) + cur.execute(insert_query) + + if temporal_value_data: + self.post_temporalvalue( collection_id, mfeature_id, tproperties_name, - datetime_group, json.dumps( - temporal_property[tproperties_name]))) - cur.execute(insert_query) + temporal_value_data) tproperties_name_list.append(tproperties_name) + # TODO replace g_temporal_property return tproperties_name_list def post_temporalvalue( @@ -742,7 +736,6 @@ def post_temporalvalue( :returns: Temporal Primitive Value """ - with self.connection.cursor() as cur: datetimes = temporal_value_data['datetimes'] @@ -751,7 +744,7 @@ def post_temporalvalue( temporal_value = self.create_temporalproperty_value( datetimes, values, interpolation) - datetime_group = self.get_temporalproperties_group( + datetime_group = self.get_temporalvalue_group( collection_id, mfeature_id, datetimes) dataType = temporal_value["type"] pvalue_column = "" @@ -768,16 +761,18 @@ def post_temporalvalue( ttext_from_mfjson(json.dumps(temporal_value))) insert_querry = ( - """INSERT INTO tproperties(collection_id, mfeature_id, + """INSERT INTO tvalue(collection_id, mfeature_id, tproperties_name, datetime_group, {0}) - VALUES ('{1}', '{2}', '{3}', {4}, '{5}')""" + VALUES ('{1}', '{2}', '{3}', {4}, '{5}') + RETURNING tvalue_id""" .format( pvalue_column, collection_id, mfeature_id, tproperties_name, datetime_group, str(value))) + cur.execute(insert_querry) - pValue_id = '' + tvalue_id = cur.fetchone()[0] - return pValue_id + return tvalue_id def put_collection(self, collection_id, collection_property): """ @@ -806,9 +801,11 @@ def delete_collection(self, restriction): with self.connection.cursor() as cur: cur.execute( - "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) cur.execute( "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) cur.execute( "DELETE FROM mfeature WHERE 1=1 {0}".format(restriction)) cur.execute( @@ -821,6 +818,8 @@ def delete_movingfeature(self, restriction): :param restriction: moving feature id """ with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) cur.execute( "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) cur.execute( @@ -846,9 +845,22 @@ def delete_temporalproperties(self, restriction): """ with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) cur.execute( "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + def delete_temporalvalue(self, restriction): + """ + Delete the temporal value record with the given restriction. + + :param restriction: temporal value id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) + def convert_temporalgeometry_to_new_version(self, temporal_geometry): """ Convert temporal geometory to new version @@ -1026,7 +1038,7 @@ def check_temporalproperty_can_post( select_query = ( """select collection_id, mfeature_id, tproperties_name, count(datetime_group) as intersect_count - from tproperties where collection_id ='{0}' + from tvalue where collection_id ='{0}' and mfeature_id='{1}' and tproperties_name in ({2}) and ((pvalue_float::tstzspan && tstzset('{3}')::tstzspan) or (pvalue_text::tstzspan && tstzset('{3}')::tstzspan)) @@ -1042,7 +1054,7 @@ def check_temporalproperty_can_post( return False return True - def get_temporalproperties_group( + def get_temporalvalue_group( self, collection_id, mfeature_id, datetimes): """ Get temporal properties group @@ -1065,24 +1077,25 @@ def get_temporalproperties_group( select_query = ( """select temp1.collection_id, temp1.mfeature_id, COALESCE(temp2.datetime_group, temp3.max_datetime_group) - from (select collection_id, mfeature_id from tproperties + from (select collection_id, mfeature_id from tvalue where collection_id ='{0}' and mfeature_id='{1}') temp1 left outer join (select collection_id, mfeature_id, - datetime_group from tproperties + datetime_group from tvalue where collection_id ='{0}' and mfeature_id='{1}' - and (timestamps(getTime(pvalue_float)) = tstzset('{2}') - or timestamps(getTime(pvalue_text)) = tstzset('{2}'))) temp2 + and (set(timestamps(pvalue_float)) = tstzset('{2}') + or set(timestamps(pvalue_text)) = tstzset('{2}'))) temp2 on temp1.collection_id = temp2.collection_id and temp1.mfeature_id = temp2.mfeature_id left outer join (select collection_id, mfeature_id, COALESCE(max(datetime_group), 0) + 1 as max_datetime_group - from tproperties where collection_id ='{0}' + from tvalue where collection_id ='{0}' and mfeature_id='{1}' group by collection_id, mfeature_id ) temp3 on temp1.collection_id = temp3.collection_id and temp1.mfeature_id = temp3.mfeature_id """ .format(collection_id, mfeature_id, "{" + ", ".join(datetimes) + "}")) + print(select_query) cur.execute(select_query) result = cur.fetchall() if len(result) > 0: diff --git a/tests/api/test_movingfeature.py b/tests/api/test_movingfeature.py index 2883a0b07..1b4e0a0fc 100644 --- a/tests/api/test_movingfeature.py +++ b/tests/api/test_movingfeature.py @@ -27,9 +27,9 @@ @pytest.fixture() def api_(): - with open(get_test_file_path('example-config.yml')) as fh: + with open(get_test_file_path('../pygeoapi-test-config-mfapi.yml')) as fh: config = yaml_load(fh) - with open(get_test_file_path('example-openapi.yml')) as fh: + with open(get_test_file_path('../pygeoapi-test-openapi-mfapi.yml')) as fh: openapi = yaml_load(fh) return API(config, openapi) @@ -681,6 +681,11 @@ def test_manage_collection_item_tProperty_value_create( assert rsp_headers['Content-Type'] == 'application/json' assert 'Location' in rsp_headers + location = rsp_headers['Location'] + tvalue_id = location.split('/')[-1] + assert tvalue_id is not None + context['tvalue_id'] = tvalue_id + def test_manage_collection_update( api_, @@ -1340,6 +1345,28 @@ def test_get_collection_items_tProperty_value(api_, context): assert valueSequence['interpolation'] == 'Discrete' +def test_manage_collection_item_tProperty_value_delete( + api_, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], + context['tProperty_name'], context['tvalue_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + def test_manage_collection_item_tProperty_delete( api_, context): diff --git a/tests/data/mf-api.sql b/tests/data/mf-api.sql new file mode 100644 index 000000000..8ad44bac9 --- /dev/null +++ b/tests/data/mf-api.sql @@ -0,0 +1,51 @@ +CREATE EXTENSION IF NOT EXISTS PostGIS; +CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +-- Table collection +CREATE TABLE public.collection ( +collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), +collection_property jsonb NULL, +PRIMARY KEY (collection_id) +); +-- Table MovingFeature +CREATE TABLE public.mfeature ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), +mf_geometry geometry NULL, +mf_property jsonb NULL, +lifespan tstzspan NULL, +PRIMARY KEY (collection_id, mfeature_id), +FOREIGN KEY (collection_id) REFERENCES collection(collection_id) +); +-- Table TemporalGeometry +CREATE TABLE public.tgeometry ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), +tgeometry_property tgeompoint NULL, +tgeog_property tgeompoint NULL, +PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), +FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); +-- Table TemporalProperty +CREATE TABLE public.tproperties ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tproperties_name text NOT NULL, +tproperty jsonb NULL, +PRIMARY KEY (collection_id, mfeature_id, tproperties_name), +FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); + +-- Table TemporalPropertyValue +CREATE TABLE public.tvalue ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tproperties_name text NOT NULL, +tvalue_id uuid NOT NULL DEFAULT uuid_generate_v4(), +datetime_group int4 NOT NULL, +pvalue_float tfloat NULL, +pvalue_text ttext NULL, +PRIMARY KEY (collection_id, mfeature_id, tproperties_name, tvalue_id), +FOREIGN KEY (collection_id, mfeature_id, tproperties_name) REFERENCES tproperties(collection_id, mfeature_id, tproperties_name) +); diff --git a/tests/pygeoapi-test-config-mfapi.yml b/tests/pygeoapi-test-config-mfapi.yml new file mode 100644 index 000000000..307c4ee6a --- /dev/null +++ b/tests/pygeoapi-test-config-mfapi.yml @@ -0,0 +1,94 @@ +# +# Authors: Tom Kralidis <tomkralidis@gmail.com> +# +# Copyright (c) 2020 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 8085 + url: http://localhost:8085 + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limit: 10 + # templates: + # path: /path/to/Jinja2/templates + # static: /path/to/static/folder # css/js/img + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>' +# manager: +# name: TinyDB +# connection: /tmp/pygeoapi-process-manager.db +# output_dir: /tmp/ + ogc_schemas_location: /etc/pygeoapi/pygeoapi/pygeoapi/schema + +logging: + level: ERROR + logfile: /etc/pygeoapi/pygeoapi/pygeoapi-mf-api/log/pygeoapi.log + +metadata: + identification: + title: + en: Movingfeatures data server + description: + en: Access to data about moving features + keywords: + en: + - geospatial + - data + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: https://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: AIST, AIRC + url: https://www.airc.aist.go.jp/en/ + contact: + name: KIM, Taehoon + position: Researcher + address: 2-4-7, Aomi, Koto-ku, Tokyo + city: Tokyo + stateorprovince: Tokyo + postalcode: 135-0064 + country: Japan + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: kim.taehoon@aist.go.jp + url: https://github.com/aistairc/mf-api + hours: Mo-Fr 08:00-17:00 + instructions: During hours of service. Off on weekends. + role: pointOfContact \ No newline at end of file diff --git a/tests/pygeoapi-test-openapi-mfapi.yml b/tests/pygeoapi-test-openapi-mfapi.yml new file mode 100644 index 000000000..a6e7db671 --- /dev/null +++ b/tests/pygeoapi-test-openapi-mfapi.yml @@ -0,0 +1,2416 @@ +openapi: 3.0.3 +info: + title: 'Building Blocks specified in OGC API - Moving Features - Part 1: Core' + version: 0.1.0 + description: This is the OpenAPI definition of Moving Features API specification that conforms to the OGC Moving Features Encoding Extension - JSON. + contact: + name: OGC Moving Features SWG + email: moving-features.swg-bounces@lists.ogc.org + license: + name: OGC License + url: https://www.ogc.org/ogc/Document + x-logo: + url: https://www.ogc.org/pub/www/files/OGC_Logo_2D_Blue_x_0_0.png + backgroundColor: '#FFFFFF' + altText: OGC logo + href: https://www.ogc.org/contacts +tags: + - name: Capabilities + description: Essential characteristics of the information available from the API. + - name: MovingFeatureCollection + description: Collections of moving features to be logically managed by a user. + - name: MovingFeatures + description: Moving feature data, including the temporal geometry, temporal properties, etc. + - name: TemporalGeometry + description: The spatial change over time (temporal geometry), representing the movement of the rigid or nonrigid body of a feature. + - name: TemporalGeometryQuery + description: Queryable resources for the temporal primitive geometry. + - name: TemporalProperty + description: The thematic change over time (temporal property), representing the variation of the value of any descriptive characteristic of a feature. +paths: + /: + get: + operationId: getLandingPage + summary: Landing page + description: The landing page provides links to the API definition, the conformance statements and to the feature collections in this dataset. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/LandingPage' + '500': + $ref: '#/components/responses/ServerError' + /conformance: + get: + operationId: getConformance + summary: Information about specifications that this API conforms to + description: A list of all conformance classes specified in a standard that the server conforms to. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/Conformance' + '500': + $ref: '#/components/responses/ServerError' + /api: + get: + operationId: getAPIList + summary: API definition + description: A list of all API definition + tags: + - Capabilities + responses: + '500': + $ref: '#/components/responses/ServerError' + /collections: + get: + operationId: searchCatalog + summary: Retrieve catalogs of moving features collection + description: | + A user can retrieve catalogs to access collections by simple filtering and a limit. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/Collections' + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: registerMetadata + summary: Register metadata about a collection of moving features + description: | + A user SHOULD register metadata about a collection of moving features into the system. + tags: + - MovingFeatureCollection + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/collection-2' + example: + title: moving_feature_collection_sample + updateFrequency: 1000 + description: example + responses: + '201': + description: Successful create a collection to manage moving features. + headers: + Location: + description: A URI of the newly added resource + schema: + type: string + example: https://data.example.org/collections/mfc1 + content: + application/json: + schema: + type: object + required: + - id + properties: + id: + type: string + description: Newly added resource ID + example: + id: mfc-1 + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}: + get: + operationId: accessMetadata + summary: Access metadata about the collection + description: | + A user can access metadata with id `collectionId`. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + responses: + '200': + $ref: '#/components/responses/Collection' + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteCollection + summary: Delete the collection + description: | + The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + responses: + '204': + description: Successfully deleted. + '404': + description: A collection with the specified name was not found. + '500': + $ref: '#/components/responses/ServerError' + put: + operationId: replaceMetadata + summary: Replace metadata about the collection + description: | + A user SHOULD replace metadata with id `collectionId`. + + The request body schema is the same the POST's one. + + However, `updateFrequency` property is NOT updated. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/collection-2' + example: + title: moving_feature_collection_sample + updateFrequency: 1000 + description: example + responses: + '204': + description: Successfully replaced. + '404': + description: A collection with the specified name was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items: + get: + operationId: retrieveMovingFeatures + summary: Retrieve moving feature collection + description: | + A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit. + + Specifically, if the `subTrajectory` parameter is "true", it will return the temporal geometry within the time interval specified by `datetime` parameter. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/bbox' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/subtrajectory-description' + responses: + '200': + $ref: '#/components/responses/MovingFeatures' + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertMovingFeatures + summary: Insert moving features + description: | + A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`. + + The request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or + [MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + requestBody: + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/movingFeature-2' + - $ref: '#/components/schemas/movingFeatureCollection' + example: + type: Feature + crs: + type: Name + properties: + name: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Link + properties: + type: OGCDEF + href: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + temporalGeometry: + type: MovingPoint + datetimes: + - '2011-07-14T22:01:01Z' + - '2011-07-14T22:01:02Z' + - '2011-07-14T22:01:03Z' + - '2011-07-14T22:01:04Z' + - '2011-07-14T22:01:05Z' + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + interpolation: Linear + base: + type: glTF + href: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 270 + temporalProperties: + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + - '2011-07-15T00:01:01.450Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + discharge: + type: Measure + form: MQS + values: + - 3 + - 4 + - 5 + interpolation: Step + - datetimes: + - 1465621816590 + - 1465711526300 + camera: + type: Image + values: + - http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1 + - iVBORw0KGgoAAAANSUhEU...... + interpolation: Discrete + labels: + type: Text + values: + - car + - human + interpolation: Discrete + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + name: car1 + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + id: mf-1 + responses: + '201': + description: | + Successful create a set of moving features or a moving feature into a specific collection. + headers: + Location: + description: A list of URI of the newly added resources + schema: + type: array + items: + type: string + example: + - https://data.example.org/collections/mfc-1/items/mf-1 + - https://data.example.org/collections/mfc-1/items/109301273 + '400': + description: A query parameter was not validly used. + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}: + get: + operationId: accessMovingFeature + summary: Access the static data of the moving feature + description: | + A user can access a static data of a moving feature with id `mFeatureId`. + + The static data of a moving feature is not included temporal geometries and temporal properties. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + responses: + '200': + $ref: '#/components/responses/MovingFeature' + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteMovingFeature + summary: Delete a single moving feature + description: | + The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence: + get: + operationId: retrieveTemporalGeometrySequence + summary: Retrieve the movement data of the single moving feature + description: | + A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/bbox' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/leaf-description' + - $ref: '#/components/parameters/subtrajectory-description' + responses: + '200': + $ref: '#/components/responses/TemporalGeometrySequence' + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalPrimitiveGeometry + summary: Add movement data into the moving feature + description: | + A user SHOULD add more movement data into a moving feature with id `mFeatureId`. + + The request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + example: + type: MovingPoint + datetimes: + - '2011-07-14T22:01:06Z' + - '2011-07-14T22:01:07Z' + - '2011-07-14T22:01:08Z' + - '2011-07-14T22:01:09Z' + - '2011-07-14T22:01:10Z' + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + interpolation: Linear + base: + type: glTF + href: https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 270 + responses: + '201': + description: | + Successful add more movement data into a specified moving feature. + headers: + Location: + description: A URI of the newly added resource + schema: + type: string + example: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence/tg-2 + '400': + description: A query parameter was not validly used. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}: + delete: + operationId: deleteTemporalPrimitiveGeometry + summary: Delete a singe temporal primitive geometry + description: | + The temporal primitive geometry with id `tGeometryId` SHOULD be deleted. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal primitive geometry with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance: + get: + operationId: getDistanceOfTemporalPrimitiveGeometry + summary: Get a time-to-distance curve of a temporal primitive geometry + description: | + A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single distance value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/DistanceQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity: + get: + operationId: getVelocityOfTemporalPrimitiveGeometry + summary: Get a time-to-velocity curve of a temporal primitive geometry + description: | + A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single velocity value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/VelocityQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration: + get: + operationId: getAccelerationOfTemporalPrimitiveGeometry + summary: Get a time-to-acceleration curve of a temporal primitive geometry + description: | + A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single acceleration value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/AccelerationQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tproperties: + get: + operationId: retrieveTemporalProperties + summary: Retrieve a set of the temporal property data + description: | + A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`. + + The static data of a temporal property is not included temporal values (property `valueSequence`). + + Also a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. + In this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/subtemporalvalue-description' + responses: + '200': + $ref: '#/components/responses/TemporalProperties' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalProperty + summary: Add temporal property data + description: | + A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`. + + The request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperties' + example: + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + - '2011-07-15T00:01:01.450Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + discharge: + type: Measure + form: MQS + values: + - 3 + - 4 + - 5 + interpolation: Step + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + camera: + type: Image + values: + - http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1 + - iVBORw0KGgoAAAANSUhEU...... + interpolation: Discrete + labels: + type: Text + values: + - car + - human + interpolation: Discrete + responses: + '201': + description: | + Successful add more temporal property into a specified moving feature. + headers: + Locations: + description: A list of URI of the newly added resources + schema: + type: array + items: + type: string + example: + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/length + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/discharge + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/camera + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/labels + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}: + get: + operationId: retrieveTemporalProperty + summary: Retrieve a temporal property + description: | + A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/leaf-description' + - $ref: '#/components/parameters/subtemporalvalue-description' + responses: + '200': + $ref: '#/components/responses/TemporalProperty' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalPrimitiveValue + summary: Add temporal primitive value data + description: | + A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalPrimitiveValue' + example: + datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 20 + - 50 + interpolation: Linear + responses: + '201': + description: | + Successful add more temporal primitive value data into a specified temporal property. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal property with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteTemporalProperty + summary: Delete a specified temporal property + description: | + The temporal property with id `tPropertyName` SHOULD be deleted. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal property with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' +components: + schemas: + link: + type: object + required: + - href + - rel + properties: + href: + type: string + example: http://data.example.com/buildings/123 + rel: + type: string + example: alternate + type: + type: string + example: application/geo+json + hreflang: + type: string + example: en + title: + type: string + example: Trierer Strasse 70, 53115 Bonn + length: + type: integer + landingPage: + type: object + required: + - links + properties: + title: + type: string + example: Moving features data server + description: + type: string + example: Access to data about moving features + links: + type: array + items: + $ref: '#/components/schemas/link' + exception: + type: object + required: + - code + properties: + code: + type: string + description: + type: string + confClasses: + type: object + required: + - conformsTo + properties: + conformsTo: + type: array + items: + type: string + example: + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures + extent: + description: |- + The extent of the features in the collection. In the Core only spatial and temporal + extents are specified. Extensions may add additional members to represent other + extents, for example, thermal or pressure ranges. + + An array of extents is provided for each extent type (spatial, temporal). The first item + in the array describes the overall extent of the data. All subsequent items describe more + precise extents, e.g., to identify clusters of data. Clients only interested in the + overall extent will only need to access the first extent in the array. + type: object + properties: + spatial: + description: The spatial extent of the features in the collection. + type: object + properties: + bbox: + description: |- + One or more bounding boxes that describe the spatial extent of the dataset. + In the Core only a single bounding box is supported. + + Extensions may support additional areas. + The first bounding box describes the overall spatial + extent of the data. All subsequent bounding boxes describe + more precise bounding boxes, e.g., to identify clusters of data. + Clients only interested in the overall spatial extent will + only need to access the first bounding box in the array. + type: array + minItems: 1 + items: + description: |- + Each bounding box is provided as four or six numbers, depending on + whether the coordinate reference system includes a vertical axis + (height or depth): + + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 + * Minimum value, coordinate axis 3 (optional) + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 + * Maximum value, coordinate axis 3 (optional) + + If the value consists of four numbers, the coordinate reference system is + WGS 84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84) + unless a different coordinate reference system is specified in `crs`. + + If the value consists of six numbers, the coordinate reference system is WGS 84 + longitude/latitude/ellipsoidal height (http://www.opengis.net/def/crs/OGC/0/CRS84h) + unless a different coordinate reference system is specified in `crs`. + + For WGS 84 longitude/latitude the values are in most cases the sequence of + minimum longitude, minimum latitude, maximum longitude and maximum latitude. + However, in cases where the box spans the antimeridian the first value + (west-most box edge) is larger than the third value (east-most box edge). + + If the vertical axis is included, the third and the sixth number are + the bottom and the top of the 3-dimensional bounding box. + + If a feature has multiple spatial geometry properties, it is the decision of the + server whether only a single spatial geometry property is used to determine + the extent or all relevant geometries. + type: array + oneOf: + - minItems: 4 + maxItems: 4 + - minItems: 6 + maxItems: 6 + items: + type: number + example: + - -180 + - -90 + - 180 + - 90 + crs: + description: |- + Coordinate reference system of the coordinates in the spatial extent + (property `bbox`). The default reference system is WGS 84 longitude/latitude. + In the Core the only other supported coordinate reference system is + WGS 84 longitude/latitude/ellipsoidal height for coordinates with height. + Extensions may support additional coordinate reference systems and add + additional enum values. + type: string + enum: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/0/CRS84h + default: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + description: The temporal extent of the features in the collection. + type: object + properties: + interval: + description: |- + One or more time intervals that describe the temporal extent of the dataset. + In the Core only a single time interval is supported. + + Extensions may support multiple intervals. + The first time interval describes the overall + temporal extent of the data. All subsequent time intervals describe + more precise time intervals, e.g., to identify clusters of data. + Clients only interested in the overall temporal extent will only need + to access the first time interval in the array (a pair of lower and upper + bound instants). + type: array + minItems: 1 + items: + description: |- + Begin and end times of the time interval. The timestamps are in the + temporal coordinate reference system specified in `trs`. By default + this is the Gregorian calendar. + + The value `null` at start or end is supported and indicates a half-bounded interval. + type: array + minItems: 2 + maxItems: 2 + items: + type: string + format: date-time + nullable: true + example: + - '2011-11-11T12:22:11Z' + - null + trs: + description: |- + Coordinate reference system of the coordinates in the temporal extent + (property `interval`). The default reference system is the Gregorian calendar. + In the Core this is the only supported temporal coordinate reference system. + Extensions may support additional temporal coordinate reference systems and add + additional enum values. + type: string + enum: + - http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + default: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + collection: + type: object + required: + - id + - links + - itemType + properties: + id: + description: identifier of the collection used, for example, in URIs + type: string + example: address + title: + description: human readable title of the collection + type: string + example: address + description: + description: a description of the features in the collection + type: string + example: An address. + links: + type: array + items: + $ref: '#/components/schemas/link' + example: + - href: https://data.example.com/buildings + rel: item + - href: https://example.com/concepts/buildings.html + rel: describedby + type: text/html + extent: + $ref: '#/components/schemas/extent' + itemType: + description: indicator about the type of the items in the collection + type: string + default: movingfeature + crs: + description: the list of coordinate reference systems supported by the service + type: array + items: + type: string + default: + - https://www.opengis.net/def/crs/OGC/1.3/CRS84 + example: + - https://www.opengis.net/def/crs/OGC/1.3/CRS84 + - https://www.opengis.net/def/crs/EPSG/0/4326 + updateFrequency: + description: a time interval of sampling location. The unit is millisecond. + type: number + collections: + type: object + required: + - collections + - links + properties: + collections: + type: array + items: + $ref: '#/components/schemas/collection' + links: + type: array + items: + $ref: '#/components/schemas/link' + collection-2: + type: object + required: + - itemType + properties: + title: + description: human readable title of the collection + type: string + updateFrequency: + description: a time interval of sampling location. The unit is millisecond. + type: number + description: + description: any description + type: string + itemType: + description: indicator about the type of the items in the moving features collection (the default value is 'movingfeature'). + type: string + default: movingfeature + motionCurve: + description: MF-JSON Prism encoding MotionCurve Object + title: MF-JSON MotionCurve + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Quadratic + - Cubic + default: Linear + - type: string + format: uri + namedCRS: + description: MF-JSON Prism encoding NamedCRS Object + title: MF-JSON NamedCRS + type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Name + properties: + type: object + additionalProperties: false + required: + - name + properties: + name: + type: string + default: urn:ogc:def:crs:OGC:1.3:CRS84 + linkedCRS: + description: MF-JSON Prism encoding LinkedCRS Object + title: MF-JSON LinkedCRS + type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Link + properties: + type: object + required: + - href + - type + properties: + href: + type: string + format: uri + type: + type: string + crs: + description: MF-JSON Prism encoding CoordinateReferenceSystem Object + title: MF-JSON CRS + oneOf: + - $ref: '#/components/schemas/namedCRS' + - $ref: '#/components/schemas/linkedCRS' + trs: + description: The "trs" member in MovingFeature object + title: MF-JSON TRS + oneOf: + - $ref: '#/components/schemas/linkedCRS' + - type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Name + properties: + type: object + additionalProperties: false + required: + - name + properties: + name: + type: string + default: urn:ogc:data:time:iso8601 + temporalPrimitiveGeometry: + description: MF-JSON Prism encoding TemporalPrimitiveGeometry Object + title: MF-JSON TemporalPrimitiveGeometry + type: object + required: + - type + - coordinates + - datetimes + properties: + type: + type: string + enum: + - MovingPoint + - MovingLineString + - MovingPolygon + - MovingPointCloud + coordinates: + type: array + minItems: 2 + items: + oneOf: + - title: pointGeoJSON coordinates + type: array + minItems: 2 + items: + type: number + - title: linestringGeoJSON coordinates + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + - title: polygonGeoJSON coordinates + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + - title: multipointGeoJSON coordinates + type: array + items: + type: array + minItems: 2 + items: + type: number + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + interpolation: + $ref: '#/components/schemas/motionCurve' + base: + type: object + nullable: true + required: + - href + - type + properties: + href: + type: string + format: uri + type: + type: string + orientations: + type: array + nullable: true + items: + type: object + required: + - scales + - angles + properties: + scales: + type: array + oneOf: + - minItems: 2 + maxItems: 2 + - minItems: 3 + maxItems: 3 + items: + type: number + angles: + type: array + oneOf: + - minItems: 2 + maxItems: 2 + - minItems: 3 + maxItems: 3 + items: + type: number + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + temporalComplexGeometry: + description: MF-JSON Prism encoding TemporalComplexGeometry Object + title: MF-JSON TemporalComplexGeometry + type: object + required: + - type + - prisms + properties: + type: + type: string + default: MovingGeometryCollection + prisms: + type: array + items: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + temporalGeometry: + description: MF-JSON Prism encoding TemporalGeometry Object + title: MF-JSON TemporalGeometry + oneOf: + - $ref: '#/components/schemas/temporalPrimitiveGeometry' + - $ref: '#/components/schemas/temporalComplexGeometry' + parametricValues: + description: MF-JSON Prism encoding ParametricValues Object + title: MF-JSON ParametricValues + type: object + required: + - datetimes + properties: + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + format: date-time + patternProperties: + ^S_: + oneOf: + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Measure + values: + type: array + nullable: true + items: + type: number + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + form: + oneOf: + - type: string + minLength: 3 + maxLength: 3 + - type: string + format: uri + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Text + values: + type: array + nullable: true + items: + oneOf: + - type: string + - type: boolean + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Image + values: + type: array + nullable: true + items: + type: string + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + temporalProperties: + description: MF-JSON Prism encoding TemporalProperties Object + title: MF-JSON TemporalProperties + type: array + nullable: true + items: + $ref: '#/components/schemas/parametricValues' + bbox: + description: MF-JSON Prism encoding BoundingBox Object + title: MF-JSON BoundingBox + type: array + minItems: 4 + nullable: true + items: + type: number + lifeSpan: + description: MF-JSON Prism encoding LifeSpan Object + title: MF-JSON LifeSpan + type: array + minItems: 2 + maxItems: 2 + uniqueItems: true + items: + type: string + nullable: true + pointGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + type: array + minItems: 2 + items: + type: number + multipointGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + linestringGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + multilinestringGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + polygonGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + multipolygonGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + type: array + items: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + geometryGeoJSON: + oneOf: + - $ref: '#/components/schemas/pointGeoJSON' + - $ref: '#/components/schemas/multipointGeoJSON' + - $ref: '#/components/schemas/linestringGeoJSON' + - $ref: '#/components/schemas/multilinestringGeoJSON' + - $ref: '#/components/schemas/polygonGeoJSON' + - $ref: '#/components/schemas/multipolygonGeoJSON' + - $ref: '#/components/schemas/geometrycollectionGeoJSON' + geometrycollectionGeoJSON: + type: object + required: + - type + - geometries + properties: + type: + type: string + enum: + - GeometryCollection + geometries: + type: array + items: + $ref: '#/components/schemas/geometryGeoJSON' + movingFeature: + type: object + required: + - id + - type + properties: + type: + type: string + enum: + - Feature + temporalGeometry: + $ref: '#/components/schemas/temporalGeometry' + temporalProperties: + $ref: '#/components/schemas/temporalProperties' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + geometry: + $ref: '#/components/schemas/geometryGeoJSON' + properties: + type: object + nullable: true + id: + description: An identifier for the feature + oneOf: + - type: string + - type: integer + links: + type: array + items: + $ref: '#/components/schemas/link' + movingFeatures: + type: object + required: + - type + - features + properties: + type: + type: string + enum: + - FeatureCollection + features: + type: array + nullable: true + items: + $ref: '#/components/schemas/movingFeature' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + movingFeature-2: + description: MF-JSON Prism encoding MovingFeature Object + title: MF-JSON MovingFeature + type: object + required: + - type + - temporalGeometry + properties: + type: + type: string + enum: + - Feature + temporalGeometry: + $ref: '#/components/schemas/temporalGeometry' + temporalProperties: + $ref: '#/components/schemas/temporalProperties' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + geometry: + $ref: '#/components/schemas/geometryGeoJSON' + properties: + type: object + nullable: true + id: + description: An identifier for the feature + oneOf: + - type: string + - type: integer + movingFeatureCollection: + description: MF-JSON Prism encoding MovingFeatureCollection Object + title: MF-JSON MovingFeatureCollection + type: object + required: + - type + - features + properties: + type: + type: string + enum: + - FeatureCollection + features: + type: array + minItems: 1 + items: + $ref: '#/components/schemas/movingFeature-2' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + label: + type: string + nullable: true + temporalGeometrySequence: + type: object + required: + - type + - geometrySequence + properties: + type: + type: string + enum: + - TemporalGeometrySequence + geometrySequence: + type: array + items: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + temporalPrimitiveValue: + type: object + required: + - datetimes + - values + - interpolation + properties: + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + format: date-time + values: + oneOf: + - type: number + - type: string + - type: boolean + interpolation: + type: string + enum: + - Discrete + - Step + - Linear + - Regression + temporalProperty: + type: object + required: + - name + - type + properties: + name: + type: string + type: + type: string + enum: + - TBoolean + - TText + - TInteger + - TReal + - TImage + form: + oneOf: + - type: string + format: uri + - type: string + minLength: 3 + maxLength: 3 + valueSequence: + type: array + uniqueItems: true + items: + $ref: '#/components/schemas/temporalPrimitiveValue' + description: + type: string + links: + type: array + items: + $ref: '#/components/schemas/link' + temporalProperties-2: + type: object + required: + - temporalProperties + properties: + temporalProperties: + oneOf: + - $ref: '#/components/schemas/temporalProperties' + - type: array + items: + $ref: '#/components/schemas/temporalProperty' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + responses: + LandingPage: + description: The links to the API capabilities. + content: + application/json: + schema: + $ref: '#/components/schemas/landingPage' + ServerError: + description: A server error occurred. + content: + application/json: + schema: + $ref: '#/components/schemas/exception' + example: + code: '500' + description: Server Internal Error + Conformance: + description: The URIs of all requirements classes supported by the server. + content: + application/json: + schema: + $ref: '#/components/schemas/confClasses' + Collections: + description: A list of catalogs about collections of moving features. + content: + application/json: + schema: + $ref: '#/components/schemas/collections' + example: + collections: + - id: mfc-1 + title: MovingFeatureCollection_1 + description: a collection of moving features to manage data in a distinct (physical or logical) space + itemType: movingfeature + updateFrequency: 1000 + extent: + spatial: + bbox: + - -180 + - -90 + - 190 + - 90 + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + interval: + - '2011-11-11T12:22:11Z' + - '2012-11-24T12:32:43Z' + trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + links: + - href: https://data.example.org/collections/mfc-1 + rel: self + type: application/json + links: + - href: https://data.example.org/collections + rel: self + type: application/json + Collection: + description: The metadata being returned. + content: + application/json: + schema: + $ref: '#/components/schemas/collection' + example: + id: mfc-1 + title: moving_feature_collection_sample + itemType: movingfeature + updateFrequency: 1000 + extent: + spatial: + bbox: + - -180 + - -90 + - 190 + - 90 + crs: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + interval: + - '2011-11-11T12:22:11Z' + - '2012-11-24T12:32:43Z' + trs: + - http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + links: + - href: https://data.example.org/collections/mfc-1 + rel: self + type: application/json + MovingFeatures: + description: A list of static data of moving feature. + content: + application/json: + schema: + $ref: '#/components/schemas/movingFeatures' + example: + type: FeatureCollection + features: + - id: mf-1 + type: Feature + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + label: car + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + links: + - href: https://data.example.org/collections/mfc-1/items + rel: self + type: application/geo+json + - href: https://data.example.org/collections/mfc-1/items&offset=1&limit=1 + rel: next + type: application/geo+json + timeStamp: '2020-01-01T12:00:00Z' + numberMatched: 100 + numberReturned: 1 + MovingFeature: + description: A moving feature static data. + content: + application/geo+json: + schema: + $ref: '#/components/schemas/movingFeature' + example: + id: mf-1 + type: Feature + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + name: car1 + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + TemporalGeometrySequence: + description: A TemporalGeometrySequence data. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalGeometrySequence' + example: + type: TemporalGeometrySequence + geometrySequence: + - id: tg-1 + type: MovingPoint + datetimes: + - '2011-07-14T22:01:02Z' + - '2011-07-14T22:01:03Z' + - '2011-07-14T22:01:04Z' + coordinates: + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + interpolation: Linear + base: + type: glTF + href: https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + links: + - href: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence + rel: self + type: application/json + - href: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence&offset=10&limit=1 + rel: next + type: application/json + timeStamp: '2021-09-01T12:00:00Z' + numberMatched: 100 + numberReturned: 1 + DistanceQuery: + description: A temporal property data that represents a time-to-distance curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: distance + type: TReal + form: MTR + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + VelocityQuery: + description: A temporal property data that represents a time-to-velocity curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: velocity + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + AccelerationQuery: + description: A temporal property data that represents a time-to-acceleration curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: acceleration + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + TemporalProperties: + description: A list of static (or temporal) data of TemporalProperty. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperties-2' + example: + temporalProperties: + - datetimes: + - '2011-07-14T22:01:06.000Z' + - '2011-07-14T22:01:07.000Z' + - '2011-07-14T22:01:08.000Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + speed: + type: Measure + form: KMH + values: + - 65 + - 70 + - 80 + interpolation: Linear + links: + - href: https://data.example.org/collections/mfc-1/items/mf-1/tproperties + rel: self + type: application/json + - href: https://data.example.org/collections/mfc-1/items/mf-1/tproperties&offset=2&limit=2 + rel: next + type: application/json + timeStamp: '2021-09-01T12:00:00Z' + numberMatched: 10 + numberReturned: 2 + TemporalProperty: + description: A (subsequence of) the temporal property data. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: speed + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 20 + - 50 + interpolation: Linear + parameters: + collectionId: + name: collectionId + in: path + description: local identifier of a collection + required: true + schema: + type: string + bbox: + name: bbox + in: query + description: |- + Only features that have a geometry that intersects the bounding box are selected. + The bounding box is provided as four or six numbers, depending on whether the + coordinate reference system includes a vertical axis (height or depth): + + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 + * Minimum value, coordinate axis 3 (optional) + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 + * Maximum value, coordinate axis 3 (optional) + + If the value consists of four numbers, the coordinate reference system is + WGS 84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84) + unless a different coordinate reference system is specified in the parameter `bbox-crs`. + + If the value consists of six numbers, the coordinate reference system is WGS 84 + longitude/latitude/ellipsoidal height (http://www.opengis.net/def/crs/OGC/0/CRS84h) + unless a different coordinate reference system is specified in the parameter `bbox-crs`. + + The query parameter `bbox-crs` is specified in OGC API - Features - Part 2: Coordinate + Reference Systems by Reference. + + For WGS 84 longitude/latitude the values are in most cases the sequence of + minimum longitude, minimum latitude, maximum longitude and maximum latitude. + However, in cases where the box spans the antimeridian the first value + (west-most box edge) is larger than the third value (east-most box edge). + + If the vertical axis is included, the third and the sixth number are the + bottom and the top of the 3-dimensional bounding box. + + If a feature has multiple spatial geometry properties, it is the decision of the + server whether only a single spatial geometry property is used to determine + the extent or all relevant geometries. + required: false + schema: + type: array + oneOf: + - minItems: 4 + maxItems: 4 + - minItems: 6 + maxItems: 6 + items: + type: number + style: form + explode: false + datetime: + name: datetime + in: query + description: |- + Either a date-time or an interval. Date and time expressions adhere to RFC 3339. + Intervals may be bounded or half-bounded (double-dots at start or end). + + Examples: + + * A date-time: "2018-02-12T23:20:50Z" + * A bounded interval: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" + * Half-bounded intervals: "2018-02-12T00:00:00Z/.." or "../2018-03-18T12:31:12Z" + + Only features that have temporal information that intersects the value of `datetime` are selected. + + If a feature has multiple temporal properties, it is the decision of the server whether only a single temporal property is used to determine the extent or all relevant temporal properties. + required: true + schema: + type: string + style: form + explode: false + limit: + name: limit + in: query + description: |- + The optional limit parameter limits the number of items that are presented in the response document. + + Only items are counted that are on the first level of the collection in the response document. + Nested objects contained within the explicitly requested items shall not be counted. + + Minimum = 1. Maximum = 10000. Default = 10. + required: false + schema: + type: integer + minimum: 1 + maximum: 10000 + default: 10 + style: form + explode: false + subtrajectory-description: + name: subTrajectory + in: query + required: false + description: |- + The `subTrajectory` parameter is a boolean value used with the `datetime` parameter. + If the `subTrajectory` is "true", + + * the `datetime` must be a bounded interval, not half-bounded intervals or a date-time. + * the `datetime` represents a specified time interval (new start time and new end time) + * only features with a temporal geometry intersecting the given time interval will return. + + The `subTrajectory` query implements *subTrajectory* operation, which is defined in the [OGC Moving Feature Access](https://docs.ogc.org/is/16-120r3/16-120r3.html). + This operation returns only a subsequence of temporal geometry within a time interval contained in the `datetime` parameter, using interpolated trajectory according to the `interpolation` property. + + If the `subTrajectory` parameter is provided with a `bbox` parameter, it will only apply to resources that intersect with a `bbox` parameter. + + The `subTrajectory` parameter must not be used with the `leaf` parameter. + Only one of these parameters can be used in the HTTP GET operation. + schema: + type: boolean + style: form + explode: false + mFeatureId: + name: mFeatureId + in: path + description: local identifier of a moving feature + required: true + schema: + type: string + leaf-description: + name: leaf + in: query + required: false + description: |- + The `leaf` is provided as a sequence of monotonic increasing instants with date-time strings. + Only features that have a temporal geometry and property that intersects the given date-time are selected. + + The `leaf` operation implements *_pointAtTime_* operation which defined in the OGC Moving Feature Access. + This operation returns only temporal geometry coordinates (or temporal property values) + at each date-time included in the `leaf` parameter, using interpolated trajectory according to the `interpolation` property. + + If the `leaf` parameter is provided with a `bbox` or (and) a `datetime` parameter, + it will only apply to resources that intersect with a `bbox` or (and) a `datetime` parameter. + + The `leaf` parameter shall not be used with the `subTrajectory` and `subTemporalValue` parameter. + Only one of those parameters can be used in the HTTP GET operation. + schema: + type: array + uniqueItems: true + minItems: 1 + items: + type: string + format: date-time + style: form + explode: false + tGeometryId: + name: tGeometryId + in: path + description: local identifier of a temporal primitive geometry + required: true + schema: + type: string + date-time: + name: date-time + in: query + description: |- + A date-time. Date and time expressions adhere to RFC 3339. + + Examples: + + * A date-time: "2018-02-12T23:20:50Z" + + The date-time parameter defines the specified date and time to return the temporal value from the time-to-distance (or time-to-velocity or time-to-acceleration) curve. + required: false + schema: + type: string + format: date-time + style: form + explode: false + subtemporalvalue-description: + name: subTemporalValue + in: query + required: false + description: |- + The `subTemporalValue` parameter is a boolean value used with the `datetime` parameter. + If the `subTemporalValue` is "true", + + * the `datetime` must be a bounded interval, not half-bounded intervals or a date-time. + * the `datetime` represents a specified time interval (new start time and new end time) + * only features with a temporal property intersecting the given time interval will return. + * it returns only the subsequence of temporal property value within a time interval contained in the `subTemporalValue` parameter, using an interpolated time-to-value curve of temporal property according to the `interpolation` property. + + The `subTemporalValue` parameter must not be used with the `leaf` parameter. + Only one of these parameters can be used in the HTTP GET operation. + schema: + type: boolean + style: form + explode: false + tPropertyName: + name: tPropertyName + in: path + description: local identifier of a temporal property + required: true + schema: + type: string diff --git a/tests/test_postgresql_mobilitydb.py b/tests/test_postgresql_mobilitydb.py index 92126c05d..f3f82d39a 100644 --- a/tests/test_postgresql_mobilitydb.py +++ b/tests/test_postgresql_mobilitydb.py @@ -491,12 +491,13 @@ def test_query_post_temporalvalue(context, temporalvalue_data): pmdb_provider = PostgresMobilityDB() pmdb_provider.connect() - pmdb_provider.post_temporalvalue(context.get('collection_id'), - context.get('mfeature_id'), - context.get('tProperty_name'), - temporalvalue_data) + tvalue_id = pmdb_provider.post_temporalvalue(context.get('collection_id'), + context.get('mfeature_id'), + context.get('tProperty_name'), + temporalvalue_data) - assert True + assert tvalue_id is not None + context['tvalue_id'] = tvalue_id def test_query_put_collection(context, update_collection_property): @@ -795,9 +796,23 @@ def test_query_get_acceleration(context, assert value_sequence is not None +def test_query_delete_temporalvalue(context): + restriction = "AND tvalue_id ='{0}'".format( + context.get('tvalue_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalvalue(restriction) + + assert True + + def test_query_delete_temporalproperties(context): - restriction = "AND tproperties_name ='{0}'".format( - context.get('tProperty_name')) + restriction = """AND collection_id ='{0}' AND mfeature_id ='{1}' + AND tproperties_name ='{2}'""".format( + context.get('collection_id'), + context.get('mfeature_id'), + context.get('tProperties_name')) pmdb_provider = PostgresMobilityDB() pmdb_provider.connect() From a7bb08d85e830aece83cb78c0a0125d82af57605 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 26 Nov 2024 18:52:15 +0900 Subject: [PATCH 08/14] About Data publishing (MF-API) (#2) Creating of MF-API documentation * Creating RST files * Creating test files used in the documentation * Adjustment of DB settings --- docs/source/data-publishing/index.rst | 1 + docs/source/data-publishing/ogcapi-mfapi.rst | 125 ++++++++++ docs/source/tour.rst | 64 +++++ docs/source/transactions.rst | 5 + pygeoapi/provider/postgresql_mobilitydb.py | 29 ++- tests/data/mfapi_moving_feature.json | 219 ++++++++++++++++++ tests/data/mfapi_temporal_geometry.json | 104 +++++++++ tests/data/mfapi_temporal_properties.json | 51 ++++ .../mfapi_temporal_property_value_data.json | 13 ++ 9 files changed, 601 insertions(+), 10 deletions(-) create mode 100644 docs/source/data-publishing/ogcapi-mfapi.rst create mode 100644 tests/data/mfapi_moving_feature.json create mode 100644 tests/data/mfapi_temporal_geometry.json create mode 100644 tests/data/mfapi_temporal_properties.json create mode 100644 tests/data/mfapi_temporal_property_value_data.json diff --git a/docs/source/data-publishing/index.rst b/docs/source/data-publishing/index.rst index 78680c7f2..c92c7258f 100644 --- a/docs/source/data-publishing/index.rst +++ b/docs/source/data-publishing/index.rst @@ -26,6 +26,7 @@ return back data to the pygeoapi API framework in a plug and play fashion. ogcapi-records ogcapi-edr stac + ogcapi-mfapi .. seealso:: diff --git a/docs/source/data-publishing/ogcapi-mfapi.rst b/docs/source/data-publishing/ogcapi-mfapi.rst new file mode 100644 index 000000000..f459d4fa3 --- /dev/null +++ b/docs/source/data-publishing/ogcapi-mfapi.rst @@ -0,0 +1,125 @@ +.. _ogcapi-mfapi: + +Publishing data to OGC API - MF-API +===================================== + +`OGC API - MF-API`_ provides provides a uniform way to access, communicate, and +anage data about moving features across different applications, data providers, +and data consumers. + +To add moving features data to pygeoapi for standard interfaces, +which is defined in the OGC API - MovingFeatures - Part 1:Core. +you can use the dataset example in `Building Blocks specified in OGC API - Moving Features - Part 1 Core (1.0.0)`_ +as a baseline and modify accordingly. + +Configuration +------------- + +In order to register data for Moving features, the DB must be created and the related tables must be initially set up. + + +PostgreSQL +^^^^^^^^^^ +.. note:: + Requires Python packages pymeos + +Must have PostGIS installed and uuid-ossp + +.. code-block:: yaml + + server: + manager: + name: PostgreSQL + connection: + host: localhost + port: 5432 + database: mobilitydb + user: postgres + password: ${POSTGRESQL_PASSWORD:-postgres} + +.. note:: + To run the process, create a table with `DDL <https://github.com/ogi-ts-shimizu/pygeoapi-ogi-mf-api/blob/mf-api-updates/tests/data/mf-api.sql>`_ + + +.. code-block:: sh + + psql -U postgres -h 127.0.0.1 -p 5432 mobilitydb < tests/data/mf-api.sql + + +Processing examples +------------------- + +.. note:: + `Here <https://github.com/ogi-ts-shimizu/pygeoapi-ogi-mf-api/tree/mf-api-updates/tests/data>`_ is the sample data specified by the -d option of the curl command. + +.. code-block:: sh + + # Register metadata about a collection of moving features. + curl -X POST http://localhost:5000/collections \ + -H "Content-Type: application/json" \ + -d "{\"title\": \"moving_feature_collection_sample\", + \"updateFrequency\": 1000, + \"description\": \"example\", + \"itemType\": \"movingfeature\" + }" + + # Retrieve catalogs of a moving features collection. + curl http://localhost:5000/collections + + + # Insert a set of moving features or a moving feature into a collection with id {collectionId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items \ + -H "Content-Type: application/json" \ + -d @mfapi_moving_feature.json + + # Access a static data of a moving feature with id {mFeatureId}. + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId} + + # Add more movement data into a moving feature with id {mFeatureId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_geometry.json + + # Retrieve the movement data of the single moving feature + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence + + # Get a time-to-(distance,velocity,acceleration) curve of a temporal primitive geometry + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration + + # Add new temporal property data into a moving feature with id {mFeatureId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_properties.json + + # Retrieve a set of the temporal property data + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties + + # Add temporal primitive value data. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_property_value_data.json + + # Retrieve a set of the temporal property data + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} + + # Delete a singe temporal primitive value + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId} + + # Delete a specified temporal property + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} + + # Delete a singe temporal primitive geometry + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId} + + # Delete a single moving feature + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId} + + # Delete the collection + curl -X DELETE http://localhost:5000/collections/{collectionId} + + +.. _`OGC API - MF-API`: https://github.com/aistairc/pygeoapi-mf-api +.. _`Building Blocks specified in OGC API - Moving Features - Part 1 Core (1.0.0)`: https://developer.ogc.org/api/movingfeatures/index.html#tag/MovingFeatureCollection/operation/registerMetadata +.. _`see website`: https://mobilitydb.com/ \ No newline at end of file diff --git a/docs/source/tour.rst b/docs/source/tour.rst index 4fd4e51da..7fb1a30d6 100644 --- a/docs/source/tour.rst +++ b/docs/source/tour.rst @@ -253,3 +253,67 @@ discover what is supported by the server. .. _`Toronto, Ontario, Canada`: https://en.wikipedia.org/wiki/Toronto .. _`Swagger`: https://en.wikipedia.org/wiki/Swagger_(software) .. _`curl`: https://curl.se + + +MF-API Tour +----------- +The OGC API - Moving Features Standard is an extension of the OGC API - Common and the OGC API - Features Standards. +MovingFeatures – Part 1: Core is described in the `here <https://github.com/opengeospatial/ogcapi-movingfeatures/tree/master?tab=readme-ov-file>`_. + + +MovingFeatures Collection Catalog +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +http://localhost:5000/collections + +Retrieve catalogs of a moving features collection. + +MovingFeatures +^^^^^^^^^^^^^^ +http://localhost:5000/{collectionId}/items + +Retrieve the moving feature collection to access the static information of the moving feature by simple filtering and a limit. + +MovingFeature +""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id} + + +Access the static data of the moving feature with id {mFeatureId}. +The static data of a moving feature is not included temporal geometries and temporal properties. + +TemporalGeometrySequence +"""""""""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tgsequence + +Retrieve the movement data of the single moving feature with id {mFeatureId}. + +TemporalGeometryQuery +""""""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tgsequence/{tGeometryId} + +Get a time-to-distance curve of a temporal primitive geometry with id {tGeometryId}. + +TemporalProperties +"""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tproperties + +Retrieve the static information of the temporal property data that included a single moving feature with id {mFeatureId}. +The static data of a temporal property is not included temporal values (property values). + +.. seealso:: + :ref:`ogcapi-mfapi` for more OGC API - MF-API request examples. + +Transactions +^^^^^^^^^^^^ +Register metadata about a collection of moving features. (using `curl`_): + +.. code-block:: sh + + curl -X POST http://localhost:5000/collections \ + -H "Content-Type: application/json" \ + -d "{\"title\": \"moving_feature_collection_sample\", + \"updateFrequency\": 1000, + \"description\": \"example\", + \"itemType\": \"movingfeature\" + }" + diff --git a/docs/source/transactions.rst b/docs/source/transactions.rst index 4c6327174..e1096df41 100644 --- a/docs/source/transactions.rst +++ b/docs/source/transactions.rst @@ -6,9 +6,13 @@ Transactions pygeoapi supports the `OGC API - Features - Part 4: Create, Replace, Update and Delete`_ draft specification, allowing for transactional capabilities against feature and record data. +Furthermore, pygeoapi supports the `OGC API - Moving Features - Part 1: Core`_ international standard, allowing +for transactional capabilities against moving features. + To enable transactions in pygeoapi, a given resource provider needs to be editable (via the configuration resource provider ``editable: true`` property). Note that the feature or record provider MUST support create/update/delete. See the :ref:`ogcapi-features` and :ref:`ogcapi-records` documentation for transaction support status of pygeoapi backends. +For MF-API transactions, please refer :ref:`ogcapi-mfapi` Access control ^^^^^^^^^^^^^^ @@ -17,3 +21,4 @@ It should be made clear that authentication and authorization is beyond the resp if a pygeoapi user enables transactions, they must provide access control explicitly via another service. .. _`OGC API - Features - Part 4: Create, Replace, Update and Delete`: https://docs.ogc.org/DRAFTS/20-002.html +.. _`OGC API - Moving Features - Part 1: Core`: https://docs.ogc.org/is/22-003r3/22-003r3.html diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py index c808613d4..4121e4b02 100644 --- a/pygeoapi/provider/postgresql_mobilitydb.py +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -11,21 +11,30 @@ # from mobilitydb.psycopg import register +# CREATE DATABASE mobilitydb +# WITH TEMPLATE = template0 +# ENCODING = 'UTF8'; +# ALTER DATABASE mobilitydb OWNER TO postgres; + +# create table: +# psql -U postgres -h 127.0.0.1 -p 5432 mobilitydb < +# tests/data/mf-api.sql + class PostgresMobilityDB: - # host = '127.0.0.1' + host = '127.0.0.1' + port = 5432 + db = 'mobilitydb' + user = 'docker' + password = 'docker' + connection = None + + # host = '172.20.241.18' # port = 5432 # db = 'mobilitydb' - # user = 'docker' - # password = 'docker' + # user = 'postgres' + # password = 'postgres' # connection = None - host = '172.20.241.18' - port = 5432 - db = 'mobility' - user = 'postgres' - password = 'postgres' - connection = None - def __init__(self, datasource=None): """ PostgresMobilityDB Class constructor diff --git a/tests/data/mfapi_moving_feature.json b/tests/data/mfapi_moving_feature.json new file mode 100644 index 000000000..7ed99fa7a --- /dev/null +++ b/tests/data/mfapi_moving_feature.json @@ -0,0 +1,219 @@ +{ + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" +} \ No newline at end of file diff --git a/tests/data/mfapi_temporal_geometry.json b/tests/data/mfapi_temporal_geometry.json new file mode 100644 index 000000000..386504de6 --- /dev/null +++ b/tests/data/mfapi_temporal_geometry.json @@ -0,0 +1,104 @@ +{ + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] +} \ No newline at end of file diff --git a/tests/data/mfapi_temporal_properties.json b/tests/data/mfapi_temporal_properties.json new file mode 100644 index 000000000..85f1e2a01 --- /dev/null +++ b/tests/data/mfapi_temporal_properties.json @@ -0,0 +1,51 @@ +[ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } +] \ No newline at end of file diff --git a/tests/data/mfapi_temporal_property_value_data.json b/tests/data/mfapi_temporal_property_value_data.json new file mode 100644 index 000000000..ead9d40c6 --- /dev/null +++ b/tests/data/mfapi_temporal_property_value_data.json @@ -0,0 +1,13 @@ +{ + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" +} \ No newline at end of file From e56181e3d7b4fa051603b548dce26c07dc076a97 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Tue, 3 Dec 2024 10:49:45 +0900 Subject: [PATCH 09/14] MobilityDB installation (#5) Add used Python packages to the requirements-privider.txt * Add pymeos package --- requirements-provider.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requirements-provider.txt b/requirements-provider.txt index 74bc473fc..891859ea0 100644 --- a/requirements-provider.txt +++ b/requirements-provider.txt @@ -22,3 +22,4 @@ sodapy xarray zarr s3fs<=2023.6.0 +pymeos \ No newline at end of file From 81a1ffcfa9204d09890faadff70b34682e64f644 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Fri, 6 Dec 2024 17:39:47 +0900 Subject: [PATCH 10/14] MobilityDB installation (#5) Building a Docker environment * Created a new Dockerfile for MobilityDB. * Modified the existing Dockerfile. * Created a new docker*compose file. * Adjusted the DB hostname. * Created a shell script to create tables. --- Dockerfile | 10 ++-- DockerfileMobiitydb | 54 +++++++++++++++++++ docker-compose.yml | 21 ++++++++ docker/initdb-mobilitydb.sh | 62 ++++++++++++++++++++++ pygeoapi/provider/postgresql_mobilitydb.py | 2 +- requirements-provider.txt | 5 +- 6 files changed, 148 insertions(+), 6 deletions(-) create mode 100644 DockerfileMobiitydb create mode 100644 docker-compose.yml create mode 100644 docker/initdb-mobilitydb.sh diff --git a/Dockerfile b/Dockerfile index 04f1a20db..043aa30b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -125,11 +125,16 @@ RUN \ && apt autoremove -y \ && rm -rf /var/lib/apt/lists/* -ADD requirements-docker.txt requirements-admin.txt /pygeoapi/ +ADD requirements-docker.txt requirements-admin.txt requirements-provider.txt /pygeoapi/ + # Install remaining pygeoapi deps RUN python3 -m pip install --no-cache-dir -r requirements-docker.txt \ - && python3 -m pip install --no-cache-dir -r requirements-admin.txt + && python3 -m pip install --no-cache-dir -r requirements-admin.txt \ + && python3 -m pip install --no-cache-dir -r requirements-provider.txt +# If execute pytest +ADD requirements-dev.txt /pygeoapi/ +RUN python3 -m pip install --no-cache-dir -r requirements-dev.txt ADD . /pygeoapi @@ -142,4 +147,3 @@ RUN \ && cp /pygeoapi/docker/entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] - diff --git a/DockerfileMobiitydb b/DockerfileMobiitydb new file mode 100644 index 000000000..2f73c5b08 --- /dev/null +++ b/DockerfileMobiitydb @@ -0,0 +1,54 @@ +FROM postgis/postgis:14-3.2 + +# Configuration Parameters +LABEL maintainer="MobilityDB Project - https://github.com/MobilityDB/MobilityDB" +ENV MOBILITYDB_VERSION 1.2.0 +ENV POSTGRES_DB=mobilitydb +ENV POSTGRES_USER=docker +ENV POSTGRES_PASSWORD=docker + +# Fix the Release file expired problem +RUN echo "Acquire::Check-Valid-Until \"false\";\nAcquire::Check-Date \"false\";" | cat > /etc/apt/apt.conf.d/10no--check-valid-until + + +# Install Prerequisites +RUN apt-get update \ + && apt-get install -y \ + build-essential \ + cmake \ + git \ + libproj-dev \ + g++ \ + wget \ + autoconf \ + autotools-dev \ + libgeos-dev \ + libpq-dev \ + libproj-dev \ + libjson-c-dev \ + protobuf-c-compiler \ + xsltproc \ + libgsl-dev \ + libgslcblas0 \ + postgresql-server-dev-${PG_MAJOR} \ + && rm -rf /var/lib/apt/lists/* + +# Install MobilityDB +RUN wget -O MobilityDB.tar.gz "https://github.com/MobilityDB/MobilityDB/archive/v${MOBILITYDB_VERSION}.tar.gz" \ + && mkdir -p /usr/local/src/MobilityDB \ + && tar \ + --extract \ + --file MobilityDB.tar.gz \ + --directory /usr/local/src/MobilityDB \ + --strip-components 1 \ + && rm MobilityDB.tar.gz +RUN mkdir /usr/local/src/MobilityDB/build +RUN cd /usr/local/src/MobilityDB/build && \ + cmake .. && \ + make -j$(nproc) && \ + make install + +RUN rm /docker-entrypoint-initdb.d/10_postgis.sh +# Create mf-api table +COPY /docker/initdb-mobilitydb.sh /docker-entrypoint-initdb.d/ +RUN chmod +x /docker-entrypoint-initdb.d/initdb-mobilitydb.sh \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..bcac1f82d --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,21 @@ +version: '3.0' +services: + mf-api: + build: + context: . + dockerfile: Dockerfile + ports: + - 5000:80 + + mobilitydb: + container_name: mobilitydb + ports: + - 25432:5432 + environment: + - POSTGRES_DB=mobilitydb + - POSTGRES_USER=docker + - POSTGRES_PASSWORD=docker + build: + context: . + dockerfile: DockerfileMobiitydb + restart: on-failure \ No newline at end of file diff --git a/docker/initdb-mobilitydb.sh b/docker/initdb-mobilitydb.sh new file mode 100644 index 000000000..fd829e1b1 --- /dev/null +++ b/docker/initdb-mobilitydb.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +echo "shared_preload_libraries = 'postgis-3.so'" >> $PGDATA/postgresql.conf + +set -e + +# Create the 'mobilitydb' extension in the mobilitydb database +echo "Loading MobilityDB extension into mobilitydb" +psql --user="$POSTGRES_USER" --dbname="mobilitydb" <<- 'EOSQL' + CREATE EXTENSION IF NOT EXISTS PostGIS; + CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; + CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + + -- Table collection + CREATE TABLE public.collection ( + collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), + collection_property jsonb NULL, + PRIMARY KEY (collection_id) + ); + -- Table MovingFeature + CREATE TABLE public.mfeature ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), + mf_geometry geometry NULL, + mf_property jsonb NULL, + lifespan tstzspan NULL, + PRIMARY KEY (collection_id, mfeature_id), + FOREIGN KEY (collection_id) REFERENCES collection(collection_id) + ); + -- Table TemporalGeometry + CREATE TABLE public.tgeometry ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), + tgeometry_property tgeompoint NULL, + tgeog_property tgeompoint NULL, + PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); + -- Table TemporalProperty + CREATE TABLE public.tproperties ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + tproperty jsonb NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); + + -- Table TemporalPropertyValue + CREATE TABLE public.tvalue ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + tvalue_id uuid NOT NULL DEFAULT uuid_generate_v4(), + datetime_group int4 NOT NULL, + pvalue_float tfloat NULL, + pvalue_text ttext NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name, tvalue_id), + FOREIGN KEY (collection_id, mfeature_id, tproperties_name) REFERENCES tproperties(collection_id, mfeature_id, tproperties_name) + ); +EOSQL diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py index 4121e4b02..54b460b43 100644 --- a/pygeoapi/provider/postgresql_mobilitydb.py +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -21,7 +21,7 @@ # tests/data/mf-api.sql class PostgresMobilityDB: - host = '127.0.0.1' + host = 'mobilitydb' port = 5432 db = 'mobilitydb' user = 'docker' diff --git a/requirements-provider.txt b/requirements-provider.txt index 891859ea0..b94a7fd24 100644 --- a/requirements-provider.txt +++ b/requirements-provider.txt @@ -8,7 +8,7 @@ GDAL<=3.8.4 geoalchemy2 geopandas netCDF4 -numpy==2.0.1 +# numpy==2.0.1 oracledb pandas psycopg2 @@ -22,4 +22,5 @@ sodapy xarray zarr s3fs<=2023.6.0 -pymeos \ No newline at end of file +pymeos +numpy<2.0 \ No newline at end of file From b3471851476395418c399f577ea82c50e08e8b75 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Thu, 12 Dec 2024 15:20:52 +0900 Subject: [PATCH 11/14] MobilityDB installation (#5) Docker image Name * Add docker image name setting --- docker-compose.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index bcac1f82d..e0c8dd280 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,6 +4,7 @@ services: build: context: . dockerfile: Dockerfile + image: pygeoapi-mf-api ports: - 5000:80 @@ -18,4 +19,5 @@ services: build: context: . dockerfile: DockerfileMobiitydb + image: pygeoapi-mf-api-mobilitydb restart: on-failure \ No newline at end of file From 42c4a771ad1c927e60059b7dec194fc69da22934 Mon Sep 17 00:00:00 2001 From: ts-shimizu <ts-shimizu@apptec.co.jp> Date: Thu, 12 Dec 2024 16:11:01 +0900 Subject: [PATCH 12/14] MobilityDB installation (#5) Update base image for DockerfileMobilityDB * updating the base image 14-3.2 to 17-3.5. --- DockerfileMobiitydb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DockerfileMobiitydb b/DockerfileMobiitydb index 2f73c5b08..42040e39f 100644 --- a/DockerfileMobiitydb +++ b/DockerfileMobiitydb @@ -1,4 +1,4 @@ -FROM postgis/postgis:14-3.2 +FROM postgis/postgis:17-3.5 # Configuration Parameters LABEL maintainer="MobilityDB Project - https://github.com/MobilityDB/MobilityDB" From 946a81bb987a648f1a9b8a922c8838daa798f521 Mon Sep 17 00:00:00 2001 From: TaehoonK <kim.taehoon@aist.go.jp> Date: Sat, 14 Dec 2024 18:08:47 +0700 Subject: [PATCH 13/14] Revise temporal geometry query functions --- pygeoapi/api/movingfeatures.py | 1258 ++++++-------------- pygeoapi/openapi.py | 19 +- pygeoapi/provider/postgresql_mobilitydb.py | 228 ++-- 3 files changed, 477 insertions(+), 1028 deletions(-) diff --git a/pygeoapi/api/movingfeatures.py b/pygeoapi/api/movingfeatures.py index 32294f844..08c2d0933 100644 --- a/pygeoapi/api/movingfeatures.py +++ b/pygeoapi/api/movingfeatures.py @@ -63,8 +63,8 @@ CONFORMANCE_CLASSES_MOVINGFEATURES = [ "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common", - "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", # noqa - "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" # noqa + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" ] @@ -1120,17 +1120,26 @@ def manage_collection_item_tGeometry( return headers, HTTPStatus.NO_CONTENT, '' -def get_collection_items_tGeometry_velocity(api: API, request: APIRequest, - dataset, identifier, - tGeometry) \ - -> Tuple[dict, int, str]: +def get_collection_items_tGeometry_velocity( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict, int, str]: + """ + Get a time-to-velocity curve of a temporal primitive geometry + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') collection_id = dataset mfeature_id = identifier tgeometry_id = tGeometry pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') try: datetime_ = validate_datetime(datetime_, return_type=False) except ValueError as err: @@ -1138,10 +1147,25 @@ def get_collection_items_tGeometry_velocity(api: API, request: APIRequest, return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + try: pmdb_provider.connect() content = pmdb_provider.get_velocity( - collection_id, mfeature_id, tgeometry_id, datetime_) + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) except (Exception, psycopg2.Error) as error: msg = str(error) return api.get_exception( @@ -1153,17 +1177,26 @@ def get_collection_items_tGeometry_velocity(api: API, request: APIRequest, return headers, HTTPStatus.OK, content -def get_collection_items_tGeometry_distance(api: API, request: APIRequest, - dataset, identifier, - tGeometry) \ - -> Tuple[dict, int, str]: +def get_collection_items_tGeometry_distance( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict, int, str]: + """ + Get a time-to-distance curve of a temporal primitive geometry + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') collection_id = str(dataset) mfeature_id = str(identifier) tgeometry_id = str(tGeometry) pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') try: datetime_ = validate_datetime(datetime_, return_type=False) except ValueError as err: @@ -1171,10 +1204,25 @@ def get_collection_items_tGeometry_distance(api: API, request: APIRequest, return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + try: pmdb_provider.connect() content = pmdb_provider.get_distance( - collection_id, mfeature_id, tgeometry_id, datetime_) + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) except (Exception, psycopg2.Error) as error: msg = str(error) return api.get_exception( @@ -1186,18 +1234,25 @@ def get_collection_items_tGeometry_distance(api: API, request: APIRequest, return headers, HTTPStatus.OK, content -def get_collection_items_tGeometry_acceleration(api: API, request: APIRequest, - dataset, identifier, - tGeometry) \ - -> Tuple[dict, - int, str]: +def get_collection_items_tGeometry_acceleration( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict,int, str]: + """ + Get a time-to-acceleration curve of a temporal primitive geometry + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ headers = request.get_response_headers(SYSTEM_LOCALE) - datetime_ = request.params.get('date-time') collection_id = dataset mfeature_id = identifier tgeometry_id = tGeometry pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') try: datetime_ = validate_datetime(datetime_, return_type=False) except ValueError as err: @@ -1205,10 +1260,25 @@ def get_collection_items_tGeometry_acceleration(api: API, request: APIRequest, return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + try: pmdb_provider.connect() content = pmdb_provider.get_acceleration( - collection_id, mfeature_id, tgeometry_id, datetime_) + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) except (Exception, psycopg2.Error) as error: msg = str(error) return api.get_exception( @@ -2059,24 +2129,31 @@ def check_required_field_feature(feature): if 'type' in feature: if feature['type'] == 'FeatureCollection': return True + if 'type' not in feature or 'temporalGeometry' not in feature: return False + if check_required_field_temporal_geometries( feature['temporalGeometry']) is False: return False + if 'temporalProperties' in feature: if check_required_field_temporal_property( feature['temporalProperties']) is False: return False + if 'geometry' in feature: if check_required_field_geometries(feature['geometry']) is False: return False + if 'crs' in feature: if check_required_field_crs(feature['crs']) is False: return False + if 'trs' in feature: if check_required_field_trs(feature['trs']) is False: return False + return True @@ -2088,15 +2165,15 @@ def check_required_field_geometries(geometry): def check_required_field_geometry_array(geometry): - if ('type' not in geometry - or 'geometries' not in geometry): + if 'type' not in geometry or 'geometries' not in geometry: return False + geometries = geometry['geometries'] - geometries = [geometries] if not isinstance( - geometries, list) else geometries + geometries = [geometries] if not isinstance(geometries, list) else geometries for l_geometry in geometries: if check_required_field_geometry_single(l_geometry) is False: return False + return True @@ -2108,10 +2185,8 @@ def check_required_field_geometry_single(geometry): def check_required_field_temporal_geometries(temporal_geometries): - if (check_required_field_temporal_geometry_array( - temporal_geometries) is False - and check_required_field_temporal_geometry_single - (temporal_geometries) is False): + if (check_required_field_temporal_geometry_array(temporal_geometries) is False + and check_required_field_temporal_geometry_single(temporal_geometries) is False): return False return True @@ -2123,15 +2198,17 @@ def check_required_field_temporal_geometry_array(temporal_geometries): prisms = temporal_geometries['prisms'] prisms = [prisms] if not isinstance(prisms, list) else prisms for temporal_geometry in prisms: - if check_required_field_temporal_geometry_single( - temporal_geometry) is False: + if check_required_field_temporal_geometry_single(temporal_geometry) is False: return False + if 'crs' in temporal_geometries: if check_required_field_crs(temporal_geometry['crs']) is False: return False + if 'trs' in temporal_geometries: if check_required_field_trs(temporal_geometry['trs']) is False: return False + return True @@ -2140,43 +2217,31 @@ def check_required_field_temporal_geometry_single(temporal_geometry): or 'datetimes' not in temporal_geometry or 'coordinates' not in temporal_geometry): return False + if 'crs' in temporal_geometry: if check_required_field_crs(temporal_geometry['crs']) is False: return False + if 'trs' in temporal_geometry: if check_required_field_trs(temporal_geometry['trs']) is False: return False - return True -# TODO Do you still have the 'temporalProperties' key? -# def checkRequiredFieldTemporalProperties(temporalProperties): -# if 'temporalProperties' not in temporalProperties: -# return False -# if check_required_field_temporal_property\ -# (temporalProperties['temporalProperties']) is False: -# return False -# return True + return True def check_required_field_temporal_property(temporal_properties): - temporal_properties = [temporal_properties] if not isinstance( - temporal_properties, list) else temporal_properties + temporal_properties = [temporal_properties] \ + if not isinstance(temporal_properties, list) \ + else temporal_properties + for temporal_property in temporal_properties: if ('datetimes' not in temporal_property): return False + for tproperties_name in temporal_property: - if tproperties_name != 'datetimes' and ( - 'values' - not - in - temporal_property - [tproperties_name] - or - 'interpolation' - not - in - temporal_property - [tproperties_name]): + if (tproperties_name != 'datetimes' + and ('values' not in temporal_property[tproperties_name] + or 'interpolation' not in temporal_property[tproperties_name])): return False return True @@ -2203,7 +2268,7 @@ def check_required_field_trs(trs): return True # fmt: off -def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, dict]]: # noqa +def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, dict]]: """ Get OpenAPI fragments @@ -2220,88 +2285,44 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, "get": { "operationId": "accessMetadata", "summary": "Access metadata about the collection", - "description": "A user can access metadata with id `collectionId`.\n", # noqa - "tags": [ - "MovingFeatureCollection" - ], - "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - } - ], + "description": "A user can access metadata with id `collectionId`.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/Collection" # noqa - }, - "404": { - "description": "A collection with the specified id was not found." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/Collection"}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "delete": { "operationId": "deleteCollection", "summary": "Delete the collection", - "description": "The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted.\n", # noqa - "tags": [ - "MovingFeatureCollection" - ], - "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - } - ], + "description": "The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], "responses": { - "204": { - "description": "Successfully deleted." - }, - "404": { - "description": "A collection with the specified name was not found." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully deleted."}, + "404": {"description": "A collection with the specified name was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "put": { "operationId": "replaceMetadata", "summary": "Replace metadata about the collection", - "description": "A user SHOULD replace metadata with id `collectionId`.\n\nThe request body schema is the same the POST's one. \n\nHowever, `updateFrequency` property is NOT updated.\n", # noqa - "tags": [ - "MovingFeatureCollection" - ], - "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - } - ], + "description": "A user SHOULD replace metadata with id `collectionId`.\n\nThe request body schema is the same the POST's one. \n\nHowever, `updateFrequency` property is NOT updated.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], "requestBody": { "content": { "application/json": { - "schema": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body" # noqa - }, - "example": { - "title": "moving_feature_collection_sample", - "updateFrequency": 1000, - "description": "example", - "itemType": "movingfeature" - } + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body"} } } }, "responses": { - "204": { - "description": "Successfully replaced." - }, - "404": { - "description": "A collection with the specified name was not found." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully replaced."}, + "404": {"description": "A collection with the specified name was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } @@ -2311,281 +2332,94 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, "get": { "operationId": "retrieveMovingFeatures", "summary": "Retrieve moving feature collection", - "description": "A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit.\n\nSpecifically, if the `subTrajectory` parameter is \"true\", it will return the temporal geometry within the time interval specified by `datetime` parameter.\n", # noqa - "tags": [ - "MovingFeatures" - ], + "description": "A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit.\n\nSpecifically, if the `subTrajectory` parameter is \"true\", it will return the temporal geometry within the time interval specified by `datetime` parameter.\n", + "tags": ["MovingFeatures"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeatures" # noqa - }, - "404": { - "description": "A collection with the specified id was not found." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeatures"}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "post": { "operationId": "insertMovingFeatures", "summary": "Insert moving features", - "description": "A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`.\n\nThe request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or \n[MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON.\n", # noqa - "tags": [ - "MovingFeatures" - ], - "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - } - ], + "description": "A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`.\n\nThe request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or \n[MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON.\n", + "tags": ["MovingFeatures"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], "requestBody": { "content": { "application/json": { "schema": { "oneOf": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeature-mfjson" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeatureCollection" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeature-mfjson"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeatureCollection"} ] }, "example": { "type": "Feature", "crs": { "type": "Name", - "properties": { - "name": "urn:ogc:def:crs:OGC:1.3:CRS84" # noqa - } + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} }, "trs": { "type": "Link", "properties": { "type": "OGCDEF", - "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" # noqa + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian", } }, "temporalGeometry": { "type": "MovingPoint", - "datetimes": [ - "2011-07-14T22:01:01Z", - "2011-07-14T22:01:02Z", - "2011-07-14T22:01:03Z", - "2011-07-14T22:01:04Z", - "2011-07-14T22:01:05Z" - ], - "coordinates": [ - [ - 139.757083, - 35.627701, - 0.5 - ], - [ - 139.757399, - 35.627701, - 2 - ], - [ - 139.757555, - 35.627688, - 4 - ], - [ - 139.757651, - 35.627596, - 4 - ], - [ - 139.757716, - 35.627483, - 4 - ] - ], + "datetimes": ["2011-07-14T22:01:01Z", "2011-07-14T22:01:02Z", "2011-07-14T22:01:03Z", "2011-07-14T22:01:04Z", "2011-07-14T22:01:05Z"], + "coordinates": [[139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0]], "interpolation": "Linear", "base": { "type": "glTF", - "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" }, "orientations": [ - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 0 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 355, - 0 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 330 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 300 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 270 - ] - } + {"scales": [1, 1, 1], "angles": [0, 0, 0]}, + {"scales": [1, 1, 1], "angles": [0, 355, 0]}, + {"scales": [1, 1, 1], "angles": [0, 0, 330]}, + {"scales": [1, 1, 1], "angles": [0, 0, 300]}, + {"scales": [1, 1, 1], "angles": [0, 0, 270]} ] }, "temporalProperties": [ { - "datetimes": [ - "2011-07-14T22:01:01.450Z", - "2011-07-14T23:01:01.450Z", - "2011-07-15T00:01:01.450Z" - ], + "datetimes": ["2011-07-14T22:01:01.450Z", "2011-07-14T23:01:01.450Z", "2011-07-15T00:01:01.450Z"], "length": { "type": "Measure", - "form": "http://qudt.org/vocab/quantitykind/Length", # noqa - "values": [ - 1, - 2.4, - 1 - ], - "interpolation": "Linear" + "form": "http://qudt.org/vocab/quantitykind/Length", + "values": [1.0, 2.4, 1.0], + "interpolation": "Linear", }, "discharge": { "type": "Measure", "form": "MQS", - "values": [ - 3, - 4, - 5 - ], + "values": [3.0, 4.0, 5.0], "interpolation": "Step" } - }, - { - "datetimes": [ - 1465621816590, - 1465711526300 - ], - "camera": { - "type": "Image", - "values": [ - "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa - "iVBORw0KGgoAAAANSUhEU......" - ], - "interpolation": "Discrete" - }, - "labels": { - "type": "Text", - "values": [ - "car", - "human" - ], - "interpolation": "Discrete" - } } ], "geometry": { "type": "LineString", - "coordinates": [ - [ - 139.757083, - 35.627701, - 0.5 - ], - [ - 139.757399, - 35.627701, - 2 - ], - [ - 139.757555, - 35.627688, - 4 - ], - [ - 139.757651, - 35.627596, - 4 - ], - [ - 139.757716, - 35.627483, - 4 - ] - ] + "coordinates": [[139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0]] }, "properties": { "name": "car1", "state": "test1", - "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg", }, - "bbox": [ - 139.757083, - 35.627483, - 0, - 139.757716, - 35.627701, - 4.5 - ], - "time": [ - "2011-07-14T22:01:01Z", - "2011-07-15T01:11:22Z" - ], + "bbox": [139.757083, 35.627483, 0.0, 139.757716, 35.627701, 4.5], + "time": ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"], "id": "mf-1" } } @@ -2593,257 +2427,103 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, }, "responses": { "201": { - "description": "Successful create a set of moving features or a moving feature into a specific collection.\n", # noqa + "description": "Successful create a set of moving features or a moving feature into a specific collection.\n", "headers": { "Locations": { - "description": "A list of URI of the newly added resources", # noqa - "schema": { - "type": "array", - "items": { - "type": "string" - }, - "example": [ - "https://data.example.org/collections/mfc-1/items/mf-1", # noqa - "https://data.example.org/collections/mfc-1/items/109301273" # noqa - ] - } + "description": "A list of URI of the newly added resources", + "schema": {"type": "array","items": {"type": "string"}} } } }, - "400": { - "description": "A query parameter was not validly used." - }, - "404": { - "description": "A collection with the specified id was not found." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "400": {"description": "A query parameter was not validly used."}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_path = '/collections/{collectionId}/items/{mFeatureId}' # noqa + collections_collectionId_items_mFeatureId_path = '/collections/{collectionId}/items/{mFeatureId}' paths[collections_collectionId_items_mFeatureId_path] = { "get": { "operationId": "accessMovingFeature", "summary": "Access the static data of the moving feature", - "description": "A user can access a static data of a moving feature with id `mFeatureId`.\n\nThe static data of a moving feature is not included temporal geometries and temporal properties.\n", # noqa - "tags": [ - "MovingFeatures" - ], + "description": "A user can access a static data of a moving feature with id `mFeatureId`.\n\nThe static data of a moving feature is not included temporal geometries and temporal properties.\n", + "tags": ["MovingFeatures"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeature" # noqa - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeature"}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "delete": { "operationId": "deleteMovingFeature", "summary": "Delete a single moving feature", - "description": "The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted.\n", # noqa - "tags": [ - "MovingFeatures" - ], + "description": "The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted.\n", + "tags": ["MovingFeatures"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} ], "responses": { - "204": { - "description": "Successfully deleted." - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tgsequence_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence' # noqa + collections_collectionId_items_mFeatureId_tgsequence_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence' paths[collections_collectionId_items_mFeatureId_tgsequence_path] = { "get": { "operationId": "retrieveTemporalGeometrySequence", - "summary": "Retrieve the movement data of the single moving feature", # noqa - "description": "A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit.\n", # noqa - "tags": [ - "TemporalGeometry" - ], + "summary": "Retrieve the movement data of the single moving feature", + "description": "A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit.\n", + "tags": ["TemporalGeometry"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalGeometrySequence" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalGeometrySequence"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "post": { "operationId": "insertTemporalPrimitiveGeometry", "summary": "Add movement data into the moving feature", - "description": "A user SHOULD add more movement data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON.\n", # noqa - "tags": [ - "TemporalGeometry" - ], + "description": "A user SHOULD add more movement data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON.\n", + "tags": ["TemporalGeometry"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} ], "requestBody": { "content": { "application/json": { - "schema": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveGeometry" # noqa - }, + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveGeometry"}, "example": { "type": "MovingPoint", - "datetimes": [ - "2011-07-14T22:01:06Z", - "2011-07-14T22:01:07Z", - "2011-07-14T22:01:08Z", - "2011-07-14T22:01:09Z", - "2011-07-14T22:01:10Z" - ], - "coordinates": [ - [ - 139.757083, - 35.627701, - 0.5 - ], - [ - 139.757399, - 35.627701, - 2 - ], - [ - 139.757555, - 35.627688, - 4 - ], - [ - 139.757651, - 35.627596, - 4 - ], - [ - 139.757716, - 35.627483, - 4 - ] - ], + "datetimes": ["2011-07-14T22:01:06Z","2011-07-14T22:01:07Z","2011-07-14T22:01:08Z","2011-07-14T22:01:09Z","2011-07-14T22:01:10Z"], + "coordinates": [ [139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0] ], "interpolation": "Linear", "base": { "type": "glTF", - "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" }, "orientations": [ - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 0 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 355, - 0 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 330 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 300 - ] - }, - { - "scales": [ - 1, - 1, - 1 - ], - "angles": [ - 0, - 0, - 270 - ] - } + {"scales":[1,1,1], "angles":[0,0,0]}, + {"scales":[1,1,1], "angles":[0,355,0]}, + {"scales":[1,1,1], "angles":[0,0,330]}, + {"scales":[1,1,1], "angles":[0,0,300]}, + {"scales":[1,1,1], "angles":[0,0,270]} ] } } @@ -2851,413 +2531,206 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, }, "responses": { "201": { - "description": "Successful add more movement data into a specified moving feature.\n", # noqa + "description": "Successful add more movement data into a specified moving feature.\n", "headers": { "Location": { - "description": "A URI of the newly added resource", # noqa - "schema": { - "type": "string", - "example": "https://data.example.org/collections/mfc-1/items/mf-1/tgsequence/tg-2" # noqa - } + "description": "A URI of the newly added resource", + "schema": {"type": "string"} } } }, - "400": { - "description": "A query parameter was not validly used." # noqa - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "400": {"description": "A query parameter was not validly used."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}' # noqa - paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path] = { # noqa + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path] = { "delete": { "operationId": "deleteTemporalPrimitiveGeometry", "summary": "Delete a singe temporal primitive geometry", - "description": "The temporal primitive geometry with id `tGeometryId` SHOULD be deleted.\n", # noqa - "tags": [ - "TemporalGeometry" - ], + "description": "The temporal primitive geometry with id `tGeometryId` SHOULD be deleted.\n", + "tags": ["TemporalGeometry"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"} ], "responses": { - "204": { - "description": "Successfully deleted." - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal primitive geometry with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal primitive geometry with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance' # noqa - paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path] = { # noqa + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path] = { "get": { "operationId": "getDistanceOfTemporalPrimitiveGeometry", - "summary": "Get a time-to-distance curve of a temporal primitive geometry", # noqa - "description": "A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa - "tags": [ - "TemporalGeometryQuery" - ], + "summary": "Get a time-to-distance curve of a temporal primitive geometry", + "description": "A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/DistanceQuery" # noqa - }, - "400": { - "description": "A query parameter was not validly used." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/DistanceQuery"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity' # noqa - paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path] = { # noqa + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path] = { "get": { "operationId": "getVelocityOfTemporalPrimitiveGeometry", - "summary": "Get a time-to-velocity curve of a temporal primitive geometry", # noqa - "description": "A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa - "tags": [ - "TemporalGeometryQuery" - ], + "summary": "Get a time-to-velocity curve of a temporal primitive geometry", + "description": "A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/VelocityQuery" # noqa - }, - "400": { - "description": "A query parameter was not validly used." - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/VelocityQuery"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration' # noqa - paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path] = { # noqa + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path] = { "get": { "operationId": "getAccelerationOfTemporalPrimitiveGeometry", - "summary": "Get a time-to-acceleration curve of a temporal primitive geometry", # noqa - "description": "A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`.\n", # noqa - "tags": [ - "TemporalGeometryQuery" - ], + "summary": "Get a time-to-acceleration curve of a temporal primitive geometry", + "description": "A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/AccelerationQuery" # noqa - }, - "400": { - "description": "A query parameter was not validly used." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/AccelerationQuery" }, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tproperties_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties' # noqa - paths[collections_collectionId_items_mFeatureId_tproperties_path] = { # noqa + collections_collectionId_items_mFeatureId_tproperties_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties' + paths[collections_collectionId_items_mFeatureId_tproperties_path] = { "get": { "operationId": "retrieveTemporalProperties", "summary": "Retrieve a set of the temporal property data", - "description": "A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`.\n\nThe static data of a temporal property is not included temporal values (property `valueSequence`).\n\nAlso a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. \nIn this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`.\n\nThe static data of a temporal property is not included temporal values (property `valueSequence`).\n\nAlso a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. \nIn this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperties" # noqa - }, - "400": { - "description": "A query parameter was not validly used." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperties"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "post": { "operationId": "insertTemporalProperty", "summary": "Add temporal property data", - "description": "A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} ], "requestBody": { "content": { "application/json": { - "schema": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalProperties-mfjson" # noqa - }, + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalProperties-mfjson"}, "example": [ - { - "datetimes": [ - "2011-07-14T22:01:01.450Z", - "2011-07-14T23:01:01.450Z", - "2011-07-15T00:01:01.450Z" - ], - "length": { - "type": "Measure", - "form": "http://qudt.org/vocab/quantitykind/Length", # noqa - "values": [ - 1, - 2.4, - 1 - ], - "interpolation": "Linear" - }, - "discharge": { - "type": "Measure", - "form": "MQS", - "values": [ - 3, - 4, - 5 - ], - "interpolation": "Step" - } - }, - { - "datetimes": [ - "2011-07-14T22:01:01.450Z", - "2011-07-14T23:01:01.450Z" - ], - "camera": { - "type": "Image", - "values": [ - "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa - "iVBORw0KGgoAAAANSUhEU......" - ], - "interpolation": "Discrete" - }, - "labels": { - "type": "Text", - "values": [ - "car", - "human" - ], - "interpolation": "Discrete" - } - } + {"datetimes": ["2011-07-14T22:01:01.450Z","2011-07-14T23:01:01.450Z","2011-07-15T00:01:01.450Z"], + "length": {"type": "Measure","form": "http://qudt.org/vocab/quantitykind/Length","values": [1,2.4,1],"interpolation": "Linear"}, + "discharge": {"type": "Measure","form": "MQS","values": [3,4,5],"interpolation": "Step"}} ] } } }, "responses": { "201": { - "description": "Successful add more temporal property into a specified moving feature.\n", # noqa + "description": "Successful add more temporal property into a specified moving feature.\n", "headers": { "Locations": { - "description": "A list of URI of the newly added resources", # noqa - "schema": { - "type": "array", - "items": { - "type": "string" - }, - "example": [ - "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/length", # noqa - "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/discharge", # noqa - "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/camera", # noqa - "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/labels" # noqa - ] + "description": "A list of URI of the newly added resources", + "schema": {"type": "array","items": {"type": "string"} } } } }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}' # noqa - paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path] = { # noqa + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}' + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path] = { "get": { "operationId": "retrieveTemporalProperty", "summary": "Retrieve a temporal property", - "description": "A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} ], "responses": { - "200": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperty" # noqa - }, - "400": { - "description": "A query parameter was not validly used." # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperty"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "post": { "operationId": "insertTemporalPrimitiveValue", "summary": "Add temporal primitive value data", - "description": "A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"} ], "requestBody": { "content": { "application/json": { - "schema": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveValue" # noqa - }, + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveValue"}, "example": { - "datetimes": [ - "2011-07-15T08:00:00Z", - "2011-07-15T08:00:01Z", - "2011-07-15T08:00:02Z" - ], - "values": [ - 0, - 20, - 50 - ], + "datetimes": ["2011-07-15T08:00:00Z","2011-07-15T08:00:01Z","2011-07-15T08:00:02Z"], + "values": [0,20,50], "interpolation": "Linear" } } @@ -3265,90 +2738,53 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, }, "responses": { "201": { - "description": "Successful add more temporal primitive value data into a specified temporal property.\n", # noqa + "description": "Successful add more temporal primitive value data into a specified temporal property.\n", "headers": { "Location": { "description": "A URI of the newly added resource", - "schema": { - "type": "string", - "example": "https://data.example.org/collections/mfc-1/items/mf-1/tproperties/tvalue/tpv-1" # noqa - } + "schema": {"type": "string"} } } }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } }, "delete": { "operationId": "deleteTemporalProperty", "summary": "Delete a specified temporal property", - "description": "The temporal property with id `tPropertyName` SHOULD be deleted.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "The temporal property with id `tPropertyName` SHOULD be deleted.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"} ], "responses": { - "204": { - "description": "Successfully deleted." - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } - collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId}' # noqa - paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path] = { # noqa + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId}' + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path] = { "delete": { "operationId": "deleteTemporalPrimitiveValue", "summary": "Delete a singe temporal primitive value", - "description": "The temporal primitive value with id `tValueId` SHOULD be deleted.\n", # noqa - "tags": [ - "TemporalProperty" - ], + "description": "The temporal primitive value with id `tValueId` SHOULD be deleted.\n", + "tags": ["TemporalProperty"], "parameters": [ - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName" # noqa - }, - { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tValueId" # noqa - } + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tValueId"} ], "responses": { - "204": { - "description": "Successfully deleted." - }, - "404": { - "description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n- Or a temporal primitive primitive with the specified id was not found.\n" # noqa - }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n- Or a temporal primitive primitive with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } diff --git a/pygeoapi/openapi.py b/pygeoapi/openapi.py index ed5d20674..8078490d9 100644 --- a/pygeoapi/openapi.py +++ b/pygeoapi/openapi.py @@ -273,17 +273,13 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: "operationId": "registerMetadata", "summary": "Register metadata about a collection of moving features", # noqa "description": "A user SHOULD register metadata about a collection of moving features into the system.\n", # noqa - "tags": [ - "MovingFeatureCollection" - ], + "tags": ["MovingFeatureCollection"], "requestBody": { "content": { "application/json": { - "schema": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body" # noqa - }, + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body"}, "example": { - "title": "moving_feature_collection_sample", # noqa + "title": "moving_feature_collection_sample", "updateFrequency": 1000, "description": "example", "itemType": "movingfeature" @@ -297,16 +293,11 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: "headers": { "Location": { "description": "A URI of the newly added resource", # noqa - "schema": { - "type": "string", - "example": "https://data.example.org/collections/mfc-1" # noqa - } + "schema": {"type": "string"} } } }, - "500": { - "$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError" # noqa - } + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} } } } diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py index 54b460b43..07afe047d 100644 --- a/pygeoapi/provider/postgresql_mobilitydb.py +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -8,17 +8,6 @@ from pygeoapi.util import format_datetime from pymeos_cffi import (tfloat_from_mfjson, ttext_from_mfjson, tgeompoint_from_mfjson) -# from mobilitydb.psycopg import register - - -# CREATE DATABASE mobilitydb -# WITH TEMPLATE = template0 -# ENCODING = 'UTF8'; -# ALTER DATABASE mobilitydb OWNER TO postgres; - -# create table: -# psql -U postgres -h 127.0.0.1 -p 5432 mobilitydb < -# tests/data/mf-api.sql class PostgresMobilityDB: host = 'mobilitydb' @@ -28,13 +17,6 @@ class PostgresMobilityDB: password = 'docker' connection = None - # host = '172.20.241.18' - # port = 5432 - # db = 'mobilitydb' - # user = 'postgres' - # password = 'postgres' - # connection = None - def __init__(self, datasource=None): """ PostgresMobilityDB Class constructor @@ -299,7 +281,7 @@ def get_feature(self, collection_id, mfeature_id): :returns: JSON MovingFeature """ with self.connection.cursor() as cur: - cur = self.connection.cursor() + # cur = self.connection.cursor() select_query = ( """select mfeature.collection_id, mfeature.mfeature_id, st_asgeojson(mfeature.mf_geometry) as mf_geometry, @@ -359,8 +341,7 @@ def get_temporalgeometries( datetime_restriction = "" if datetime != '' and datetime is not None: datetime_restriction = (""" and atTime(tgeometry_property, - tstzspan('[{0}]')) is not null """ - .format(datetime)) + tstzspan('[{0}]')) is not null """.format(datetime)) if leaf != '' and leaf is not None: tgeometry_property = ("""atTime(tgeometry_property, @@ -512,7 +493,6 @@ def get_temporalproperties_value( :param leaf: only features that have a temporal geometry and property that intersects the given date-time are selected [optional] - :param sub_temporal_value: only features with a temporal property intersecting the given time interval will return (default False) [optional] @@ -574,7 +554,7 @@ def post_collection(self, collection_property): Register metadata about a collection of moving features :param collection_property: metadata about a collection - title - human readable title of the collection + title - human-readable title of the collection updateFrequency - a time interval of sampling location description - any description itemType - indicator about the type of the items in the @@ -682,7 +662,7 @@ def post_temporalproperties( :param collection_id: local identifier of a collection :param mfeature_id: local identifier of a moving feature - :param temporalProperty: TemporalProperties object in the OGC MF-JSON + :param temporal_property: TemporalProperties object in the OGC MF-JSON :returns: TemporalProperty Name """ @@ -789,7 +769,7 @@ def put_collection(self, collection_id, collection_property): :param collection_id: local identifier of a collection :param collection_property: metadata about a collection - title - human readable title of the collection + title - human-readable title of the collection updateFrequency - a time interval of sampling location description - any description itemType - indicator about the type of the items in the @@ -1104,59 +1084,85 @@ def get_temporalvalue_group( and temp1.mfeature_id = temp3.mfeature_id """ .format(collection_id, mfeature_id, "{" + ", ".join(datetimes) + "}")) - print(select_query) + # print(select_query) cur.execute(select_query) result = cur.fetchall() if len(result) > 0: return result[0][2] return 1 - def get_velocity( - self, collection_id, mfeature_id, tgeometry_id, datetime=None): + def get_velocity(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): """ Get temporal property of velocity :param collection_id: local identifier of a collection :param mfeature_id: local identifier of a moving feature :param tgeometry_id: local identifier of a geometry - :param datetime: array of strings <date-time> (default None) + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings <date-time> (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] :returns: TemporalProperty of velocity """ - form = "MTS" name = "velocity" - with self.connection.cursor() as cur: - if datetime is None: - select_query = f"""SELECT speed(tgeog_property) AS speed - FROM tgeometry - WHERE collection_id = '{collection_id}' - and mfeature_id = '{mfeature_id}' - and tgeometry_id = '{tgeometry_id}'""" - else: + if (leaf == '' or leaf is None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # no optional query parameters are used -> time-to-velocity curve returns + select_query = \ + f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf != '' or leaf is not None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # only leaf query parameter is used + leaf_condition = "tstzset('{"+leaf+"}')" select_query = \ - f"""SELECT valueAtTimestamp(speed(tgeog_property), - '{datetime}') AS speed, interp(speed(tgeog_property)) - AS interp - FROM tgeometry - WHERE collection_id = '{collection_id}' - and mfeature_id = '{mfeature_id}' - and tgeometry_id = '{tgeometry_id}'""" + f"""SELECT atTime(speed(tgeog_property),{leaf_condition}) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf == '' or leaf is None) and \ + (sub_temporal_value or sub_temporal_value == "true"): + # only sub_temporal_value query parameter is used + select_query = \ + f"""SELECT atTime(speed(tgeog_property), tstzspan('[{datetime}]')) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + else: + print("Not valid query parameters") + cur.execute(select_query) result = cur.fetchall() - return self.to_tproperties(result, name, form, datetime) + return self.to_tproperties(result, name, form, leaf) - def get_distance( - self, collection_id, mfeature_id, tgeometry_id, datetime=None): + def get_distance(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): """ Get temporal property of distance :param collection_id: local identifier of a collection :param mfeature_id: local identifier of a moving feature :param tgeometry_id: local identifier of a geometry - :param datetime: array of strings <date-time> (default None) + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings <date-time> (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] :returns: TemporalProperty of distance """ @@ -1164,35 +1170,57 @@ def get_distance( form = "MTR" name = "distance" with self.connection.cursor() as cur: - if datetime is None: - select_query = f"""SELECT cumulativeLength(tgeog_property) - AS distance FROM tgeometry - WHERE collection_id = '{collection_id}' - and mfeature_id = '{mfeature_id}' - and tgeometry_id = '{tgeometry_id}'""" + if (leaf == '' or leaf is None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # no optional query parameters are used -> time-to-velocity curve returns + select_query = \ + f"""SELECT cumulativeLength(tgeog_property) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf != '' or leaf is not None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # only leaf query parameter is used + leaf_condition = "tstzset('{"+leaf+"}')" + select_query = \ + f"""SELECT atTime(cumulativeLength(tgeog_property),{leaf_condition}) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf == '' or leaf is None) and \ + (sub_temporal_value or sub_temporal_value == "true"): + # only sub_temporal_value query parameter is used + select_query = \ + f"""SELECT atTime(cumulativeLength(tgeog_property), tstzspan('[{datetime}]')) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" else: - select_query = f"""SELECT - valueAtTimestamp(cumulativeLength(tgeog_property), - '{datetime}') AS distance, - interp(cumulativeLength(tgeog_property)) AS interp - FROM tgeometry - WHERE collection_id = '{collection_id}' - and mfeature_id = '{mfeature_id}' - and tgeometry_id = '{tgeometry_id}'""" + print("Not valid query parameters") + cur.execute(select_query) result = cur.fetchall() - return self.to_tproperties(result, name, form, datetime) + return self.to_tproperties(result, name, form, leaf) - def get_acceleration( - self, collection_id, mfeature_id, tgeometry_id, datetime=None): + def get_acceleration(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): """ Get temporal property of acceleration :param collection_id: local identifier of a collection :param mfeature_id: local identifier of a moving feature :param tgeometry_id: local identifier of a geometry - :param datetime: array of strings <date-time> (default None) + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings <date-time> (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] :returns: TemporalProperty of acceleration """ @@ -1204,10 +1232,12 @@ def get_acceleration( "valueSequence": [] } with self.connection.cursor() as cur: - select_query = f"""SELECT speed(tgeog_property) AS speed - FROM tgeometry WHERE collection_id = '{collection_id}' - and mfeature_id = '{mfeature_id}' - and tgeometry_id = '{tgeometry_id}'""" + select_query = \ + f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" cur.execute(select_query) result = cur.fetchall() @@ -1217,17 +1247,14 @@ def get_acceleration( interpolation = each_row_converted.interpolation().to_string() each_time = [ - each_val.time().start_timestamp().strftime( - '%Y-%m-%dT%H:%M:%S.%fZ') + each_val.time().start_timestamp().strftime('%Y-%m-%dT%H:%M:%S.%fZ') for each_val in each_row_converted.instants()] if interpolation == "Step": each_values = [0 for each_val in each_row_converted.instants()] else: - each_values = [each_val.value() - for each_val in each_row_converted.instants()] + each_values = [each_val.value() for each_val in each_row_converted.instants()] - value_sequence = self.calculate_acceleration( - each_values, each_time, datetime) + value_sequence = self.calculate_acceleration(each_values, each_time, datetime) if value_sequence.get("values"): if datetime is not None: value_sequence["interpolation"] = "Discrete" @@ -1238,14 +1265,14 @@ def get_acceleration( tProperty["valueSequence"].append(value_sequence) return tProperty - def to_tproperties(self, results, name, form, datetime): + def to_tproperties(self, results, name, form, leaf): """ - Convert Temoral properties object + Convert Temporal properties object :param results: temporal property object of query :param name: temporal property name :param form: a unit of measurement - :param datetime: array of strings <date-time> + :param leaf: array of strings <date-time> :returns: TemporalProperty object """ @@ -1255,35 +1282,30 @@ def to_tproperties(self, results, name, form, datetime): "form": form, "valueSequence": [] } + pymeos_initialize() for each_row in results: - if datetime is None: - each_row_converted = None - if name == "velocity": - each_row_converted = TFloatSeqSet(each_row[0]) - else: - each_row_converted = TFloatSeq(each_row[0]) - each_values = [each_val.value() - for each_val in each_row_converted.instants()] - each_time = [ - each_val.time().start_timestamp().strftime( - '%Y-%m-%dT%H:%M:%S.%fZ') - for each_val in each_row_converted.instants()] - interpolation = each_row_converted.interpolation().to_string() - value_sequence = { - "datetimes": each_time, - "values": each_values, - "interpolation": interpolation - } + each_row_converted = None + if name == "velocity": + each_row_converted = TFloatSeqSet(each_row[0]) else: - value_sequence = { - "datetimes": [format_datetime(datetime)], - "values": [each_row[0]], - "interpolation": "Discrete" - } + each_row_converted = TFloatSeq(each_row[0]) + + each_values = [each_val.value() for each_val in each_row_converted.instants()] + each_time = [ + each_val.time().start_timestamp().strftime('%Y-%m-%dT%H:%M:%S.%fZ') + for each_val in each_row_converted.instants()] + interpolation = each_row_converted.interpolation().to_string() + + value_sequence = { + "datetimes": each_time, + "values": each_values, + "interpolation": interpolation + } tProperty["valueSequence"].append(value_sequence) return tProperty + def calculate_acceleration(self, velocities, times, chk_dtime): """ Calculate acceleration From e19e6b3874a177fc4f0870b766ada585e9619639 Mon Sep 17 00:00:00 2001 From: TaehoonK <kim.taehoon@aist.go.jp> Date: Sat, 14 Dec 2024 18:10:15 +0700 Subject: [PATCH 14/14] docker-compose.yml configuration update --- docker-compose.yml | 5 +- mf-api.config.yml | 513 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 517 insertions(+), 1 deletion(-) create mode 100644 mf-api.config.yml diff --git a/docker-compose.yml b/docker-compose.yml index e0c8dd280..717e431e5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,12 +1,15 @@ version: '3.0' services: mf-api: + container_name: pygeoapi-mf-api build: context: . dockerfile: Dockerfile image: pygeoapi-mf-api + volumes: + - ./mf-api.config.yml:/pygeoapi/local.config.yml ports: - - 5000:80 + - 5050:80 mobilitydb: container_name: mobilitydb diff --git a/mf-api.config.yml b/mf-api.config.yml new file mode 100644 index 000000000..198704f72 --- /dev/null +++ b/mf-api.config.yml @@ -0,0 +1,513 @@ +# ================================================================= +# +# Authors: Just van den Broecke <justb4@gmail.com> +# Tom Kralidis <tomkralidis@gmail.com> +# Francesco Bartoli <xbartolone@gmail.com> +# +# Copyright (c) 2019 Just van den Broecke +# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2020 Francesco Bartoli +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + + +# Default config for base Docker Image, override via DockerVolume +# mapping with your own config. +server: + bind: + host: 0.0.0.0 + port: 80 + url: http://localhost:5050 + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + language: en-US + cors: true + pretty_print: true + limit: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© <a href="https://openstreetmap.org/copyright">OpenStreetMap contributors</a>' + ogc_schemas_location: /schemas.opengis.net + +logging: + level: ERROR + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: pygeoapi Demo instance - running latest GitHub version + description: pygeoapi provides an API to geospatial data + keywords: + - geospatial + - data + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: https://github.com/geopython/pygeoapi + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: pygeoapi Development Team + url: https://pygeoapi.io + contact: + name: Kralidis, Tom + position: Lead Dev + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Canada + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: +# obs: +# type: collection +# title: Observations +# description: My cool observations +# keywords: +# - observations +# - monitoring +# linked-data: +# context: +# - datetime: https://schema.org/DateTime +# - vocab: https://example.com/vocab# +# stn_id: "vocab:stn_id" +# value: "vocab:value" +# links: +# - type: text/csv +# rel: canonical +# title: data +# href: https://github.com/mapserver/mapserver/blob/branch-7-0/msautotest/wxs/data/obs.csv +# hreflang: en-US +# - type: text/csv +# rel: alternate +# title: data +# href: https://raw.githubusercontent.com/mapserver/mapserver/branch-7-0/msautotest/wxs/data/obs.csv +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180,-90,180,90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2000-10-30T18:24:39Z +# end: 2007-10-30T08:57:29Z +# providers: +# - type: feature +# name: CSV +# data: tests/data/obs.csv +# id_field: id +# geometry: +# x_field: long +# y_field: lat +# +# lakes: +# type: collection +# title: Large Lakes +# description: lakes of the world, public domain +# keywords: +# - lakes +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180,-90,180,90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2011-11-11 +# end: null # or empty (either means open ended) +# providers: +# - type: feature +# name: GeoJSON +# data: tests/data/ne_110m_lakes.geojson +# id_field: id +# +# countries: +# type: collection +# title: Countries in the world (SpatialLite Provider) +# description: Countries of the world (SpatialLite) +# keywords: +# - countries +# - natural eart +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180, -90, 180, 90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: SQLiteGPKG +# data: tests/data/ne_110m_admin_0_countries.sqlite +# id_field: ogc_fid +# table: ne_110m_admin_0_countries +# +# dutch_georef_stations: +# type: collection +# title: Dutch Georef Stations via OGR WFS +# description: Locations of RD/GNSS-reference stations from Dutch Kadaster PDOK a.k.a RDInfo. Uses MapServer WFS v2 backend via OGRProvider. +# keywords: +# - Netherlands +# - GNSS +# - Surveying +# - Holland +# - RD +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/3ebe56dc-5f09-4fb3-b224-55c2db4ca2fd?tab=general +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:https://service.pdok.nl/kadaster/rdinfo/wfs/v1_0? +## source_srs: EPSG:28992 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 1.1.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/28992 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/28992 +# id_field: gml_id +# layer: rdinfo:stations +# +# utah_city_locations: +# type: collection +# title: Cities in Utah via OGR WFS +# description: Data from the state of Utah. Standard demo dataset from the deegree WFS server that is used as backend WFS. +# keywords: +# - USA +# - deegree +# - Utah +# - Demo data +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://download.deegree.org/documentation/3.3.20/html/lightly.html#example-workspace-2-utah-webmapping-services +# hreflang: en-US +# extents: +# spatial: +# bbox: [-112.108489, 39.854053, -111.028628, 40.460098] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:http://demo.deegree.org/utah-workspace/services/wfs?TYPENAME=app:SGID93_LOCATION_UDOTMap_CityLocations +## source_srs: EPSG:26912 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 2.0.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/26912 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/26912 +# id_field: NAME +# layer: app:SGID93_LOCATION_UDOTMap_CityLocations +# +# unesco_pois_italy: +# type: collection +# title: Unesco POIs in Italy via OGR WFS +# description: Unesco Points of Interest in Italy. Using GeoSolutions GeoServer WFS demo-server as backend WFS. +# keywords: +# - Italy +# - Unesco +# - Demo +# links: +# - type: text/html +# rel: canonical +# title: information +# href: https://mapstore2.geo-solutions.it/mapstore/#/dashboard/5593 +# hreflang: en-US +# extents: +# spatial: +# bbox: [5.0,36.0,20.0,46.0] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:https://gs-stable.geosolutionsgroup.com/geoserver/wfs +## source_srs: EPSG:32632 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 1.1.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/32632 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/32632 +# id_field: gml_id +# layer: unesco:Unesco_point +# +# ogr_gpkg_poi: +# type: collection +# title: Portuguese Points of Interest via OGR GPKG +# description: Portuguese Points of Interest obtained from OpenStreetMap. Dataset includes Madeira and Azores islands. Uses GeoPackage backend via OGR provider. +# keywords: +# - Portugal +# - POI +# - Point of Interest +# - Madeira +# - Azores +# - OSM +# - Open Street Map +# - NaturaGIS +# links: +# - type: text/html +# rel: canonical +# title: information +# href: https://wiki.openstreetmap.org/wiki/Points_of_interest/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-31.2687, 32.5898, -6.18992, 42.152] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GPKG +# source: tests/data/poi_portugal.gpkg +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: osm_id +# layer: poi_portugal +# +# ogr_geojson_lakes: +# type: collection +# title: Large Lakes OGR GeoJSON Driver +# description: lakes of the world, public domain +# keywords: +# - lakes +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180, -90, 180, 90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2011-11-11 +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GeoJSON +# source: tests/data/ne_110m_lakes.geojson +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: ne_110m_lakes +# +# ogr_addresses_sqlite: +# type: collection +# title: Dutch addresses (subset Otterlo). OGR SQLite Driver +# description: Dutch addresses subset. +# keywords: +# - Netherlands +# - addresses +# - INSPIRE +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/4074b3c3-ca85-45ad-bc0d-b5fca8540z0b +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: SQLite +# # source: tests/data/ne_110m_admin_0_countries.sqlite +# source: tests/data/dutch_addresses_4326.sqlite +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: ogrgeojson +# +# ogr_addresses_gpkg: +# type: collection +# title: Dutch addresses (subset Otterlo). OGR GeoPackage Driver +# description: Dutch addresses subset. +# keywords: +# - Netherlands +# - addresses +# - INSPIRE +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/4074b3c3-ca85-45ad-bc0d-b5fca8540z0b +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GPKG +# source: tests/data/dutch_addresses_4326.gpkg +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: OGRGeoJSON +# + hello-world: + type: process + processor: + name: HelloWorld