diff --git a/Dockerfile b/Dockerfile index 04f1a20db..043aa30b0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -125,11 +125,16 @@ RUN \ && apt autoremove -y \ && rm -rf /var/lib/apt/lists/* -ADD requirements-docker.txt requirements-admin.txt /pygeoapi/ +ADD requirements-docker.txt requirements-admin.txt requirements-provider.txt /pygeoapi/ + # Install remaining pygeoapi deps RUN python3 -m pip install --no-cache-dir -r requirements-docker.txt \ - && python3 -m pip install --no-cache-dir -r requirements-admin.txt + && python3 -m pip install --no-cache-dir -r requirements-admin.txt \ + && python3 -m pip install --no-cache-dir -r requirements-provider.txt +# If execute pytest +ADD requirements-dev.txt /pygeoapi/ +RUN python3 -m pip install --no-cache-dir -r requirements-dev.txt ADD . /pygeoapi @@ -142,4 +147,3 @@ RUN \ && cp /pygeoapi/docker/entrypoint.sh /entrypoint.sh ENTRYPOINT ["/entrypoint.sh"] - diff --git a/DockerfileMobiitydb b/DockerfileMobiitydb new file mode 100644 index 000000000..42040e39f --- /dev/null +++ b/DockerfileMobiitydb @@ -0,0 +1,54 @@ +FROM postgis/postgis:17-3.5 + +# Configuration Parameters +LABEL maintainer="MobilityDB Project - https://github.com/MobilityDB/MobilityDB" +ENV MOBILITYDB_VERSION 1.2.0 +ENV POSTGRES_DB=mobilitydb +ENV POSTGRES_USER=docker +ENV POSTGRES_PASSWORD=docker + +# Fix the Release file expired problem +RUN echo "Acquire::Check-Valid-Until \"false\";\nAcquire::Check-Date \"false\";" | cat > /etc/apt/apt.conf.d/10no--check-valid-until + + +# Install Prerequisites +RUN apt-get update \ + && apt-get install -y \ + build-essential \ + cmake \ + git \ + libproj-dev \ + g++ \ + wget \ + autoconf \ + autotools-dev \ + libgeos-dev \ + libpq-dev \ + libproj-dev \ + libjson-c-dev \ + protobuf-c-compiler \ + xsltproc \ + libgsl-dev \ + libgslcblas0 \ + postgresql-server-dev-${PG_MAJOR} \ + && rm -rf /var/lib/apt/lists/* + +# Install MobilityDB +RUN wget -O MobilityDB.tar.gz "https://github.com/MobilityDB/MobilityDB/archive/v${MOBILITYDB_VERSION}.tar.gz" \ + && mkdir -p /usr/local/src/MobilityDB \ + && tar \ + --extract \ + --file MobilityDB.tar.gz \ + --directory /usr/local/src/MobilityDB \ + --strip-components 1 \ + && rm MobilityDB.tar.gz +RUN mkdir /usr/local/src/MobilityDB/build +RUN cd /usr/local/src/MobilityDB/build && \ + cmake .. && \ + make -j$(nproc) && \ + make install + +RUN rm /docker-entrypoint-initdb.d/10_postgis.sh +# Create mf-api table +COPY /docker/initdb-mobilitydb.sh /docker-entrypoint-initdb.d/ +RUN chmod +x /docker-entrypoint-initdb.d/initdb-mobilitydb.sh \ No newline at end of file diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 000000000..717e431e5 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,26 @@ +version: '3.0' +services: + mf-api: + container_name: pygeoapi-mf-api + build: + context: . + dockerfile: Dockerfile + image: pygeoapi-mf-api + volumes: + - ./mf-api.config.yml:/pygeoapi/local.config.yml + ports: + - 5050:80 + + mobilitydb: + container_name: mobilitydb + ports: + - 25432:5432 + environment: + - POSTGRES_DB=mobilitydb + - POSTGRES_USER=docker + - POSTGRES_PASSWORD=docker + build: + context: . + dockerfile: DockerfileMobiitydb + image: pygeoapi-mf-api-mobilitydb + restart: on-failure \ No newline at end of file diff --git a/docker/initdb-mobilitydb.sh b/docker/initdb-mobilitydb.sh new file mode 100644 index 000000000..fd829e1b1 --- /dev/null +++ b/docker/initdb-mobilitydb.sh @@ -0,0 +1,62 @@ +#!/bin/bash + +echo "shared_preload_libraries = 'postgis-3.so'" >> $PGDATA/postgresql.conf + +set -e + +# Create the 'mobilitydb' extension in the mobilitydb database +echo "Loading MobilityDB extension into mobilitydb" +psql --user="$POSTGRES_USER" --dbname="mobilitydb" <<- 'EOSQL' + CREATE EXTENSION IF NOT EXISTS PostGIS; + CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; + CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; + + -- Table collection + CREATE TABLE public.collection ( + collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), + collection_property jsonb NULL, + PRIMARY KEY (collection_id) + ); + -- Table MovingFeature + CREATE TABLE public.mfeature ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), + mf_geometry geometry NULL, + mf_property jsonb NULL, + lifespan tstzspan NULL, + PRIMARY KEY (collection_id, mfeature_id), + FOREIGN KEY (collection_id) REFERENCES collection(collection_id) + ); + -- Table TemporalGeometry + CREATE TABLE public.tgeometry ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), + tgeometry_property tgeompoint NULL, + tgeog_property tgeompoint NULL, + PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); + -- Table TemporalProperty + CREATE TABLE public.tproperties ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + tproperty jsonb NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name), + FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) + ); + + -- Table TemporalPropertyValue + CREATE TABLE public.tvalue ( + collection_id uuid NOT NULL, + mfeature_id uuid NOT NULL, + tproperties_name text NOT NULL, + tvalue_id uuid NOT NULL DEFAULT uuid_generate_v4(), + datetime_group int4 NOT NULL, + pvalue_float tfloat NULL, + pvalue_text ttext NULL, + PRIMARY KEY (collection_id, mfeature_id, tproperties_name, tvalue_id), + FOREIGN KEY (collection_id, mfeature_id, tproperties_name) REFERENCES tproperties(collection_id, mfeature_id, tproperties_name) + ); +EOSQL diff --git a/docs/source/data-publishing/index.rst b/docs/source/data-publishing/index.rst index 78680c7f2..c92c7258f 100644 --- a/docs/source/data-publishing/index.rst +++ b/docs/source/data-publishing/index.rst @@ -26,6 +26,7 @@ return back data to the pygeoapi API framework in a plug and play fashion. ogcapi-records ogcapi-edr stac + ogcapi-mfapi .. seealso:: diff --git a/docs/source/data-publishing/ogcapi-mfapi.rst b/docs/source/data-publishing/ogcapi-mfapi.rst new file mode 100644 index 000000000..f459d4fa3 --- /dev/null +++ b/docs/source/data-publishing/ogcapi-mfapi.rst @@ -0,0 +1,125 @@ +.. _ogcapi-mfapi: + +Publishing data to OGC API - MF-API +===================================== + +`OGC API - MF-API`_ provides provides a uniform way to access, communicate, and +anage data about moving features across different applications, data providers, +and data consumers. + +To add moving features data to pygeoapi for standard interfaces, +which is defined in the OGC API - MovingFeatures - Part 1:Core. +you can use the dataset example in `Building Blocks specified in OGC API - Moving Features - Part 1 Core (1.0.0)`_ +as a baseline and modify accordingly. + +Configuration +------------- + +In order to register data for Moving features, the DB must be created and the related tables must be initially set up. + + +PostgreSQL +^^^^^^^^^^ +.. note:: + Requires Python packages pymeos + +Must have PostGIS installed and uuid-ossp + +.. code-block:: yaml + + server: + manager: + name: PostgreSQL + connection: + host: localhost + port: 5432 + database: mobilitydb + user: postgres + password: ${POSTGRESQL_PASSWORD:-postgres} + +.. note:: + To run the process, create a table with `DDL `_ + + +.. code-block:: sh + + psql -U postgres -h 127.0.0.1 -p 5432 mobilitydb < tests/data/mf-api.sql + + +Processing examples +------------------- + +.. note:: + `Here `_ is the sample data specified by the -d option of the curl command. + +.. code-block:: sh + + # Register metadata about a collection of moving features. + curl -X POST http://localhost:5000/collections \ + -H "Content-Type: application/json" \ + -d "{\"title\": \"moving_feature_collection_sample\", + \"updateFrequency\": 1000, + \"description\": \"example\", + \"itemType\": \"movingfeature\" + }" + + # Retrieve catalogs of a moving features collection. + curl http://localhost:5000/collections + + + # Insert a set of moving features or a moving feature into a collection with id {collectionId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items \ + -H "Content-Type: application/json" \ + -d @mfapi_moving_feature.json + + # Access a static data of a moving feature with id {mFeatureId}. + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId} + + # Add more movement data into a moving feature with id {mFeatureId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_geometry.json + + # Retrieve the movement data of the single moving feature + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence + + # Get a time-to-(distance,velocity,acceleration) curve of a temporal primitive geometry + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration + + # Add new temporal property data into a moving feature with id {mFeatureId}. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_properties.json + + # Retrieve a set of the temporal property data + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties + + # Add temporal primitive value data. + curl -X POST http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} \ + -H "Content-Type: application/json" \ + -d @mfapi_temporal_property_value_data.json + + # Retrieve a set of the temporal property data + curl http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} + + # Delete a singe temporal primitive value + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId} + + # Delete a specified temporal property + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName} + + # Delete a singe temporal primitive geometry + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId} + + # Delete a single moving feature + curl -X DELETE http://localhost:5000/collections/{collectionId}/items/{mFeatureId} + + # Delete the collection + curl -X DELETE http://localhost:5000/collections/{collectionId} + + +.. _`OGC API - MF-API`: https://github.com/aistairc/pygeoapi-mf-api +.. _`Building Blocks specified in OGC API - Moving Features - Part 1 Core (1.0.0)`: https://developer.ogc.org/api/movingfeatures/index.html#tag/MovingFeatureCollection/operation/registerMetadata +.. _`see website`: https://mobilitydb.com/ \ No newline at end of file diff --git a/docs/source/tour.rst b/docs/source/tour.rst index 4fd4e51da..7fb1a30d6 100644 --- a/docs/source/tour.rst +++ b/docs/source/tour.rst @@ -253,3 +253,67 @@ discover what is supported by the server. .. _`Toronto, Ontario, Canada`: https://en.wikipedia.org/wiki/Toronto .. _`Swagger`: https://en.wikipedia.org/wiki/Swagger_(software) .. _`curl`: https://curl.se + + +MF-API Tour +----------- +The OGC API - Moving Features Standard is an extension of the OGC API - Common and the OGC API - Features Standards. +MovingFeatures – Part 1: Core is described in the `here `_. + + +MovingFeatures Collection Catalog +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +http://localhost:5000/collections + +Retrieve catalogs of a moving features collection. + +MovingFeatures +^^^^^^^^^^^^^^ +http://localhost:5000/{collectionId}/items + +Retrieve the moving feature collection to access the static information of the moving feature by simple filtering and a limit. + +MovingFeature +""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id} + + +Access the static data of the moving feature with id {mFeatureId}. +The static data of a moving feature is not included temporal geometries and temporal properties. + +TemporalGeometrySequence +"""""""""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tgsequence + +Retrieve the movement data of the single moving feature with id {mFeatureId}. + +TemporalGeometryQuery +""""""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tgsequence/{tGeometryId} + +Get a time-to-distance curve of a temporal primitive geometry with id {tGeometryId}. + +TemporalProperties +"""""""""""""""""" +http://localhost:5000/{collectionId}/items/{mf_id}/tproperties + +Retrieve the static information of the temporal property data that included a single moving feature with id {mFeatureId}. +The static data of a temporal property is not included temporal values (property values). + +.. seealso:: + :ref:`ogcapi-mfapi` for more OGC API - MF-API request examples. + +Transactions +^^^^^^^^^^^^ +Register metadata about a collection of moving features. (using `curl`_): + +.. code-block:: sh + + curl -X POST http://localhost:5000/collections \ + -H "Content-Type: application/json" \ + -d "{\"title\": \"moving_feature_collection_sample\", + \"updateFrequency\": 1000, + \"description\": \"example\", + \"itemType\": \"movingfeature\" + }" + diff --git a/docs/source/transactions.rst b/docs/source/transactions.rst index 4c6327174..e1096df41 100644 --- a/docs/source/transactions.rst +++ b/docs/source/transactions.rst @@ -6,9 +6,13 @@ Transactions pygeoapi supports the `OGC API - Features - Part 4: Create, Replace, Update and Delete`_ draft specification, allowing for transactional capabilities against feature and record data. +Furthermore, pygeoapi supports the `OGC API - Moving Features - Part 1: Core`_ international standard, allowing +for transactional capabilities against moving features. + To enable transactions in pygeoapi, a given resource provider needs to be editable (via the configuration resource provider ``editable: true`` property). Note that the feature or record provider MUST support create/update/delete. See the :ref:`ogcapi-features` and :ref:`ogcapi-records` documentation for transaction support status of pygeoapi backends. +For MF-API transactions, please refer :ref:`ogcapi-mfapi` Access control ^^^^^^^^^^^^^^ @@ -17,3 +21,4 @@ It should be made clear that authentication and authorization is beyond the resp if a pygeoapi user enables transactions, they must provide access control explicitly via another service. .. _`OGC API - Features - Part 4: Create, Replace, Update and Delete`: https://docs.ogc.org/DRAFTS/20-002.html +.. _`OGC API - Moving Features - Part 1: Core`: https://docs.ogc.org/is/22-003r3/22-003r3.html diff --git a/mf-api.config.yml b/mf-api.config.yml new file mode 100644 index 000000000..198704f72 --- /dev/null +++ b/mf-api.config.yml @@ -0,0 +1,513 @@ +# ================================================================= +# +# Authors: Just van den Broecke +# Tom Kralidis +# Francesco Bartoli +# +# Copyright (c) 2019 Just van den Broecke +# Copyright (c) 2020 Tom Kralidis +# Copyright (c) 2020 Francesco Bartoli +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + + +# Default config for base Docker Image, override via DockerVolume +# mapping with your own config. +server: + bind: + host: 0.0.0.0 + port: 80 + url: http://localhost:5050 + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + language: en-US + cors: true + pretty_print: true + limit: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + ogc_schemas_location: /schemas.opengis.net + +logging: + level: ERROR + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: pygeoapi Demo instance - running latest GitHub version + description: pygeoapi provides an API to geospatial data + keywords: + - geospatial + - data + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: https://github.com/geopython/pygeoapi + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: pygeoapi Development Team + url: https://pygeoapi.io + contact: + name: Kralidis, Tom + position: Lead Dev + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Canada + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: +# obs: +# type: collection +# title: Observations +# description: My cool observations +# keywords: +# - observations +# - monitoring +# linked-data: +# context: +# - datetime: https://schema.org/DateTime +# - vocab: https://example.com/vocab# +# stn_id: "vocab:stn_id" +# value: "vocab:value" +# links: +# - type: text/csv +# rel: canonical +# title: data +# href: https://github.com/mapserver/mapserver/blob/branch-7-0/msautotest/wxs/data/obs.csv +# hreflang: en-US +# - type: text/csv +# rel: alternate +# title: data +# href: https://raw.githubusercontent.com/mapserver/mapserver/branch-7-0/msautotest/wxs/data/obs.csv +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180,-90,180,90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2000-10-30T18:24:39Z +# end: 2007-10-30T08:57:29Z +# providers: +# - type: feature +# name: CSV +# data: tests/data/obs.csv +# id_field: id +# geometry: +# x_field: long +# y_field: lat +# +# lakes: +# type: collection +# title: Large Lakes +# description: lakes of the world, public domain +# keywords: +# - lakes +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180,-90,180,90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2011-11-11 +# end: null # or empty (either means open ended) +# providers: +# - type: feature +# name: GeoJSON +# data: tests/data/ne_110m_lakes.geojson +# id_field: id +# +# countries: +# type: collection +# title: Countries in the world (SpatialLite Provider) +# description: Countries of the world (SpatialLite) +# keywords: +# - countries +# - natural eart +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180, -90, 180, 90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: SQLiteGPKG +# data: tests/data/ne_110m_admin_0_countries.sqlite +# id_field: ogc_fid +# table: ne_110m_admin_0_countries +# +# dutch_georef_stations: +# type: collection +# title: Dutch Georef Stations via OGR WFS +# description: Locations of RD/GNSS-reference stations from Dutch Kadaster PDOK a.k.a RDInfo. Uses MapServer WFS v2 backend via OGRProvider. +# keywords: +# - Netherlands +# - GNSS +# - Surveying +# - Holland +# - RD +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/3ebe56dc-5f09-4fb3-b224-55c2db4ca2fd?tab=general +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:https://service.pdok.nl/kadaster/rdinfo/wfs/v1_0? +## source_srs: EPSG:28992 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 1.1.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/28992 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/28992 +# id_field: gml_id +# layer: rdinfo:stations +# +# utah_city_locations: +# type: collection +# title: Cities in Utah via OGR WFS +# description: Data from the state of Utah. Standard demo dataset from the deegree WFS server that is used as backend WFS. +# keywords: +# - USA +# - deegree +# - Utah +# - Demo data +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://download.deegree.org/documentation/3.3.20/html/lightly.html#example-workspace-2-utah-webmapping-services +# hreflang: en-US +# extents: +# spatial: +# bbox: [-112.108489, 39.854053, -111.028628, 40.460098] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:http://demo.deegree.org/utah-workspace/services/wfs?TYPENAME=app:SGID93_LOCATION_UDOTMap_CityLocations +## source_srs: EPSG:26912 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 2.0.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/26912 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/26912 +# id_field: NAME +# layer: app:SGID93_LOCATION_UDOTMap_CityLocations +# +# unesco_pois_italy: +# type: collection +# title: Unesco POIs in Italy via OGR WFS +# description: Unesco Points of Interest in Italy. Using GeoSolutions GeoServer WFS demo-server as backend WFS. +# keywords: +# - Italy +# - Unesco +# - Demo +# links: +# - type: text/html +# rel: canonical +# title: information +# href: https://mapstore2.geo-solutions.it/mapstore/#/dashboard/5593 +# hreflang: en-US +# extents: +# spatial: +# bbox: [5.0,36.0,20.0,46.0] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: WFS +# source: WFS:https://gs-stable.geosolutionsgroup.com/geoserver/wfs +## source_srs: EPSG:32632 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# source_options: +# # OGR_WFS_VERSION: 1.1.0 +# OGR_WFS_LOAD_MULTIPLE_LAYER_DEFN: NO +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# crs: +# - http://www.opengis.net/def/crs/EPSG/0/4258 +# - http://www.opengis.net/def/crs/EPSG/0/32632 +# storage_crs: http://www.opengis.net/def/crs/EPSG/0/32632 +# id_field: gml_id +# layer: unesco:Unesco_point +# +# ogr_gpkg_poi: +# type: collection +# title: Portuguese Points of Interest via OGR GPKG +# description: Portuguese Points of Interest obtained from OpenStreetMap. Dataset includes Madeira and Azores islands. Uses GeoPackage backend via OGR provider. +# keywords: +# - Portugal +# - POI +# - Point of Interest +# - Madeira +# - Azores +# - OSM +# - Open Street Map +# - NaturaGIS +# links: +# - type: text/html +# rel: canonical +# title: information +# href: https://wiki.openstreetmap.org/wiki/Points_of_interest/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-31.2687, 32.5898, -6.18992, 42.152] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GPKG +# source: tests/data/poi_portugal.gpkg +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: osm_id +# layer: poi_portugal +# +# ogr_geojson_lakes: +# type: collection +# title: Large Lakes OGR GeoJSON Driver +# description: lakes of the world, public domain +# keywords: +# - lakes +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.naturalearthdata.com/ +# hreflang: en-US +# extents: +# spatial: +# bbox: [-180, -90, 180, 90] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: 2011-11-11 +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GeoJSON +# source: tests/data/ne_110m_lakes.geojson +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: ne_110m_lakes +# +# ogr_addresses_sqlite: +# type: collection +# title: Dutch addresses (subset Otterlo). OGR SQLite Driver +# description: Dutch addresses subset. +# keywords: +# - Netherlands +# - addresses +# - INSPIRE +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/4074b3c3-ca85-45ad-bc0d-b5fca8540z0b +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: SQLite +# # source: tests/data/ne_110m_admin_0_countries.sqlite +# source: tests/data/dutch_addresses_4326.sqlite +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: ogrgeojson +# +# ogr_addresses_gpkg: +# type: collection +# title: Dutch addresses (subset Otterlo). OGR GeoPackage Driver +# description: Dutch addresses subset. +# keywords: +# - Netherlands +# - addresses +# - INSPIRE +# links: +# - type: text/html +# rel: canonical +# title: information +# href: http://www.nationaalgeoregister.nl/geonetwork/srv/dut/catalog.search#/metadata/4074b3c3-ca85-45ad-bc0d-b5fca8540z0b +# hreflang: nl-NL +# extents: +# spatial: +# bbox: [3.37,50.75,7.21,53.47] +# crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 +# temporal: +# begin: +# end: null # or empty +# providers: +# - type: feature +# name: OGR +# data: +# source_type: GPKG +# source: tests/data/dutch_addresses_4326.gpkg +## source_srs: EPSG:4326 +## target_srs: EPSG:4326 +# source_capabilities: +# paging: True +# +# gdal_ogr_options: +# EMPTY_AS_NULL: NO +# GDAL_CACHEMAX: 64 +# # GDAL_HTTP_PROXY: (optional proxy) +# # GDAL_PROXY_AUTH: (optional auth for remote WFS) +# CPL_DEBUG: NO +# +# id_field: id +# layer: OGRGeoJSON +# + hello-world: + type: process + processor: + name: HelloWorld diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py index b47541f23..138f87e39 100644 --- a/pygeoapi/api/__init__.py +++ b/pygeoapi/api/__init__.py @@ -40,6 +40,14 @@ Returns content from plugins and sets responses. """ +from pygeoapi.util import (CrsTransformSpec, TEMPLATES, UrlPrefetcher, + get_api_rules, get_base_url, get_provider_by_type, + get_typed_value, get_crs_from_uri, dategetter, + get_supported_crs_list, render_j2_template, to_json, + get_provider_default, filter_dict_by_key_value) +from pymeos import STBox, TsTzSpan, pymeos_initialize +import psycopg2 +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB import asyncio from collections import OrderedDict from copy import deepcopy @@ -63,13 +71,6 @@ from pygeoapi.provider.base import ( ProviderConnectionError, ProviderGenericError, ProviderTypeError) -from pygeoapi.util import ( - CrsTransformSpec, TEMPLATES, UrlPrefetcher, dategetter, - filter_dict_by_key_value, get_api_rules, get_base_url, - get_provider_by_type, get_provider_default, get_typed_value, - get_crs_from_uri, get_supported_crs_list, render_j2_template, to_json -) - LOGGER = logging.getLogger(__name__) #: Return headers for requests (e.g:X-Powered-By) @@ -133,7 +134,7 @@ def all_apis() -> dict: """ from . import (coverages, environmental_data_retrieval, itemtypes, maps, - processes, tiles, stac) + processes, tiles, stac, movingfeatures) return { 'coverage': coverages, @@ -142,7 +143,8 @@ def all_apis() -> dict: 'map': maps, 'process': processes, 'tile': tiles, - 'stac': stac + 'stac': stac, + 'movingfeature': movingfeatures, } @@ -292,6 +294,7 @@ def example_method(self, request: Union[APIRequest, Any], custom_arg): :param request: The web platform specific Request instance. :param supported_locales: List or set of supported Locale instances. """ + def __init__(self, request, supported_locales): # Set default request data self._data = b'' @@ -454,7 +457,7 @@ def _get_format(self, headers) -> Union[str, None]: # Format not specified: get from Accept headers (MIME types) # e.g. format_ = 'text/html' - h = headers.get('accept', headers.get('Accept', '')).strip() # noqa + h = headers.get('accept', headers.get('Accept', '')).strip() # noqa (fmts, mimes) = zip(*FORMAT_TYPES.items()) # basic support for complex types (i.e. with "q=0.x") for type_ in (t.split(';')[0].strip() for t in h.split(',') if t): @@ -812,7 +815,7 @@ def landing_page(self, @gzip @pre_process def openapi_(self, request: Union[APIRequest, Any]) -> Tuple[ - dict, int, str]: + dict, int, str]: """ Provide OpenAPI document @@ -882,6 +885,9 @@ def conformance(self, if provider['type'] == 'record': conformance_list.extend( apis_dict['itemtypes'].CONFORMANCE_CLASSES_RECORDS) + if provider['type'] == 'movingfeatures': + conformance_list.extend( + apis_dict['movingfeatures'].CONFORMANCE_CLASSES_RECORDS) # noqa conformance = { 'conformsTo': sorted(list(set(conformance_list))) @@ -898,20 +904,19 @@ def conformance(self, @gzip @pre_process @jsonldify - def describe_collections(self, request: Union[APIRequest, Any], - dataset=None) -> Tuple[dict, int, str]: + def describe_collections( + self, request: Union[APIRequest, Any], + dataset=None) -> Tuple[dict, int, str]: """ - Provide collection metadata + Queries collection :param request: A request object - :param dataset: name of collection :returns: tuple of headers, status code, content """ - if not request.is_valid(): return self.get_format_exception(request) - headers = request.get_response_headers(**self.api_headers) + headers = request.get_response_headers() fcm = { 'collections': [], @@ -1099,10 +1104,10 @@ def describe_collections(self, request: Union[APIRequest, Any], # OAPIF Part 2 - list supported CRSs and StorageCRS if collection_data_type == 'feature': - collection['crs'] = get_supported_crs_list(collection_data, DEFAULT_CRS_LIST) # noqa - collection['storageCRS'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa + collection['crs'] = get_supported_crs_list(collection_data, DEFAULT_CRS_LIST) # noqa + collection['storageCRS'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa if 'storage_crs_coordinate_epoch' in collection_data: - collection['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa + collection['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa elif collection_data_type == 'coverage': # TODO: translate @@ -1141,7 +1146,7 @@ def describe_collections(self, request: Union[APIRequest, Any], collection['extent']['spatial']['grid'] = [{ 'cellsCount': p._coverage_properties['width'], 'resolution': p._coverage_properties['resx'] - }, { + }, { 'cellsCount': p._coverage_properties['height'], 'resolution': p._coverage_properties['resy'] }] @@ -1235,7 +1240,7 @@ def describe_collections(self, request: Union[APIRequest, Any], 'link': { 'href': f'{self.get_collections_url()}/{k}/{qt}', 'rel': 'data' - } + } } collection['data_queries'][qt] = data_query @@ -1263,6 +1268,144 @@ def describe_collections(self, request: Union[APIRequest, Any], fcm['collections'].append(collection) + if dataset is None: + # get moving feature collections + pmdb_provider = PostgresMobilityDB() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_collections() + except (Exception, psycopg2.Error) as error: + msg = str(error) + return self.get_exception( + HTTPStatus.BAD_REQUEST, + headers, + request.format, + 'ConnectingError', + msg) + + pymeos_initialize() + for row in result: + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id + + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' # noqa + else: + crs = 'http://www.opengis.net/def/\ + crs/OGC/1.3/CRS84' + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append( + lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append( + lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + + collection['extent'] = { + 'spatial': { + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs + } + } + + collection['links'] = [] + + # TODO: provide translations + LOGGER.debug('Adding JSON and HTML link relations') + + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as JSON', request.locale), # noqa + 'href': f"{self.base_url}?f={F_JSON}" + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as HTML', request.locale), # noqa + 'href': f"{self.base_url}?f={F_HTML}" + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSON], + 'rel': request.get_linkrel(F_JSON), + 'title': l10n.translate('This document as JSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': request.get_linkrel(F_JSONLD), + 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_JSONLD}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': request.get_linkrel(F_HTML), + 'title': l10n.translate('This document as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}?f={F_HTML}' # noqa + }) + + collection['links'].append({ + 'type': 'application/geo+json', + 'rel': 'items', + 'title': l10n.translate('Items as GeoJSON', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSON}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': 'items', + 'title': l10n.translate('Items as RDF (GeoJSON-LD)', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_JSONLD}' # noqa + }) + collection['links'].append({ + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'items', + 'title': l10n.translate('Items as HTML', request.locale), # noqa + 'href': f'{self.get_collections_url()}/{collection_id}/items?f={F_HTML}' # noqa + }) + + fcm['collections'].append(collection) + if dataset is None: # TODO: translate fcm['links'].append({ @@ -1286,7 +1429,7 @@ def describe_collections(self, request: Union[APIRequest, Any], if request.format == F_HTML: # render fcm['collections_path'] = self.get_collections_url() - if dataset is not None: + if len(result) > 0: content = render_j2_template(self.tpl_config, 'collections/collection.html', fcm, request.locale) diff --git a/pygeoapi/api/movingfeatures.py b/pygeoapi/api/movingfeatures.py new file mode 100644 index 000000000..08c2d0933 --- /dev/null +++ b/pygeoapi/api/movingfeatures.py @@ -0,0 +1,2793 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# Francesco Bartoli +# Sander Schaminee +# John A Stevenson +# Colin Blackburn +# +# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2020 Francesco Bartoli +# Copyright (c) 2022 John A Stevenson and Colin Blackburn +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= +""" Root level code of pygeoapi, parsing content provided by web framework. +Returns content from plugins and sets responses. +""" + +from datetime import datetime +from functools import partial +import json +import logging +import re +from typing import Tuple +import urllib.parse + +from dateutil.parser import parse as dateparse +import pytz +from http import HTTPStatus + +from pygeoapi.plugin import PLUGINS + +from pymeos import (STBox, TsTzSpan, TTextSeq, TFloatSeq, + TGeomPointSeq, Temporal, pymeos_initialize) +import psycopg2 +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB +from . import (API, APIRequest, SYSTEM_LOCALE, + FORMAT_TYPES, F_JSON) +from pygeoapi.util import (to_json) + +LOGGER = logging.getLogger(__name__) + + +CONFORMANCE_CLASSES_MOVINGFEATURES = [ + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common", + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection", + "http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures" +] + + +def manage_collection(api: API, request: APIRequest, + action, dataset=None) -> Tuple[dict, int, str]: + """ + Adds a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(SYSTEM_LOCALE) + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if action in ['create', 'update']: + data = request.data + if not data: + # TODO not all processes require input, e.g. time-dependent or + # random value generators + msg = 'missing request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'MissingParameterValue', msg) + + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if action == 'create': + try: + pmdb_provider.connect() + collection_id = pmdb_provider.post_collection(data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + url = '{}/{}'.format(api.get_collections_url(), collection_id) + + headers['Location'] = url + return headers, HTTPStatus.CREATED, '' + + if action == 'update': + LOGGER.debug('Updating item') + try: + pmdb_provider.connect() + pmdb_provider.put_collection(collection_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + try: + pmdb_provider.connect() + pmdb_provider.delete_collection( + "AND collection_id ='{0}'".format(collection_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection(api: API, request: APIRequest, + dataset=None) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_collection(collection_id) + if len(result) > 0: + row = result[0] + else: + msg = 'Collection not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + collection = {} + if row is not None: + pymeos_initialize() + collection_id = row[0] + collection = row[1] + collection['itemType'] = 'movingfeature' + collection['id'] = collection_id + + crs = None + trs = None + if 'crs' in collection: + crs = collection.pop('crs', None) + if 'trs' in collection: + trs = collection.pop('trs', None) + + extend_stbox = STBox(row[3]) if row[3] is not None else None + lifespan = TsTzSpan(row[2]) if row[2] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + + if crs is None: + if extend_stbox.srid() == 4326: + if extend_stbox.zmax() is not None: + crs = 'http://www.opengis.net/def/crs/OGC/0/CRS84h' + else: + crs = 'http://www.opengis.net/def/crs/\ + OGC/1.3/CRS84' + + if crs is None: + crs = 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + if trs is None: + trs = 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + + collection['extent'] = { + 'spatial': { + 'bbox': bbox, + 'crs': crs + }, + 'temporal': { + 'interval': time, + 'trs': trs + } + } + + collection['links'] = [] + collection['links'].append({ + 'href': '{}/{}'.format( + api.get_collections_url(), collection_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) + + return headers, HTTPStatus.OK, to_json(collection, api.pretty_print) + + +def get_collection_items( + api: API, request: APIRequest, + dataset) -> Tuple[dict, int, str]: + """ + Queries collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + # Set Content-Language to system locale until provider locale + # has been determined + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, collections = get_list_of_collections_id() + collection_id = dataset + if excuted is False: + msg = str(collections) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if collection_id not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: + try: + bbox = validate_bbox(bbox) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "type": "FeatureCollection", + "features": [], + "crs": {}, + "trs": {}, + "links": [] + } + + try: + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_features(collection_id=collection_id, + bbox=bbox, datetime=datetime_, + limit=limit, offset=offset, + sub_trajectory=sub_trajectory) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeatures = [] + crs = None + trs = None + + split_mfeature = {} + for i in range(len(result)): + mfeature_id = str(result[i][1]) + if mfeature_id not in split_mfeature: + split_mfeature[mfeature_id] = [] + split_mfeature[mfeature_id].append(i) + + pymeos_initialize() + for key, mfeature_row_index in split_mfeature.items(): + row = result[mfeature_row_index[0]] + + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if 'crs' in mfeature and crs is None: + crs = mfeature['crs'] + if 'trs' in mfeature and trs is None: + trs = mfeature['trs'] + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + else: + mfeature['geometry'] = None + + if 'properties' not in mfeature: + mfeature['properties'] = None + + if sub_trajectory or sub_trajectory == "true": + prisms = [] + for row_index in mfeature_row_index: + row_tgeometory = result[int(row_index)] + if row_tgeometory[7] is not None: + mfeature_check = row_tgeometory[1] + if mfeature_check == mfeature_id: + temporal_geometry = json.loads( + Temporal.as_mfjson( + TGeomPointSeq( + str(row_tgeometory[7]).replace( + "'", "")), + False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = \ + pmdb_provider.\ + convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row_tgeometory[6] + prisms.append(temporal_geometry) + mfeature['temporalGeometry'] = prisms + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + if 'trs' not in mfeature: + mfeature['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + mfeatures.append(mfeature) + + content['features'] = mfeatures + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + + # TODO: translate titles + uri = '{}/{}/items'.format(api.get_collections_url(), collection_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['features']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item( + api: API, request: APIRequest, + action, dataset, identifier=None) -> Tuple[dict, int, str]: + """ + Adds an item to a collection + + :param request: A request object + :param dataset: dataset name + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, collections = get_list_of_collections_id() + + if excuted is False: + msg = str(collections) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if dataset not in collections: + msg = 'Collection not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_feature(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,temporalgeometry) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + if data['type'] == 'FeatureCollection': + for feature in data['features']: + if check_required_field_feature(feature) is False: + # TODO not all processes require input + msg = 'The required tag \ + (e.g., type,temporalgeometry) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, + 'MissingParameterValue', msg) + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, feature) + else: + mfeature_id = pmdb_provider.post_movingfeature( + collection_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + headers['Location'] = '{}/{}/items/{}'.format( + api.get_collections_url(), dataset, mfeature_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_movingfeature( + "AND mfeature_id ='{0}'".format(mfeature_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection_item(api: API, request: APIRequest, + dataset, identifier) -> Tuple[dict, int, str]: + """ + Get a single collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + pmdb_provider = PostgresMobilityDB() + collection_id = str(dataset) + mfeature_id = str(identifier) + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers() + + try: + pmdb_provider.connect() + result = pmdb_provider.get_feature(collection_id, mfeature_id) + if len(result) > 0: + row = result[0] + else: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + mfeature = {} + if row is not None: + mfeature_id = row[1] + mfeature = row[3] + mfeature['id'] = mfeature_id + mfeature['type'] = 'Feature' + + if row[2] is not None: + mfeature['geometry'] = json.loads(row[2]) + + extend_stbox = STBox(row[5]) if row[5] is not None else None + lifespan = TsTzSpan(row[4]) if row[4] is not None else None + + bbox = [] + if extend_stbox is not None: + bbox.append(extend_stbox.xmin()) + bbox.append(extend_stbox.ymin()) + if extend_stbox.zmin() is not None: + bbox.append(extend_stbox.zmin()) + bbox.append(extend_stbox.xmax()) + bbox.append(extend_stbox.ymax()) + if extend_stbox.zmax() is not None: + bbox.append(extend_stbox.zmax()) + mfeature['bbox'] = bbox + + time = [] + if lifespan is not None: + time.append(lifespan.lower().strftime("%Y-%m-%dT%H:%M:%SZ")) + time.append(lifespan.upper().strftime("%Y-%m-%dT%H:%M:%SZ")) + else: + if extend_stbox is not None: + if extend_stbox.tmin() is not None: + time.append(extend_stbox.tmin().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + time.append(extend_stbox.tmax().strftime( + "%Y-%m-%dT%H:%M:%SZ")) + mfeature['time'] = time + + if 'crs' not in mfeature: + mfeature['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + if 'trs' not in mfeature: + mfeature['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + mfeature['links'] = [] + mfeature['links'].append({ + 'href': '{}/{}/items/{}'.format( + api.get_collections_url(), collection_id, mfeature_id), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON] + }) + return headers, HTTPStatus.OK, to_json(mfeature, api.pretty_print) + + +def get_collection_items_tGeometry(api: API, request: APIRequest, + dataset, identifier) \ + -> Tuple[dict, int, str]: + """ + Get temporal Geometry of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing bbox parameter') + + bbox = request.params.get('bbox') + + if bbox is None: + bbox = [] + else: + try: + bbox = validate_bbox(bbox) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + LOGGER.debug('Processing leaf parameter') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_trajectory = request.params.get('subTrajectory') + if sub_trajectory is None: + sub_trajectory = False + + if (leaf_ != '' and leaf_ is not None) \ + and (sub_trajectory or sub_trajectory == 'true'): + msg = 'Cannot use both parameter `subTrajectory` \ + and `leaf` at the same time' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('bbox: {}'.format(bbox)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "type": "TemporalGeometrySequence", + "geometrySequence": [], + "crs": {}, + "trs": {}, + "links": [], + } + + crs = None + trs = None + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalgeometries(collection_id=collection_id, + mfeature_id=mfeature_id, + bbox=bbox, + leaf=leaf_, + datetime=datetime_, + limit=limit, + offset=offset, + sub_trajectory=sub_trajectory) + pymeos_initialize() + prisms = [] + for row in result: + temporal_geometry = json.loads(Temporal.as_mfjson( + TGeomPointSeq(str(row[3]).replace("'", "")), False)) + if 'crs' in temporal_geometry and crs is None: + crs = temporal_geometry['crs'] + if 'trs' in temporal_geometry and trs is None: + trs = temporal_geometry['trs'] + temporal_geometry = pmdb_provider\ + .convert_temporalgeometry_to_old_version( + temporal_geometry) + temporal_geometry['id'] = row[2] + + if (leaf_ != '' and leaf_ is not None) or \ + (sub_trajectory or sub_trajectory == 'true'): + if row[4] is not None: + temporal_geometry_filter = json.loads( + Temporal.as_mfjson( + TGeomPointSeq(str(row[4]).replace("'", "")), + False)) + temporal_geometry['datetimes'] = \ + temporal_geometry_filter['datetimes'] + temporal_geometry['coordinates'] = \ + temporal_geometry_filter['coordinates'] + else: + continue + # temporalGeometry['datetimes'] = [] + # temporalGeometry['coordinates'] = [] + prisms.append(temporal_geometry) + content["geometrySequence"] = prisms + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if crs is not None: + content['crs'] = crs + else: + content['crs'] = { + "type": "Name", + "properties": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + + if trs is not None: + content['trs'] = trs + else: + content['trs'] = { + "type": "Name", + "properties": "urn:ogc:data:time:iso8601" + } + + # TODO: translate titles + uri = '{}/{}/items/{}/tgsequence'.format( + api.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['geometrySequence']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next'}) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = len(content["geometrySequence"]) + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item_tGeometry( + api: API, request: APIRequest, + action, dataset, identifier, + tGeometry=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Geometry item to a moving feature + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tGeometry_id = tGeometry + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_geometries(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., type,prisms) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + if data['type'] == 'MovingGeometryCollection': + for tGeometry in data['prisms']: + tGeometry_id = pmdb_provider.\ + post_temporalgeometry( + collection_id, mfeature_id, tGeometry) + + else: + tGeometry_id = pmdb_provider.post_temporalgeometry( + collection_id, mfeature_id, data) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + headers['Location'] = '{}/{}/items/{}/tgsequence/{}'.format( + api.get_collections_url(), dataset, mfeature_id, tGeometry_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalgeometry( + "AND tgeometry_id ='{0}'".format(tGeometry_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection_items_tGeometry_velocity( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict, int, str]: + """ + Get a time-to-velocity curve of a temporal primitive geometry + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(SYSTEM_LOCALE) + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + try: + pmdb_provider.connect() + content = pmdb_provider.get_velocity( + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'Server Internal Error', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + +def get_collection_items_tGeometry_distance( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict, int, str]: + """ + Get a time-to-distance curve of a temporal primitive geometry + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(SYSTEM_LOCALE) + collection_id = str(dataset) + mfeature_id = str(identifier) + tgeometry_id = str(tGeometry) + pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + try: + pmdb_provider.connect() + content = pmdb_provider.get_distance( + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + +def get_collection_items_tGeometry_acceleration( + api: API, request: APIRequest, + dataset, identifier, tGeometry) -> Tuple[dict,int, str]: + """ + Get a time-to-acceleration curve of a temporal primitive geometry + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tGeometry: Temporal Geometry's id + + :returns: tuple of headers, status code, content + """ + headers = request.get_response_headers(SYSTEM_LOCALE) + collection_id = dataset + mfeature_id = identifier + tgeometry_id = tGeometry + pmdb_provider = PostgresMobilityDB() + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_, return_type=False) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + try: + pmdb_provider.connect() + content = pmdb_provider.get_acceleration( + collection_id, mfeature_id, tgeometry_id, + datetime_, leaf_, sub_temporal_value) + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.OK, content + + +def get_collection_items_tProperty(api: API, request: APIRequest, + dataset, + identifier) -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, feature_list = get_list_of_features_id() + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = { + "temporalProperties": [], + "links": [] + } + + try: + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.\ + get_temporalproperties(collection_id=collection_id, + mfeature_id=mfeature_id, + datetime=datetime_, + limit=limit, offset=offset, + sub_temporal_value=sub_temporal_value) + temporal_properties = [] + if sub_temporal_value is False or sub_temporal_value == "false": + for row in result: + temporal_property = row[3] if row[3] is not None else {} + temporal_property['name'] = row[2] + + temporal_properties.append(temporal_property) + else: + split_groups = {} + for i in range(len(result)): + group_id = str(result[i][4]) + if group_id not in split_groups: + split_groups[group_id] = [] + split_groups[group_id].append(i) + pymeos_initialize() + for key, group_row_index in split_groups.items(): + group = {} + group["datetimes"] = [] + for row_index in group_row_index: + row = result[int(row_index)] + tproperties_name = row[2] + group[tproperties_name] \ + = row[3] if row[3] is not None else {} + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] \ + is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + temporal_property_value = pmdb_provider.\ + convert_temporalproperty_value_to_base_version( + json.loads(temporal_property_value)) + + if 'datetimes' in temporal_property_value: + group["datetimes"] = \ + temporal_property_value.pop( + "datetimes", None) + group[tproperties_name].update( + temporal_property_value) + temporal_properties.append(group) + content["temporalProperties"] = temporal_properties + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + uri = '{}/{}/items/{}/tProperties'.format( + api.get_collections_url(), collection_id, mfeature_id) + + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + content['links'] = [ + {'href': '{}?offset={}{}'.format( + uri, offset, serialized_query_params), + 'rel': request.get_linkrel(F_JSON), + 'type': FORMAT_TYPES[F_JSON]}] + + if len(content['temporalProperties']) == limit: + next_ = offset + limit + content['links'].append( + {'href': '{}?offset={}{}'.format( + uri, next_, serialized_query_params), + 'type': 'application/geo+json', 'rel': 'next', }) + + content['timeStamp'] = datetime.utcnow().strftime( + '%Y-%m-%dT%H:%M:%S.%fZ') + + content['numberMatched'] = number_matched + content['numberReturned'] = number_returned + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item_tProperty( + api: API, request: APIRequest, + action, dataset, identifier, + tProperty=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property item to a moving feature + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, feature_list = get_list_of_features_id() + + if excuted is False: + msg = str(feature_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier] not in feature_list: + msg = 'Feature not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperties_name = tProperty + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + if not isinstance(data, list): + data = json.loads(data) + else: + for d in data: + _ = json.loads(d) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_property(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,interpolation) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + # temporalProperties = data['temporalProperties'] + temporal_properties = data + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, temporal_properties) + tProperties_name_list = [] + if can_post: + for temporalProperty in temporal_properties: + tProperties_name_list.extend( + pmdb_provider. post_temporalproperties( + collection_id, mfeature_id, temporalProperty)) + else: + return headers, HTTPStatus.BAD_REQUEST, '' + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + location_list = [] + for tProperties_name in tProperties_name_list: + location_list.append('{}/{}/items/{}/tProperties/{}'.format( + api.get_collections_url(), dataset, mfeature_id, + tProperties_name)) + headers['Locations'] = location_list + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalproperties( + """AND collection_id ='{0}' AND mfeature_id ='{1}' + AND tproperties_name ='{2}'""".format( + collection_id, mfeature_id, tProperties_name)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def get_collection_items_tProperty_value(api: API, request: APIRequest, + dataset, + identifier, + tProperty) \ + -> Tuple[dict, int, str]: + """ + Get temporal Properties of collection item + + :param request: A request object + :param dataset: dataset name + :param identifier: item identifier + :param tProperty: Temporal Property + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(): + return api.get_format_exception(request) + headers = request.get_response_headers(SYSTEM_LOCALE) + + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + LOGGER.debug('Processing query parameters') + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + # TODO: We should do more validation, against the min and max + # allowed by the server configuration + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + if limit > 10000: + msg = 'limit value should be less than or equal to 10000' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + limit = int(api.config['server']['limit']) + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing leaf parameter') + leaf_ = request.params.get('leaf') + try: + leaf_ = validate_leaf(leaf_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + sub_temporal_value = request.params.get('subTemporalValue') + if sub_temporal_value is None: + sub_temporal_value = False + + if (leaf_ != '' and leaf_ is not None) and \ + (sub_temporal_value or sub_temporal_value == 'true'): + msg = 'Cannot use both parameter `subTemporalValue` \ + and `leaf` at the same time' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Processing datetime parameter') + datetime_ = request.params.get('datetime') + try: + datetime_ = validate_datetime(datetime_) + except ValueError as err: + msg = str(err) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + LOGGER.debug('Querying provider') + LOGGER.debug('offset: {}'.format(offset)) + LOGGER.debug('limit: {}'.format(limit)) + LOGGER.debug('leaf: {}'.format(leaf_)) + LOGGER.debug('datetime: {}'.format(datetime_)) + + pmdb_provider = PostgresMobilityDB() + content = {} + + try: + pmdb_provider.connect() + result = pmdb_provider.get_temporalproperties_value( + collection_id=collection_id, mfeature_id=mfeature_id, + tProperty_name=tProperty_name, + datetime=datetime_, leaf=leaf_, + sub_temporal_value=sub_temporal_value) + pymeos_initialize() + value_sequence = [] + for row in result: + content = row[3] + if row[5] is not None or row[6] is not None: + temporal_property_value = Temporal.as_mfjson( + TFloatSeq(str(row[5]).replace("'", "")), + False) if row[5] is not None else Temporal.as_mfjson( + TTextSeq(str(row[6]).replace("'", "")), + False) + value_sequence.append( + pmdb_provider. + convert_temporalproperty_value_to_base_version( + json.loads( + temporal_property_value))) + content["valueSequence"] = value_sequence + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + + # TODO: translate titles + return headers, HTTPStatus.OK, to_json(content, api.pretty_print) + + +def manage_collection_item_tProperty_value( + api: API, request: APIRequest, + action, dataset, identifier, + tProperty=None, tvalue=None) -> Tuple[dict, int, str]: + """ + Adds Temporal Property Value item to a Temporal Property + + :param request: A request object + :param dataset: dataset name + :param identifier: moving feature's id + :param tProperty: Temporal Property's id + + :returns: tuple of headers, status code, content + """ + + if not request.is_valid(PLUGINS['formatter'].keys()): + return api.get_format_exception(request) + + # Set Content-Language to system locale until provider locale + # has been determined + headers = request.get_response_headers(SYSTEM_LOCALE) + + pmdb_provider = PostgresMobilityDB() + excuted, tproperty_list = get_list_of_tproperties_name() + if excuted is False: + msg = str(tproperty_list) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + if [dataset, identifier, tProperty] not in tproperty_list: + msg = 'Temporal Property not found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.NOT_FOUND, + headers, request.format, 'NotFound', msg) + + collection_id = dataset + mfeature_id = identifier + tProperty_name = tProperty + tvalue_id = tvalue + if action == 'create': + if not request.data: + msg = 'No data found' + LOGGER.error(msg) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + data = request.data + try: + # Parse bytes data, if applicable + data = data.decode() + LOGGER.debug(data) + except (UnicodeDecodeError, AttributeError): + pass + + try: + data = json.loads(data) + except (json.decoder.JSONDecodeError, TypeError) as err: + # Input does not appear to be valid JSON + LOGGER.error(err) + msg = 'invalid request data' + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'InvalidParameterValue', msg) + + if check_required_field_temporal_value(data) is False: + # TODO not all processes require input + msg = 'The required tag (e.g., datetimes,value) \ + is missing from the request data.' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, + headers, request.format, 'MissingParameterValue', msg) + + LOGGER.debug('Creating item') + try: + pmdb_provider.connect() + can_post = pmdb_provider.check_temporalproperty_can_post( + collection_id, mfeature_id, [data], tProperty_name) + if can_post: + pValue_id = pmdb_provider.post_temporalvalue( + collection_id, mfeature_id, tProperty_name, data) + else: + return headers, HTTPStatus.BAD_REQUEST, '' + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + headers['Location'] = '{}/{}/items/{}/tProperties/{}/pvalue/{}'\ + .format(api.get_collections_url(), dataset, mfeature_id, + tProperty_name, pValue_id) + + return headers, HTTPStatus.CREATED, '' + + if action == 'delete': + LOGGER.debug('Deleting item') + + try: + pmdb_provider.connect() + pmdb_provider.delete_temporalvalue( + "AND tvalue_id ='{0}'".format(tvalue_id)) + + except (Exception, psycopg2.Error) as error: + msg = str(error) + return api.get_exception( + HTTPStatus.BAD_REQUEST, + headers, request.format, 'ConnectingError', msg) + finally: + pmdb_provider.disconnect() + + return headers, HTTPStatus.NO_CONTENT, '' + + +def validate_bbox(value=None) -> list: + """ + Helper function to validate bbox parameter + + :param value: `list` of minx, miny, maxx, maxy + + :returns: bbox as `list` of `float` values + """ + + if value is None: + LOGGER.debug('bbox is empty') + return [] + + bbox = value.split(',') + + if len(bbox) != 4 and len(bbox) != 6: + msg = 'bbox should be 4 values (minx,miny,maxx,maxy) or \ + 6 values (minx,miny,minz,maxx,maxy,maxz)' + LOGGER.debug(msg) + raise ValueError(msg) + + try: + bbox = [float(c) for c in bbox] + except ValueError as err: + msg = 'bbox values must be numbers' + err.args = (msg,) + LOGGER.debug(msg) + raise + + if len(bbox) == 4: + if bbox[1] > bbox[3]: + msg = 'miny should be less than maxy' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[0] > bbox[2]: + msg = 'minx is greater than maxx (possibly antimeridian bbox)' + LOGGER.debug(msg) + raise ValueError(msg) + + if len(bbox) == 6: + if bbox[2] > bbox[5]: + msg = 'minz should be less than maxz' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[1] > bbox[4]: + msg = 'miny should be less than maxy' + LOGGER.debug(msg) + raise ValueError(msg) + + if bbox[0] > bbox[3]: + msg = 'minx is greater than maxx (possibly antimeridian bbox)' + LOGGER.debug(msg) + raise ValueError(msg) + + return bbox + + +def validate_leaf(leaf_=None) -> str: + """ + Helper function to validate temporal parameter + + :param resource_def: `dict` of configuration resource definition + :param datetime_: `str` of datetime parameter + + :returns: `str` of datetime input, if valid + """ + + # TODO: pass datetime to query as a `datetime` object + # we would need to ensure partial dates work accordingly + # as well as setting '..' values to `None` so that underlying + # providers can just assume a `datetime.datetime` object + # + # NOTE: needs testing when passing partials from API to backend + + unix_epoch = datetime(1970, 1, 1, 0, 0, 0) + dateparse_ = partial(dateparse, default=unix_epoch) + + leaf_invalid = False + + if leaf_ is not None: + LOGGER.debug('detected leaf_') + LOGGER.debug('Validating time windows') + leaf_list = leaf_.split(',') + + leaf_ = '' + if (len(leaf_list) > 0): + datetime_ = dateparse_(leaf_list[0]) + leaf_ = datetime_.strftime('%Y-%m-%d %H:%M:%S.%f') + + for i in range(1, len(leaf_list)): + datetime_pre = dateparse_(leaf_list[i - 1]) + datetime_ = dateparse_(leaf_list[i]) + + if datetime_pre != '..': + if datetime_pre.tzinfo is None: + datetime_pre = datetime_pre.replace(tzinfo=pytz.UTC) + + if datetime_ != '..': + if datetime_.tzinfo is None: + datetime_ = datetime_.replace(tzinfo=pytz.UTC) + + if datetime_pre >= datetime_: + leaf_invalid = True + break + leaf_ += ',' + datetime_.strftime('%Y-%m-%d %H:%M:%S.%f') + + if leaf_invalid: + msg = 'invalid leaf' + LOGGER.debug(msg) + raise ValueError(msg) + return leaf_ + + +def validate_datetime(datetime_=None, return_type=True) -> str: + """ + Helper function to validate temporal parameter + + :param resource_def: `dict` of configuration resource definition + :param datetime_: `str` of datetime parameter + + :returns: `str` of datetime input, if valid + """ + + # TODO: pass datetime to query as a `datetime` object + # we would need to ensure partial dates work accordingly + # as well as setting '..' values to `None` so that underlying + # providers can just assume a `datetime.datetime` object + # + # NOTE: needs testing when passing partials from API to backend + + datetime_invalid = False + + if datetime_ is not None and datetime_ != '': + dateparse_begin = partial(dateparse, default=datetime.min) + dateparse_end = partial(dateparse, default=datetime.max) + unix_epoch = datetime(1970, 1, 1, 0, 0, 0) + dateparse_ = partial(dateparse, default=unix_epoch) + + if '/' in datetime_: # envelope + LOGGER.debug('detected time range') + LOGGER.debug('Validating time windows') + + # normalize "" to ".." (actually changes datetime_) + datetime_ = re.sub(r'^/', '../', datetime_) + datetime_ = re.sub(r'/$', '/..', datetime_) + + datetime_begin, datetime_end = datetime_.split('/') + if datetime_begin != '..': + datetime_begin = dateparse_begin(datetime_begin) + if datetime_begin.tzinfo is None: + datetime_begin = datetime_begin.replace( + tzinfo=pytz.UTC) + else: + datetime_begin = datetime(1, 1, 1, 0, 0, 0).replace( + tzinfo=pytz.UTC) + + if datetime_end != '..': + datetime_end = dateparse_end(datetime_end) + if datetime_end.tzinfo is None: + datetime_end = datetime_end.replace(tzinfo=pytz.UTC) + else: + datetime_end = datetime(9999, 1, 1, 0, 0, 0).replace( + tzinfo=pytz.UTC) + + datetime_invalid = any([ + (datetime_begin > datetime_end) + ]) + datetime_ = datetime_begin.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime_end.strftime('%Y-%m-%d %H:%M:%S.%f') + else: # time instant + LOGGER.debug('detected time instant') + datetime__ = dateparse_(datetime_) + if datetime__ != '..': + if datetime__.tzinfo is None: + datetime__ = datetime__.replace(tzinfo=pytz.UTC) + datetime_invalid = any([ + (datetime__ == '..') + ]) + if return_type: + datetime_ = datetime__.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime__.strftime('%Y-%m-%d %H:%M:%S.%f') + else: + datetime_ = datetime__.strftime('%Y-%m-%d %H:%M:%S.%f') + + if datetime_invalid: + msg = 'datetime parameter out of range' + LOGGER.debug(msg) + raise ValueError(msg) + return datetime_ + + +def get_list_of_collections_id(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_collections_list() + collections_id = [] + for row in result: + collections_id.append(row[0]) + return True, collections_id + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def get_list_of_features_id(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_features_list() + features_list = [] + for row in result: + features_list.append([row[0], row[1]]) + return True, features_list + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def get_list_of_tproperties_name(): + pmdb_provider = PostgresMobilityDB() + try: + pmdb_provider.connect() + result = pmdb_provider.get_tProperties_name_list() + tproperties_name_list = [] + for row in result: + tproperties_name_list.append([row[0], row[1], row[2]]) + return True, tproperties_name_list + except (Exception, psycopg2.Error) as error: + return False, error + finally: + pmdb_provider.disconnect() + + +def check_required_field_feature(feature): + if 'type' in feature: + if feature['type'] == 'FeatureCollection': + return True + + if 'type' not in feature or 'temporalGeometry' not in feature: + return False + + if check_required_field_temporal_geometries( + feature['temporalGeometry']) is False: + return False + + if 'temporalProperties' in feature: + if check_required_field_temporal_property( + feature['temporalProperties']) is False: + return False + + if 'geometry' in feature: + if check_required_field_geometries(feature['geometry']) is False: + return False + + if 'crs' in feature: + if check_required_field_crs(feature['crs']) is False: + return False + + if 'trs' in feature: + if check_required_field_trs(feature['trs']) is False: + return False + + return True + + +def check_required_field_geometries(geometry): + if (check_required_field_geometry_array(geometry) is False + and check_required_field_geometry_single(geometry) is False): + return False + return True + + +def check_required_field_geometry_array(geometry): + if 'type' not in geometry or 'geometries' not in geometry: + return False + + geometries = geometry['geometries'] + geometries = [geometries] if not isinstance(geometries, list) else geometries + for l_geometry in geometries: + if check_required_field_geometry_single(l_geometry) is False: + return False + + return True + + +def check_required_field_geometry_single(geometry): + if ('type' not in geometry + or 'coordinates' not in geometry): + return False + return True + + +def check_required_field_temporal_geometries(temporal_geometries): + if (check_required_field_temporal_geometry_array(temporal_geometries) is False + and check_required_field_temporal_geometry_single(temporal_geometries) is False): + return False + return True + + +def check_required_field_temporal_geometry_array(temporal_geometries): + if ('type' not in temporal_geometries + or 'prisms' not in temporal_geometries): + return False + prisms = temporal_geometries['prisms'] + prisms = [prisms] if not isinstance(prisms, list) else prisms + for temporal_geometry in prisms: + if check_required_field_temporal_geometry_single(temporal_geometry) is False: + return False + + if 'crs' in temporal_geometries: + if check_required_field_crs(temporal_geometry['crs']) is False: + return False + + if 'trs' in temporal_geometries: + if check_required_field_trs(temporal_geometry['trs']) is False: + return False + + return True + + +def check_required_field_temporal_geometry_single(temporal_geometry): + if ('type' not in temporal_geometry + or 'datetimes' not in temporal_geometry + or 'coordinates' not in temporal_geometry): + return False + + if 'crs' in temporal_geometry: + if check_required_field_crs(temporal_geometry['crs']) is False: + return False + + if 'trs' in temporal_geometry: + if check_required_field_trs(temporal_geometry['trs']) is False: + return False + + return True + + +def check_required_field_temporal_property(temporal_properties): + temporal_properties = [temporal_properties] \ + if not isinstance(temporal_properties, list) \ + else temporal_properties + + for temporal_property in temporal_properties: + if ('datetimes' not in temporal_property): + return False + + for tproperties_name in temporal_property: + if (tproperties_name != 'datetimes' + and ('values' not in temporal_property[tproperties_name] + or 'interpolation' not in temporal_property[tproperties_name])): + return False + return True + + +def check_required_field_temporal_value(temporalValue): + if ('datetimes' not in temporalValue + or 'values' not in temporalValue + or 'interpolation' not in temporalValue): + return False + return True + + +def check_required_field_crs(crs): + if ('type' not in crs + or 'properties' not in crs): + return False + return True + + +def check_required_field_trs(trs): + if ('type' not in trs + or 'properties' not in trs): + return False + return True + +# fmt: off +def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, dict]]: + """ + Get OpenAPI fragments + + :param cfg: `dict` of configuration + :param locale: `str` of locale + + :returns: `tuple` of `list` of tag objects, and `dict` of path objects + """ + from pygeoapi.openapi import OPENAPI_YAML + + paths = {} + collections_collectionId_path = '/collections/{collectionId}' + paths[collections_collectionId_path] = { + "get": { + "operationId": "accessMetadata", + "summary": "Access metadata about the collection", + "description": "A user can access metadata with id `collectionId`.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/Collection"}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "delete": { + "operationId": "deleteCollection", + "summary": "Delete the collection", + "description": "The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], + "responses": { + "204": {"description": "Successfully deleted."}, + "404": {"description": "A collection with the specified name was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "put": { + "operationId": "replaceMetadata", + "summary": "Replace metadata about the collection", + "description": "A user SHOULD replace metadata with id `collectionId`.\n\nThe request body schema is the same the POST's one. \n\nHowever, `updateFrequency` property is NOT updated.\n", + "tags": ["MovingFeatureCollection"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body"} + } + } + }, + "responses": { + "204": {"description": "Successfully replaced."}, + "404": {"description": "A collection with the specified name was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_path = '/collections/{collectionId}/items' + paths[collections_collectionId_items_path] = { + "get": { + "operationId": "retrieveMovingFeatures", + "summary": "Retrieve moving feature collection", + "description": "A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit.\n\nSpecifically, if the `subTrajectory` parameter is \"true\", it will return the temporal geometry within the time interval specified by `datetime` parameter.\n", + "tags": ["MovingFeatures"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeatures"}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "post": { + "operationId": "insertMovingFeatures", + "summary": "Insert moving features", + "description": "A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`.\n\nThe request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or \n[MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON.\n", + "tags": ["MovingFeatures"], + "parameters": [{"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}], + "requestBody": { + "content": { + "application/json": { + "schema": { + "oneOf": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeature-mfjson"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/movingFeatureCollection"} + ] + }, + "example": { + "type": "Feature", + "crs": { + "type": "Name", + "properties": {"name": "urn:ogc:def:crs:OGC:1.3:CRS84"} + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian", + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": ["2011-07-14T22:01:01Z", "2011-07-14T22:01:02Z", "2011-07-14T22:01:03Z", "2011-07-14T22:01:04Z", "2011-07-14T22:01:05Z"], + "coordinates": [[139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0]], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + {"scales": [1, 1, 1], "angles": [0, 0, 0]}, + {"scales": [1, 1, 1], "angles": [0, 355, 0]}, + {"scales": [1, 1, 1], "angles": [0, 0, 330]}, + {"scales": [1, 1, 1], "angles": [0, 0, 300]}, + {"scales": [1, 1, 1], "angles": [0, 0, 270]} + ] + }, + "temporalProperties": [ + { + "datetimes": ["2011-07-14T22:01:01.450Z", "2011-07-14T23:01:01.450Z", "2011-07-15T00:01:01.450Z"], + "length": { + "type": "Measure", + "form": "http://qudt.org/vocab/quantitykind/Length", + "values": [1.0, 2.4, 1.0], + "interpolation": "Linear", + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [3.0, 4.0, 5.0], + "interpolation": "Step" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [[139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0]] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg", + }, + "bbox": [139.757083, 35.627483, 0.0, 139.757716, 35.627701, 4.5], + "time": ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"], + "id": "mf-1" + } + } + } + }, + "responses": { + "201": { + "description": "Successful create a set of moving features or a moving feature into a specific collection.\n", + "headers": { + "Locations": { + "description": "A list of URI of the newly added resources", + "schema": {"type": "array","items": {"type": "string"}} + } + } + }, + "400": {"description": "A query parameter was not validly used."}, + "404": {"description": "A collection with the specified id was not found."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_path = '/collections/{collectionId}/items/{mFeatureId}' + paths[collections_collectionId_items_mFeatureId_path] = { + "get": { + "operationId": "accessMovingFeature", + "summary": "Access the static data of the moving feature", + "description": "A user can access a static data of a moving feature with id `mFeatureId`.\n\nThe static data of a moving feature is not included temporal geometries and temporal properties.\n", + "tags": ["MovingFeatures"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/MovingFeature"}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "delete": { + "operationId": "deleteMovingFeature", + "summary": "Delete a single moving feature", + "description": "The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted.\n", + "tags": ["MovingFeatures"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} + ], + "responses": { + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + collections_collectionId_items_mFeatureId_tgsequence_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence' + paths[collections_collectionId_items_mFeatureId_tgsequence_path] = { + "get": { + "operationId": "retrieveTemporalGeometrySequence", + "summary": "Retrieve the movement data of the single moving feature", + "description": "A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit.\n", + "tags": ["TemporalGeometry"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/bbox"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtrajectory"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalGeometrySequence"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "post": { + "operationId": "insertTemporalPrimitiveGeometry", + "summary": "Add movement data into the moving feature", + "description": "A user SHOULD add more movement data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON.\n", + "tags": ["TemporalGeometry"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} + ], + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveGeometry"}, + "example": { + "type": "MovingPoint", + "datetimes": ["2011-07-14T22:01:06Z","2011-07-14T22:01:07Z","2011-07-14T22:01:08Z","2011-07-14T22:01:09Z","2011-07-14T22:01:10Z"], + "coordinates": [ [139.757083, 35.627701, 0.5], [139.757399, 35.627701, 2.0], [139.757555, 35.627688, 4.0], [139.757651, 35.627596, 4.0], [139.757716, 35.627483, 4.0] ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + {"scales":[1,1,1], "angles":[0,0,0]}, + {"scales":[1,1,1], "angles":[0,355,0]}, + {"scales":[1,1,1], "angles":[0,0,330]}, + {"scales":[1,1,1], "angles":[0,0,300]}, + {"scales":[1,1,1], "angles":[0,0,270]} + ] + } + } + } + }, + "responses": { + "201": { + "description": "Successful add more movement data into a specified moving feature.\n", + "headers": { + "Location": { + "description": "A URI of the newly added resource", + "schema": {"type": "string"} + } + } + }, + "400": {"description": "A query parameter was not validly used."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_path] = { + "delete": { + "operationId": "deleteTemporalPrimitiveGeometry", + "summary": "Delete a singe temporal primitive geometry", + "description": "The temporal primitive geometry with id `tGeometryId` SHOULD be deleted.\n", + "tags": ["TemporalGeometry"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"} + ], + "responses": { + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal primitive geometry with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_distance_path] = { + "get": { + "operationId": "getDistanceOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-distance curve of a temporal primitive geometry", + "description": "A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/DistanceQuery"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_velocity_path] = { + "get": { + "operationId": "getVelocityOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-velocity curve of a temporal primitive geometry", + "description": "A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/VelocityQuery"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path = '/collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration' + paths[collections_collectionId_items_mFeatureId_tgsequence_tGeometryId_acceleration_path] = { + "get": { + "operationId": "getAccelerationOfTemporalPrimitiveGeometry", + "summary": "Get a time-to-acceleration curve of a temporal primitive geometry", + "description": "A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`.\n", + "tags": ["TemporalGeometryQuery"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tGeometryId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/AccelerationQuery" }, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties' + paths[collections_collectionId_items_mFeatureId_tproperties_path] = { + "get": { + "operationId": "retrieveTemporalProperties", + "summary": "Retrieve a set of the temporal property data", + "description": "A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`.\n\nThe static data of a temporal property is not included temporal values (property `valueSequence`).\n\nAlso a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. \nIn this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/limit"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperties"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "post": { + "operationId": "insertTemporalProperty", + "summary": "Add temporal property data", + "description": "A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`.\n\nThe request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"} + ], + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalProperties-mfjson"}, + "example": [ + {"datetimes": ["2011-07-14T22:01:01.450Z","2011-07-14T23:01:01.450Z","2011-07-15T00:01:01.450Z"], + "length": {"type": "Measure","form": "http://qudt.org/vocab/quantitykind/Length","values": [1,2.4,1],"interpolation": "Linear"}, + "discharge": {"type": "Measure","form": "MQS","values": [3,4,5],"interpolation": "Step"}} + ] + } + } + }, + "responses": { + "201": { + "description": "Successful add more temporal property into a specified moving feature.\n", + "headers": { + "Locations": { + "description": "A list of URI of the newly added resources", + "schema": {"type": "array","items": {"type": "string"} + } + } + } + }, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}' + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_path] = { + "get": { + "operationId": "retrieveTemporalProperty", + "summary": "Retrieve a temporal property", + "description": "A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/datetime"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/leaf"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/subtemporalvalue"} + ], + "responses": { + "200": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/TemporalProperty"}, + "400": {"description": "A query parameter was not validly used."}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "post": { + "operationId": "insertTemporalPrimitiveValue", + "summary": "Add temporal primitive value data", + "description": "A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"} + ], + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/temporalPrimitiveValue"}, + "example": { + "datetimes": ["2011-07-15T08:00:00Z","2011-07-15T08:00:01Z","2011-07-15T08:00:02Z"], + "values": [0,20,50], + "interpolation": "Linear" + } + } + } + }, + "responses": { + "201": { + "description": "Successful add more temporal primitive value data into a specified temporal property.\n", + "headers": { + "Location": { + "description": "A URI of the newly added resource", + "schema": {"type": "string"} + } + } + }, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + }, + "delete": { + "operationId": "deleteTemporalProperty", + "summary": "Delete a specified temporal property", + "description": "The temporal property with id `tPropertyName` SHOULD be deleted.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"} + ], + "responses": { + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path = '/collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}/{tValueId}' + paths[collections_collectionId_items_mFeatureId_tproperties_tPropertyName_tValueId_path] = { + "delete": { + "operationId": "deleteTemporalPrimitiveValue", + "summary": "Delete a singe temporal primitive value", + "description": "The temporal primitive value with id `tValueId` SHOULD be deleted.\n", + "tags": ["TemporalProperty"], + "parameters": [ + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/collectionId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/mFeatureId"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tPropertyName"}, + {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/parameters/tValueId"} + ], + "responses": { + "204": {"description": "Successfully deleted."}, + "404": {"description": "- A collection with the specified id was not found.\n- Or a moving feature with the specified id was not found.\n- Or a temporal property with the specified id was not found.\n- Or a temporal primitive primitive with the specified id was not found.\n"}, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } + } + } + + return [{'name': 'MovingFeatureCollection'}], {'paths': paths} +# fmt: on diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index 4e4f0097e..6cb9e0f2b 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -45,9 +45,10 @@ import pygeoapi.api.processes as processes_api import pygeoapi.api.stac as stac_api import pygeoapi.api.tiles as tiles_api +import pygeoapi.api.movingfeatures as movingfeatures from pygeoapi.openapi import load_openapi_document from pygeoapi.config import get_config -from pygeoapi.util import get_mimetype, get_api_rules +from pygeoapi.util import get_mimetype, get_api_rules, filter_dict_by_key_value CONFIG = get_config() @@ -179,6 +180,16 @@ def openapi(): """ OpenAPI endpoint + :returns: HTTP response + """ + return get_response(api_.openapi_(request)) + + +@BLUEPRINT.route('/api') +def api(): + """ + OpenAPI endpoint + :returns: HTTP response """ @@ -221,8 +232,10 @@ def get_tilematrix_sets(): return execute_from_flask(tiles_api.tilematrixsets, request) -@BLUEPRINT.route('/collections') -@BLUEPRINT.route('/collections/') +@BLUEPRINT.route('/collections', + methods=['GET', 'POST']) +@BLUEPRINT.route('/collections/', + methods=['GET', 'PUT', 'DELETE']) def collections(collection_id=None): """ OGC API collections endpoint @@ -232,7 +245,33 @@ def collections(collection_id=None): :returns: HTTP response """ - return get_response(api_.describe_collections(request, collection_id)) + if collection_id is None: + if request.method == 'GET': # list items + return get_response(api_.describe_collections(request)) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection, request, 'create') + else: + collections = filter_dict_by_key_value(api_.config['resources'], + 'type', 'collection') + # collection in config + if collection_id in collections: + return get_response( + api_.describe_collections( + request, collection_id)) + # moving feature collection + else: + if request.method == 'DELETE': + return execute_from_flask( + movingfeatures.manage_collection, request, 'delete', + collection_id) + elif request.method == 'PUT': + return execute_from_flask( + movingfeatures.manage_collection, request, 'update', + collection_id) + else: + return execute_from_flask( + movingfeatures.get_collection, request, collection_id) @BLUEPRINT.route('/collections//schema') @@ -258,16 +297,14 @@ def collection_queryables(collection_id=None): :returns: HTTP response """ - return execute_from_flask(itemtypes_api.get_collection_queryables, request, - collection_id) + return execute_from_flask(itemtypes_api.get_collection_queryables, + request, collection_id) @BLUEPRINT.route('/collections//items', - methods=['GET', 'POST', 'OPTIONS'], - provide_automatic_options=False) + methods=['GET', 'POST']) @BLUEPRINT.route('/collections//items/', - methods=['GET', 'PUT', 'DELETE', 'OPTIONS'], - provide_automatic_options=False) + methods=['GET', 'DELETE'],) def collection_items(collection_id, item_id=None): """ OGC API collections items endpoint @@ -278,42 +315,79 @@ def collection_items(collection_id, item_id=None): :returns: HTTP response """ - if item_id is None: - if request.method == 'GET': # list items - return execute_from_flask(itemtypes_api.get_collection_items, - request, collection_id, - skip_valid_check=True) - elif request.method == 'POST': # filter or manage items - if request.content_type is not None: - if request.content_type == 'application/geo+json': - return execute_from_flask( + collections = filter_dict_by_key_value(api_.config['resources'], + 'type', 'collection') + # collection in config + if collection_id in collections: + if item_id is None: + if request.method == 'GET': # list items + return execute_from_flask(itemtypes_api.get_collection_items, + request, collection_id, + skip_valid_check=True) + elif request.method == 'POST': # filter or manage items + if request.content_type is not None: + if request.content_type == 'application/geo+json': + return execute_from_flask( itemtypes_api.manage_collection_item, request, 'create', collection_id, skip_valid_check=True) - else: - return execute_from_flask( + else: + return execute_from_flask( itemtypes_api.post_collection_items, request, collection_id, skip_valid_check=True) + elif request.method == 'OPTIONS': + return execute_from_flask( + itemtypes_api.manage_collection_item, request, + 'options', collection_id, skip_valid_check=True) + + elif request.method == 'DELETE': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, + 'delete', + collection_id, + item_id, + skip_valid_check=True) + elif request.method == 'PUT': + return execute_from_flask( + itemtypes_api.manage_collection_item, + request, + 'update', + collection_id, + item_id, + skip_valid_check=True) elif request.method == 'OPTIONS': return execute_from_flask( - itemtypes_api.manage_collection_item, request, 'options', - collection_id, skip_valid_check=True) + itemtypes_api.manage_collection_item, + request, + 'options', + collection_id, + item_id, + skip_valid_check=True) + else: + return execute_from_flask(itemtypes_api.get_collection_item, + request, collection_id, item_id) - elif request.method == 'DELETE': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'delete', collection_id, item_id, - skip_valid_check=True) - elif request.method == 'PUT': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'update', collection_id, item_id, - skip_valid_check=True) - elif request.method == 'OPTIONS': - return execute_from_flask(itemtypes_api.manage_collection_item, - request, 'options', collection_id, item_id, - skip_valid_check=True) else: - return execute_from_flask(itemtypes_api.get_collection_item, request, - collection_id, item_id) + if item_id is None: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items, request, + collection_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item, request, + 'create', collection_id) + + elif request.method == 'DELETE': + return execute_from_flask( + movingfeatures.manage_collection_item, request, + 'delete', collection_id, + item_id) + else: + return execute_from_flask( + movingfeatures.get_collection_item, request, + collection_id, item_id) @BLUEPRINT.route('/collections//coverage') @@ -547,6 +621,190 @@ def stac_catalog_path(path): return execute_from_flask(stac_api.get_stac_path, request, path) +@BLUEPRINT.route( + '/collections//items//tgsequence', + methods=['GET', 'POST']) +@BLUEPRINT.route( + '/collections//items//tgsequence/', # noqa + methods=['DELETE']) +def collection_items_tgeometries(collection_id, item_id, tGeometry_id=None): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if tGeometry_id is None: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items_tGeometry, request, + collection_id, + item_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item_tGeometry, request, + 'create', + collection_id, + item_id) + + elif request.method == 'DELETE': + return execute_from_flask( + movingfeatures.manage_collection_item_tGeometry, request, + 'delete', + collection_id, + item_id, + tGeometry_id) + + +@BLUEPRINT.route( + '/collections//items//tgsequence//velocity', # noqa + methods=['GET']) +def collection_items_tgeometries_velocity( + collection_id, item_id, tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_velocity, request, + collection_id, + item_id, + tGeometry_id) + + +@BLUEPRINT.route( + '/collections//items//tgsequence//distance', # noqa + methods=['GET']) +def collection_items_tgeometries_distance( + collection_id, item_id, tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_distance, request, + collection_id, + item_id, + tGeometry_id) + + +@BLUEPRINT.route( + '/collections//items//tgsequence//acceleration', # noqa + methods=['GET']) +def collection_items_tgeometries_acceleration(collection_id, item_id, + tGeometry_id): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures + .get_collection_items_tGeometry_acceleration, request, + collection_id, + item_id, + tGeometry_id) + + +@BLUEPRINT.route( + '/collections//items//tproperties', + methods=['GET', 'POST']) +@BLUEPRINT.route( + '/collections//items//tproperties/', # noqa + methods=['GET', 'POST', 'DELETE']) +def collection_items_tproperties(collection_id, item_id, tProperty_id=None): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if tProperty_id is None: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items_tProperty, request, + collection_id, + item_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item_tProperty, request, + 'create', + collection_id, + item_id) + else: + if request.method == 'GET': # list items + return execute_from_flask( + movingfeatures.get_collection_items_tProperty_value, request, + collection_id, + item_id, + tProperty_id) + elif request.method == 'POST': # filter or manage items + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty_value, request, + 'create', + collection_id, + item_id, + tProperty_id) + elif request.method == 'DELETE': # filter or manage items + return execute_from_flask( + movingfeatures + .manage_collection_item_tProperty, request, + 'delete', + collection_id, + item_id, + tProperty_id) + + +@BLUEPRINT.route( + '/collections//items//tproperties//', # noqa + methods=['DELETE']) +def collection_items_tproperties_values(collection_id, item_id, + tProperty_id, tValue_id=None): + """ + OGC API collections items endpoint + + :param collection_id: collection identifier + :param item_id: item identifier + + :returns: HTTP response + """ + + if request.method == 'DELETE': # filter or manage items + return execute_from_flask( + movingfeatures.manage_collection_item_tProperty_value, request, + 'delete', + collection_id, + item_id, + tProperty_id, + tValue_id) + + @ADMIN_BLUEPRINT.route('/admin/config', methods=['GET', 'PUT', 'PATCH']) def admin_config(): """ diff --git a/pygeoapi/openapi.py b/pygeoapi/openapi.py index 3cf5c0b8f..8078490d9 100644 --- a/pygeoapi/openapi.py +++ b/pygeoapi/openapi.py @@ -53,13 +53,14 @@ OPENAPI_YAML = { 'oapif-1': 'https://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml', # noqa - 'oapif-2': 'https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml', # noqa + 'oapif-2': 'https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml', # noqa 'oapip': 'https://schemas.opengis.net/ogcapi/processes/part1/1.0/openapi', 'oacov': 'https://raw.githubusercontent.com/tomkralidis/ogcapi-coverages-1/fix-cis/yaml-unresolved', # noqa 'oapir': 'https://raw.githubusercontent.com/opengeospatial/ogcapi-records/master/core/openapi', # noqa - 'oaedr': 'https://schemas.opengis.net/ogcapi/edr/1.0/openapi', # noqa + 'oaedr': 'https://schemas.opengis.net/ogcapi/edr/1.0/openapi', # noqa 'oapit': 'https://schemas.opengis.net/ogcapi/tiles/part1/1.0/openapi/ogcapi-tiles-1.yaml', # noqa - 'pygeoapi': 'https://raw.githubusercontent.com/geopython/pygeoapi/master/pygeoapi/schemas/config/pygeoapi-config-0.x.yml' # noqa + 'pygeoapi': 'https://raw.githubusercontent.com/geopython/pygeoapi/master/pygeoapi/schemas/config/pygeoapi-config-0.x.yml', # noqa + 'movingfeature': 'https://schemas.opengis.net/ogcapi/movingfeatures/part1/1.0/openapi/ogcapi-movingfeatures-1.bundled.yaml' # noqa } THISDIR = os.path.dirname(os.path.realpath(__file__)) @@ -154,7 +155,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: osl = get_ogc_schemas_location(cfg['server']) OPENAPI_YAML['oapif-1'] = os.path.join(osl, 'ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml') # noqa - OPENAPI_YAML['oapif-2'] = os.path.join(osl, 'ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml') # noqa + OPENAPI_YAML['oapif-2'] = os.path.join(osl, 'ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml') # noqa LOGGER.debug('setting up server info') oas = { @@ -267,6 +268,37 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: '400': {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/responses/InvalidParameter"}, # noqa '500': {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/responses/ServerError"} # noqa } + }, + "post": { + "operationId": "registerMetadata", + "summary": "Register metadata about a collection of moving features", # noqa + "description": "A user SHOULD register metadata about a collection of moving features into the system.\n", # noqa + "tags": ["MovingFeatureCollection"], + "requestBody": { + "content": { + "application/json": { + "schema": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/schemas/collection-body"}, + "example": { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example", + "itemType": "movingfeature" + } + } + } + }, + "responses": { + "201": { + "description": "Successful create a collection to manage moving features.", # noqa + "headers": { + "Location": { + "description": "A URI of the newly added resource", # noqa + "schema": {"type": "string"} + } + } + }, + "500": {"$ref": f"{OPENAPI_YAML['movingfeature']}#/components/responses/ServerError"} + } } } @@ -311,30 +343,30 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: ], 'properties': { 'queryable': { - 'description': 'the token that may be used in a CQL predicate', # noqa + 'description': 'the token that may be used in a CQL predicate', # noqa 'type': 'string' }, 'title': { - 'description': 'a human readable title for the queryable', # noqa + 'description': 'a human readable title for the queryable', # noqa 'type': 'string' }, 'description': { - 'description': 'a human-readable narrative describing the queryable', # noqa + 'description': 'a human-readable narrative describing the queryable', # noqa 'type': 'string' }, 'language': { - 'description': 'the language used for the title and description', # noqa + 'description': 'the language used for the title and description', # noqa 'type': 'string', 'default': [ 'en' ] }, 'type': { - 'description': 'the data type of the queryable', # noqa + 'description': 'the data type of the queryable', # noqa 'type': 'string' }, 'type-ref': { - 'description': 'a reference to the formal definition of the type', # noqa + 'description': 'a reference to the formal definition of the type', # noqa 'type': 'string', 'format': 'url' } @@ -401,7 +433,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: oas['components']['responses'].update({ 'Tiles': { - 'description': 'Retrieves the tiles description for this collection', # noqa + 'description': 'Retrieves the tiles description for this collection', # noqa 'content': { 'application/json': { 'schema': { @@ -436,7 +468,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: 'tileMatrixSetLinks': { 'type': 'array', 'items': { - '$ref': '#/components/schemas/tilematrixsetlink' # noqa + '$ref': '#/components/schemas/tilematrixsetlink' # noqa } }, 'links': { diff --git a/pygeoapi/provider/postgresql_mobilitydb.py b/pygeoapi/provider/postgresql_mobilitydb.py new file mode 100644 index 000000000..07afe047d --- /dev/null +++ b/pygeoapi/provider/postgresql_mobilitydb.py @@ -0,0 +1,1348 @@ +import json +import datetime +import psycopg2 +from functools import partial +from dateutil.parser import parse as dateparse +import pytz +from pymeos import (Temporal, TFloatSeq, TFloatSeqSet, pymeos_initialize) +from pygeoapi.util import format_datetime +from pymeos_cffi import (tfloat_from_mfjson, ttext_from_mfjson, + tgeompoint_from_mfjson) + +class PostgresMobilityDB: + host = 'mobilitydb' + port = 5432 + db = 'mobilitydb' + user = 'docker' + password = 'docker' + connection = None + + def __init__(self, datasource=None): + """ + PostgresMobilityDB Class constructor + + :param datasource: datasource definition (default None) + host - database host address + port - connection port number + db - table name + user - user name used to authenticate + password - password used to authenticate + """ + + self.connection = None + if datasource is not None: + self.host = datasource['host'] + self.port = int(datasource['port']) + self.db = datasource['dbname'] + self.user = datasource['user'] + self.password = datasource['password'] + + def connect(self): + """ + Connection of database + """ + + # Set the connection parameters to PostgreSQL + self.connection = psycopg2.connect(host=self.host, + database=self.db, + user=self.user, + password=self.password, + port=self.port) + self.connection.autocommit = True + # Register MobilityDB data types (old library 'python-mobilitydb') + # register(self.connection) + + def disconnect(self): + """ + Close the connection + """ + + if self.connection: + self.connection.close() + + def get_collections_list(self): + """ + Query moving features collection list + GET /collections + + :returns: JSON FeatureCollection + """ + with self.connection.cursor() as cur: + select_query = "SELECT collection_id FROM collection" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_collections(self): + """ + Query moving features collections + + :returns: JSON FeatureCollections + """ + with self.connection.cursor() as cur: + select_query = """select collection.collection_id, + collection.collection_property, extentLifespan, + extentTGeometry from (select collection.collection_id, + collection.collection_property, + extent(mfeature.lifespan) as extentLifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from collection + left outer join mfeature + on collection.collection_id = mfeature.collection_id + left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + group by collection.collection_id, collection.collection_property) + collection """ + + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_collection(self, collection_id): + """ + Query specific moving features collection + GET /collections/{collectionId} + + :param collection_id: local identifier of a collection + + :returns: JSON FeatureCollection + """ + with self.connection.cursor() as cur: + select_query = ("""select collection.collection_id, + collection.collection_property, extentLifespan, + extentTGeometry from (select collection.collection_id, + collection.collection_property, + extent(mfeature.lifespan) as extentLifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from collection + left outer join mfeature + on collection.collection_id = mfeature.collection_id + left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where collection.collection_id ='{0}' + group by collection.collection_id, + collection.collection_property) + collection """ + .format(collection_id)) + + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_features_list(self): + """ + Query all moving features + + :returns: JSON MovingFeatures + """ + + with self.connection.cursor() as cur: + select_query = "SELECT collection_id, mfeature_id FROM mfeature" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_features( + self, collection_id, bbox='', datetime='', limit=10, offset=0, + sub_trajectory=False): + """ + Retrieve the moving feature collection to access + the static information of the moving feature + /collections/{collectionId}/items + + :param collection_id: local identifier of a collection + :param bbox: bounding box [lowleft1,lowleft2,min(optional), + upright1,upright2,max(optional)] + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param sub_trajectory: If specified true, This operation returns only a + subsequence of temporal geometry within a time + interval contained in the + datetime parameter (default False)[optional] + + :returns: JSON MovingFeatures + """ + + with self.connection.cursor() as cur: + bbox_restriction = "" + if bbox != '' and bbox is not None: + s_bbox = ','.join(str(x) for x in bbox) + if len(bbox) == 4: + bbox_restriction = " and box2d(stboxx(" + \ + s_bbox + ")) &&& box2d(extentTGeometry) " + elif len(bbox) == 6: + bbox_restriction = " and box3d(stboxz(" + \ + s_bbox + ")) &&& box3d(extentTGeometry) " + + datetime_restriction = "" + if datetime != '' and datetime is not None: + if sub_trajectory is False or sub_trajectory == "false": + datetime_restriction = ( + """ and((lifespan && tstzspan('[{0}]')) + or (extentTPropertiesValueFloat::tstzspan && + tstzspan('[{0}]')) or + (extentTPropertiesValueText::tstzspan && + tstzspan('[{0}]')) or + (extentTGeometry::tstzspan && tstzspan('[{0}]')))""" + .format(datetime)) + limit_restriction = " LIMIT " + \ + str(limit) + " OFFSET " + str(offset) + + # sub_trajectory is false + select_query = ( + """select mfeature.collection_id, mfeature.mfeature_id, + st_asgeojson(mfeature.mf_geometry) as mf_geometry, + mfeature.mf_property, mfeature.lifespan, extentTGeometry, + extentTPropertiesValueFloat, extentTPropertiesValueText + from (select mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan) + mfeature left outer join + (select mfeature.collection_id, mfeature.mfeature_id, + extent(tvalue.pvalue_float) + as extentTPropertiesValueFloat, + extent(tvalue.pvalue_text) as extentTPropertiesValueText + from mfeature left outer join tvalue + on mfeature.collection_id = tvalue.collection_id + and mfeature.mfeature_id = tvalue.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id) + tvalue ON + mfeature.collection_id = tvalue.collection_id + and mfeature.mfeature_id = tvalue.mfeature_id + where 1=1 {1} {2}""" .format( + collection_id, bbox_restriction, datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += limit_restriction + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + if sub_trajectory or sub_trajectory == "true": + sub_trajectory_field = ("""atTime(tgeometry.tgeometry_property, + tstzspan('[{0}]'))""" + .format(datetime)) + # sub_trajectory is true + select_geometry_query = ( + """select mfeature.collection_id, + mfeature.mfeature_id, mfeature.mf_geometry, + mfeature.mf_property, mfeature.lifespan, + extentTGeometry, tgeometry.tgeometry_id, + tgeometry_property from (select mfeature.collection_id, + mfeature.mfeature_id, st_asgeojson(mfeature.mf_geometry) + as mf_geometry, mfeature.mf_property, mfeature.lifespan, + extentTGeometry from (select mfeature.collection_id, + mfeature.mfeature_id, mfeature.mf_geometry, + mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) + as extentTGeometry from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan) + mfeature where 1=1 {1} {2}) mfeature + left outer join (select tgeometry.collection_id, + tgeometry.mfeature_id, tgeometry.tgeometry_id, {3} + as tgeometry_property from tgeometry + where tgeometry.collection_id ='{0}' and {3} is not null) + tgeometry ON mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id where 1=1 """. + format( + collection_id, bbox_restriction, + limit_restriction, + sub_trajectory_field)) + + cur.execute(select_geometry_query) + result = cur.fetchall() + + return result, number_matched, number_returned + + def get_feature(self, collection_id, mfeature_id): + """ + Access the static data of the moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + + :returns: JSON MovingFeature + """ + with self.connection.cursor() as cur: + # cur = self.connection.cursor() + select_query = ( + """select mfeature.collection_id, mfeature.mfeature_id, + st_asgeojson(mfeature.mf_geometry) as mf_geometry, + mfeature.mf_property, mfeature.lifespan, extentTGeometry + from (select mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, mfeature.lifespan, + extent(tgeometry.tgeometry_property) as extentTGeometry + from mfeature left outer join tgeometry + on mfeature.collection_id = tgeometry.collection_id + and mfeature.mfeature_id = tgeometry.mfeature_id + where mfeature.collection_id ='{0}' + AND mfeature.mfeature_id='{1}' + group by mfeature.collection_id, mfeature.mfeature_id, + mfeature.mf_geometry, mfeature.mf_property, + mfeature.lifespan) mfeature """ .format( + collection_id, mfeature_id)) + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_temporalgeometries( + self, collection_id, mfeature_id, bbox='', leaf='', datetime='', + limit=10, offset=0, sub_trajectory=False): + """ + Retrieve only the movement data of a moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param bbox: bounding box [lowleft1,lowleft2,min(optional), + upright1,upright2,max(optional)] + :param leaf: only features that have a temporal geometry and + property that intersects the given + date-time are selected [optional] + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param sub_trajectory: If specified true, This operation returns only a + subsequence of temporal geometry within a time + interval contained in the + datetime parameter (default False) [optional] + + :returns: JSON TemporalGeometry + """ + with self.connection.cursor() as cur: + tgeometry_property = 'null' + + bbox_restriction = "" + if bbox != '' and bbox is not None: + s_bbox = ','.join(str(x) for x in bbox) + if len(bbox) == 4: + bbox_restriction = " and box2d(stboxx(" + s_bbox + \ + ")) &&& box2d(stbox(tgeometry_property))" + elif len(bbox) == 6: + bbox_restriction = " and box3d(stboxz(" + s_bbox + \ + ")) &&& box3d(stbox(tgeometry_property))" + + datetime_restriction = "" + if datetime != '' and datetime is not None: + datetime_restriction = (""" and atTime(tgeometry_property, + tstzspan('[{0}]')) is not null """.format(datetime)) + + if leaf != '' and leaf is not None: + tgeometry_property = ("""atTime(tgeometry_property, + tstzset('{0}'))""".format('{' + leaf + '}')) + elif sub_trajectory or sub_trajectory == "true": + tgeometry_property = ("""atTime(tgeometry_property, + tstzspan('[{0}]'))""".format(datetime)) + + select_query = ( + """SELECT collection_id, mfeature_id, tgeometry_id, + tgeometry_property, {0} + FROM tgeometry WHERE collection_id ='{1}' + AND mfeature_id='{2}' {3} {4}""" + .format(tgeometry_property, collection_id, + mfeature_id, bbox_restriction, + datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += " LIMIT " + str(limit) + " OFFSET " + str(offset) + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + return result, number_matched, number_returned + + def get_tProperties_name_list(self): + """ + Query all tProperties name list + + :returns: MF-JSON tProperties + """ + with self.connection.cursor() as cur: + select_query = """SELECT collection_id, mfeature_id, + tproperties_name FROM tproperties""" + cur.execute(select_query) + result = cur.fetchall() + return result + + def get_temporalproperties( + self, collection_id, mfeature_id, datetime='', limit=10, + offset=0, sub_temporal_value=False): + """ + Retrieve the static information of the temporal property data + that included a single moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param datetime: either a date-time or an interval(datestamp or extent) + :param limit: number of items (default 10) [optional] + :param offset: starting record to return (default 0) + :param sub_temporal_value: only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: MF-JSON TemporalProperties or temporalProperty + """ + with self.connection.cursor() as cur: + datetime_restriction = '' + if datetime != '' and datetime is not None: + if sub_temporal_value is False \ + or sub_temporal_value == "false": + datetime_restriction = (""" and (atTime(pvalue_float, + tstzspan('[{0}]')) is not null + or atTime(pvalue_text, tstzspan('[{0}]')) is not null)""" + .format(datetime)) + + limit_restriction = " LIMIT " + \ + str(limit) + " OFFSET " + str(offset) + select_query = ("""select distinct on (tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name) + tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + WHERE tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' {2}""". format( + collection_id, mfeature_id, datetime_restriction)) + + cur.execute(select_query) + result = cur.fetchall() + number_matched = len(result) + + select_query += limit_restriction + cur.execute(select_query) + result = cur.fetchall() + number_returned = len(result) + + if sub_temporal_value or sub_temporal_value == "true": + subTemporalValue_float_field = ( + """atTime(tvalue.pvalue_float, + tstzspan('[{0}]'))""" .format(datetime)) + subTemporalValue_text_field = ( + """atTime(tvalue.pvalue_text, + tstzspan('[{0}]'))""" .format(datetime)) + + select_temporalvalue_query = ( + """select tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name, + tproperties.tproperty, datetime_group, pvalue_float, pvalue_text + from (select distinct on (tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name) + tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' {2} {3}) tproperties + left outer join (select tvalue.collection_id, + tvalue.mfeature_id, tvalue.tproperties_name, + tvalue.datetime_group, {4} as pvalue_float, + {5} as pvalue_text from tvalue + where tvalue.collection_id ='{0}' + AND tvalue.mfeature_id='{1}' and ({4} is not null + or {5} is not null)) tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + where 1=1 order by datetime_group""". + format( + collection_id, mfeature_id, + datetime_restriction, + limit_restriction, + subTemporalValue_float_field, + subTemporalValue_text_field)) + + cur.execute(select_temporalvalue_query) + result = cur.fetchall() + + return result, number_matched, number_returned + + def get_temporalproperties_value( + self, collection_id, mfeature_id, tProperty_name, datetime='', + leaf='', sub_temporal_value=False): + """ + Retrieve temporal values with a specified name + {tPropertyName} of temporal property. + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tProperty_name: local identifier of a temporal property + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: only features that have a temporal geometry and + property that intersects the given + date-time are selected [optional] + :param sub_temporal_value: only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: JSON TemporalPropertyValue + """ + with self.connection.cursor() as cur: + datetime_restriction = "" + if datetime != '' and datetime is not None: + datetime_restriction = ( + """ and (atTime(tvalue.pvalue_float, + tstzspan('[{0}]')) is not null + or atTime(tvalue.pvalue_text, + tstzspan('[{0}]')) is not null) """ .format(datetime)) + float_field = 'pvalue_float' + text_field = 'pvalue_text' + if leaf != '' and leaf is not None: + float_field = "atTime(tvalue.pvalue_float, \ + tstzset('{" + leaf + "}'))" + text_field = "atTime(tvalue.pvalue_text, \ + tstzset('{" + leaf + "}'))" + elif sub_temporal_value or sub_temporal_value == "true": + float_field = "atTime(tvalue.pvalue_float, \ + tstzspan('[" + datetime + "]'))" + text_field = "atTime(tvalue.pvalue_text, \ + tstzspan('[" + datetime + "]'))" + + select_query = ( + """select tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty, + datetime_group, pvalue_float, pvalue_text + from (select tproperties.collection_id, tproperties.mfeature_id, + tproperties.tproperties_name, tproperties.tproperty + from tproperties where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' + AND tproperties.tproperties_name='{2}') tproperties + left outer join (select tproperties.collection_id, + tproperties.mfeature_id, tproperties.tproperties_name, + tvalue.datetime_group, {3} as pvalue_float, {4} as pvalue_text + from tproperties left outer join tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + where tproperties.collection_id ='{0}' + AND tproperties.mfeature_id='{1}' + AND tproperties.tproperties_name='{2}' {5}) tvalue + on tproperties.collection_id = tvalue.collection_id + and tproperties.mfeature_id = tvalue.mfeature_id + and tproperties.tproperties_name = tvalue.tproperties_name + where 1=1 order by datetime_group""" + .format(collection_id, mfeature_id, tProperty_name, + float_field, text_field, datetime_restriction)) + cur.execute(select_query) + result = cur.fetchall() + return result + + def post_collection(self, collection_property): + """ + Register metadata about a collection of moving features + + :param collection_property: metadata about a collection + title - human-readable title of the collection + updateFrequency - a time interval of sampling location + description - any description + itemType - indicator about the type of the items in the + moving features collection (default "movingfeature") + + :returns: Collection ID + """ + with self.connection.cursor() as cur: + cur.execute( + "INSERT INTO collection(collection_property) \ + VALUES ('{0}') RETURNING collection_id". + format(json.dumps(collection_property))) + + collection_id = cur.fetchone()[0] + return collection_id + + def post_movingfeature(self, collection_id, movingfeature): + """ + Insert a set of moving features or a moving feature into a collection + + :param collection_id: local identifier of a collection + :param movingfeature: MovingFeature object or + MovingFeatureCollection object + + :returns: MovingFeature ID + """ + with self.connection.cursor() as cur: + g_movingfeature = dict(movingfeature) + lifespan = g_movingfeature.pop("time", None) + if lifespan is not None: + lifespan = "'[" + self.validate_lifespan(lifespan) + "]'" + else: + lifespan = "NULL" + temporal_geometries = g_movingfeature.pop("temporalGeometry", None) + temporal_properties = g_movingfeature.pop( + "temporalProperties", None) + + if 'geometry' in g_movingfeature: + geometry = g_movingfeature.pop("geometry", None) + cur.execute( + """INSERT INTO mfeature(collection_id, mf_geometry, + mf_property, lifespan) VALUES ('{0}', + ST_GeomFromGeoJSON('{1}'), '{2}', {3}) + RETURNING mfeature_id""" + .format(collection_id, json.dumps(geometry), + json.dumps(g_movingfeature), lifespan)) + else: + cur.execute( + """INSERT INTO mfeature(collection_id, + mf_property, lifespan) + VALUES ('{0}', '{1}', {2}) RETURNING mfeature_id""" + .format( + collection_id, json.dumps(g_movingfeature), lifespan)) + mfeature_id = cur.fetchone()[0] + + if temporal_geometries is not None: + temporal_geometries = [temporal_geometries] if not isinstance( + temporal_geometries, list) else temporal_geometries + for temporal_geometry in temporal_geometries: + self.post_temporalgeometry( + collection_id, mfeature_id, temporal_geometry) + + if temporal_properties is not None: + temporal_properties = [temporal_properties] if not isinstance( + temporal_properties, list) else temporal_properties + for temporal_property in temporal_properties: + self.post_temporalproperties( + collection_id, mfeature_id, temporal_property) + + return mfeature_id + + def post_temporalgeometry( + self, collection_id, mfeature_id, temporal_geometry): + """ + Add movement data into the moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporal_geometry: TemporalPrimitiveGeometry object + in the OGC MF-JSON + :returns: TemporalGeometry ID + """ + + with self.connection.cursor() as cur: + # pymeos of python + pymeos_initialize() + temporal_geometry = self.convert_temporalgeometry_to_new_version( + temporal_geometry) + value = Temporal._factory( + tgeompoint_from_mfjson(json.dumps(temporal_geometry))) + cur.execute( + """INSERT INTO tgeometry(collection_id, mfeature_id, + tgeometry_property, tgeog_property) + VALUES ('{0}', '{1}', '{2}', '{3}') RETURNING tgeometry_id""" + .format(collection_id, mfeature_id, str(value), str(value))) + + tgeometry_id = cur.fetchone()[0] + + return tgeometry_id + + def post_temporalproperties( + self, collection_id, mfeature_id, temporal_property): + """ + Add temporal property data into a moving feature + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporal_property: TemporalProperties object in the OGC MF-JSON + + :returns: TemporalProperty Name + """ + g_temporal_property = dict(temporal_property) + datetimes = [] + if 'datetimes' in g_temporal_property: + datetimes = g_temporal_property.pop("datetimes", None) + + tproperties_name_list = [] + for tproperties_name in g_temporal_property: + with self.connection.cursor() as cur: + temporal_value_data = {} + if 'values' in g_temporal_property[tproperties_name] \ + and 'interpolation' in g_temporal_property[ + tproperties_name]: + values = g_temporal_property[tproperties_name].pop( + "values", None) + interpolation = g_temporal_property[tproperties_name].pop( + "interpolation", None) + + temporal_value_data['datetimes'] = datetimes + temporal_value_data['values'] = values + temporal_value_data['interpolation'] = interpolation + + insert_query = ( + """INSERT INTO tproperties(collection_id, mfeature_id, + tproperties_name, tproperty) + VALUES ('{0}', '{1}', '{2}', '{3}') + ON CONFLICT (collection_id, mfeature_id, + tproperties_name) + DO UPDATE SET tproperty = EXCLUDED.tproperty""" + .format(collection_id, mfeature_id, + tproperties_name, json.dumps( + g_temporal_property[tproperties_name]))) + cur.execute(insert_query) + + if temporal_value_data: + self.post_temporalvalue( + collection_id, mfeature_id, tproperties_name, + temporal_value_data) + + tproperties_name_list.append(tproperties_name) + + # TODO replace g_temporal_property + return tproperties_name_list + + def post_temporalvalue( + self, collection_id, mfeature_id, tproperties_name, + temporal_value_data): + """ + Add more temporal values data into a temporal property + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tproperties_name: local identifier of a temporal property + :param temporal_value_data: temporal primitive value + datetimes - array of strings + values - number or string or boolean + interpolation - Enum: "Discrete" "Step" "Linear" "Regression" + + :returns: Temporal Primitive Value + """ + with self.connection.cursor() as cur: + + datetimes = temporal_value_data['datetimes'] + values = temporal_value_data['values'] + interpolation = temporal_value_data['interpolation'] + temporal_value = self.create_temporalproperty_value( + datetimes, values, interpolation) + + datetime_group = self.get_temporalvalue_group( + collection_id, mfeature_id, datetimes) + dataType = temporal_value["type"] + pvalue_column = "" + value = None + + pymeos_initialize() + if dataType == 'MovingFloat': + pvalue_column = "pValue_float" + value = Temporal._factory( + tfloat_from_mfjson(json.dumps(temporal_value))) + else: + pvalue_column = "pValue_text" + value = Temporal._factory( + ttext_from_mfjson(json.dumps(temporal_value))) + + insert_querry = ( + """INSERT INTO tvalue(collection_id, mfeature_id, + tproperties_name, datetime_group, {0}) + VALUES ('{1}', '{2}', '{3}', {4}, '{5}') + RETURNING tvalue_id""" + .format( + pvalue_column, collection_id, mfeature_id, + tproperties_name, datetime_group, str(value))) + + cur.execute(insert_querry) + tvalue_id = cur.fetchone()[0] + + return tvalue_id + + def put_collection(self, collection_id, collection_property): + """ + Replace metadata about the collection + + :param collection_id: local identifier of a collection + :param collection_property: metadata about a collection + title - human-readable title of the collection + updateFrequency - a time interval of sampling location + description - any description + itemType - indicator about the type of the items in the + moving features collection (default "movingfeature") + """ + with self.connection.cursor() as cur: + cur.execute( + "UPDATE collection set collection_property = '{0}' \ + WHERE collection_id = '{1}'" .format( + json.dumps(collection_property), collection_id)) + + def delete_collection(self, restriction): + """ + Delete records associated with a collection id + + :param restriction: moving feature collection id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM mfeature WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM collection WHERE 1=1 {0}".format(restriction)) + + def delete_movingfeature(self, restriction): + """ + Delete records associated with a moving feature id + + :param restriction: moving feature id + """ + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM mfeature WHERE 1=1 {0}".format(restriction)) + + def delete_temporalgeometry(self, restriction): + """ + Delete the temporal geometry record with the given restriction. + + :param restriction: temporal geometry id + """ + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tgeometry WHERE 1=1 {0}".format(restriction)) + + def delete_temporalproperties(self, restriction): + """ + Delete the temporal properties record with the given restriction. + + :param restriction: temporal properties id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) + cur.execute( + "DELETE FROM tproperties WHERE 1=1 {0}".format(restriction)) + + def delete_temporalvalue(self, restriction): + """ + Delete the temporal value record with the given restriction. + + :param restriction: temporal value id + """ + + with self.connection.cursor() as cur: + cur.execute( + "DELETE FROM tvalue WHERE 1=1 {0}".format(restriction)) + + def convert_temporalgeometry_to_new_version(self, temporal_geometry): + """ + Convert temporal geometory to new version + + :param temporal_geometry: MF-JSON TemporalPrimitiveGeometry (object) or + MF-JSON TemporalComplexGeometry + + :returns: temporalGeometry object + """ + + if 'datetimes' in temporal_geometry: + datetimes = temporal_geometry['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].replace('Z', '') + temporal_geometry['datetimes'] = datetimes + + if 'lower_inc' not in temporal_geometry: + temporal_geometry['lower_inc'] = True + if 'upper_inc' not in temporal_geometry: + temporal_geometry['upper_inc'] = True + return temporal_geometry + + def convert_temporalgeometry_to_old_version(self, temporal_geometry): + """ + Convert temporal geometory to old version + + :param temporal_geometry: MF-JSON TemporalPrimitiveGeometry (object) or + MF-JSON TemporalComplexGeometry + + :returns: temporalGeometry object + """ + + if 'datetimes' in temporal_geometry: + datetimes = temporal_geometry['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].split('+')[0] + 'Z' + temporal_geometry['datetimes'] = datetimes + + if 'lower_inc' in temporal_geometry: + del temporal_geometry['lower_inc'] + if 'upper_inc' in temporal_geometry: + del temporal_geometry['upper_inc'] + + return temporal_geometry + + def create_temporalproperty_value(self, datetimes, values, interpolation): + """ + Create temporal property value + + :param datetimes: array of strings + :param values: number or string or boolean + :param interpolation: Enum: "Discrete" "Step" "Linear" "Regression" + + :returns: temporalValue object + """ + + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3).strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + if all( + [isinstance(item, int) or isinstance(item, float) + for item in values]): + dataType = 'MovingFloat' + else: + dataType = 'MovingText' + temporal_value = { + "type": dataType, + "lower_inc": True, + "upper_inc": True, + 'datetimes': datetimes, + 'values': values, + 'interpolation': interpolation + } + return temporal_value + + def convert_temporalproperty_value_to_base_version( + self, temporal_property_value): + """ + Convert temporal property value to base version + + :param temporal_property_value: database type(tText,tFloat) + temporalPropertyValue object + + :returns: JSON temporalPropertyValue + """ + + if 'type' in temporal_property_value: + del temporal_property_value['type'] + + if 'datetimes' in temporal_property_value: + datetimes = temporal_property_value['datetimes'] + for i in range(len(datetimes)): + datetimes[i] = datetimes[i].split('+')[0] + 'Z' + temporal_property_value['datetimes'] = datetimes + + if 'lower_inc' in temporal_property_value: + del temporal_property_value['lower_inc'] + if 'upper_inc' in temporal_property_value: + del temporal_property_value['upper_inc'] + return temporal_property_value + + def validate_lifespan(self, datetime_=None) -> str: + """ + Validate datetime lifespan + + :param datetime_: either a date-time or an interval. (default None) + + :returns: start and end datetype string + """ + + datetime_for_return = '' + if datetime_ is not None and datetime_ != []: + dateparse_begin = partial(dateparse, default=datetime.datetime.min) + dateparse_end = partial(dateparse, default=datetime.datetime.max) + + datetime_begin = datetime_[0] + datetime_end = datetime_[-1] + datetime_begin = dateparse_begin(datetime_begin) + if datetime_begin.tzinfo is None: + datetime_begin = datetime_begin.replace( + tzinfo=pytz.UTC) + + datetime_end = dateparse_end(datetime_end) + if datetime_end.tzinfo is None: + datetime_end = datetime_end.replace(tzinfo=pytz.UTC) + + datetime_invalid = any([ + (datetime_begin > datetime_end) + ]) + + if not datetime_invalid: + datetime_for_return = datetime_begin.strftime( + '%Y-%m-%d %H:%M:%S.%f') + ',' + \ + datetime_end.strftime('%Y-%m-%d %H:%M:%S.%f') + return datetime_for_return + + def check_temporalproperty_can_post( + self, collection_id, mfeature_id, temporal_properties, + tproperties_name=None): + """ + Check temporalProperties object can be POSTed + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param temporal_properties: temporalProperties object + :param tproperties_name: temporal property name (default None) + + :returns: True or False + """ + + with self.connection.cursor() as cur: + for temporal_property in temporal_properties: + g_temporal_property = dict(temporal_property) + if 'datetimes' in g_temporal_property: + datetimes = g_temporal_property["datetimes"] + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3)\ + .strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + tproperties_name_list = [] + if tproperties_name is not None: + tproperties_name_list = [tproperties_name] + else: + for tproperties_name in g_temporal_property: + tproperties_name_list.append(tproperties_name) + + select_query = ( + """select collection_id, mfeature_id, tproperties_name, + count(datetime_group) as intersect_count + from tvalue where collection_id ='{0}' + and mfeature_id='{1}' and tproperties_name in ({2}) + and ((pvalue_float::tstzspan && tstzset('{3}')::tstzspan) + or (pvalue_text::tstzspan && tstzset('{3}')::tstzspan)) + group by collection_id, mfeature_id, tproperties_name""" + .format(collection_id, mfeature_id, + "'" + "', '".join(tproperties_name_list) + "'", + "{" + ", ".join(datetimes) + "}")) + cur.execute(select_query) + rows = cur.fetchall() + + for row in rows: + if int(row[3]) > 0: + return False + return True + + def get_temporalvalue_group( + self, collection_id, mfeature_id, datetimes): + """ + Get temporal properties group + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param datetimes: array of strings + + :returns: ID of the group that summarizes same datetime in tproperty + """ + + with self.connection.cursor() as cur: + for i in range(len(datetimes)): + if isinstance(datetimes[i], int): + datetimes[i] = datetime.datetime.fromtimestamp( + datetimes[i] / 1e3).strftime("%Y/%m/%dT%H:%M:%S.%f") + else: + datetimes[i] = datetimes[i].replace('Z', '') + + select_query = ( + """select temp1.collection_id, temp1.mfeature_id, + COALESCE(temp2.datetime_group, temp3.max_datetime_group) + from (select collection_id, mfeature_id from tvalue + where collection_id ='{0}' and mfeature_id='{1}') temp1 + left outer join (select collection_id, mfeature_id, + datetime_group from tvalue + where collection_id ='{0}' and mfeature_id='{1}' + and (set(timestamps(pvalue_float)) = tstzset('{2}') + or set(timestamps(pvalue_text)) = tstzset('{2}'))) temp2 + on temp1.collection_id = temp2.collection_id + and temp1.mfeature_id = temp2.mfeature_id + left outer join (select collection_id, mfeature_id, + COALESCE(max(datetime_group), 0) + 1 as max_datetime_group + from tvalue where collection_id ='{0}' + and mfeature_id='{1}' + group by collection_id, mfeature_id ) temp3 + on temp1.collection_id = temp3.collection_id + and temp1.mfeature_id = temp3.mfeature_id """ + .format(collection_id, mfeature_id, + "{" + ", ".join(datetimes) + "}")) + # print(select_query) + cur.execute(select_query) + result = cur.fetchall() + if len(result) > 0: + return result[0][2] + return 1 + + def get_velocity(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): + """ + Get temporal property of velocity + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: TemporalProperty of velocity + """ + form = "MTS" + name = "velocity" + with self.connection.cursor() as cur: + if (leaf == '' or leaf is None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # no optional query parameters are used -> time-to-velocity curve returns + select_query = \ + f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf != '' or leaf is not None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # only leaf query parameter is used + leaf_condition = "tstzset('{"+leaf+"}')" + select_query = \ + f"""SELECT atTime(speed(tgeog_property),{leaf_condition}) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf == '' or leaf is None) and \ + (sub_temporal_value or sub_temporal_value == "true"): + # only sub_temporal_value query parameter is used + select_query = \ + f"""SELECT atTime(speed(tgeog_property), tstzspan('[{datetime}]')) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + else: + print("Not valid query parameters") + + cur.execute(select_query) + result = cur.fetchall() + + return self.to_tproperties(result, name, form, leaf) + + def get_distance(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): + """ + Get temporal property of distance + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: TemporalProperty of distance + """ + + form = "MTR" + name = "distance" + with self.connection.cursor() as cur: + if (leaf == '' or leaf is None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # no optional query parameters are used -> time-to-velocity curve returns + select_query = \ + f"""SELECT cumulativeLength(tgeog_property) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf != '' or leaf is not None) and \ + (not sub_temporal_value or sub_temporal_value == "false"): + # only leaf query parameter is used + leaf_condition = "tstzset('{"+leaf+"}')" + select_query = \ + f"""SELECT atTime(cumulativeLength(tgeog_property),{leaf_condition}) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + elif (leaf == '' or leaf is None) and \ + (sub_temporal_value or sub_temporal_value == "true"): + # only sub_temporal_value query parameter is used + select_query = \ + f"""SELECT atTime(cumulativeLength(tgeog_property), tstzspan('[{datetime}]')) AS distance + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + else: + print("Not valid query parameters") + + cur.execute(select_query) + result = cur.fetchall() + + return self.to_tproperties(result, name, form, leaf) + + def get_acceleration(self, collection_id, mfeature_id, tgeometry_id, + datetime='', leaf='', sub_temporal_value=False): + """ + Get temporal property of acceleration + + :param collection_id: local identifier of a collection + :param mfeature_id: local identifier of a moving feature + :param tgeometry_id: local identifier of a geometry + :param datetime: either a date-time or an interval(datestamp or extent) + :param leaf: array of strings (default None) + only features that have a temporal geometry and property + that intersects the given date-time are selected [optional] + :param sub_temporal_value: boolean, only features with a temporal property + intersecting the given time interval + will return (default False) [optional] + + :returns: TemporalProperty of acceleration + """ + + tProperty = { + "name": "acceleration", + "type": "TReal", + "form": "MTS", + "valueSequence": [] + } + with self.connection.cursor() as cur: + select_query = \ + f"""SELECT speed(tgeog_property) AS speed + FROM tgeometry + WHERE collection_id = '{collection_id}' + and mfeature_id = '{mfeature_id}' + and tgeometry_id = '{tgeometry_id}'""" + cur.execute(select_query) + result = cur.fetchall() + + pymeos_initialize() + for each_row in result: + each_row_converted = TFloatSeqSet(each_row[0]) + interpolation = each_row_converted.interpolation().to_string() + + each_time = [ + each_val.time().start_timestamp().strftime('%Y-%m-%dT%H:%M:%S.%fZ') + for each_val in each_row_converted.instants()] + if interpolation == "Step": + each_values = [0 for each_val in each_row_converted.instants()] + else: + each_values = [each_val.value() for each_val in each_row_converted.instants()] + + value_sequence = self.calculate_acceleration(each_values, each_time, datetime) + if value_sequence.get("values"): + if datetime is not None: + value_sequence["interpolation"] = "Discrete" + elif interpolation == "Linear": + value_sequence["interpolation"] = "Step" + else: + value_sequence["interpolation"] = interpolation + tProperty["valueSequence"].append(value_sequence) + return tProperty + + def to_tproperties(self, results, name, form, leaf): + """ + Convert Temporal properties object + + :param results: temporal property object of query + :param name: temporal property name + :param form: a unit of measurement + :param leaf: array of strings + + :returns: TemporalProperty object + """ + tProperty = { + "name": name, + "type": "TReal", + "form": form, + "valueSequence": [] + } + + pymeos_initialize() + for each_row in results: + each_row_converted = None + if name == "velocity": + each_row_converted = TFloatSeqSet(each_row[0]) + else: + each_row_converted = TFloatSeq(each_row[0]) + + each_values = [each_val.value() for each_val in each_row_converted.instants()] + each_time = [ + each_val.time().start_timestamp().strftime('%Y-%m-%dT%H:%M:%S.%fZ') + for each_val in each_row_converted.instants()] + interpolation = each_row_converted.interpolation().to_string() + + value_sequence = { + "datetimes": each_time, + "values": each_values, + "interpolation": interpolation + } + tProperty["valueSequence"].append(value_sequence) + return tProperty + + + def calculate_acceleration(self, velocities, times, chk_dtime): + """ + Calculate acceleration + + :param velocities: interpolation value list + :param times: interpolation datetime list + :param chk_dtime: array of strings + + :returns: valueSequence object + """ + + value_sequence = {} + time_format = '%Y-%m-%d %H:%M:%S.%f' + time_format2 = '%Y-%m-%dT%H:%M:%S.%fZ' + if chk_dtime is not None: + chk_time = datetime.datetime.strptime(chk_dtime, time_format) + + for i in range(1, len(velocities)): + time1 = datetime.datetime.strptime(times[i - 1], time_format2) + time2 = datetime.datetime.strptime(times[i], time_format2) + if chk_time <= time2 and chk_time >= time1: + delta_v = velocities[i] - velocities[i - 1] + delta_t = (time2 - time1).total_seconds() + acceleration = delta_v / delta_t + value_sequence["values"] = [acceleration] + value_sequence["datetimes"] = [format_datetime(chk_dtime)] + break + else: + value_sequence["values"] = [] + value_sequence["datetimes"] = [] + for i in range(1, len(velocities)): + delta_v = velocities[i] - velocities[i - 1] + time1 = datetime.datetime.strptime(times[i - 1], time_format2) + time2 = datetime.datetime.strptime(times[i], time_format2) + delta_t = (time2 - time1).total_seconds() + acceleration = delta_v / delta_t + value_sequence["values"].append(acceleration) + value_sequence["datetimes"].append(times[i]) + + return value_sequence diff --git a/requirements-provider.txt b/requirements-provider.txt index 74bc473fc..b94a7fd24 100644 --- a/requirements-provider.txt +++ b/requirements-provider.txt @@ -8,7 +8,7 @@ GDAL<=3.8.4 geoalchemy2 geopandas netCDF4 -numpy==2.0.1 +# numpy==2.0.1 oracledb pandas psycopg2 @@ -22,3 +22,5 @@ sodapy xarray zarr s3fs<=2023.6.0 +pymeos +numpy<2.0 \ No newline at end of file diff --git a/tests/api/test_movingfeature.py b/tests/api/test_movingfeature.py new file mode 100644 index 000000000..1b4e0a0fc --- /dev/null +++ b/tests/api/test_movingfeature.py @@ -0,0 +1,1455 @@ +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB +import pytest +import json +from http import HTTPStatus +from pygeoapi.util import yaml_load + +from tests.util import get_test_file_path, mock_api_request + +from pygeoapi.api.movingfeatures import ( + manage_collection, + manage_collection_item, + manage_collection_item_tGeometry, + manage_collection_item_tProperty, + manage_collection_item_tProperty_value, + get_collection_items, + get_collection, + get_collection_item, + get_collection_items_tGeometry, + get_collection_items_tGeometry_velocity, + get_collection_items_tGeometry_distance, + get_collection_items_tGeometry_acceleration, + get_collection_items_tProperty, + get_collection_items_tProperty_value) + +from pygeoapi.api import API + + +@pytest.fixture() +def api_(): + with open(get_test_file_path('../pygeoapi-test-config-mfapi.yml')) as fh: + config = yaml_load(fh) + with open(get_test_file_path('../pygeoapi-test-openapi-mfapi.yml')) as fh: + openapi = yaml_load(fh) + return API(config, openapi) + + +@pytest.fixture(scope="session") +def context(): + return {} + + +@pytest.fixture() +def collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example" + } + + +@pytest.fixture() +def update_collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "test_update" + } + + +@pytest.fixture() +def movingfeature(): + return { + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" + } + + +@pytest.fixture() +def temporalgeometry(): + return { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + } + + +@pytest.fixture() +def temporalproperties(): + return [ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ] + + +@pytest.fixture() +def temporalvalue_data(): + return { + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" + } + + +def test_manage_collection_create( + api_, + collection_property, + context): + + # missing request data + req = mock_api_request() + rsp_headers, code, response = manage_collection(api_, req, 'create') + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection(api_, req, 'create') + assert code == HTTPStatus.BAD_REQUEST + + # successful request data + req = mock_api_request(data=json.dumps(collection_property)) + rsp_headers, code, response = manage_collection(api_, req, 'create') + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + collection_id = location.split('/')[-1] + assert collection_id is not None + context['collection_id'] = collection_id + + +def test_manage_collection_item_create( + api_, movingfeature, context): + + # collection not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., type,temporalgeometry) + # is missing from the request data. + missing_data = dict(movingfeature) + del missing_data['temporalGeometry'] + + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_api_request(data=json.dumps(movingfeature)) + rsp_headers, code, response = manage_collection_item( + api_, req, 'create', context['collection_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + mfeature_id = location.split('/')[-1] + assert mfeature_id is not None + context['mfeature_id'] = mfeature_id + + +def test_manage_collection_item_tGeometry_create( + api_, temporalgeometry, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., type,prisms) + # is missing from the request data. + missing_data = dict(temporalgeometry) + del missing_data['type'] + + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_api_request(data=json.dumps(temporalgeometry)) + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + tgeometry_id = location.split('/')[-1] + assert tgeometry_id is not None + context['tgeometry_id'] = tgeometry_id + + +def test_manage_collection_item_tProperty_create( + api_, temporalproperties, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., datetimes,interpolation) + # is missing from the request data. + missing_data = [] + for temporalproperty in temporalproperties: + missing_data.append(dict(temporalproperty)) + del missing_data[0]['datetimes'] + + req = mock_api_request(data=json.dumps(missing_data, indent=2)) + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_api_request(data=json.dumps(temporalproperties, indent=2)) + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'create', context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Locations' in rsp_headers + + location = rsp_headers['Locations'] + assert len(location) == 4 + tProperty_name = location[-1].split('/')[-1] + assert tProperty_name is not None + context['tProperty_name'] = tProperty_name + + +def test_manage_collection_item_tProperty_value_create( + api_, temporalvalue_data, context): + + # temporal property not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # no data found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # The required tag (e.g., datetimes,interpolation) + # is missing from the request data. + missing_data = dict(temporalvalue_data) + del missing_data['datetimes'] + + req = mock_api_request(data=json.dumps(missing_data)) + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.NOT_IMPLEMENTED + + # successful request data + req = mock_api_request(data=json.dumps(temporalvalue_data)) + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'create', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.CREATED + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + assert 'Location' in rsp_headers + + location = rsp_headers['Location'] + tvalue_id = location.split('/')[-1] + assert tvalue_id is not None + context['tvalue_id'] = tvalue_id + + +def test_manage_collection_update( + api_, + update_collection_property, + context): + + # missing request data + req = mock_api_request() + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid request data + req = mock_api_request(data='Invalid data. Valid data is JSON') + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful request data + req = mock_api_request(data=json.dumps(update_collection_property)) + rsp_headers, code, response = manage_collection( + api_, req, 'update', context['collection_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + + +def test_get_collection_items(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection_items( + api_, req, '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox values must be numbers + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values + # (minx,miny,minz,maxx,maxy,maxz) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # minx is greater than maxx (possibly antimeridian bbox) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.BAD_REQUEST + + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z', # noqa + 'subTrajectory': 'true'}) + rsp_headers, code, response = get_collection_items( + api_, req, context['collection_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + collection = json.loads(response) + + # check response data + assert 'type' in collection + assert 'features' in collection + assert len(collection['features']) == 1 + + mfeature = collection['features'][0] + assert 'id' in mfeature + assert 'type' in mfeature + assert mfeature['type'] == 'Feature' + assert 'properties' in mfeature + + assert 'geometry' in mfeature + assert 'type' in mfeature['geometry'] + assert 'coordinates' in mfeature['geometry'] + + assert 'temporalGeometry' in mfeature + assert len(mfeature['temporalGeometry']) == 2 + temporal_geometry = mfeature['temporalGeometry'][0] + assert 'type' in temporal_geometry + assert temporal_geometry['type'] == 'MovingPoint' + assert 'datetimes' in temporal_geometry + assert 'interpolation' in temporal_geometry + assert 'id' in temporal_geometry + + assert 'bbox' in mfeature + assert mfeature['bbox'] == [ + 139.757083, + 35.627483, + 0.5, + 139.757716, + 35.627701, + 4] + assert 'time' in mfeature + assert mfeature['time'] == ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + + assert 'crs' in collection + assert 'trs' in collection + + assert 'links' in collection + assert len(collection['links']) == 1 + + assert 'timeStamp' in collection + assert 'numberMatched' in collection + assert collection['numberMatched'] == 1 + assert 'numberReturned' in collection + assert collection['numberReturned'] == 1 + + +def test_get_collection(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection( + api_, req, '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful data + req = mock_api_request() + rsp_headers, code, response = get_collection( + api_, req, context['collection_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + collection = json.loads(response) + + assert 'id' in collection + assert 'itemType' in collection + assert collection['itemType'] == 'movingfeature' + + assert 'title' in collection + assert collection['title'] == 'moving_feature_collection_sample' + assert 'updateFrequency' in collection + assert collection['updateFrequency'] == 1000 + assert 'description' in collection + assert collection['description'] == 'test_update' + + assert 'extent' in collection + assert collection['extent']['spatial']['bbox'] == [ + 139.757083, 35.627483, 0.5, 139.757716, 35.627701, 4] + assert collection['extent']['spatial']['crs'] == \ + 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + assert collection['extent']['temporal']['interval'] == \ + ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + assert collection['extent']['temporal']['trs'] == \ + 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + + assert 'links' in collection + assert len(collection['links']) == 1 + + +def test_get_collection_item(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection_item( + api_, req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful data + rsp_headers, code, response = get_collection_item( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + mfeature = json.loads(response) + + assert 'id' in mfeature + assert 'type' in mfeature + assert mfeature['type'] == 'Feature' + assert 'properties' in mfeature + + assert 'geometry' in mfeature + assert 'type' in mfeature['geometry'] + assert 'coordinates' in mfeature['geometry'] + + assert 'crs' in mfeature + assert 'trs' in mfeature + + assert 'bbox' in mfeature + assert mfeature['bbox'] == [ + 139.757083, + 35.627483, + 0.5, + 139.757716, + 35.627701, + 4] + assert 'time' in mfeature + assert mfeature['time'] == ["2011-07-14T22:01:01Z", "2011-07-15T01:11:22Z"] + + assert 'links' in mfeature + assert len(mfeature['links']) == 1 + + +def test_get_collection_items_tGeometry(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox values must be numbers + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': 'one,two,three,four'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # bbox should be 4 values (minx,miny,maxx,maxy) or 6 values + # (minx,miny,minz,maxx,maxy,maxz) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '100,30,0,200,40'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # minx is greater than maxx (possibly antimeridian bbox) + req = mock_api_request( + {'offset': 0, 'limit': 10, 'bbox': '200,30,0,100,40,10'}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid leaf + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.BAD_REQUEST + + # cannot use both parameter `subTrajectory` and `leaf` at the same time + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'subTrajectory': True}) + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T23:01:01.000Z/2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_api_request({'offset': 0, + 'limit': 10, + 'bbox': '100,30,0,200,40,10', + 'leaf': '2011-07-14T22:01:01.000Z', + 'datetime': '2011-07-14T22:01:01.000Z/2011-07-14T23:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tGeometry( + api_, req, context['collection_id'], context['mfeature_id']) + + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_geometries = json.loads(response) + + assert 'geometrySequence' in temporal_geometries + assert len(temporal_geometries["geometrySequence"]) == 1 + + temporal_geometry = temporal_geometries['geometrySequence'][0] + assert 'id' in temporal_geometry + assert 'datetimes' in temporal_geometry + assert temporal_geometry['datetimes'] == ["2011-07-14T22:01:01+09"] + assert 'coordinates' in temporal_geometry + assert temporal_geometry['coordinates'] == [[139.757083, 35.627701, 0.5]] + assert 'type' in temporal_geometry + assert temporal_geometry['type'] == 'MovingPoint' + assert 'interpolation' in temporal_geometry + assert temporal_geometry['interpolation'] == 'Linear' + + assert 'crs' in temporal_geometries + assert 'trs' in temporal_geometries + assert 'links' in temporal_geometries + assert len(temporal_geometries['links']) == 1 + + assert 'timeStamp' in temporal_geometries + assert 'numberMatched' in temporal_geometries + assert temporal_geometries['numberMatched'] == 2 + assert 'numberReturned' in temporal_geometries + assert temporal_geometries['numberReturned'] == 1 + + +def test_get_collection_items_tGeometry_velocity(api_, context): + + # successful data + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = get_collection_items_tGeometry_velocity( + api_, req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'velocity' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTS' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [0.00013296616111996862] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tGeometry_distance(api_, context): + + # successful data + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = get_collection_items_tGeometry_distance( + api_, req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'distance' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTR' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [3.5000000394115824] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tGeometry_acceleration(api_, context): + + # successful data + req = mock_api_request({'date-time': '2011-07-14T22:01:08Z'}) + rsp_headers, code, response = \ + get_collection_items_tGeometry_acceleration( + api_, req, context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + temporal_properties = response + + assert 'name' in temporal_properties + assert temporal_properties['name'] == 'acceleration' + assert 'type' in temporal_properties + assert temporal_properties['type'] == 'TReal' + assert 'form' in temporal_properties + assert temporal_properties['form'] == 'MTS' + + assert 'valueSequence' in temporal_properties + assert len(temporal_properties['valueSequence']) == 1 + value_sequence = temporal_properties['valueSequence'][0] + + assert 'datetimes' in value_sequence + assert value_sequence['datetimes'] == ["2011-07-14T22:01:08.000000Z"] + assert 'values' in value_sequence + assert value_sequence['values'] == [0] + assert 'interpolation' in value_sequence + assert value_sequence['interpolation'], 1 == "Discrete" + + +def test_get_collection_items_tProperty(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_api_request({'offset': 0, 'limit': 10, + 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_api_request({'offset': 0, + 'limit': 10, + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z', # noqa + 'subTemporalValue': 'true'}) + rsp_headers, code, response = get_collection_items_tProperty( + api_, req, context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + result = json.loads(response) + + assert 'temporalProperties' in result + temporal_properties = result['temporalProperties'] + assert len(temporal_properties) == 2 + + temporal_property = temporal_properties[0] + assert 'datetimes' in temporal_property + assert 'discharge' in temporal_property + assert 'form' in temporal_property['discharge'] + assert temporal_property['discharge']['form'] == 'MQS' + assert 'type' in temporal_property['discharge'] + assert temporal_property['discharge']['type'] == 'Measure' + assert 'values' in temporal_property['discharge'] + assert temporal_property['discharge']['values'] == [3, 4, 5] + + assert 'length' in temporal_property + assert 'form' in temporal_property['length'] + assert temporal_property['length']['form'] == \ + 'http://www.qudt.org/qudt/owl/1.0.0/quantity/Length' + assert 'type' in temporal_property['length'] + assert temporal_property['length']['type'] == 'Measure' + assert 'values' in temporal_property['length'] + assert temporal_property['length']['values'] == [1, 2.4, 1] + + assert 'links' in result + assert len(result['links']) == 1 + + assert 'timeStamp' in result + assert 'numberMatched' in result + assert result['numberMatched'] == 4 + assert 'numberReturned' in result + assert result['numberReturned'] == 4 + + +def test_get_collection_items_tProperty_value(api_, context): + + # not found + req = mock_api_request() + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # offset value should be positive or zero + req = mock_api_request({'offset': -1}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # offset value should be an integer + req = mock_api_request({'offset': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be strictly positive + req = mock_api_request({'offset': 0, 'limit': 0}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be less than or equal to 10000 + req = mock_api_request({'offset': 0, 'limit': 10001}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # limit value should be an integer + req = mock_api_request({'offset': 0, 'limit': 'one'}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # invalid leaf + req = mock_api_request({'offset': 0, 'limit': 10, + 'leaf': '2011-07-14T22:01:01.000Z,2011-07-14T22:01:01.000Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # cannot use both parameter `subTemporalValue` + # and `leaf` at the same time + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'subTemporalValue': True}) + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # datetime parameter out of range + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-17T22:01:01.450Z/2011-07-16T00:01:01.450Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + assert code == HTTPStatus.BAD_REQUEST + + # successful data + req = mock_api_request({'offset': 0, + 'limit': 10, + 'leaf': '2011-07-16T22:01:01.450Z', + 'datetime': '2011-07-16T22:01:01.450Z/2011-07-17T00:01:01.450Z'}) # noqa + rsp_headers, code, response = get_collection_items_tProperty_value( + api_, req, context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.OK + + assert rsp_headers['Content-Type'] == 'application/json' + tProperty_value = json.loads(response) + + assert 'type' in tProperty_value + assert tProperty_value['type'] == 'Text' + assert 'valueSequence' in tProperty_value + assert len(tProperty_value["valueSequence"]) == 1 + + valueSequence = tProperty_value['valueSequence'][0] + assert 'values' in valueSequence + assert valueSequence['values'] == ["car"] + assert 'datetimes' in valueSequence + assert valueSequence['datetimes'] == ["2011-07-16T22:01:01.45Z"] + assert 'interpolation' in valueSequence + assert valueSequence['interpolation'] == 'Discrete' + + +def test_manage_collection_item_tProperty_value_delete( + api_, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty_value( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], + context['tProperty_name'], context['tvalue_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + +def test_manage_collection_item_tProperty_delete( + api_, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', '') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tProperty( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], + context['tProperty_name']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + +def test_manage_collection_item_tGeometry_delete( + api_, context): + + # feature not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection_item_tGeometry( + api_, req, 'delete', context['collection_id'], context['mfeature_id'], + context['tgeometry_id']) + + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + +def test_manage_collection_item_delete( + api_, context): + + # collection not found + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'delete', '00000000-0000-0000-0000-000000000000', + '00000000-0000-0000-0000-000000000000') + assert code == HTTPStatus.NOT_FOUND + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection_item( + api_, req, 'delete', context['collection_id'], context['mfeature_id']) + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + # check feature + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + assert len(result) == 0 + + +def test_manage_collection_delete( + api_, + context): + + # successful delete + req = mock_api_request() + rsp_headers, code, response = manage_collection( + api_, req, 'delete', context['collection_id']) + assert code == HTTPStatus.NO_CONTENT + assert response == '' + assert rsp_headers['Content-Type'] == 'application/json' + + # check collection + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collection(context.get('collection_id')) + assert len(result) == 0 diff --git a/tests/data/mf-api.sql b/tests/data/mf-api.sql new file mode 100644 index 000000000..8ad44bac9 --- /dev/null +++ b/tests/data/mf-api.sql @@ -0,0 +1,51 @@ +CREATE EXTENSION IF NOT EXISTS PostGIS; +CREATE EXTENSION IF NOT EXISTS mobilitydb CASCADE; +CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; +-- Table collection +CREATE TABLE public.collection ( +collection_id uuid NOT NULL DEFAULT uuid_generate_v4(), +collection_property jsonb NULL, +PRIMARY KEY (collection_id) +); +-- Table MovingFeature +CREATE TABLE public.mfeature ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL DEFAULT uuid_generate_v4(), +mf_geometry geometry NULL, +mf_property jsonb NULL, +lifespan tstzspan NULL, +PRIMARY KEY (collection_id, mfeature_id), +FOREIGN KEY (collection_id) REFERENCES collection(collection_id) +); +-- Table TemporalGeometry +CREATE TABLE public.tgeometry ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tgeometry_id uuid NOT NULL DEFAULT uuid_generate_v4(), +tgeometry_property tgeompoint NULL, +tgeog_property tgeompoint NULL, +PRIMARY KEY (collection_id, mfeature_id, tgeometry_id), +FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); +-- Table TemporalProperty +CREATE TABLE public.tproperties ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tproperties_name text NOT NULL, +tproperty jsonb NULL, +PRIMARY KEY (collection_id, mfeature_id, tproperties_name), +FOREIGN KEY (collection_id, mfeature_id) REFERENCES mfeature(collection_id, mfeature_id) +); + +-- Table TemporalPropertyValue +CREATE TABLE public.tvalue ( +collection_id uuid NOT NULL, +mfeature_id uuid NOT NULL, +tproperties_name text NOT NULL, +tvalue_id uuid NOT NULL DEFAULT uuid_generate_v4(), +datetime_group int4 NOT NULL, +pvalue_float tfloat NULL, +pvalue_text ttext NULL, +PRIMARY KEY (collection_id, mfeature_id, tproperties_name, tvalue_id), +FOREIGN KEY (collection_id, mfeature_id, tproperties_name) REFERENCES tproperties(collection_id, mfeature_id, tproperties_name) +); diff --git a/tests/data/mfapi_moving_feature.json b/tests/data/mfapi_moving_feature.json new file mode 100644 index 000000000..7ed99fa7a --- /dev/null +++ b/tests/data/mfapi_moving_feature.json @@ -0,0 +1,219 @@ +{ + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" +} \ No newline at end of file diff --git a/tests/data/mfapi_temporal_geometry.json b/tests/data/mfapi_temporal_geometry.json new file mode 100644 index 000000000..386504de6 --- /dev/null +++ b/tests/data/mfapi_temporal_geometry.json @@ -0,0 +1,104 @@ +{ + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] +} \ No newline at end of file diff --git a/tests/data/mfapi_temporal_properties.json b/tests/data/mfapi_temporal_properties.json new file mode 100644 index 000000000..85f1e2a01 --- /dev/null +++ b/tests/data/mfapi_temporal_properties.json @@ -0,0 +1,51 @@ +[ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } +] \ No newline at end of file diff --git a/tests/data/mfapi_temporal_property_value_data.json b/tests/data/mfapi_temporal_property_value_data.json new file mode 100644 index 000000000..ead9d40c6 --- /dev/null +++ b/tests/data/mfapi_temporal_property_value_data.json @@ -0,0 +1,13 @@ +{ + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" +} \ No newline at end of file diff --git a/tests/pygeoapi-test-config-mfapi.yml b/tests/pygeoapi-test-config-mfapi.yml new file mode 100644 index 000000000..307c4ee6a --- /dev/null +++ b/tests/pygeoapi-test-config-mfapi.yml @@ -0,0 +1,94 @@ +# +# Authors: Tom Kralidis +# +# Copyright (c) 2020 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 8085 + url: http://localhost:8085 + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limit: 10 + # templates: + # path: /path/to/Jinja2/templates + # static: /path/to/static/folder # css/js/img + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' +# manager: +# name: TinyDB +# connection: /tmp/pygeoapi-process-manager.db +# output_dir: /tmp/ + ogc_schemas_location: /etc/pygeoapi/pygeoapi/pygeoapi/schema + +logging: + level: ERROR + logfile: /etc/pygeoapi/pygeoapi/pygeoapi-mf-api/log/pygeoapi.log + +metadata: + identification: + title: + en: Movingfeatures data server + description: + en: Access to data about moving features + keywords: + en: + - geospatial + - data + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: https://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: AIST, AIRC + url: https://www.airc.aist.go.jp/en/ + contact: + name: KIM, Taehoon + position: Researcher + address: 2-4-7, Aomi, Koto-ku, Tokyo + city: Tokyo + stateorprovince: Tokyo + postalcode: 135-0064 + country: Japan + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: kim.taehoon@aist.go.jp + url: https://github.com/aistairc/mf-api + hours: Mo-Fr 08:00-17:00 + instructions: During hours of service. Off on weekends. + role: pointOfContact \ No newline at end of file diff --git a/tests/pygeoapi-test-openapi-mfapi.yml b/tests/pygeoapi-test-openapi-mfapi.yml new file mode 100644 index 000000000..a6e7db671 --- /dev/null +++ b/tests/pygeoapi-test-openapi-mfapi.yml @@ -0,0 +1,2416 @@ +openapi: 3.0.3 +info: + title: 'Building Blocks specified in OGC API - Moving Features - Part 1: Core' + version: 0.1.0 + description: This is the OpenAPI definition of Moving Features API specification that conforms to the OGC Moving Features Encoding Extension - JSON. + contact: + name: OGC Moving Features SWG + email: moving-features.swg-bounces@lists.ogc.org + license: + name: OGC License + url: https://www.ogc.org/ogc/Document + x-logo: + url: https://www.ogc.org/pub/www/files/OGC_Logo_2D_Blue_x_0_0.png + backgroundColor: '#FFFFFF' + altText: OGC logo + href: https://www.ogc.org/contacts +tags: + - name: Capabilities + description: Essential characteristics of the information available from the API. + - name: MovingFeatureCollection + description: Collections of moving features to be logically managed by a user. + - name: MovingFeatures + description: Moving feature data, including the temporal geometry, temporal properties, etc. + - name: TemporalGeometry + description: The spatial change over time (temporal geometry), representing the movement of the rigid or nonrigid body of a feature. + - name: TemporalGeometryQuery + description: Queryable resources for the temporal primitive geometry. + - name: TemporalProperty + description: The thematic change over time (temporal property), representing the variation of the value of any descriptive characteristic of a feature. +paths: + /: + get: + operationId: getLandingPage + summary: Landing page + description: The landing page provides links to the API definition, the conformance statements and to the feature collections in this dataset. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/LandingPage' + '500': + $ref: '#/components/responses/ServerError' + /conformance: + get: + operationId: getConformance + summary: Information about specifications that this API conforms to + description: A list of all conformance classes specified in a standard that the server conforms to. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/Conformance' + '500': + $ref: '#/components/responses/ServerError' + /api: + get: + operationId: getAPIList + summary: API definition + description: A list of all API definition + tags: + - Capabilities + responses: + '500': + $ref: '#/components/responses/ServerError' + /collections: + get: + operationId: searchCatalog + summary: Retrieve catalogs of moving features collection + description: | + A user can retrieve catalogs to access collections by simple filtering and a limit. + tags: + - Capabilities + responses: + '200': + $ref: '#/components/responses/Collections' + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: registerMetadata + summary: Register metadata about a collection of moving features + description: | + A user SHOULD register metadata about a collection of moving features into the system. + tags: + - MovingFeatureCollection + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/collection-2' + example: + title: moving_feature_collection_sample + updateFrequency: 1000 + description: example + responses: + '201': + description: Successful create a collection to manage moving features. + headers: + Location: + description: A URI of the newly added resource + schema: + type: string + example: https://data.example.org/collections/mfc1 + content: + application/json: + schema: + type: object + required: + - id + properties: + id: + type: string + description: Newly added resource ID + example: + id: mfc-1 + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}: + get: + operationId: accessMetadata + summary: Access metadata about the collection + description: | + A user can access metadata with id `collectionId`. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + responses: + '200': + $ref: '#/components/responses/Collection' + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteCollection + summary: Delete the collection + description: | + The collection catalog with id `collectionId` and including metadata and moving features SHOULD be deleted. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + responses: + '204': + description: Successfully deleted. + '404': + description: A collection with the specified name was not found. + '500': + $ref: '#/components/responses/ServerError' + put: + operationId: replaceMetadata + summary: Replace metadata about the collection + description: | + A user SHOULD replace metadata with id `collectionId`. + + The request body schema is the same the POST's one. + + However, `updateFrequency` property is NOT updated. + tags: + - MovingFeatureCollection + parameters: + - $ref: '#/components/parameters/collectionId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/collection-2' + example: + title: moving_feature_collection_sample + updateFrequency: 1000 + description: example + responses: + '204': + description: Successfully replaced. + '404': + description: A collection with the specified name was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items: + get: + operationId: retrieveMovingFeatures + summary: Retrieve moving feature collection + description: | + A user can retrieve moving feature collection to access the static information of the moving feature by simple filtering and a limit. + + Specifically, if the `subTrajectory` parameter is "true", it will return the temporal geometry within the time interval specified by `datetime` parameter. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/bbox' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/subtrajectory-description' + responses: + '200': + $ref: '#/components/responses/MovingFeatures' + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertMovingFeatures + summary: Insert moving features + description: | + A user SHOULD insert a set of moving features or a moving feature into a collection with id `collectionId`. + + The request body schema SHALL follows the [MovingFeature object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeature) or + [MovingFeatureCollection object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#mfeaturecollection) in the OGC MF-JSON. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + requestBody: + content: + application/json: + schema: + oneOf: + - $ref: '#/components/schemas/movingFeature-2' + - $ref: '#/components/schemas/movingFeatureCollection' + example: + type: Feature + crs: + type: Name + properties: + name: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Link + properties: + type: OGCDEF + href: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + temporalGeometry: + type: MovingPoint + datetimes: + - '2011-07-14T22:01:01Z' + - '2011-07-14T22:01:02Z' + - '2011-07-14T22:01:03Z' + - '2011-07-14T22:01:04Z' + - '2011-07-14T22:01:05Z' + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + interpolation: Linear + base: + type: glTF + href: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 270 + temporalProperties: + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + - '2011-07-15T00:01:01.450Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + discharge: + type: Measure + form: MQS + values: + - 3 + - 4 + - 5 + interpolation: Step + - datetimes: + - 1465621816590 + - 1465711526300 + camera: + type: Image + values: + - http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1 + - iVBORw0KGgoAAAANSUhEU...... + interpolation: Discrete + labels: + type: Text + values: + - car + - human + interpolation: Discrete + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + name: car1 + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + id: mf-1 + responses: + '201': + description: | + Successful create a set of moving features or a moving feature into a specific collection. + headers: + Location: + description: A list of URI of the newly added resources + schema: + type: array + items: + type: string + example: + - https://data.example.org/collections/mfc-1/items/mf-1 + - https://data.example.org/collections/mfc-1/items/109301273 + '400': + description: A query parameter was not validly used. + '404': + description: A collection with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}: + get: + operationId: accessMovingFeature + summary: Access the static data of the moving feature + description: | + A user can access a static data of a moving feature with id `mFeatureId`. + + The static data of a moving feature is not included temporal geometries and temporal properties. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + responses: + '200': + $ref: '#/components/responses/MovingFeature' + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteMovingFeature + summary: Delete a single moving feature + description: | + The moving feature with id `mFeatureId` and including temporal geometries and properties SHOULD be deleted. + tags: + - MovingFeatures + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence: + get: + operationId: retrieveTemporalGeometrySequence + summary: Retrieve the movement data of the single moving feature + description: | + A user can retrieve only the movement data of a moving feature with id `mFeatureId` by simple filtering and a limit. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/bbox' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/leaf-description' + - $ref: '#/components/parameters/subtrajectory-description' + responses: + '200': + $ref: '#/components/responses/TemporalGeometrySequence' + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalPrimitiveGeometry + summary: Add movement data into the moving feature + description: | + A user SHOULD add more movement data into a moving feature with id `mFeatureId`. + + The request body schema SHALL follows the [TemporalPrimitiveGeometry object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tprimitive) in the OGC MF-JSON. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + example: + type: MovingPoint + datetimes: + - '2011-07-14T22:01:06Z' + - '2011-07-14T22:01:07Z' + - '2011-07-14T22:01:08Z' + - '2011-07-14T22:01:09Z' + - '2011-07-14T22:01:10Z' + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + interpolation: Linear + base: + type: glTF + href: https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 270 + responses: + '201': + description: | + Successful add more movement data into a specified moving feature. + headers: + Location: + description: A URI of the newly added resource + schema: + type: string + example: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence/tg-2 + '400': + description: A query parameter was not validly used. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}: + delete: + operationId: deleteTemporalPrimitiveGeometry + summary: Delete a singe temporal primitive geometry + description: | + The temporal primitive geometry with id `tGeometryId` SHOULD be deleted. + tags: + - TemporalGeometry + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal primitive geometry with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/distance: + get: + operationId: getDistanceOfTemporalPrimitiveGeometry + summary: Get a time-to-distance curve of a temporal primitive geometry + description: | + A user can get time-to-distance curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single distance value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/DistanceQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/velocity: + get: + operationId: getVelocityOfTemporalPrimitiveGeometry + summary: Get a time-to-velocity curve of a temporal primitive geometry + description: | + A user can get time-to-velocity curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single velocity value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/VelocityQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tgsequence/{tGeometryId}/acceleration: + get: + operationId: getAccelerationOfTemporalPrimitiveGeometry + summary: Get a time-to-acceleration curve of a temporal primitive geometry + description: | + A user can get time-to-acceleration curve of a temporal primitive geometry with id `tGeometryId`. + + When `date-time` query parameter is provided, this operation will return a single acceleration value according to the specified date and time in the `data-time` parameter. + tags: + - TemporalGeometryQuery + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tGeometryId' + - $ref: '#/components/parameters/date-time' + responses: + '200': + $ref: '#/components/responses/AccelerationQuery' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tproperties: + get: + operationId: retrieveTemporalProperties + summary: Retrieve a set of the temporal property data + description: | + A user can retrieve the static information of the temporal property data that included a single moving feature with id `mFeatureId`. + + The static data of a temporal property is not included temporal values (property `valueSequence`). + + Also a user can retrieve the sub sequence of the temporal information of the temporal property data for the specified time interval with `subTemporalValue` query parameter. + In this case, `temporalProperties` property schema SHALL follows the [TemporalProperties object](https://docs.ogc.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/limit' + - $ref: '#/components/parameters/subtemporalvalue-description' + responses: + '200': + $ref: '#/components/responses/TemporalProperties' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalProperty + summary: Add temporal property data + description: | + A user SHOULD add new temporal property data into a moving feature with id `mFeatureId`. + + The request body schema SHALL follows the [TemporalProperties object](https://docs.opengeospatial.org/is/19-045r3/19-045r3.html#tproperties) in the OGC MF-JSON. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperties' + example: + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + - '2011-07-15T00:01:01.450Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + discharge: + type: Measure + form: MQS + values: + - 3 + - 4 + - 5 + interpolation: Step + - datetimes: + - '2011-07-14T22:01:01.450Z' + - '2011-07-14T23:01:01.450Z' + camera: + type: Image + values: + - http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1 + - iVBORw0KGgoAAAANSUhEU...... + interpolation: Discrete + labels: + type: Text + values: + - car + - human + interpolation: Discrete + responses: + '201': + description: | + Successful add more temporal property into a specified moving feature. + headers: + Locations: + description: A list of URI of the newly added resources + schema: + type: array + items: + type: string + example: + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/length + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/discharge + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/camera + - https://data.example.org/collections/mfc-1/items/mf-1/tproperties/labels + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + /collections/{collectionId}/items/{mFeatureId}/tproperties/{tPropertyName}: + get: + operationId: retrieveTemporalProperty + summary: Retrieve a temporal property + description: | + A user can retrieve only the temporal values with a specified name `tPropertyName` of temporal property. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + - $ref: '#/components/parameters/datetime' + - $ref: '#/components/parameters/leaf-description' + - $ref: '#/components/parameters/subtemporalvalue-description' + responses: + '200': + $ref: '#/components/responses/TemporalProperty' + '400': + description: A query parameter was not validly used. + '500': + $ref: '#/components/responses/ServerError' + post: + operationId: insertTemporalPrimitiveValue + summary: Add temporal primitive value data + description: | + A user SHOULD add more temporal primitive value data into a temporal property with id `tPropertyName`. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/temporalPrimitiveValue' + example: + datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 20 + - 50 + interpolation: Linear + responses: + '201': + description: | + Successful add more temporal primitive value data into a specified temporal property. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal property with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' + delete: + operationId: deleteTemporalProperty + summary: Delete a specified temporal property + description: | + The temporal property with id `tPropertyName` SHOULD be deleted. + tags: + - TemporalProperty + parameters: + - $ref: '#/components/parameters/collectionId' + - $ref: '#/components/parameters/mFeatureId' + - $ref: '#/components/parameters/tPropertyName' + responses: + '204': + description: Successfully deleted. + '404': + description: | + - A collection with the specified id was not found. + - Or a moving feature with the specified id was not found. + - Or a temporal property with the specified id was not found. + '500': + $ref: '#/components/responses/ServerError' +components: + schemas: + link: + type: object + required: + - href + - rel + properties: + href: + type: string + example: http://data.example.com/buildings/123 + rel: + type: string + example: alternate + type: + type: string + example: application/geo+json + hreflang: + type: string + example: en + title: + type: string + example: Trierer Strasse 70, 53115 Bonn + length: + type: integer + landingPage: + type: object + required: + - links + properties: + title: + type: string + example: Moving features data server + description: + type: string + example: Access to data about moving features + links: + type: array + items: + $ref: '#/components/schemas/link' + exception: + type: object + required: + - code + properties: + code: + type: string + description: + type: string + confClasses: + type: object + required: + - conformsTo + properties: + conformsTo: + type: array + items: + type: string + example: + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/common + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/mf-collection + - http://www.opengis.net/spec/ogcapi-movingfeatures-1/1.0/conf/movingfeatures + extent: + description: |- + The extent of the features in the collection. In the Core only spatial and temporal + extents are specified. Extensions may add additional members to represent other + extents, for example, thermal or pressure ranges. + + An array of extents is provided for each extent type (spatial, temporal). The first item + in the array describes the overall extent of the data. All subsequent items describe more + precise extents, e.g., to identify clusters of data. Clients only interested in the + overall extent will only need to access the first extent in the array. + type: object + properties: + spatial: + description: The spatial extent of the features in the collection. + type: object + properties: + bbox: + description: |- + One or more bounding boxes that describe the spatial extent of the dataset. + In the Core only a single bounding box is supported. + + Extensions may support additional areas. + The first bounding box describes the overall spatial + extent of the data. All subsequent bounding boxes describe + more precise bounding boxes, e.g., to identify clusters of data. + Clients only interested in the overall spatial extent will + only need to access the first bounding box in the array. + type: array + minItems: 1 + items: + description: |- + Each bounding box is provided as four or six numbers, depending on + whether the coordinate reference system includes a vertical axis + (height or depth): + + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 + * Minimum value, coordinate axis 3 (optional) + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 + * Maximum value, coordinate axis 3 (optional) + + If the value consists of four numbers, the coordinate reference system is + WGS 84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84) + unless a different coordinate reference system is specified in `crs`. + + If the value consists of six numbers, the coordinate reference system is WGS 84 + longitude/latitude/ellipsoidal height (http://www.opengis.net/def/crs/OGC/0/CRS84h) + unless a different coordinate reference system is specified in `crs`. + + For WGS 84 longitude/latitude the values are in most cases the sequence of + minimum longitude, minimum latitude, maximum longitude and maximum latitude. + However, in cases where the box spans the antimeridian the first value + (west-most box edge) is larger than the third value (east-most box edge). + + If the vertical axis is included, the third and the sixth number are + the bottom and the top of the 3-dimensional bounding box. + + If a feature has multiple spatial geometry properties, it is the decision of the + server whether only a single spatial geometry property is used to determine + the extent or all relevant geometries. + type: array + oneOf: + - minItems: 4 + maxItems: 4 + - minItems: 6 + maxItems: 6 + items: + type: number + example: + - -180 + - -90 + - 180 + - 90 + crs: + description: |- + Coordinate reference system of the coordinates in the spatial extent + (property `bbox`). The default reference system is WGS 84 longitude/latitude. + In the Core the only other supported coordinate reference system is + WGS 84 longitude/latitude/ellipsoidal height for coordinates with height. + Extensions may support additional coordinate reference systems and add + additional enum values. + type: string + enum: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/OGC/0/CRS84h + default: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + description: The temporal extent of the features in the collection. + type: object + properties: + interval: + description: |- + One or more time intervals that describe the temporal extent of the dataset. + In the Core only a single time interval is supported. + + Extensions may support multiple intervals. + The first time interval describes the overall + temporal extent of the data. All subsequent time intervals describe + more precise time intervals, e.g., to identify clusters of data. + Clients only interested in the overall temporal extent will only need + to access the first time interval in the array (a pair of lower and upper + bound instants). + type: array + minItems: 1 + items: + description: |- + Begin and end times of the time interval. The timestamps are in the + temporal coordinate reference system specified in `trs`. By default + this is the Gregorian calendar. + + The value `null` at start or end is supported and indicates a half-bounded interval. + type: array + minItems: 2 + maxItems: 2 + items: + type: string + format: date-time + nullable: true + example: + - '2011-11-11T12:22:11Z' + - null + trs: + description: |- + Coordinate reference system of the coordinates in the temporal extent + (property `interval`). The default reference system is the Gregorian calendar. + In the Core this is the only supported temporal coordinate reference system. + Extensions may support additional temporal coordinate reference systems and add + additional enum values. + type: string + enum: + - http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + default: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + collection: + type: object + required: + - id + - links + - itemType + properties: + id: + description: identifier of the collection used, for example, in URIs + type: string + example: address + title: + description: human readable title of the collection + type: string + example: address + description: + description: a description of the features in the collection + type: string + example: An address. + links: + type: array + items: + $ref: '#/components/schemas/link' + example: + - href: https://data.example.com/buildings + rel: item + - href: https://example.com/concepts/buildings.html + rel: describedby + type: text/html + extent: + $ref: '#/components/schemas/extent' + itemType: + description: indicator about the type of the items in the collection + type: string + default: movingfeature + crs: + description: the list of coordinate reference systems supported by the service + type: array + items: + type: string + default: + - https://www.opengis.net/def/crs/OGC/1.3/CRS84 + example: + - https://www.opengis.net/def/crs/OGC/1.3/CRS84 + - https://www.opengis.net/def/crs/EPSG/0/4326 + updateFrequency: + description: a time interval of sampling location. The unit is millisecond. + type: number + collections: + type: object + required: + - collections + - links + properties: + collections: + type: array + items: + $ref: '#/components/schemas/collection' + links: + type: array + items: + $ref: '#/components/schemas/link' + collection-2: + type: object + required: + - itemType + properties: + title: + description: human readable title of the collection + type: string + updateFrequency: + description: a time interval of sampling location. The unit is millisecond. + type: number + description: + description: any description + type: string + itemType: + description: indicator about the type of the items in the moving features collection (the default value is 'movingfeature'). + type: string + default: movingfeature + motionCurve: + description: MF-JSON Prism encoding MotionCurve Object + title: MF-JSON MotionCurve + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Quadratic + - Cubic + default: Linear + - type: string + format: uri + namedCRS: + description: MF-JSON Prism encoding NamedCRS Object + title: MF-JSON NamedCRS + type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Name + properties: + type: object + additionalProperties: false + required: + - name + properties: + name: + type: string + default: urn:ogc:def:crs:OGC:1.3:CRS84 + linkedCRS: + description: MF-JSON Prism encoding LinkedCRS Object + title: MF-JSON LinkedCRS + type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Link + properties: + type: object + required: + - href + - type + properties: + href: + type: string + format: uri + type: + type: string + crs: + description: MF-JSON Prism encoding CoordinateReferenceSystem Object + title: MF-JSON CRS + oneOf: + - $ref: '#/components/schemas/namedCRS' + - $ref: '#/components/schemas/linkedCRS' + trs: + description: The "trs" member in MovingFeature object + title: MF-JSON TRS + oneOf: + - $ref: '#/components/schemas/linkedCRS' + - type: object + nullable: true + required: + - type + - properties + properties: + type: + type: string + enum: + - Name + properties: + type: object + additionalProperties: false + required: + - name + properties: + name: + type: string + default: urn:ogc:data:time:iso8601 + temporalPrimitiveGeometry: + description: MF-JSON Prism encoding TemporalPrimitiveGeometry Object + title: MF-JSON TemporalPrimitiveGeometry + type: object + required: + - type + - coordinates + - datetimes + properties: + type: + type: string + enum: + - MovingPoint + - MovingLineString + - MovingPolygon + - MovingPointCloud + coordinates: + type: array + minItems: 2 + items: + oneOf: + - title: pointGeoJSON coordinates + type: array + minItems: 2 + items: + type: number + - title: linestringGeoJSON coordinates + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + - title: polygonGeoJSON coordinates + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + - title: multipointGeoJSON coordinates + type: array + items: + type: array + minItems: 2 + items: + type: number + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + interpolation: + $ref: '#/components/schemas/motionCurve' + base: + type: object + nullable: true + required: + - href + - type + properties: + href: + type: string + format: uri + type: + type: string + orientations: + type: array + nullable: true + items: + type: object + required: + - scales + - angles + properties: + scales: + type: array + oneOf: + - minItems: 2 + maxItems: 2 + - minItems: 3 + maxItems: 3 + items: + type: number + angles: + type: array + oneOf: + - minItems: 2 + maxItems: 2 + - minItems: 3 + maxItems: 3 + items: + type: number + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + temporalComplexGeometry: + description: MF-JSON Prism encoding TemporalComplexGeometry Object + title: MF-JSON TemporalComplexGeometry + type: object + required: + - type + - prisms + properties: + type: + type: string + default: MovingGeometryCollection + prisms: + type: array + items: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + temporalGeometry: + description: MF-JSON Prism encoding TemporalGeometry Object + title: MF-JSON TemporalGeometry + oneOf: + - $ref: '#/components/schemas/temporalPrimitiveGeometry' + - $ref: '#/components/schemas/temporalComplexGeometry' + parametricValues: + description: MF-JSON Prism encoding ParametricValues Object + title: MF-JSON ParametricValues + type: object + required: + - datetimes + properties: + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + format: date-time + patternProperties: + ^S_: + oneOf: + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Measure + values: + type: array + nullable: true + items: + type: number + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + form: + oneOf: + - type: string + minLength: 3 + maxLength: 3 + - type: string + format: uri + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Text + values: + type: array + nullable: true + items: + oneOf: + - type: string + - type: boolean + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + - type: object + required: + - type + - values + properties: + type: + type: string + enum: + - Image + values: + type: array + nullable: true + items: + type: string + interpolation: + oneOf: + - type: string + enum: + - Discrete + - Step + - Linear + - Regression + default: Linear + - type: string + format: uri + description: + type: string + temporalProperties: + description: MF-JSON Prism encoding TemporalProperties Object + title: MF-JSON TemporalProperties + type: array + nullable: true + items: + $ref: '#/components/schemas/parametricValues' + bbox: + description: MF-JSON Prism encoding BoundingBox Object + title: MF-JSON BoundingBox + type: array + minItems: 4 + nullable: true + items: + type: number + lifeSpan: + description: MF-JSON Prism encoding LifeSpan Object + title: MF-JSON LifeSpan + type: array + minItems: 2 + maxItems: 2 + uniqueItems: true + items: + type: string + nullable: true + pointGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Point + coordinates: + type: array + minItems: 2 + items: + type: number + multipointGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPoint + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: number + linestringGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - LineString + coordinates: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + multilinestringGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiLineString + coordinates: + type: array + items: + type: array + minItems: 2 + items: + type: array + minItems: 2 + items: + type: number + polygonGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - Polygon + coordinates: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + multipolygonGeoJSON: + type: object + required: + - type + - coordinates + properties: + type: + type: string + enum: + - MultiPolygon + coordinates: + type: array + items: + type: array + items: + type: array + minItems: 4 + items: + type: array + minItems: 2 + items: + type: number + geometryGeoJSON: + oneOf: + - $ref: '#/components/schemas/pointGeoJSON' + - $ref: '#/components/schemas/multipointGeoJSON' + - $ref: '#/components/schemas/linestringGeoJSON' + - $ref: '#/components/schemas/multilinestringGeoJSON' + - $ref: '#/components/schemas/polygonGeoJSON' + - $ref: '#/components/schemas/multipolygonGeoJSON' + - $ref: '#/components/schemas/geometrycollectionGeoJSON' + geometrycollectionGeoJSON: + type: object + required: + - type + - geometries + properties: + type: + type: string + enum: + - GeometryCollection + geometries: + type: array + items: + $ref: '#/components/schemas/geometryGeoJSON' + movingFeature: + type: object + required: + - id + - type + properties: + type: + type: string + enum: + - Feature + temporalGeometry: + $ref: '#/components/schemas/temporalGeometry' + temporalProperties: + $ref: '#/components/schemas/temporalProperties' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + geometry: + $ref: '#/components/schemas/geometryGeoJSON' + properties: + type: object + nullable: true + id: + description: An identifier for the feature + oneOf: + - type: string + - type: integer + links: + type: array + items: + $ref: '#/components/schemas/link' + movingFeatures: + type: object + required: + - type + - features + properties: + type: + type: string + enum: + - FeatureCollection + features: + type: array + nullable: true + items: + $ref: '#/components/schemas/movingFeature' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + movingFeature-2: + description: MF-JSON Prism encoding MovingFeature Object + title: MF-JSON MovingFeature + type: object + required: + - type + - temporalGeometry + properties: + type: + type: string + enum: + - Feature + temporalGeometry: + $ref: '#/components/schemas/temporalGeometry' + temporalProperties: + $ref: '#/components/schemas/temporalProperties' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + geometry: + $ref: '#/components/schemas/geometryGeoJSON' + properties: + type: object + nullable: true + id: + description: An identifier for the feature + oneOf: + - type: string + - type: integer + movingFeatureCollection: + description: MF-JSON Prism encoding MovingFeatureCollection Object + title: MF-JSON MovingFeatureCollection + type: object + required: + - type + - features + properties: + type: + type: string + enum: + - FeatureCollection + features: + type: array + minItems: 1 + items: + $ref: '#/components/schemas/movingFeature-2' + crs: + $ref: '#/components/schemas/crs' + trs: + $ref: '#/components/schemas/trs' + bbox: + $ref: '#/components/schemas/bbox' + time: + $ref: '#/components/schemas/lifeSpan' + label: + type: string + nullable: true + temporalGeometrySequence: + type: object + required: + - type + - geometrySequence + properties: + type: + type: string + enum: + - TemporalGeometrySequence + geometrySequence: + type: array + items: + $ref: '#/components/schemas/temporalPrimitiveGeometry' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + temporalPrimitiveValue: + type: object + required: + - datetimes + - values + - interpolation + properties: + datetimes: + type: array + uniqueItems: true + minItems: 2 + items: + type: string + format: date-time + values: + oneOf: + - type: number + - type: string + - type: boolean + interpolation: + type: string + enum: + - Discrete + - Step + - Linear + - Regression + temporalProperty: + type: object + required: + - name + - type + properties: + name: + type: string + type: + type: string + enum: + - TBoolean + - TText + - TInteger + - TReal + - TImage + form: + oneOf: + - type: string + format: uri + - type: string + minLength: 3 + maxLength: 3 + valueSequence: + type: array + uniqueItems: true + items: + $ref: '#/components/schemas/temporalPrimitiveValue' + description: + type: string + links: + type: array + items: + $ref: '#/components/schemas/link' + temporalProperties-2: + type: object + required: + - temporalProperties + properties: + temporalProperties: + oneOf: + - $ref: '#/components/schemas/temporalProperties' + - type: array + items: + $ref: '#/components/schemas/temporalProperty' + links: + type: array + items: + $ref: '#/components/schemas/link' + timeStamp: + type: string + format: date-time + numberMatched: + type: integer + minimum: 0 + numberReturned: + type: integer + minimum: 0 + responses: + LandingPage: + description: The links to the API capabilities. + content: + application/json: + schema: + $ref: '#/components/schemas/landingPage' + ServerError: + description: A server error occurred. + content: + application/json: + schema: + $ref: '#/components/schemas/exception' + example: + code: '500' + description: Server Internal Error + Conformance: + description: The URIs of all requirements classes supported by the server. + content: + application/json: + schema: + $ref: '#/components/schemas/confClasses' + Collections: + description: A list of catalogs about collections of moving features. + content: + application/json: + schema: + $ref: '#/components/schemas/collections' + example: + collections: + - id: mfc-1 + title: MovingFeatureCollection_1 + description: a collection of moving features to manage data in a distinct (physical or logical) space + itemType: movingfeature + updateFrequency: 1000 + extent: + spatial: + bbox: + - -180 + - -90 + - 190 + - 90 + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + interval: + - '2011-11-11T12:22:11Z' + - '2012-11-24T12:32:43Z' + trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + links: + - href: https://data.example.org/collections/mfc-1 + rel: self + type: application/json + links: + - href: https://data.example.org/collections + rel: self + type: application/json + Collection: + description: The metadata being returned. + content: + application/json: + schema: + $ref: '#/components/schemas/collection' + example: + id: mfc-1 + title: moving_feature_collection_sample + itemType: movingfeature + updateFrequency: 1000 + extent: + spatial: + bbox: + - -180 + - -90 + - 190 + - 90 + crs: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + interval: + - '2011-11-11T12:22:11Z' + - '2012-11-24T12:32:43Z' + trs: + - http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + links: + - href: https://data.example.org/collections/mfc-1 + rel: self + type: application/json + MovingFeatures: + description: A list of static data of moving feature. + content: + application/json: + schema: + $ref: '#/components/schemas/movingFeatures' + example: + type: FeatureCollection + features: + - id: mf-1 + type: Feature + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + label: car + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + links: + - href: https://data.example.org/collections/mfc-1/items + rel: self + type: application/geo+json + - href: https://data.example.org/collections/mfc-1/items&offset=1&limit=1 + rel: next + type: application/geo+json + timeStamp: '2020-01-01T12:00:00Z' + numberMatched: 100 + numberReturned: 1 + MovingFeature: + description: A moving feature static data. + content: + application/geo+json: + schema: + $ref: '#/components/schemas/movingFeature' + example: + id: mf-1 + type: Feature + geometry: + type: LineString + coordinates: + - - 139.757083 + - 35.627701 + - 0.5 + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + - - 139.757716 + - 35.627483 + - 4 + properties: + name: car1 + state: test1 + video: http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg + bbox: + - 139.757083 + - 35.627483 + - 0 + - 139.757716 + - 35.627701 + - 4.5 + time: + - '2011-07-14T22:01:01Z' + - '2011-07-15T01:11:22Z' + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + TemporalGeometrySequence: + description: A TemporalGeometrySequence data. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalGeometrySequence' + example: + type: TemporalGeometrySequence + geometrySequence: + - id: tg-1 + type: MovingPoint + datetimes: + - '2011-07-14T22:01:02Z' + - '2011-07-14T22:01:03Z' + - '2011-07-14T22:01:04Z' + coordinates: + - - 139.757399 + - 35.627701 + - 2 + - - 139.757555 + - 35.627688 + - 4 + - - 139.757651 + - 35.627596 + - 4 + interpolation: Linear + base: + type: glTF + href: https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf + orientations: + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 355 + - 0 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 330 + - scales: + - 1 + - 1 + - 1 + angles: + - 0 + - 0 + - 300 + crs: + type: Name + properties: urn:ogc:def:crs:OGC:1.3:CRS84 + trs: + type: Name + properties: urn:ogc:data:time:iso8601 + links: + - href: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence + rel: self + type: application/json + - href: https://data.example.org/collections/mfc-1/items/mf-1/tgsequence&offset=10&limit=1 + rel: next + type: application/json + timeStamp: '2021-09-01T12:00:00Z' + numberMatched: 100 + numberReturned: 1 + DistanceQuery: + description: A temporal property data that represents a time-to-distance curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: distance + type: TReal + form: MTR + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + VelocityQuery: + description: A temporal property data that represents a time-to-velocity curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: velocity + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + AccelerationQuery: + description: A temporal property data that represents a time-to-acceleration curve of specified temporal primitive geometry. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: acceleration + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 10 + - 20 + interpolation: Linear + TemporalProperties: + description: A list of static (or temporal) data of TemporalProperty. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperties-2' + example: + temporalProperties: + - datetimes: + - '2011-07-14T22:01:06.000Z' + - '2011-07-14T22:01:07.000Z' + - '2011-07-14T22:01:08.000Z' + length: + type: Measure + form: http://www.qudt.org/qudt/owl/1.0.0/quantity/Length + values: + - 1 + - 2.4 + - 1 + interpolation: Linear + speed: + type: Measure + form: KMH + values: + - 65 + - 70 + - 80 + interpolation: Linear + links: + - href: https://data.example.org/collections/mfc-1/items/mf-1/tproperties + rel: self + type: application/json + - href: https://data.example.org/collections/mfc-1/items/mf-1/tproperties&offset=2&limit=2 + rel: next + type: application/json + timeStamp: '2021-09-01T12:00:00Z' + numberMatched: 10 + numberReturned: 2 + TemporalProperty: + description: A (subsequence of) the temporal property data. + content: + application/json: + schema: + $ref: '#/components/schemas/temporalProperty' + example: + name: speed + type: TReal + form: KMH + valueSequence: + - datetimes: + - '2011-07-15T08:00:00Z' + - '2011-07-15T08:00:01Z' + - '2011-07-15T08:00:02Z' + values: + - 0 + - 20 + - 50 + interpolation: Linear + parameters: + collectionId: + name: collectionId + in: path + description: local identifier of a collection + required: true + schema: + type: string + bbox: + name: bbox + in: query + description: |- + Only features that have a geometry that intersects the bounding box are selected. + The bounding box is provided as four or six numbers, depending on whether the + coordinate reference system includes a vertical axis (height or depth): + + * Lower left corner, coordinate axis 1 + * Lower left corner, coordinate axis 2 + * Minimum value, coordinate axis 3 (optional) + * Upper right corner, coordinate axis 1 + * Upper right corner, coordinate axis 2 + * Maximum value, coordinate axis 3 (optional) + + If the value consists of four numbers, the coordinate reference system is + WGS 84 longitude/latitude (http://www.opengis.net/def/crs/OGC/1.3/CRS84) + unless a different coordinate reference system is specified in the parameter `bbox-crs`. + + If the value consists of six numbers, the coordinate reference system is WGS 84 + longitude/latitude/ellipsoidal height (http://www.opengis.net/def/crs/OGC/0/CRS84h) + unless a different coordinate reference system is specified in the parameter `bbox-crs`. + + The query parameter `bbox-crs` is specified in OGC API - Features - Part 2: Coordinate + Reference Systems by Reference. + + For WGS 84 longitude/latitude the values are in most cases the sequence of + minimum longitude, minimum latitude, maximum longitude and maximum latitude. + However, in cases where the box spans the antimeridian the first value + (west-most box edge) is larger than the third value (east-most box edge). + + If the vertical axis is included, the third and the sixth number are the + bottom and the top of the 3-dimensional bounding box. + + If a feature has multiple spatial geometry properties, it is the decision of the + server whether only a single spatial geometry property is used to determine + the extent or all relevant geometries. + required: false + schema: + type: array + oneOf: + - minItems: 4 + maxItems: 4 + - minItems: 6 + maxItems: 6 + items: + type: number + style: form + explode: false + datetime: + name: datetime + in: query + description: |- + Either a date-time or an interval. Date and time expressions adhere to RFC 3339. + Intervals may be bounded or half-bounded (double-dots at start or end). + + Examples: + + * A date-time: "2018-02-12T23:20:50Z" + * A bounded interval: "2018-02-12T00:00:00Z/2018-03-18T12:31:12Z" + * Half-bounded intervals: "2018-02-12T00:00:00Z/.." or "../2018-03-18T12:31:12Z" + + Only features that have temporal information that intersects the value of `datetime` are selected. + + If a feature has multiple temporal properties, it is the decision of the server whether only a single temporal property is used to determine the extent or all relevant temporal properties. + required: true + schema: + type: string + style: form + explode: false + limit: + name: limit + in: query + description: |- + The optional limit parameter limits the number of items that are presented in the response document. + + Only items are counted that are on the first level of the collection in the response document. + Nested objects contained within the explicitly requested items shall not be counted. + + Minimum = 1. Maximum = 10000. Default = 10. + required: false + schema: + type: integer + minimum: 1 + maximum: 10000 + default: 10 + style: form + explode: false + subtrajectory-description: + name: subTrajectory + in: query + required: false + description: |- + The `subTrajectory` parameter is a boolean value used with the `datetime` parameter. + If the `subTrajectory` is "true", + + * the `datetime` must be a bounded interval, not half-bounded intervals or a date-time. + * the `datetime` represents a specified time interval (new start time and new end time) + * only features with a temporal geometry intersecting the given time interval will return. + + The `subTrajectory` query implements *subTrajectory* operation, which is defined in the [OGC Moving Feature Access](https://docs.ogc.org/is/16-120r3/16-120r3.html). + This operation returns only a subsequence of temporal geometry within a time interval contained in the `datetime` parameter, using interpolated trajectory according to the `interpolation` property. + + If the `subTrajectory` parameter is provided with a `bbox` parameter, it will only apply to resources that intersect with a `bbox` parameter. + + The `subTrajectory` parameter must not be used with the `leaf` parameter. + Only one of these parameters can be used in the HTTP GET operation. + schema: + type: boolean + style: form + explode: false + mFeatureId: + name: mFeatureId + in: path + description: local identifier of a moving feature + required: true + schema: + type: string + leaf-description: + name: leaf + in: query + required: false + description: |- + The `leaf` is provided as a sequence of monotonic increasing instants with date-time strings. + Only features that have a temporal geometry and property that intersects the given date-time are selected. + + The `leaf` operation implements *_pointAtTime_* operation which defined in the OGC Moving Feature Access. + This operation returns only temporal geometry coordinates (or temporal property values) + at each date-time included in the `leaf` parameter, using interpolated trajectory according to the `interpolation` property. + + If the `leaf` parameter is provided with a `bbox` or (and) a `datetime` parameter, + it will only apply to resources that intersect with a `bbox` or (and) a `datetime` parameter. + + The `leaf` parameter shall not be used with the `subTrajectory` and `subTemporalValue` parameter. + Only one of those parameters can be used in the HTTP GET operation. + schema: + type: array + uniqueItems: true + minItems: 1 + items: + type: string + format: date-time + style: form + explode: false + tGeometryId: + name: tGeometryId + in: path + description: local identifier of a temporal primitive geometry + required: true + schema: + type: string + date-time: + name: date-time + in: query + description: |- + A date-time. Date and time expressions adhere to RFC 3339. + + Examples: + + * A date-time: "2018-02-12T23:20:50Z" + + The date-time parameter defines the specified date and time to return the temporal value from the time-to-distance (or time-to-velocity or time-to-acceleration) curve. + required: false + schema: + type: string + format: date-time + style: form + explode: false + subtemporalvalue-description: + name: subTemporalValue + in: query + required: false + description: |- + The `subTemporalValue` parameter is a boolean value used with the `datetime` parameter. + If the `subTemporalValue` is "true", + + * the `datetime` must be a bounded interval, not half-bounded intervals or a date-time. + * the `datetime` represents a specified time interval (new start time and new end time) + * only features with a temporal property intersecting the given time interval will return. + * it returns only the subsequence of temporal property value within a time interval contained in the `subTemporalValue` parameter, using an interpolated time-to-value curve of temporal property according to the `interpolation` property. + + The `subTemporalValue` parameter must not be used with the `leaf` parameter. + Only one of these parameters can be used in the HTTP GET operation. + schema: + type: boolean + style: form + explode: false + tPropertyName: + name: tPropertyName + in: path + description: local identifier of a temporal property + required: true + schema: + type: string diff --git a/tests/test_postgresql_mobilitydb.py b/tests/test_postgresql_mobilitydb.py new file mode 100644 index 000000000..f3f82d39a --- /dev/null +++ b/tests/test_postgresql_mobilitydb.py @@ -0,0 +1,855 @@ +from pygeoapi.provider.postgresql_mobilitydb import PostgresMobilityDB +import pytest + + +@pytest.fixture(scope="session") +def context(): + return {} + + +@pytest.fixture() +def collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "example" + } + + +@pytest.fixture() +def update_collection_property(): + return { + "title": "moving_feature_collection_sample", + "updateFrequency": 1000, + "description": "test_update" + } + + +@pytest.fixture() +def movingfeature(): + return { + "type": "Feature", + "crs": { + "type": "Name", + "properties": { + "name": "urn:ogc:def:crs:OGC:1.3:CRS84" + } + }, + "trs": { + "type": "Link", + "properties": { + "type": "OGCDEF", + "href": "http://www.opengis.net/def/uom/ISO-8601/0/Gregorian" + } + }, + "temporalGeometry": { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:01Z", + "2011-07-14T22:01:02Z", + "2011-07-14T22:01:03Z", + "2011-07-14T22:01:04Z", + "2011-07-14T22:01:05Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + }, + "temporalProperties": [ + { + "datetimes": [ + "2011-07-14T22:01:01.450Z", + "2011-07-14T23:01:01.450Z", + "2011-07-15T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", # noqa + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + 1465621816590, + 1465711526300 + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ], + "geometry": { + "type": "LineString", + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ] + }, + "properties": { + "name": "car1", + "state": "test1", + "video": "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/video.mpeg" # noqa + }, + "bbox": [ + 139.757083, + 35.627483, + 0, + 139.757716, + 35.627701, + 4.5 + ], + "time": [ + "2011-07-14T22:01:01Z", + "2011-07-15T01:11:22Z" + ], + "id": "mf-1" + } + + +@pytest.fixture() +def temporalgeometry(): + return { + "type": "MovingPoint", + "datetimes": [ + "2011-07-14T22:01:06Z", + "2011-07-14T22:01:07Z", + "2011-07-14T22:01:08Z", + "2011-07-14T22:01:09Z", + "2011-07-14T22:01:10Z" + ], + "coordinates": [ + [ + 139.757083, + 35.627701, + 0.5 + ], + [ + 139.757399, + 35.627701, + 2 + ], + [ + 139.757555, + 35.627688, + 4 + ], + [ + 139.757651, + 35.627596, + 4 + ], + [ + 139.757716, + 35.627483, + 4 + ] + ], + "interpolation": "Linear", + "base": { + "type": "glTF", + "href": "https://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/car3dmodel.gltf" # noqa + }, + "orientations": [ + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 355, + 0 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 330 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 300 + ] + }, + { + "scales": [ + 1, + 1, + 1 + ], + "angles": [ + 0, + 0, + 270 + ] + } + ] + } + + +@pytest.fixture() +def temporalproperties(): + return [ + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z", + "2011-07-17T00:01:01.450Z" + ], + "length": { + "type": "Measure", + "form": "http://www.qudt.org/qudt/owl/1.0.0/quantity/Length", + "values": [ + 1, + 2.4, + 1 + ], + "interpolation": "Linear" + }, + "discharge": { + "type": "Measure", + "form": "MQS", + "values": [ + 3, + 4, + 5 + ], + "interpolation": "Step" + } + }, + { + "datetimes": [ + "2011-07-16T22:01:01.450Z", + "2011-07-16T23:01:01.450Z" + ], + "camera": { + "type": "Image", + "values": [ + "http://www.opengis.net/spec/movingfeatures/json/1.0/prism/example/image1", # noqa + "iVBORw0KGgoAAAANSUhEU......" + ], + "interpolation": "Discrete" + }, + "labels": { + "type": "Text", + "values": [ + "car", + "human" + ], + "interpolation": "Discrete" + } + } + ] + + +@pytest.fixture() +def temporalvalue_data(): + return { + "datetimes": [ + "2011-07-18T08:00:00Z", + "2011-07-18T08:00:01Z", + "2011-07-18T08:00:02Z" + ], + "values": [ + 0, + 20, + 50 + ], + "interpolation": "Linear" + } + + +def test_query_post_collection(context, collection_property): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + collection_id = pmdb_provider.post_collection(collection_property) + + assert collection_id is not None + context['collection_id'] = collection_id + + +def test_query_post_movingfeature(context, movingfeature): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + mfeature_id = \ + pmdb_provider.post_movingfeature(context.get('collection_id'), + movingfeature) + + assert mfeature_id is not None + context['mfeature_id'] = mfeature_id + + +def test_query_post_temporalgeometry(context, temporalgeometry): + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tgeometry_id = \ + pmdb_provider.post_temporalgeometry(context.get('collection_id'), + context.get('mfeature_id'), + temporalgeometry) + + assert tgeometry_id is not None + context['tgeometry_id'] = tgeometry_id + + +def test_query_post_temporalproperties(context, temporalproperties): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + l_temporal_properties = [temporalproperties] if not isinstance( + temporalproperties, list) else temporalproperties + + canPost = pmdb_provider.check_temporalproperty_can_post( + context.get('collection_id'), + context.get('mfeature_id'), + l_temporal_properties) + + tProperty_name_list = [] + if canPost: + for temporal_property in l_temporal_properties: + tProperty_name_list.extend(pmdb_provider. + post_temporalproperties( + context.get('collection_id'), + context.get('mfeature_id'), + temporal_property)) + + assert len(tProperty_name_list) == 4 + tProperty_name = tProperty_name_list[-1] + assert tProperty_name is not None + context['tProperty_name'] = tProperty_name + + +def test_query_post_temporalvalue(context, temporalvalue_data): + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tvalue_id = pmdb_provider.post_temporalvalue(context.get('collection_id'), + context.get('mfeature_id'), + context.get('tProperty_name'), + temporalvalue_data) + + assert tvalue_id is not None + context['tvalue_id'] = tvalue_id + + +def test_query_put_collection(context, update_collection_property): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.put_collection(context.get('collection_id'), + update_collection_property) + + result = pmdb_provider.get_collection(context.get('collection_id')) + collection = result[0] + assert collection[1].get('description') == 'test_update' + + +def test_query_get_collections_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collections_list() + + assert result + assert len(result) > 0 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + + +def test_query_get_collections(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collections() + + assert result + assert len(result) > 0 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + collection_property = collection[1] + assert collection_property is not None + + +def test_query_get_collection(context): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_collection(context.get('collection_id')) + + assert result + assert len(result) == 1 + collection = result[0] + l_collection_id = collection[0] + assert l_collection_id is not None + collection_property = collection[1] + assert collection_property is not None + extentLifespan = collection[2] + assert extentLifespan is not None + extentTGeometry = collection[3] + assert extentTGeometry is not None + + +def test_query_get_features_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_features_list() + + assert result + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + + +def test_query_get_tProperties_name_list(): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_tProperties_name_list() + + assert result + assert len(result) > 0 + tPropertie = result[0] + l_collection_id = tPropertie[0] + assert l_collection_id is not None + l_mfeature_id = tPropertie[1] + assert l_mfeature_id is not None + tproperties_name = tPropertie[2] + assert tproperties_name is not None + + +def test_query_get_features( + context, + bbox=[ + 100, + 30, + 0, + 200, + 40, + 10], + datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', + limit=10, offset=0, sub_trajectory=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = pmdb_provider.get_features( + context.get('collection_id'), bbox, datetime, limit, offset, + sub_trajectory) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + mf_geometry = mfeature[2] + assert mf_geometry is not None + mf_property = mfeature[3] + assert mf_property is not None + lifespan = mfeature[4] + assert lifespan is not None + extent_tGeometry = mfeature[5] + assert extent_tGeometry is not None + extent_tProperties_value_float = mfeature[6] + assert extent_tProperties_value_float is not None + extent_tProperties_value_text = mfeature[7] + assert extent_tProperties_value_text is not None + + +def test_query_get_feature(context): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + + assert result + assert len(result) > 0 + mfeature = result[0] + l_collection_id = mfeature[0] + assert l_collection_id is not None + l_mfeature_id = mfeature[1] + assert l_mfeature_id is not None + mf_geometry = mfeature[2] + assert mf_geometry is not None + mf_property = mfeature[3] + assert mf_property is not None + lifespan = mfeature[4] + assert lifespan is not None + extent_tGeometry = mfeature[5] + assert extent_tGeometry is not None + + +def test_query_get_temporalgeometries( + context, + bbox=[ + 100, + 30, + 0, + 200, + 40, + 10], + leaf='2011-07-14 22:01:01.000', + datetime='2011-07-14 22:01:01.000,2011-07-14 22:01:01.000', + limit=10, + offset=0, + sub_trajectory=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_temporalgeometries( + context.get('collection_id'), context.get('mfeature_id'), bbox, + leaf, datetime, limit, offset, sub_trajectory) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + tgeometry = result[0] + l_collection_id = tgeometry[0] + assert l_collection_id is not None + l_mfeature_id = tgeometry[1] + assert l_mfeature_id is not None + tgeometry_id = tgeometry[2] + assert tgeometry_id is not None + + +def test_query_get_temporalproperties( + context, + datetime='2011-07-14 22:01:01.450,2011-07-14 22:01:01.450', + limit=10, + offset=0, + sub_temporal_value=True): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result, number_matched, number_returned = \ + pmdb_provider.get_temporalproperties( + context.get('collection_id'), context.get('mfeature_id'), + datetime, limit, offset, sub_temporal_value) + + assert result + assert number_matched + assert number_returned + assert len(result) > 0 + tproperties = result[0] + l_collection_id = tproperties[0] + assert l_collection_id is not None + l_mfeature_id = tproperties[1] + assert l_mfeature_id is not None + tgeometry_id = tproperties[2] + assert tgeometry_id is not None + tproperty = tproperties[3] + assert tproperty is not None + + +def test_query_get_temporalproperties_value( + context, + datetime='2011-07-16 22:01:01.450,2011-07-16 22:01:01.450', + leaf='2011-07-16 22:01:01.450', + sub_temporal_value=False): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + result = pmdb_provider.get_temporalproperties_value( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tProperty_name'), datetime, leaf, sub_temporal_value) + + assert result + assert len(result) > 0 + tpropertiesvalue = result[0] + l_collection_id = tpropertiesvalue[0] + assert l_collection_id is not None + l_mfeature_id = tpropertiesvalue[1] + assert l_mfeature_id is not None + tgeometry_id = tpropertiesvalue[2] + assert tgeometry_id is not None + tproperty = tpropertiesvalue[3] + assert tproperty is not None + datetime_group = tpropertiesvalue[4] + assert datetime_group is not None + pvalue_float = tpropertiesvalue[5] + pvalue_text = tpropertiesvalue[6] + assert pvalue_float is not None or pvalue_text is not None + + +def test_query_get_velocity(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_velocity( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_get_distance(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_distance( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_get_acceleration(context, + datetime='2011-07-14 22:01:01.450'): + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + tProperties = pmdb_provider.get_acceleration( + context.get('collection_id'), context.get('mfeature_id'), + context.get('tgeometry_id'), datetime) + + assert tProperties + name = tProperties.get('name') + assert name is not None + type = tProperties.get('type') + assert type is not None + form = tProperties.get('form') + assert form is not None + value_sequence = tProperties.get('valueSequence') + assert value_sequence is not None + + +def test_query_delete_temporalvalue(context): + restriction = "AND tvalue_id ='{0}'".format( + context.get('tvalue_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalvalue(restriction) + + assert True + + +def test_query_delete_temporalproperties(context): + restriction = """AND collection_id ='{0}' AND mfeature_id ='{1}' + AND tproperties_name ='{2}'""".format( + context.get('collection_id'), + context.get('mfeature_id'), + context.get('tProperties_name')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalproperties(restriction) + + assert True + + +def test_query_delete_temporalgeometry(context): + restriction = "AND tgeometry_id ='{0}'".format(context.get('tgeometry_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_temporalgeometry(restriction) + + assert True + + +def test_query_delete_movingfeature(context): + restriction = "AND mfeature_id ='{0}'".format(context.get('mfeature_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_movingfeature(restriction) + + result = pmdb_provider.get_feature(context.get('collection_id'), + context.get('mfeature_id')) + assert len(result) == 0 + + +def test_query_delete_collection(context): + restriction = "AND collection_id ='{0}'".format( + context.get('collection_id')) + + pmdb_provider = PostgresMobilityDB() + pmdb_provider.connect() + pmdb_provider.delete_collection(restriction) + + result = pmdb_provider.get_collection(context.get('collection_id')) + assert len(result) == 0