diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index c94189a7c..c8b2a6b48 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,4 +1,4 @@ # These are supported funding model platforms #github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] -custom: ['https://www.paypal.com/cgi-bin/webscr?cmd=_s-xclick&hosted_button_id=WT27AS28UFSNW&source=url'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] +custom: ['https://github.com/geopython/pygeoapi/wiki/Sponsorship'] # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/.github/workflows/containers.yml b/.github/workflows/containers.yml index 67544fed2..04897a440 100644 --- a/.github/workflows/containers.yml +++ b/.github/workflows/containers.yml @@ -12,7 +12,7 @@ on: branches: [master] env: - DOCKER_REPOSITORY: geopython/pygeoapi + DOCKER_REPOSITORY: ${{ secrets.DOCKER_REPOSITORY || 'geopython/pygeoapi' }} # DOCKER_TEST_IMAGE: geopython/pygeoapi:test jobs: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index bfbd6bf9e..3805b9aa0 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -91,20 +91,19 @@ jobs: pip3 install -r requirements-starlette.txt pip3 install -r requirements-dev.txt pip3 install -r requirements-provider.txt + pip3 install -r requirements-manager.txt pip3 install -r requirements-django.txt python3 setup.py install - pip3 install --upgrade numpy elasticsearch - pip3 install --upgrade numpy "sqlalchemy<2" pip3 install --global-option=build_ext --global-option="-I/usr/include/gdal" GDAL==`gdal-config --version` #pip3 install --upgrade rasterio==1.1.8 - name: setup test data ⚙️ run: | python3 tests/load_es_data.py tests/data/ne_110m_populated_places_simple.geojson geonameid - python3 tests/load_es_data.py tests/cite/canada-hydat-daily-mean-02HC003.geojson IDENTIFIER python3 tests/load_mongo_data.py tests/data/ne_110m_populated_places_simple.geojson gunzip < tests/data/hotosm_bdi_waterways.sql.gz | psql postgresql://postgres:${{ secrets.DatabasePassword || 'postgres' }}@localhost:5432/test psql postgresql://postgres:${{ secrets.DatabasePassword || 'postgres' }}@localhost:5432/test -f tests/data/dummy_data.sql psql postgresql://postgres:${{ secrets.DatabasePassword || 'postgres' }}@localhost:5432/test -f tests/data/dummy_types_data.sql + psql postgresql://postgres:${{ secrets.DatabasePassword || 'postgres' }}@localhost:5432/test -f tests/data/postgres_manager_full_structure.backup.sql docker ps python3 tests/load_oracle_data.py - name: run unit tests ⚙️ @@ -128,6 +127,7 @@ jobs: pytest tests/test_ogr_shapefile_provider.py pytest tests/test_ogr_sqlite_provider.py pytest tests/test_ogr_wfs_provider.py + pytest tests/test_postgresql_manager.py # pytest tests/test_ogr_wfs_provider_live.py # NOTE: these are skipped in the file but listed here for completeness pytest tests/test_openapi.py pytest tests/test_oracle_provider.py diff --git a/.github/workflows/vulnerabilities.yml b/.github/workflows/vulnerabilities.yml index de36b8624..f87d2789c 100644 --- a/.github/workflows/vulnerabilities.yml +++ b/.github/workflows/vulnerabilities.yml @@ -26,7 +26,7 @@ jobs: with: string: ${{ github.repository }} - name: Checkout pygeoapi - uses: actions/checkout@v4 + uses: actions/checkout@master - name: Scan vulnerabilities with trivy uses: aquasecurity/trivy-action@master with: @@ -41,6 +41,9 @@ jobs: docker buildx build -t ${{ steps.string.outputs.lowercase }}:${{ github.sha }} --platform linux/amd64 --no-cache -f Dockerfile . - name: Scan locally built Docker image for vulnerabilities with trivy uses: aquasecurity/trivy-action@master + env: + TRIVY_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-db:2 + TRIVY_JAVA_DB_REPOSITORY: public.ecr.aws/aquasecurity/trivy-java-db:1 with: scan-type: image exit-code: 1 diff --git a/Dockerfile b/Dockerfile index 98e7ffc18..04f1a20db 100644 --- a/Dockerfile +++ b/Dockerfile @@ -34,7 +34,7 @@ # # ================================================================= -FROM ubuntu:jammy-20240627.1 +FROM ubuntu:jammy-20240911.1 LABEL maintainer="Just van den Broecke " @@ -98,7 +98,6 @@ ENV TZ=${TZ} \ python3-greenlet \ python3-pip \ python3-tz \ - python3-unicodecsv \ python3-yaml \ ${ADD_DEB_PACKAGES}" diff --git a/docker/default.config.yml b/docker/default.config.yml index bc20d7b58..81d9e2883 100644 --- a/docker/default.config.yml +++ b/docker/default.config.yml @@ -48,8 +48,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' ogc_schemas_location: /schemas.opengis.net logging: @@ -200,7 +200,7 @@ resources: hreflang: nl-NL extents: spatial: - bbox: [50.7539, 7.21097, 53.4658, 3.37087] + bbox: [3.37,50.75,7.21,53.47] crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 temporal: begin: @@ -301,7 +301,7 @@ resources: hreflang: en-US extents: spatial: - bbox: [36.0, 17.0, 46.0, 18.0] + bbox: [5.0,36.0,20.0,46.0] crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 temporal: begin: @@ -438,7 +438,7 @@ resources: hreflang: nl-NL extents: spatial: - bbox: [50.7539, 7.21097, 53.4658, 3.37087] + bbox: [3.37,50.75,7.21,53.47] crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 temporal: begin: @@ -481,7 +481,7 @@ resources: hreflang: nl-NL extents: spatial: - bbox: [50.7539, 7.21097, 53.4658, 3.37087] + bbox: [3.37,50.75,7.21,53.47] crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 temporal: begin: diff --git a/docs/source/_templates/indexsidebar.html b/docs/source/_templates/indexsidebar.html index 8000f32dd..a782c3ba6 100644 --- a/docs/source/_templates/indexsidebar.html +++ b/docs/source/_templates/indexsidebar.html @@ -16,11 +16,14 @@ OGC Reference Implementation + + OGC Reference Implementation + OSGeo Project - - FOSS4G Conference + + FOSS4G Conference

diff --git a/docs/source/conf.py b/docs/source/conf.py index dff89f13c..6a9e11d61 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -112,7 +112,7 @@ def __getattr__(cls, name): # built documents. # # The short X.Y version. -version = '0.18.dev0' +version = '0.19.dev0' # The full version, including alpha/beta/rc tags. release = version diff --git a/docs/source/configuration.rst b/docs/source/configuration.rst index 0c81b7126..2fa41d7f0 100644 --- a/docs/source/configuration.rst +++ b/docs/source/configuration.rst @@ -57,8 +57,8 @@ For more information related to API design rules (the ``api_rules`` property in static: /path/to/static/folder # path to static folder containing css, js, images and other static files referenced by the template map: # leaflet map setup for HTML pages - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' ogc_schemas_location: /opt/schemas.opengis.net # local copy of https://schemas.opengis.net manager: # optional OGC API - Processes asynchronous job management @@ -206,6 +206,7 @@ default. temporal: # optional begin: 2000-10-30T18:24:39Z # start datetime in RFC3339 end: 2007-10-30T08:57:29Z # end datetime in RFC3339 + trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian # TRS providers: # list of 1..n required connections information # provider name # see pygeoapi.plugin for supported providers @@ -240,7 +241,7 @@ default. option_name: option_value hello-world: # name of process - type: collection # REQUIRED (collection, process, or stac-collection) + type: process # REQUIRED (collection, process, or stac-collection) processor: name: HelloWorld # Python path of process definition diff --git a/docs/source/data-publishing/ogcapi-coverages.rst b/docs/source/data-publishing/ogcapi-coverages.rst index 6e06b2e30..76ed0de39 100644 --- a/docs/source/data-publishing/ogcapi-coverages.rst +++ b/docs/source/data-publishing/ogcapi-coverages.rst @@ -72,9 +72,12 @@ The `Xarray`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data. data: tests/data/coads_sst.nc # optionally specify x/y/time fields, else provider will attempt # to derive automagically - x_field: lat x_field: lon + y_field: lat time_field: time + # optionally specify the coordinate reference system of your dataset + # else pygeoapi assumes it is WGS84 (EPSG:4326). + storage_crs: 4326 format: name: netcdf mimetype: application/x-netcdf @@ -96,6 +99,11 @@ The `Xarray`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data. be sure to provide the full S3 URL. Any parameters required to open the dataset using fsspec can be added to the config file under `options` and `s3`. +.. note:: + When providing a `storage_crs` value in the xarray configuration, specify the + coordinate reference system using any valid input for + `pyproj.CRS.from_user_input`_. + Data access examples -------------------- @@ -146,3 +154,4 @@ Data access examples .. _`NetCDF`: https://en.wikipedia.org/wiki/NetCDF .. _`Zarr`: https://zarr.readthedocs.io/en/stable .. _`GDAL raster driver short name`: https://gdal.org/drivers/raster/index.html +.. _`pyproj.CRS.from_user_input`: https://pyproj4.github.io/pyproj/stable/api/crs/coordinate_system.html#pyproj.crs.CoordinateSystem.from_user_input diff --git a/docs/source/data-publishing/ogcapi-edr.rst b/docs/source/data-publishing/ogcapi-edr.rst index 1014b40d3..83a2a76bd 100644 --- a/docs/source/data-publishing/ogcapi-edr.rst +++ b/docs/source/data-publishing/ogcapi-edr.rst @@ -44,9 +44,12 @@ The `xarray-edr`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data data: tests/data/coads_sst.nc # optionally specify x/y/time fields, else provider will attempt # to derive automagically - x_field: lat x_field: lon + y_field: lat time_field: time + # optionally specify the coordinate reference system of your dataset + # else pygeoapi assumes it is WGS84 (EPSG:4326). + storage_crs: 4326 format: name: netcdf mimetype: application/x-netcdf @@ -81,25 +84,36 @@ The `xarray-edr`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data S3 URL. Any parameters required to open the dataset using fsspec can be added to the config file under `options` and `s3`, as shown above. +.. note:: + When providing a `storage_crs` value in the EDR configuration, specify the + coordinate reference system using any valid input for + `pyproj.CRS.from_user_input`_. + Data access examples -------------------- * list all collections + * http://localhost:5000/collections * overview of dataset + * http://localhost:5000/collections/foo * dataset position query + * http://localhost:5000/collections/foo/position?coords=POINT(-75%2045) * dataset position query for a specific parameter + * http://localhost:5000/collections/foo/position?coords=POINT(-75%2045)¶meter-name=SST * dataset position query for a specific parameter and time step + * http://localhost:5000/collections/foo/position?coords=POINT(-75%2045)¶meter-name=SST&datetime=2000-01-16 .. _`xarray`: https://docs.xarray.dev/en/stable/ .. _`NetCDF`: https://en.wikipedia.org/wiki/NetCDF .. _`Zarr`: https://zarr.readthedocs.io/en/stable +.. _`pyproj.CRS.from_user_input`: https://pyproj4.github.io/pyproj/stable/api/crs/coordinate_system.html#pyproj.crs.CoordinateSystem.from_user_input .. _`OGC Environmental Data Retrieval (EDR) (API)`: https://github.com/opengeospatial/ogcapi-coverages diff --git a/docs/source/data-publishing/ogcapi-features.rst b/docs/source/data-publishing/ogcapi-features.rst index a8e2c9a55..12478c1d2 100644 --- a/docs/source/data-publishing/ogcapi-features.rst +++ b/docs/source/data-publishing/ogcapi-features.rst @@ -21,16 +21,18 @@ parameters. `CSV`_,✅/✅,results/hits,❌,❌,❌,✅,❌,❌,✅ `Elasticsearch`_,✅/✅,results/hits,✅,✅,✅,✅,✅,✅,✅ - `ERDDAP Tabledap Service`_,❌/❌,results/hits,✅,✅,❌,❌,❌,❌,❌ + `ERDDAP Tabledap Service`_,❌/❌,results/hits,✅,✅,❌,❌,❌,❌,✅ `ESRI Feature Service`_,✅/✅,results/hits,✅,✅,✅,✅,❌,❌,✅ `GeoJSON`_,✅/✅,results/hits,❌,❌,❌,✅,❌,❌,✅ `MongoDB`_,✅/❌,results,✅,✅,✅,✅,❌,❌,✅ `OGR`_,✅/❌,results/hits,✅,❌,❌,✅,❌,❌,✅ `Oracle`_,✅/✅,results/hits,✅,❌,✅,✅,❌,❌,✅ + `Parquet`_,✅/✅,results/hits,✅,✅,❌,✅,❌,❌,✅ `PostgreSQL`_,✅/✅,results/hits,✅,✅,✅,✅,✅,❌,✅ `SQLiteGPKG`_,✅/❌,results/hits,✅,❌,❌,✅,❌,❌,✅ `SensorThings API`_,✅/✅,results/hits,✅,✅,✅,✅,❌,❌,✅ `Socrata`_,✅/✅,results/hits,✅,✅,✅,✅,❌,❌,✅ + `TinyDB`_,✅/✅,results/hits,✅,✅,✅,✅,❌,✅,✅ .. note:: @@ -70,20 +72,6 @@ definition. - http://www.opengis.net/def/crs/EPSG/0/4326 storage_crs: http://www.opengis.net/def/crs/EPSG/0/28992 - -GeoJSON -^^^^^^^ - -To publish a GeoJSON file, the file must be a valid GeoJSON FeatureCollection. - -.. code-block:: yaml - - providers: - - type: feature - name: GeoJSON - data: tests/data/file.json - id_field: id - .. _Elasticsearch: Elasticsearch @@ -125,15 +113,43 @@ The ES provider also has the support for the CQL queries as indicated in the tab .. seealso:: :ref:`cql` for more details on how to use Common Query Language (CQL) to filter the collection with specific queries. +.. _ERDDAP Tabledap Service: + +ERDDAP Tabledap Service +^^^^^^^^^^^^^^^^^^^^^^^ + +.. note:: + Requires Python package `requests`_ + +To publish from an ERDDAP `Tabledap`_ service, the following are required in your index: + +.. code-block:: yaml + + providers: + - type: feature + name: ERDDAPTabledap + data: http://osmc.noaa.gov/erddap/tabledap/OSMC_Points + id_field: PLATFORM_CODE + time_field: time + options: + filters: "¶meter=\"SLP\"&platform!=\"C-MAN%20WEATHER%20STATIONS\"&platform!=\"TIDE GAUGE STATIONS (GENERIC)\"" + max_age_hours: 12 + +.. note:: + If the ``datetime`` parameter is passed by the client, this overrides the ``options.max_age_hours`` setting. ESRI Feature Service ^^^^^^^^^^^^^^^^^^^^ -To publish an `ESRI Feature Service`_ or `ESRI Map Service`_ specify the URL for the service layer in the ``data`` field. +To publish an ESRI `Feature Service`_ or `Map Service`_ specify the URL for the service layer in the ``data`` field. * ``id_field`` will often be ``OBJECTID``, ``objectid``, or ``FID``. * If the map or feature service is not shared publicly, the ``username`` and ``password`` fields can be set in the - configuration to authenticate into the service. + configuration to authenticate to the service. +* If the map or feature service is self-hosted and not shared publicly, the ``token_service`` and optional ``referer`` fields + can be set in the configuration to authenticate to the service. + +To publish from an ArcGIS online hosted service: .. code-block:: yaml @@ -146,7 +162,64 @@ To publish an `ESRI Feature Service`_ or `ESRI Map Service`_ specify the URL for crs: 4326 # Optional crs (default is EPSG:4326) username: username # Optional ArcGIS username password: password # Optional ArcGIS password + token_service: https://your.server.com/arcgis/sharing/rest/generateToken # optional URL to your generateToken service + referer: https://your.server.com # optional referer, defaults to https://www.arcgis.com if not set + +To publish from a self-hosted service that is not publicly accessible, the ``token_service`` field is required: +.. code-block:: yaml + + providers: + - type: feature + name: ESRI + data: https://your.server.com/arcgis/rest/services/your-layer/MapServer/0 + id_field: objectid + time_field: date_in_your_device_time_zone # Optional time field + crs: 4326 # Optional crs (default is EPSG:4326) + username: username # Optional ArcGIS username + password: password # Optional ArcGIS password + token_service: https://your.server.com/arcgis/sharing/rest/generateToken # Optional url to your generateToken service + referer: https://your.server.com # Optional referer, defaults to https://www.arcgis.com if not set + +GeoJSON +^^^^^^^ + +To publish a GeoJSON file, the file must be a valid GeoJSON FeatureCollection. + +.. code-block:: yaml + + providers: + - type: feature + name: GeoJSON + data: tests/data/file.json + id_field: id + +MongoDB +^^^^^^^ + +.. note:: + Requires Python package pymongo + +.. note:: + Mongo 5 or greater is supported. + +MongoDB (`website `_) is a powerful and versatile NoSQL database that provides numerous advantages, making it a preferred choice for many applications. One of the main reasons to use MongoDB is its ability to handle large volumes of unstructured data, making it ideal for managing diverse data types such as text, geospatial, and multimedia data. Additionally, MongoDB's flexible document model allows for easy schema evolution, enabling developers to iterate quickly and adapt to changing requirements. + +`MongoDB GeoJSON `_ support is available, thus a GeoJSON file can be added to MongoDB using following command + +`mongoimport --db test -c points --file "path/to/file.geojson" --jsonArray` + +Here `test` is the name of database , `points` is the target collection name. + +* each document must be a GeoJSON Feature, with a valid geometry. + +.. code-block:: yaml + + providers: + - type: feature + name: MongoDB + data: mongodb://localhost:27017/testdb + collection: testplaces OGR ^^^ @@ -249,34 +322,6 @@ The OGR provider requires a recent (3+) version of GDAL to be installed. The `crs` query parameter is used as follows: e.g. ``http://localhost:5000/collections/foo/items?crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F28992``. - -MongoDB -^^^^^^^ - -.. note:: - Requires Python package pymongo - -.. note:: - Mongo 5 or greater is supported. - -`MongoDB `_ is a powerful and versatile NoSQL database that provides numerous advantages, making it a preferred choice for many applications. One of the main reasons to use MongoDB is its ability to handle large volumes of unstructured data, making it ideal for managing diverse data types such as text, geospatial, and multimedia data. Additionally, MongoDB's flexible document model allows for easy schema evolution, enabling developers to iterate quickly and adapt to changing requirements. - -`GeoJSON `_ support is available officially by MongoDB , thus a GeoJSON file can be added to MongoDB using following command - -`mongoimport --db test -c points --file "path/to/file.geojson" --jsonArray` - -Here `test` is the name of database , `points` is the target collection name. - -* each document must be a GeoJSON Feature, with a valid geometry. - -.. code-block:: yaml - - providers: - - type: feature - name: MongoDB - data: mongodb://localhost:27017/testdb - collection: testplaces - .. _Oracle: Oracle @@ -365,7 +410,7 @@ configure mandatory properties. When this is activated, the provider throws an e is not in the query uri. Extra properties -"""""""""""""""""""" +"""""""""""""""" .. code-block:: yaml providers: @@ -388,7 +433,7 @@ Extra properties is a list of strings which are added as fields for data retriev can be used to return expressions computed by the database. Session Pooling -"""""""""""""""" +""""""""""""""" Configured using environment variables. @@ -398,7 +443,7 @@ Configured using environment variables. export ORACLE_POOL_MAX=10 -The ``ORACLE_POOL_MIN`` and ``ORACLE_POOL_MAX`` environment variables are used to trigger session pool creation in the Oracle Provider and the ``DatabaseConnection`` class. See https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.create_pool for documentation of the ``create_pool`` function. +The ``ORACLE_POOL_MIN`` and ``ORACLE_POOL_MAX`` environment variables are used to trigger session pool creation in the Oracle Provider and the ``DatabaseConnection`` class. Supports auth via user + password or wallet. For an example of the configuration see above at Oracle - Connection. See https://python-oracledb.readthedocs.io/en/latest/api_manual/module.html#oracledb.create_pool for documentation of the ``create_pool`` function. If none or only one of the environment variables is set, session pooling will not be activated and standalone connections are established at every request. @@ -410,6 +455,40 @@ useful e.g. for authorization at row level or manipulation of the explain plan w An example an more information about that feature you can find in the test class in tests/test_oracle_provider.py. +.. _Parquet: + +Parquet +^^^^^^^ + +.. note:: + Requires Python package pyarrow + +To publish a GeoParquet file (with a geometry column) the geopandas package is also required. + +.. note:: + Reading data directly from a public s3 bucket is also supported. + +.. code-block:: yaml + + providers: + - type: feature + name: Parquet + data: + source: ./tests/data/parquet/random.parquet + id_field: id + time_field: time + x_field: + - minlon + - maxlon + y_field: + - minlat + - maxlat + +For GeoParquet data, the `x_field` and `y_field` must be specified in the provider definition, +and they must be arrays of two column names that contain the x and y coordinates of the +bounding box of each geometry. If the geometries in the data are all points, the `x_field` and `y_field` +can be strings instead of arrays and refer to a single column each. + .. _PostgreSQL: PostgreSQL @@ -593,31 +672,25 @@ To publish a `Socrata Open Data API (SODA)`_ endpoint, pygeoapi heavily relies o token: my_token # Optional app token -.. _ERDDAP Tabledap Service: - -ERDDAP Tabledap Service -^^^^^^^^^^^^^^^^^^^^^^^ +TinyDB +^^^^^^ .. note:: - Requires Python package `requests`_ + Requires Python package tinydb -To publish from an ERDDAP `Tabledap`_ service, the following are required in your index: +To publish a TinyDB (`see website `_) index, the following are required in your index: + +* indexes must be documents of valid GeoJSON Features .. code-block:: yaml providers: - type: feature - name: ERDDAPTabledap - data: http://osmc.noaa.gov/erddap/tabledap/OSMC_Points - id_field: PLATFORM_CODE - time_field: time - options: - filters: "¶meter=\"SLP\"&platform!=\"C-MAN%20WEATHER%20STATIONS\"&platform!=\"TIDE GAUGE STATIONS (GENERIC)\"" - max_age_hours: 12 - - -.. note:: - If the ``datetime`` parameter is passed by the client, this overrides the ``options.max_age_hours`` setting. + editable: true|false # optional, default is false + name: TinyDB + data: /path/to/file.db + id_field: identifier + time_field: datetimefield Controlling the order of properties ----------------------------------- @@ -636,36 +709,52 @@ Data access examples -------------------- * list all collections + * http://localhost:5000/collections * overview of dataset + * http://localhost:5000/collections/foo * queryables + * http://localhost:5000/collections/foo/queryables * browse features + * http://localhost:5000/collections/foo/items * paging + * http://localhost:5000/collections/foo/items?offset=10&limit=10 * CSV outputs + * http://localhost:5000/collections/foo/items?f=csv * query features (spatial) + * http://localhost:5000/collections/foo/items?bbox=-180,-90,180,90 * query features (spatial with bbox-crs) + * http://localhost:5000/collections/foo/items?bbox=120000,450000,130000,460000&bbox-crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F28992 * query features (attribute) + * http://localhost:5000/collections/foo/items?propertyname=foo * query features (temporal) + * http://localhost:5000/collections/foo/items?datetime=2020-04-10T14:11:00Z * query features (temporal) and sort ascending by a property (if no +/- indicated, + is assumed) + * http://localhost:5000/collections/foo/items?datetime=2020-04-10T14:11:00Z&sortby=+datetime * query features (temporal) and sort descending by a property + * http://localhost:5000/collections/foo/items?datetime=2020-04-10T14:11:00Z&sortby=-datetime * query features in a given (and supported) CRS + * http://localhost:5000/collections/foo/items?crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F32633 * query features in a given bounding BBOX and return in given CRS + * http://localhost:5000/collections/foo/items?bbox=120000,450000,130000,460000&bbox-crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F28992&crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F32633 * fetch a specific feature + * http://localhost:5000/collections/foo/items/123 * fetch a specific feature in a given (and supported) CRS + * http://localhost:5000/collections/foo/items/123?crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F32633 .. note:: @@ -682,8 +771,8 @@ Data access examples provider `id_field` values support slashes (i.e. ``my/cool/identifier``). The client request would then be responsible for encoding the identifier accordingly (i.e. ``http://localhost:5000/collections/foo/items/my%2Fcool%2Fidentifier``) -.. _`ESRI Feature Service`: https://enterprise.arcgis.com/en/server/latest/publish-services/windows/what-is-a-feature-service-.htm -.. _`ESRI Map Service`: https://enterprise.arcgis.com/en/server/latest/publish-services/windows/what-is-a-map-service.htm +.. _`Feature Service`: https://enterprise.arcgis.com/en/server/latest/publish-services/windows/what-is-a-feature-service-.htm +.. _`Map Service`: https://enterprise.arcgis.com/en/server/latest/publish-services/windows/what-is-a-map-service.htm .. _`Google Cloud SQL`: https://cloud.google.com/sql .. _`OGC API - Features`: https://www.ogc.org/standards/ogcapi-features .. _`Socrata Open Data API (SODA)`: https://dev.socrata.com diff --git a/docs/source/data-publishing/ogcapi-maps.rst b/docs/source/data-publishing/ogcapi-maps.rst index 8e3dc7c04..03846eeaa 100644 --- a/docs/source/data-publishing/ogcapi-maps.rst +++ b/docs/source/data-publishing/ogcapi-maps.rst @@ -18,7 +18,7 @@ parameters. :header: Provider, bbox, width/height :align: left - `MapScript`,✅,✅ + `MapScript`_,✅,✅ `WMSFacade`_,✅,✅ @@ -30,7 +30,7 @@ Connection examples MapScript ^^^^^^^^^ -`MapScript`_ is MapServer's scripting interface to map rendering. +MapScript (`see website`_) is MapServer's scripting interface to map rendering. To publish a map via MapScript, the path to data is required, as well as the layer type (`options.type`). To style the data, set `options.style`. If @@ -51,7 +51,7 @@ Currently supported style files (`options.style`): .. code-block:: yaml providers: - - type: map + - type: map name: MapScript data: /path/to/data.shp options: @@ -59,7 +59,7 @@ Currently supported style files (`options.style`): layer: foo_name style: ./foo.sld format: - name: png + name: png mimetype: image/png WMSFacade @@ -71,14 +71,15 @@ required. An optional style name can be defined via `options.style`. .. code-block:: yaml providers: - - type: map + - type: map name: WMSFacade data: https://demo.mapserver.org/cgi-bin/msautotest options: layer: world_latlong style: default + version: 1.3.0 format: - name: png + name: png mimetype: image/png @@ -86,17 +87,23 @@ Data visualization examples --------------------------- * list all collections + * http://localhost:5000/collections * overview of dataset + * http://localhost:5000/collections/foo * map (default format) + * http://localhost:5000/collections/foo/map * map with bbox subset + * http://localhost:5000/collections/foo/map?bbox=-142,42,-52,84 * map with bbox and temporal subset + * http://localhost:5000/collections/foo/map?bbox=-142,42,-52,84&datetime=2020-04-10T14:11:00Z * map with bbox and bbox-crs + * http://localhost:5000/collections/foo/map?bbox-crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F3857&bbox=4.022369384765626%2C50.690447870569436%2C4.681549072265626%2C51.00260125274477&width=800&height=600&transparent .. _`OGC API - Maps`: https://ogcapi.ogc.org/maps -.. _`MapScript`: https://mapserver.org/mapscript/index.html +.. _`see website`: https://mapserver.org/mapscript/index.html diff --git a/docs/source/data-publishing/ogcapi-processes.rst b/docs/source/data-publishing/ogcapi-processes.rst index c11f9f338..6cd214c5c 100644 --- a/docs/source/data-publishing/ogcapi-processes.rst +++ b/docs/source/data-publishing/ogcapi-processes.rst @@ -14,15 +14,47 @@ The pygeoapi offers two processes: a default ``hello-world`` process which allow Configuration ------------- +The below configuration is an example of a process defined within the pygeoapi internal plugin registry: + .. code-block:: yaml processes: - - # enabled by default + # enabled by default hello-world: processor: name: HelloWorld +The below configuration is an example of a process defined as part of a custom Python process: + +.. code-block:: yaml + + processes: + # enabled by default + hello-world: + processor: + # refer to a process in the standard PYTHONPATH + # e.g. my_package/my_module/my_file.py (class MyProcess) + # the MyProcess class must subclass from pygeoapi.process.base.BaseProcessor + name: my_package.my_module.my_file.MyProcess + +See :ref:`example-custom-pygeoapi-processing-plugin` for processing plugin examples. + +Processing and response handling +-------------------------------- + +pygeoapi processing plugins must return a tuple of media type and native outputs. Multipart +responses are not supported at this time, and it is up to the process plugin implementor to return a single +payload defining multiple artifacts (or references to them). + +By default (or via the OGC API - Processes ``response: raw`` execution parameter), pygeoapi provides +processing responses in their native encoding and media type, as defined by a given +plugin (which needs to set the response content type and payload accordingly). + +pygeoapi also supports a JSON-based response type (via the OGC API - Processes ``response: document`` +execution parameter). When this mode is requested, the response will always be a JSON encoding, embedding +the resulting payload (part of which may be Base64 encoded for binary data, for example). + + Asynchronous support -------------------- @@ -33,15 +65,27 @@ an asynchronous design pattern. This means that when a job is submitted in asyn mode, the server responds immediately with a reference to the job, which allows the client to periodically poll the server for the processing status of a given job. -pygeoapi provides asynchronous support by providing a 'manager' concept which, well, +In keeping with the OGC API - Processes specification, asynchronous process execution +can be requested by including the ``Prefer: respond-async`` HTTP header in the request. + +Job management is required for asynchronous functionality. + +Job management +-------------- + +pygeoapi provides job management by providing a 'manager' concept which, well, manages job execution. The manager concept is implemented as part of the pygeoapi :ref:`plugins` architecture. pygeoapi provides a default manager implementation based on `TinyDB`_ for simplicity. Custom manager plugins can be developed for more advanced job management capabilities (e.g. Kubernetes, databases, etc.). -In keeping with the OGC API - Processes specification, asynchronous process execution -can be requested by including the ``Prefer: respond-async`` HTTP header in the request +Job managers +------------ + +TinyDB +^^^^^^ +TinyDB is the default job manager for pygeoapi when enabled. .. code-block:: yaml @@ -52,11 +96,12 @@ can be requested by including the ``Prefer: respond-async`` HTTP header in the r output_dir: /tmp/ MongoDB --------------------- -As an alternative to the default a manager employing `MongoDB`_ can be used. -The connection to an installed `MongoDB`_ instance must be provided in the configuration. -`MongoDB`_ uses the localhost and port 27017 by default. Jobs are stored in a collection named -job_manager_pygeoapi. +^^^^^^^ + +As an alternative to the default, a manager employing `MongoDB`_ can be used. +The connection to a `MongoDB`_ instance must be provided in the configuration. +`MongoDB`_ uses ``localhost`` and port ``27017`` by default. Jobs are stored in a collection named +``job_manager_pygeoapi``. .. code-block:: yaml @@ -66,11 +111,34 @@ job_manager_pygeoapi. connection: mongodb://host:port output_dir: /tmp/ +PostgreSQL +^^^^^^^^^^ + +As another alternative to the default, a manager employing `PostgreSQL`_ can be used. +The connection to a `PostgreSQL`_ database must be provided in the configuration. +`PostgreSQL`_ uses ``localhost`` and port ``5432`` by default. Jobs are stored in a table named ``jobs``. + +.. code-block:: yaml + + server: + manager: + name: PostgreSQL + connection: + host: localhost + port: 5432 + database: test + user: postgres + password: ${POSTGRESQL_PASSWORD:-postgres} + # Alternative accepted connection definition: + # connection: postgresql://postgres:postgres@localhost:5432/test + # connection: postgresql://postgres:${POSTGRESQL_PASSWORD:-postgres}@localhost:5432/test + output_dir: /tmp + Putting it all together ----------------------- -To summarize how pygeoapi processes and managers work together:: +To summarize how pygeoapi processes and managers work together: * process plugins implement the core processing / workflow functionality * manager plugins control and manage how processes are executed diff --git a/docs/source/data-publishing/ogcapi-tiles.rst b/docs/source/data-publishing/ogcapi-tiles.rst index a06e1739f..ec41b3b0f 100644 --- a/docs/source/data-publishing/ogcapi-tiles.rst +++ b/docs/source/data-publishing/ogcapi-tiles.rst @@ -106,8 +106,8 @@ Following block shows how to configure pygeoapi to read Mapbox vector tiles from zoom: min: 0 max: 15 - schemes: - - WebMercatorQuad # this option is needed in the MVT-proxy provider + schemes: + - WebMercatorQuad # this option is needed in the MVT-proxy provider format: name: pbf mimetype: application/vnd.mapbox-vector-tile @@ -124,8 +124,8 @@ Following code block shows how to configure pygeoapi to read Mapbox vector tiles zoom: min: 0 max: 15 - schemes: - - WebMercatorQuad + schemes: + - WebMercatorQuad format: name: pbf mimetype: application/vnd.mapbox-vector-tile diff --git a/docs/source/installation.rst b/docs/source/installation.rst index c3ff21b7b..e3354da08 100644 --- a/docs/source/installation.rst +++ b/docs/source/installation.rst @@ -11,6 +11,13 @@ Requirements and dependencies pygeoapi runs on Python 3. +.. note:: + + The exact Python version requirements are aligned with the version of Python on the pygeoapi supported Ubuntu + operating system version. For example, as of 2024-07, the supported version of Python is bound to Ubuntu 22.04 + (Jammy) which supports Python 3.10. Ensure you have a Python version that is compatible with the current Ubuntu + version that is specified in pygeoapi's `Dockerfile`_. + Core dependencies are included as part of a given pygeoapi installation procedure. More specific requirements details are described below depending on the platform. @@ -32,7 +39,7 @@ For developers and the truly impatient vi example-config.yml # edit as required export PYGEOAPI_CONFIG=example-config.yml export PYGEOAPI_OPENAPI=example-openapi.yml - pygeoapi openapi generate $PYGEOAPI_CONFIG > $PYGEOAPI_OPENAPI + pygeoapi openapi generate $PYGEOAPI_CONFIG --output-file $PYGEOAPI_OPENAPI pygeoapi serve curl http://localhost:5000 @@ -142,3 +149,4 @@ onto your system. .. _`Docker image`: https://github.com/geopython/pygeoapi/pkgs/container/pygeoapi +.. _`Dockerfile`: https://github.com/geopython/pygeoapi/blob/master/Dockerfile diff --git a/docs/source/introduction.rst b/docs/source/introduction.rst index a98b218c2..d41ff33f4 100644 --- a/docs/source/introduction.rst +++ b/docs/source/introduction.rst @@ -14,12 +14,12 @@ Features * OGC API - Features * OGC API - Environmental Data Retrieval * OGC API - Tiles + * OGC API - Processes * additionally implements * OGC API - Coverages * OGC API - Maps - * OGC API - Processes * OGC API - Records * SpatioTemporal Asset Library @@ -52,7 +52,7 @@ Standards are at the core of pygeoapi. Below is the project's standards support `OGC API - Coverages`_,Implementing `OGC API - Maps`_,Implementing `OGC API - Tiles`_,Reference Implementation - `OGC API - Processes`_,Implementing + `OGC API - Processes`_,Compliant `OGC API - Records`_,Implementing `OGC API - Environmental Data Retrieval`_,Reference Implementation `SpatioTemporal Asset Catalog`_,Implementing diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index 0a4138498..cadf93a73 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -240,15 +240,16 @@ The below template provides a minimal example (let's call the file ``mycoolraste super().__init__(provider_def) self.num_bands = 4 self.axes = ['Lat', 'Long'] - self.fields = self.get_fields() + self.get_fields() def get_fields(self): # generate a JSON Schema of coverage band metadata - return { + self._fields = { 'b1': { 'type': 'number' } } + return self._fields def query(self, bands=[], subsets={}, format_='json', **kwargs): # process bands and subsets parameters @@ -272,6 +273,8 @@ implementation. Each base class documents the functions, arguments and return types required for implementation. +.. _example-custom-pygeoapi-processing-plugin: + Example: custom pygeoapi processing plugin ------------------------------------------ diff --git a/locale/bs/LC_MESSAGES/messages.po b/locale/bs/LC_MESSAGES/messages.po index 40c6573f6..b140d4b2c 100644 --- a/locale/bs/LC_MESSAGES/messages.po +++ b/locale/bs/LC_MESSAGES/messages.po @@ -656,3 +656,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/locale/de/LC_MESSAGES/messages.po b/locale/de/LC_MESSAGES/messages.po index e7d1229b6..770816f1c 100644 --- a/locale/de/LC_MESSAGES/messages.po +++ b/locale/de/LC_MESSAGES/messages.po @@ -706,3 +706,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/locale/en/LC_MESSAGES/messages.po b/locale/en/LC_MESSAGES/messages.po index 4f2e08a03..d9ff71247 100644 --- a/locale/en/LC_MESSAGES/messages.po +++ b/locale/en/LC_MESSAGES/messages.po @@ -708,3 +708,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/locale/es/LC_MESSAGES/messages.po b/locale/es/LC_MESSAGES/messages.po index 9cd90fc2a..7c31304c3 100644 --- a/locale/es/LC_MESSAGES/messages.po +++ b/locale/es/LC_MESSAGES/messages.po @@ -521,3 +521,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/locale/fr/LC_MESSAGES/messages.po b/locale/fr/LC_MESSAGES/messages.po index 72ab27727..ea63df8c7 100644 --- a/locale/fr/LC_MESSAGES/messages.po +++ b/locale/fr/LC_MESSAGES/messages.po @@ -715,3 +715,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/locale/sr/LC_MESSAGES/messages.po b/locale/sr/LC_MESSAGES/messages.po index 9a043592e..71d7210f0 100644 --- a/locale/sr/LC_MESSAGES/messages.po +++ b/locale/sr/LC_MESSAGES/messages.po @@ -656,3 +656,27 @@ msgstr "" msgid "not specified" msgstr "" + +msgid "Position" +msgstr "" + +msgid "Cube" +msgstr "" + +msgid "Area" +msgstr "" + +msgid "Corridor" +msgstr "" + +msgid "Trajectory" +msgstr "" + +msgid "Radius" +msgstr "" + +msgid "Locations" +msgstr "" + +msgid "Instances" +msgstr "" diff --git a/pygeoapi/__init__.py b/pygeoapi/__init__.py index e66240e72..9c5d35688 100644 --- a/pygeoapi/__init__.py +++ b/pygeoapi/__init__.py @@ -30,7 +30,7 @@ # # ================================================================= -__version__ = '0.18.dev0' +__version__ = '0.19.dev0' import click try: diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py index 8b64ba2d3..b47541f23 100644 --- a/pygeoapi/api/__init__.py +++ b/pygeoapi/api/__init__.py @@ -80,6 +80,7 @@ CHARSET = ['utf-8'] F_JSON = 'json' +F_COVERAGEJSON = 'json' F_HTML = 'html' F_JSONLD = 'jsonld' F_GZIP = 'gzip' @@ -714,6 +715,13 @@ def landing_page(self, LOGGER.debug('Creating links') # TODO: put title text in config or translatable files? fcm['links'] = [{ + 'rel': 'about', + 'type': 'text/html', + 'title': l10n.translate( + self.config['metadata']['identification']['title'], + request.locale), + 'href': self.config['metadata']['identification']['url'] + }, { 'rel': request.get_linkrel(F_JSON), 'type': FORMAT_TYPES[F_JSON], 'title': l10n.translate('This document as JSON', request.locale), @@ -1202,6 +1210,7 @@ def describe_collections(self, request: Union[APIRequest, Any], if edr: # TODO: translate LOGGER.debug('Adding EDR links') + collection['data_queries'] = {} parameters = p.get_fields() if parameters: collection['parameter_names'] = {} @@ -1222,6 +1231,14 @@ def describe_collections(self, request: Union[APIRequest, Any], } for qt in p.get_query_types(): + data_query = { + 'link': { + 'href': f'{self.get_collections_url()}/{k}/{qt}', + 'rel': 'data' + } + } + collection['data_queries'][qt] = data_query + title1 = l10n.translate('query for this collection as JSON', request.locale) # noqa title1 = f'{qt} {title1}' title2 = l10n.translate('query for this collection as HTML', request.locale) # noqa @@ -1366,6 +1383,7 @@ def get_collection_schema(self, request: Union[APIRequest, Any], self.config['resources'][dataset]['title'], request.locale) schema['collections_path'] = self.get_collections_url() + schema['dataset_path'] = f'{self.get_collections_url()}/{dataset}' content = render_j2_template(self.tpl_config, 'collections/schema.html', @@ -1423,7 +1441,8 @@ def get_format_exception(self, request) -> Tuple[dict, int, str]: # Content-Language is in the system locale (ignore language settings) headers = request.get_response_headers(SYSTEM_LOCALE, **self.api_headers) - msg = f'Invalid format: {request.format}' + msg = 'Invalid format requested' + LOGGER.error(f'{msg}: {request.format}') return self.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) diff --git a/pygeoapi/api/environmental_data_retrieval.py b/pygeoapi/api/environmental_data_retrieval.py index 494e695d1..c13c83205 100644 --- a/pygeoapi/api/environmental_data_retrieval.py +++ b/pygeoapi/api/environmental_data_retrieval.py @@ -41,10 +41,12 @@ from http import HTTPStatus import logging from typing import Tuple +import urllib -from shapely.errors import WKTReadingError +from shapely.errors import ShapelyError from shapely.wkt import loads as shapely_loads +from pygeoapi import l10n from pygeoapi.plugin import load_plugin, PLUGINS from pygeoapi.provider.base import ProviderGenericError from pygeoapi.util import ( @@ -52,7 +54,8 @@ to_json, filter_dict_by_key_value ) -from . import APIRequest, API, F_HTML, validate_datetime, validate_bbox +from . import (APIRequest, API, F_COVERAGEJSON, F_HTML, F_JSONLD, + validate_datetime, validate_bbox) LOGGER = logging.getLogger(__name__) @@ -88,6 +91,27 @@ def get_collection_edr_query(api: API, request: APIRequest, return api.get_exception( HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) + LOGGER.debug('Loading provider') + try: + p = load_plugin('provider', get_provider_by_type( + collections[dataset]['providers'], 'edr')) + except ProviderGenericError as err: + return api.get_exception( + err.http_status_code, headers, request.format, + err.ogc_exception_code, err.message) + + if instance is not None and not p.get_instance(instance): + msg = 'Invalid instance identifier' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, + request.format, 'InvalidParameterValue', msg) + + if query_type not in p.get_query_types(): + msg = 'Unsupported query type' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + LOGGER.debug('Processing query parameters') LOGGER.debug('Processing datetime parameter') @@ -124,7 +148,7 @@ def get_collection_edr_query(api: API, request: APIRequest, if wkt: try: wkt = shapely_loads(wkt) - except WKTReadingError: + except ShapelyError: msg = 'invalid coords parameter' return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, @@ -144,27 +168,6 @@ def get_collection_edr_query(api: API, request: APIRequest, LOGGER.debug('Processing z parameter') z = request.params.get('z') - LOGGER.debug('Loading provider') - try: - p = load_plugin('provider', get_provider_by_type( - collections[dataset]['providers'], 'edr')) - except ProviderGenericError as err: - return api.get_exception( - err.http_status_code, headers, request.format, - err.ogc_exception_code, err.message) - - if instance is not None and not p.get_instance(instance): - msg = 'Invalid instance identifier' - return api.get_exception( - HTTPStatus.BAD_REQUEST, headers, - request.format, 'InvalidParameterValue', msg) - - if query_type not in p.get_query_types(): - msg = 'Unsupported query type' - return api.get_exception( - HTTPStatus.BAD_REQUEST, headers, request.format, - 'InvalidParameterValue', msg) - if parameternames and not any((fld in parameternames) for fld in p.get_fields().keys()): msg = 'Invalid parameter-name' @@ -195,6 +198,36 @@ def get_collection_edr_query(api: API, request: APIRequest, err.ogc_exception_code, err.message) if request.format == F_HTML: # render + uri = f'{api.get_collections_url()}/{dataset}/{query_type}' + serialized_query_params = '' + for k, v in request.params.items(): + if k != 'f': + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + data['query_type'] = query_type.capitalize() + data['query_path'] = uri + data['dataset_path'] = '/'.join(uri.split('/')[:-1]) + data['collections_path'] = api.get_collections_url() + + data['links'] = [{ + 'rel': 'collection', + 'title': collections[dataset]['title'], + 'href': data['dataset_path'] + }, { + 'type': 'application/prs.coverage+json', + 'rel': request.get_linkrel(F_COVERAGEJSON), + 'title': l10n.translate('This document as CoverageJSON', request.locale), # noqa + 'href': f'{uri}?f={F_COVERAGEJSON}{serialized_query_params}' + }, { + 'type': 'application/ld+json', + 'rel': 'alternate', + 'title': l10n.translate('This document as JSON-LD', request.locale), # noqa + 'href': f'{uri}?f={F_JSONLD}{serialized_query_params}' + }] + content = render_j2_template(api.tpl_config, 'collections/edr/query.html', data, api.default_locale) @@ -305,11 +338,9 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, 'tags': [k], 'operationId': f'queryLOCATIONSBYID{k.capitalize()}', 'parameters': [ - {'$ref': f"{OPENAPI_YAML['oaedr']}/parameters/{spatial_parameter}.yaml"}, # noqa {'$ref': f"{OPENAPI_YAML['oaedr']}/parameters/locationId.yaml"}, # noqa {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime"}, # noqa {'$ref': f"{OPENAPI_YAML['oaedr']}/parameters/parameter-name.yaml"}, # noqa - {'$ref': f"{OPENAPI_YAML['oaedr']}/parameters/z.yaml"}, # noqa {'$ref': '#/components/parameters/f'} ], 'responses': { diff --git a/pygeoapi/api/itemtypes.py b/pygeoapi/api/itemtypes.py index 33cb517b3..008b28cb7 100644 --- a/pygeoapi/api/itemtypes.py +++ b/pygeoapi/api/itemtypes.py @@ -121,23 +121,22 @@ def get_collection_queryables(api: API, request: Union[APIRequest, Any], HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) LOGGER.debug('Creating collection queryables') - try: - LOGGER.debug('Loading feature provider') - p = load_plugin('provider', get_provider_by_type( - api.config['resources'][dataset]['providers'], 'feature')) - except ProviderTypeError: + + p = None + for pt in ['feature', 'coverage', 'record']: try: - LOGGER.debug('Loading coverage provider') + LOGGER.debug(f'Loading {pt} provider') p = load_plugin('provider', get_provider_by_type( - api.config['resources'][dataset]['providers'], 'coverage')) # noqa + api.config['resources'][dataset]['providers'], pt)) + break except ProviderTypeError: - LOGGER.debug('Loading record provider') - p = load_plugin('provider', get_provider_by_type( - api.config['resources'][dataset]['providers'], 'record')) - except ProviderGenericError as err: + LOGGER.debug(f'Providing type {pt} not found') + + if p is None: + msg = 'queryables not available for this collection' return api.get_exception( - err.http_status_code, headers, request.format, - err.ogc_exception_code, err.message) + HTTPStatus.BAD_REQUEST, headers, request.format, + 'NoApplicableError', msg) queryables = { 'type': 'object', @@ -182,6 +181,7 @@ def get_collection_queryables(api: API, request: Union[APIRequest, Any], api.config['resources'][dataset]['title'], request.locale) queryables['collections_path'] = api.get_collections_url() + queryables['dataset_path'] = f'{api.get_collections_url()}/{dataset}' content = render_j2_template(api.tpl_config, 'collections/queryables.html', @@ -380,8 +380,12 @@ def get_collection_items( LOGGER.debug('processing property parameters') for k, v in request.params.items(): - if k not in reserved_fieldnames and k in list(p.fields.keys()): - LOGGER.debug(f'Adding property filter {k}={v}') + if k not in reserved_fieldnames: + if k in list(p.fields.keys()): + LOGGER.debug(f'Adding property filter {k}={v}') + else: + LOGGER.debug(f'Adding additional property filter {k}={v}') + properties.append((k, v)) LOGGER.debug('processing sort parameter') @@ -444,7 +448,8 @@ def get_collection_items( geometry_column_name=provider_def.get('geom_field'), ) except Exception: - msg = f'Bad CQL string : {cql_text}' + msg = 'Bad CQL text' + LOGGER.error(f'{msg}: {cql_text}') return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) @@ -531,17 +536,23 @@ def get_collection_items( 'href': f'{uri}?offset={prev}{serialized_query_params}' }) - if 'numberMatched' in content: - if content['numberMatched'] > (limit + offset): - next_ = offset + limit - next_href = f'{uri}?offset={next_}{serialized_query_params}' - content['links'].append( - { - 'type': 'application/geo+json', - 'rel': 'next', - 'title': l10n.translate('Items (next)', request.locale), - 'href': next_href - }) + next_link = False + + if content.get('numberMatched', -1) > (limit + offset): + next_link = True + elif len(content['features']) == limit: + next_link = True + + if next_link: + next_ = offset + limit + next_href = f'{uri}?offset={next_}{serialized_query_params}' + content['links'].append( + { + 'type': 'application/geo+json', + 'rel': 'next', + 'title': l10n.translate('Items (next)', request.locale), + 'href': next_href + }) content['links'].append( { @@ -836,7 +847,7 @@ def post_collection_items( if (request_headers.get( 'Content-Type') or request_headers.get( 'content-type')) != 'application/query-cql-json': - msg = ('Invalid body content-type') + msg = 'Invalid body content-type' return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidHeaderValue', msg) @@ -872,7 +883,8 @@ def post_collection_items( geometry_column_name=provider_def.get('geom_field') ) except Exception: - msg = f'Bad CQL string : {data}' + msg = 'Bad CQL text' + LOGGER.error(f'{msg}: {data}') return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) @@ -881,7 +893,8 @@ def post_collection_items( try: filter_ = CQLModel.parse_raw(data) except Exception: - msg = f'Bad CQL string : {data}' + msg = 'Bad CQL text' + LOGGER.error(f'{msg}: {data}') return api.get_exception( HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) diff --git a/pygeoapi/api/processes.py b/pygeoapi/api/processes.py index 8ba759970..b2a577757 100644 --- a/pygeoapi/api/processes.py +++ b/pygeoapi/api/processes.py @@ -46,6 +46,7 @@ import json import logging from typing import Tuple +import urllib.parse from pygeoapi import l10n from pygeoapi.util import ( @@ -162,7 +163,7 @@ def describe_processes(api: API, request: APIRequest, 'type': FORMAT_TYPES[F_HTML], 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/job-list', 'href': f'{jobs_url}?f={F_HTML}', - 'title': l10n.translate('Jobs for this process as HTML', request.locale), # noqa + 'title': l10n.translate('Jobs list as HTML', request.locale), # noqa 'hreflang': api.default_locale } p2['links'].append(link) @@ -171,7 +172,7 @@ def describe_processes(api: API, request: APIRequest, 'type': FORMAT_TYPES[F_JSON], 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/job-list', 'href': f'{jobs_url}?f={F_JSON}', - 'title': l10n.translate('Jobs for this process as HTML', request.locale), # noqa + 'title': l10n.translate('Jobs list as JSON', request.locale), # noqa 'hreflang': api.default_locale } p2['links'].append(link) @@ -240,10 +241,51 @@ def get_jobs(api: API, request: APIRequest, headers = request.get_response_headers(SYSTEM_LOCALE, **api.api_headers) + LOGGER.debug('Processing limit parameter') + try: + limit = int(request.params.get('limit')) + + if limit <= 0: + msg = 'limit value should be strictly positive' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + except TypeError: + limit = int(api.config['server']['limit']) + LOGGER.debug('returning all jobs') + except ValueError: + msg = 'limit value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + + LOGGER.debug('Processing offset parameter') + try: + offset = int(request.params.get('offset')) + if offset < 0: + msg = 'offset value should be positive or zero' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + except TypeError as err: + LOGGER.warning(err) + offset = 0 + except ValueError: + msg = 'offset value should be an integer' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + if job_id is None: - jobs = sorted(api.manager.get_jobs(), + jobs_data = api.manager.get_jobs(limit=limit, offset=offset) + # TODO: For pagination to work, the provider has to do the sorting. + # Here we do sort again in case the provider doesn't support + # pagination yet and always returns all jobs. + jobs = sorted(jobs_data['jobs'], key=lambda k: k['job_start_datetime'], reverse=True) + numberMatched = jobs_data['numberMatched'] + else: try: jobs = [api.manager.get_job(job_id)] @@ -251,6 +293,7 @@ def get_jobs(api: API, request: APIRequest, return api.get_exception( HTTPStatus.NOT_FOUND, headers, request.format, 'InvalidParameterValue', job_id) + numberMatched = 1 serialized_jobs = { 'jobs': [], @@ -309,6 +352,44 @@ def get_jobs(api: API, request: APIRequest, serialized_jobs['jobs'].append(job2) + serialized_query_params = '' + for k, v in request.params.items(): + if k not in ('f', 'offset'): + serialized_query_params += '&' + serialized_query_params += urllib.parse.quote(k, safe='') + serialized_query_params += '=' + serialized_query_params += urllib.parse.quote(str(v), safe=',') + + uri = f'{api.base_url}/jobs' + + if offset > 0: + prev = max(0, offset - limit) + serialized_jobs['links'].append( + { + 'href': f'{uri}?offset={prev}{serialized_query_params}', + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'prev', + 'title': l10n.translate('Items (prev)', request.locale), + }) + + next_link = False + + if numberMatched > (limit + offset): + next_link = True + elif len(jobs) == limit: + next_link = True + + if next_link: + next_ = offset + limit + next_href = f'{uri}?offset={next_}{serialized_query_params}' + serialized_jobs['links'].append( + { + 'href': next_href, + 'rel': 'next', + 'type': FORMAT_TYPES[F_JSON], + 'title': l10n.translate('Items (next)', request.locale), + }) + if job_id is None: j2_template = 'jobs/index.html' else: @@ -318,6 +399,7 @@ def get_jobs(api: API, request: APIRequest, if request.format == F_HTML: data = { 'jobs': serialized_jobs, + 'offset': offset, 'now': datetime.now(timezone.utc).strftime(DATETIME_FORMAT) } response = render_j2_template(api.tpl_config, j2_template, data, @@ -379,6 +461,8 @@ def execute_process(api: API, request: APIRequest, requested_outputs = data.get('outputs') LOGGER.debug(f'outputs: {requested_outputs}') + requested_response = data.get('response', 'raw') + subscriber = None subscriber_dict = data.get('subscriber') if subscriber_dict: @@ -407,10 +491,14 @@ def execute_process(api: API, request: APIRequest, result = api.manager.execute_process( process_id, data_dict, execution_mode=execution_mode, requested_outputs=requested_outputs, - subscriber=subscriber) + subscriber=subscriber, + requested_response=requested_response) job_id, mime_type, outputs, status, additional_headers = result headers.update(additional_headers or {}) - headers['Location'] = f'{api.base_url}/jobs/{job_id}' + + if api.manager.is_async: + headers['Location'] = f'{api.base_url}/jobs/{job_id}' + except ProcessorExecuteError as err: return api.get_exception( err.http_status_code, headers, @@ -420,11 +508,11 @@ def execute_process(api: API, request: APIRequest, if status == JobStatus.failed: response = outputs - if data.get('response', 'raw') == 'raw': + if requested_response == 'raw': headers['Content-Type'] = mime_type response = outputs elif status not in (JobStatus.failed, JobStatus.accepted): - response['outputs'] = [outputs] + response = outputs if status == JobStatus.accepted: http_status = HTTPStatus.CREATED @@ -433,7 +521,7 @@ def execute_process(api: API, request: APIRequest, else: http_status = HTTPStatus.OK - if mime_type == 'application/json': + if mime_type == 'application/json' or requested_response == 'document': response2 = to_json(response, api.pretty_print) else: response2 = response @@ -512,9 +600,7 @@ def get_job_result(api: API, request: APIRequest, return headers, HTTPStatus.OK, content -def delete_job( - api: API, request: APIRequest, job_id -) -> Tuple[dict, int, str]: +def delete_job(api: API, request: APIRequest, job_id) -> Tuple[dict, int, str]: """ Delete a process job @@ -522,6 +608,7 @@ def delete_job( :returns: tuple of headers, status code, content """ + response_headers = request.get_response_headers( SYSTEM_LOCALE, **api.api_headers) try: @@ -555,7 +642,7 @@ def delete_job( ) LOGGER.info(response) # TODO: this response does not have any headers - return {}, http_status, response + return {}, http_status, to_json(response, api.pretty_print) def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, dict]]: # noqa diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index cc140e276..296107019 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -77,7 +77,7 @@ if CONFIG['server'].get('cors', False): try: from flask_cors import CORS - CORS(APP) + CORS(APP, CORS_EXPOSE_HEADERS=['*']) except ModuleNotFoundError: print('Python package flask-cors required for CORS support') @@ -279,11 +279,7 @@ def collection_items(collection_id, item_id=None): """ if item_id is None: - if request.method == 'GET': # list items - return execute_from_flask(itemtypes_api.get_collection_items, - request, collection_id, - skip_valid_check=True) - elif request.method == 'POST': # filter or manage items + if request.method == 'POST': # filter or manage items if request.content_type is not None: if request.content_type == 'application/geo+json': return execute_from_flask( @@ -298,6 +294,10 @@ def collection_items(collection_id, item_id=None): return execute_from_flask( itemtypes_api.manage_collection_item, request, 'options', collection_id, skip_valid_check=True) + else: # GET: list items + return execute_from_flask(itemtypes_api.get_collection_items, + request, collection_id, + skip_valid_check=True) elif request.method == 'DELETE': return execute_from_flask(itemtypes_api.manage_collection_item, diff --git a/pygeoapi/formatter/csv_.py b/pygeoapi/formatter/csv_.py index 664bc8807..51a6ded17 100644 --- a/pygeoapi/formatter/csv_.py +++ b/pygeoapi/formatter/csv_.py @@ -27,11 +27,10 @@ # # ================================================================= +import csv import io import logging -import unicodecsv as csv - from pygeoapi.formatter.base import BaseFormatter, FormatterSerializationError LOGGER = logging.getLogger(__name__) @@ -83,10 +82,11 @@ def write(self, options: dict = {}, data: dict = None) -> str: # TODO: implement wkt geometry serialization LOGGER.debug('not a point geometry, skipping') + print("JJJ", fields) LOGGER.debug(f'CSV fields: {fields}') try: - output = io.BytesIO() + output = io.StringIO() writer = csv.DictWriter(output, fields) writer.writeheader() @@ -101,7 +101,7 @@ def write(self, options: dict = {}, data: dict = None) -> str: LOGGER.error(err) raise FormatterSerializationError('Error writing CSV output') - return output.getvalue() + return output.getvalue().encode('utf-8') def __repr__(self): return f' {self.name}' diff --git a/pygeoapi/openapi.py b/pygeoapi/openapi.py index 274e19dad..fe390b742 100644 --- a/pygeoapi/openapi.py +++ b/pygeoapi/openapi.py @@ -134,6 +134,52 @@ def gen_response_object(description: str, media_type: str, return response +def gen_contact(cfg: dict) -> dict: + """ + Generates an OpenAPI contact object with OGC extensions + based on OGC API - Records contact + + :param cfg: `dict` of configuration + + :returns: `dict` of OpenAPI contact object + """ + + contact = { + 'name': cfg['metadata']['provider']['name'], + 'url': cfg['metadata']['provider']['url'], + 'email': cfg['metadata']['contact']['email'] + } + + contact['x-ogc-serviceContact'] = { + 'name': cfg['metadata']['contact']['name'], + 'position': cfg['metadata']['contact']['position'], + 'addresses': [{ + 'deliveryPoint': [cfg['metadata']['contact']['address']], + 'city': cfg['metadata']['contact']['city'], + 'administrativeArea': cfg['metadata']['contact']['stateorprovince'], # noqa + 'postalCode': cfg['metadata']['contact']['postalcode'], + 'country': cfg['metadata']['contact']['country'] + }], + 'phones': [{ + 'type': 'main', 'value': cfg['metadata']['contact']['phone'] + }, { + 'type': 'fax', 'value': cfg['metadata']['contact']['fax'] + }], + 'emails': [{ + 'value': cfg['metadata']['contact']['email'] + }], + 'contactInstructions': cfg['metadata']['contact']['instructions'], + 'links': [{ + 'type': 'text/html', + 'href': cfg['metadata']['contact']['url'] + }], + 'hoursOfService': cfg['metadata']['contact']['hours'], + 'roles': [cfg['metadata']['contact']['role']] + } + + return contact + + def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: """ Generates an OpenAPI 3.0 Document @@ -167,11 +213,7 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: 'x-keywords': l10n.translate(cfg['metadata']['identification']['keywords'], locale_), # noqa 'termsOfService': cfg['metadata']['identification']['terms_of_service'], - 'contact': { - 'name': cfg['metadata']['provider']['name'], - 'url': cfg['metadata']['provider']['url'], - 'email': cfg['metadata']['contact']['email'] - }, + 'contact': gen_contact(cfg), 'license': { 'name': cfg['metadata']['license']['name'], 'url': cfg['metadata']['license']['url'] @@ -903,6 +945,17 @@ def load_openapi_document() -> dict: pygeoapi_openapi = os.environ.get('PYGEOAPI_OPENAPI') + if pygeoapi_openapi is None: + msg = 'PYGEOAPI_OPENAPI environment not set' + LOGGER.error(msg) + raise RuntimeError(msg) + + if not os.path.exists(pygeoapi_openapi): + msg = (f'OpenAPI document {pygeoapi_openapi} does not exist. ' + 'Please generate before starting pygeoapi') + LOGGER.error(msg) + raise RuntimeError(msg) + with open(pygeoapi_openapi, encoding='utf8') as ff: if pygeoapi_openapi.endswith(('.yaml', '.yml')): openapi_ = yaml_load(ff) diff --git a/pygeoapi/plugin.py b/pygeoapi/plugin.py index 8e922f2a9..ee4617225 100644 --- a/pygeoapi/plugin.py +++ b/pygeoapi/plugin.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2023 Tom Kralidis +# Copyright (c) 2024 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -55,11 +55,13 @@ 'MVT-proxy': 'pygeoapi.provider.mvt_proxy.MVTProxyProvider', # noqa: E501 'OracleDB': 'pygeoapi.provider.oracle.OracleProvider', 'OGR': 'pygeoapi.provider.ogr.OGRProvider', + 'Parquet': 'pygeoapi.provider.parquet.ParquetProvider', 'PostgreSQL': 'pygeoapi.provider.postgresql.PostgreSQLProvider', 'rasterio': 'pygeoapi.provider.rasterio_.RasterioProvider', 'SensorThings': 'pygeoapi.provider.sensorthings.SensorThingsProvider', 'SQLiteGPKG': 'pygeoapi.provider.sqlite.SQLiteGPKGProvider', 'Socrata': 'pygeoapi.provider.socrata.SODAServiceProvider', + 'TinyDB': 'pygeoapi.provider.tinydb_.TinyDBProvider', 'TinyDBCatalogue': 'pygeoapi.provider.tinydb_.TinyDBCatalogueProvider', 'WMSFacade': 'pygeoapi.provider.wms_facade.WMSFacadeProvider', 'WMTSFacade': 'pygeoapi.provider.wmts_facade.WMTSFacadeProvider', @@ -77,7 +79,8 @@ 'process_manager': { 'Dummy': 'pygeoapi.process.manager.dummy.DummyManager', 'MongoDB': 'pygeoapi.process.manager.mongodb_.MongoDBManager', - 'TinyDB': 'pygeoapi.process.manager.tinydb_.TinyDBManager' + 'TinyDB': 'pygeoapi.process.manager.tinydb_.TinyDBManager', + 'PostgreSQL': 'pygeoapi.process.manager.postgresql.PostgreSQLManager' } } diff --git a/pygeoapi/process/manager/base.py b/pygeoapi/process/manager/base.py index 548883b1d..df5c41a8c 100644 --- a/pygeoapi/process/manager/base.py +++ b/pygeoapi/process/manager/base.py @@ -54,6 +54,7 @@ JobStatus, ProcessExecutionMode, RequestedProcessExecutionMode, + RequestedResponse, Subscriber ) @@ -107,14 +108,21 @@ def get_processor(self, process_id: str) -> BaseProcessor: else: return load_plugin('process', process_conf['processor']) - def get_jobs(self, status: JobStatus = None) -> list: + def get_jobs(self, + status: JobStatus = None, + limit: Optional[int] = None, + offset: Optional[int] = None + ) -> dict: """ Get process jobs, optionally filtered by status :param status: job status (accepted, running, successful, failed, results) (default is all) + :param limit: number of jobs to return + :param offset: pagination offset - :returns: `list` of jobs (identifier, status, process identifier) + :returns: dict of list of jobs (identifier, status, process identifier) + and numberMatched """ raise NotImplementedError() @@ -187,6 +195,7 @@ def _execute_handler_async(self, p: BaseProcessor, job_id: str, data_dict: dict, requested_outputs: Optional[dict] = None, subscriber: Optional[Subscriber] = None, + requested_response: Optional[RequestedResponse] = RequestedResponse.raw.value # noqa ) -> Tuple[str, None, JobStatus]: """ This private execution handler executes a process in a background @@ -197,27 +206,34 @@ def _execute_handler_async(self, p: BaseProcessor, job_id: str, :param p: `pygeoapi.process` object :param job_id: job identifier :param data_dict: `dict` of data parameters - :param requested_outputs: `dict` specify the subset of required - outputs - defaults to all outputs. - The value of any key may be an object and include the property - `transmissionMode` - defaults to `value`. - Note: 'optional' is for backward compatibility. + :param requested_outputs: `dict` optionally specifying the subset of + required outputs - defaults to all outputs. + The value of any key may be an object and + include the property `transmissionMode` + (defaults to `value`) + Note: 'optional' is for backward + compatibility. :param subscriber: optional `Subscriber` specifying callback URLs + :param requested_response: `RequestedResponse` optionally specifying + raw or document (default is `raw`) :returns: tuple of None (i.e. initial response payload) and JobStatus.accepted (i.e. initial job status) """ - _process = dummy.Process( - target=self._execute_handler_sync, - args=(p, job_id, data_dict, requested_outputs, subscriber) - ) + + args = (p, job_id, data_dict, requested_outputs, subscriber, + requested_response) + + _process = dummy.Process(target=self._execute_handler_sync, args=args) _process.start() + return 'application/json', None, JobStatus.accepted def _execute_handler_sync(self, p: BaseProcessor, job_id: str, data_dict: dict, requested_outputs: Optional[dict] = None, subscriber: Optional[Subscriber] = None, + requested_response: Optional[RequestedResponse] = RequestedResponse.raw.value # noqa ) -> Tuple[str, Any, JobStatus]: """ Synchronous execution handler @@ -229,15 +245,27 @@ def _execute_handler_sync(self, p: BaseProcessor, job_id: str, :param p: `pygeoapi.process` object :param job_id: job identifier :param data_dict: `dict` of data parameters - :param requested_outputs: `dict` specify the subset of required - outputs - defaults to all outputs. - The value of any key may be an object and include the property - `transmissionMode` - defaults to `value`. - Note: 'optional' is for backward compatibility. + :param requested_outputs: `dict` optionally specifying the subset of + required outputs - defaults to all outputs. + The value of any key may be an object and + include the property `transmissionMode` + (defaults to `value`) + Note: 'optional' is for backward + compatibility. :param subscriber: optional `Subscriber` specifying callback URLs + :param requested_response: `RequestedResponse` optionally specifying + raw or document (default is `raw`) :returns: tuple of MIME type, response payload and status """ + + extra_execute_parameters = {} + + # only pass requested_outputs if supported, + # otherwise this breaks existing processes + if p.supports_outputs: + extra_execute_parameters['outputs'] = requested_outputs + self._send_in_progress_notification(subscriber) try: @@ -248,13 +276,12 @@ def _execute_handler_sync(self, p: BaseProcessor, job_id: str, job_filename = None current_status = JobStatus.running - jfmt, outputs = p.execute( - data_dict, - # only pass requested_outputs if supported, - # otherwise this breaks existing processes - **({'outputs': requested_outputs} - if p.supports_outputs else {}) - ) + jfmt, outputs = p.execute(data_dict, **extra_execute_parameters) + + if requested_response == RequestedResponse.document.value: + outputs = { + 'outputs': [outputs] + } self.update_job(job_id, { 'status': current_status.value, @@ -330,7 +357,8 @@ def execute_process( data_dict: dict, execution_mode: Optional[RequestedProcessExecutionMode] = None, requested_outputs: Optional[dict] = None, - subscriber: Optional[Subscriber] = None + subscriber: Optional[Subscriber] = None, + requested_response: Optional[RequestedResponse] = RequestedResponse.raw.value # noqa ) -> Tuple[str, Any, JobStatus, Optional[Dict[str, str]]]: """ Default process execution handler @@ -339,12 +367,17 @@ def execute_process( :param data_dict: `dict` of data parameters :param execution_mode: `str` optionally specifying sync or async processing. - :param requested_outputs: `dict` optionally specify the subset of - required outputs - defaults to all outputs. - The value of any key may be an object and include the property - `transmissionMode` - defaults to `value`. - Note: 'optional' is for backward compatibility. + :param requested_outputs: `dict` optionally specifying the subset of + required outputs - defaults to all outputs. + The value of any key may be an object and + include the property `transmissionMode` + (default is `value`) + Note: 'optional' is for backward + compatibility. :param subscriber: `Subscriber` optionally specifying callback urls + :param requested_response: `RequestedResponse` optionally specifying + raw or document (default is `raw`) + :raises UnknownProcessError: if the input process_id does not correspond to a known process @@ -356,6 +389,9 @@ def execute_process( job_id = str(uuid.uuid1()) processor = self.get_processor(process_id) processor.set_job_id(job_id) + extra_execute_handler_parameters = { + 'requested_response': requested_response + } if execution_mode == RequestedProcessExecutionMode.respond_async: job_control_options = processor.metadata.get( @@ -406,6 +442,11 @@ def execute_process( } self.add_job(job_metadata) + # only pass subscriber if supported, otherwise this breaks + # existing managers + if self.supports_subscribing: + extra_execute_handler_parameters['subscriber'] = subscriber + # TODO: handler's response could also be allowed to include more HTTP # headers mime_type, outputs, status = handler( @@ -413,10 +454,7 @@ def execute_process( job_id, data_dict, requested_outputs, - # only pass subscriber if supported, otherwise this breaks existing - # managers - **({'subscriber': subscriber} if self.supports_subscribing else {}) - ) + **extra_execute_handler_parameters) return job_id, mime_type, outputs, status, response_headers diff --git a/pygeoapi/process/manager/dummy.py b/pygeoapi/process/manager/dummy.py index 8360154a0..7c3a7037b 100644 --- a/pygeoapi/process/manager/dummy.py +++ b/pygeoapi/process/manager/dummy.py @@ -33,8 +33,9 @@ from pygeoapi.process.manager.base import BaseManager from pygeoapi.util import ( - RequestedProcessExecutionMode, JobStatus, + RequestedProcessExecutionMode, + RequestedResponse, Subscriber ) @@ -55,17 +56,21 @@ def __init__(self, manager_def: dict): super().__init__(manager_def) - def get_jobs(self, status: JobStatus = None) -> list: + def get_jobs(self, status: JobStatus = None, limit=None, offset=None + ) -> dict: """ Get process jobs, optionally filtered by status :param status: job status (accepted, running, successful, failed, results) (default is all) + :param limit: number of jobs to return + :param offset: pagination offset - :returns: `list` of jobs (identifier, status, process identifier) + :returns: dict of list of jobs (identifier, status, process identifier) + and numberMatched """ - return [] + return {'jobs': [], 'numberMatched': 0} def execute_process( self, @@ -73,7 +78,8 @@ def execute_process( data_dict: dict, execution_mode: Optional[RequestedProcessExecutionMode] = None, requested_outputs: Optional[dict] = None, - subscriber: Optional[Subscriber] = None + subscriber: Optional[Subscriber] = None, + requested_response: Optional[RequestedResponse] = RequestedResponse.raw.value # noqa ) -> Tuple[str, str, Any, JobStatus, Optional[Dict[str, str]]]: """ Default process execution handler @@ -81,9 +87,19 @@ def execute_process( :param process_id: process identifier :param data_dict: `dict` of data parameters :param execution_mode: requested execution mode - + :param requested_outputs: `dict` optionally specify the subset of + required outputs - defaults to all outputs. + The value of any key may be an object and include the property + `transmissionMode` - defaults to `value`. + Note: 'optional' is for backward compatibility. + :param subscriber: `Subscriber` optionally specifying callback urls + :param requested_response: `RequestedResponse` optionally specifying + raw or document (default is `raw`) + + :raises UnknownProcessError: if the input process_id does not + correspond to a known process :returns: tuple of job_id, MIME type, response payload, status and - optionally additional HTTP headers to include in the + optionally additional HTTP headers to include in the final response """ @@ -111,6 +127,12 @@ def execute_process( current_status = JobStatus.failed LOGGER.exception(err) self._send_failed_notification(subscriber) + + if requested_response == RequestedResponse.document.value: + outputs = { + 'outputs': [outputs] + } + job_id = str(uuid.uuid1()) return job_id, jfmt, outputs, current_status, response_headers diff --git a/pygeoapi/process/manager/mongodb_.py b/pygeoapi/process/manager/mongodb_.py index 66886f973..2b64806cf 100644 --- a/pygeoapi/process/manager/mongodb_.py +++ b/pygeoapi/process/manager/mongodb_.py @@ -70,7 +70,7 @@ def destroy(self): exc_info=(traceback)) return False - def get_jobs(self, status=None): + def get_jobs(self, status=None, limit=None, offset=None): try: self._connect() database = self.db.job_manager_pygeoapi @@ -80,7 +80,10 @@ def get_jobs(self, status=None): else: jobs = list(collection.find({})) LOGGER.info("JOBMANAGER - MongoDB jobs queried") - return jobs + return { + 'jobs': jobs, + 'numberMatched': len(jobs) + } except Exception: LOGGER.error("JOBMANAGER - get_jobs error", exc_info=(traceback)) diff --git a/pygeoapi/process/manager/postgresql.py b/pygeoapi/process/manager/postgresql.py new file mode 100644 index 000000000..72f3b75c4 --- /dev/null +++ b/pygeoapi/process/manager/postgresql.py @@ -0,0 +1,303 @@ +# ================================================================= +# +# Authors: Francesco Martinelli +# +# Copyright (c) 2024 Francesco Martinelli +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +# Requires postgresql database structure. +# Create the database: +# e.g. +# CREATE DATABASE test +# WITH TEMPLATE = template0 +# ENCODING = 'UTF8' +# LOCALE = 'en_US.UTF-8'; +# ALTER DATABASE test OWNER TO postgres; +# +# Import dump: +# psql -U postgres -h 127.0.0.1 -p 5432 test < +# tests/data/postgres_manager_full_structure.backup.sql + +import json +import logging +from pathlib import Path +from typing import Any, Tuple + +from sqlalchemy import insert, update, delete +from sqlalchemy.engine import make_url +from sqlalchemy.orm import Session + +from pygeoapi.process.base import ( + JobNotFoundError, + JobResultNotFoundError, + ProcessorGenericError +) +from pygeoapi.process.manager.base import BaseManager +from pygeoapi.provider.postgresql import get_engine, get_table_model +from pygeoapi.util import JobStatus + + +LOGGER = logging.getLogger(__name__) + + +class PostgreSQLManager(BaseManager): + """PostgreSQL Manager""" + + def __init__(self, manager_def: dict): + """ + Initialize object + + :param manager_def: manager definition + + :returns: `pygeoapi.process.manager.postgresqs.PostgreSQLManager` + """ + + super().__init__(manager_def) + self.is_async = True + self.id_field = 'identifier' + self.supports_subscribing = True + self.connection = manager_def['connection'] + + try: + self.db_search_path = tuple(self.connection.get('search_path', + ['public'])) + except Exception: + self.db_search_path = ('public',) + + try: + LOGGER.debug('Connecting to database') + if isinstance(self.connection, str): + _url = make_url(self.connection) + self._engine = get_engine( + _url.host, + _url.port, + _url.database, + _url.username, + _url.password) + else: + self._engine = get_engine(**self.connection) + except Exception as err: + msg = 'Test connecting to DB failed' + LOGGER.error(f'{msg}: {err}') + raise ProcessorGenericError(msg) + + try: + LOGGER.debug('Getting table model') + self.table_model = get_table_model( + 'jobs', + self.id_field, + self.db_search_path, + self._engine + ) + except Exception as err: + msg = 'Table model fetch failed' + LOGGER.error(f'{msg}: {err}') + raise ProcessorGenericError(msg) + + def get_jobs(self, status: JobStatus = None, limit=None, offset=None + ) -> dict: + """ + Get jobs + + :param status: job status (accepted, running, successful, + failed, results) (default is all) + :param limit: number of jobs to return + :param offset: pagination offset + + :returns: dict of list of jobs (identifier, status, process identifier) + and numberMatched + """ + + LOGGER.debug('Querying for jobs') + with Session(self._engine) as session: + results = session.query(self.table_model) + if status is not None: + column = getattr(self.table_model, 'status') + results = results.filter(column == status.value) + + jobs = [r.__dict__ for r in results.all()] + return { + 'jobs': jobs, + 'numberMatched': len(jobs) + } + + def add_job(self, job_metadata: dict) -> str: + """ + Add a job + + :param job_metadata: `dict` of job metadata + + :returns: identifier of added job + """ + + LOGGER.debug('Adding job') + with Session(self._engine) as session: + try: + session.execute(insert(self.table_model) + .values(**job_metadata)) + session.commit() + except Exception as err: + session.rollback() + msg = 'Insert failed' + LOGGER.error(f'{msg}: {err}') + raise ProcessorGenericError(msg) + + return job_metadata['identifier'] + + def update_job(self, job_id: str, update_dict: dict) -> bool: + """ + Updates a job + + :param job_id: job identifier + :param update_dict: `dict` of property updates + + :returns: `bool` of status result + """ + + rowcount = 0 + + LOGGER.debug('Updating job') + with Session(self._engine) as session: + try: + column = getattr(self.table_model, self.id_field) + stmt = ( + update(self.table_model) + .where(column == job_id) + .values(**update_dict) + ) + result = session.execute(stmt) + session.commit() + rowcount = result.rowcount + except Exception as err: + session.rollback() + msg = 'Update failed' + LOGGER.error(f'{msg}: {err}') + raise ProcessorGenericError(msg) + + return rowcount == 1 + + def get_job(self, job_id: str) -> dict: + """ + Get a single job + + :param job_id: job identifier + + :raises JobNotFoundError: if the job_id does not correspond to a + known job + :returns: `dict` # `pygeoapi.process.manager.Job` + """ + + LOGGER.debug('Querying for job') + with Session(self._engine) as session: + results = session.query(self.table_model) + column = getattr(self.table_model, self.id_field) + results = session.query(self.table_model).filter(column == job_id) + + first = results.first() + + if first is not None: + return first.__dict__ + else: + raise JobNotFoundError() + + def delete_job(self, job_id: str) -> bool: + """ + Deletes a job + + :param job_id: job identifier + + :raises JobNotFoundError: if the job_id does not correspond to a + known job + :return `bool` of status result + """ + + rowcount = 0 + + # get result file if present for deletion + job_result = self.get_job(job_id) + location = job_result.get('location') + + LOGGER.debug('Deleting job') + with Session(self._engine) as session: + try: + column = getattr(self.table_model, self.id_field) + stmt = ( + delete(self.table_model) + .where(column == job_id) + ) + result = session.execute(stmt) + session.commit() + rowcount = result.rowcount + except Exception as err: + session.rollback() + msg = 'Delete failed' + LOGGER.error(f'{msg}: {err}') + raise ProcessorGenericError(msg) + + # delete result file if present + if None not in [location, self.output_dir]: + try: + Path(location).unlink() + except FileNotFoundError: + pass + + return rowcount == 1 + + def get_job_result(self, job_id: str) -> Tuple[str, Any]: + """ + Get a job's status, and actual output of executing the process + + :param job_id: job identifier + + :raises JobNotFoundError: if the job_id does not correspond to a + known job + :raises JobResultNotFoundError: if the job-related result cannot + be returned + :returns: `tuple` of mimetype and raw output + """ + + job_result = self.get_job(job_id) + location = job_result.get('location') + mimetype = job_result.get('mimetype') + job_status = JobStatus[job_result['status']] + + if job_status != JobStatus.successful: + # Job is incomplete + return (None,) + if not location: + LOGGER.warning(f'job {job_id!r} - unknown result location') + raise JobResultNotFoundError() + else: + try: + location = Path(location) + with location.open(encoding='utf-8') as fh: + result = json.load(fh) + except (TypeError, FileNotFoundError, json.JSONDecodeError): + raise JobResultNotFoundError() + else: + return mimetype, result + + def __repr__(self): + return f' {self.name}' diff --git a/pygeoapi/process/manager/tinydb_.py b/pygeoapi/process/manager/tinydb_.py index 3966e9dd1..2f022a33c 100644 --- a/pygeoapi/process/manager/tinydb_.py +++ b/pygeoapi/process/manager/tinydb_.py @@ -82,20 +82,35 @@ def destroy(self) -> bool: return True - def get_jobs(self, status: JobStatus = None) -> list: + def get_jobs(self, status: JobStatus = None, limit=None, offset=None + ) -> dict: """ Get jobs :param status: job status (accepted, running, successful, failed, results) (default is all) + :param limit: number of jobs to return + :param offset: pagination offset - :returns: 'list` of jobs (identifier, status, process identifier) + :returns: dict of list of jobs (identifier, status, process identifier) + and numberMatched """ with self._db() as db: jobs_list = db.all() - return jobs_list + number_matched = len(jobs_list) + + if offset: + jobs_list = jobs_list[offset:] + + if limit: + jobs_list = jobs_list[:limit] + + return { + 'jobs': jobs_list, + 'numberMatched': number_matched + } def add_job(self, job_metadata: dict) -> str: """ diff --git a/pygeoapi/provider/base.py b/pygeoapi/provider/base.py index 39dcc2a4a..5f9456870 100644 --- a/pygeoapi/provider/base.py +++ b/pygeoapi/provider/base.py @@ -73,7 +73,7 @@ def __init__(self, provider_def): self.title_field = provider_def.get('title_field') self.properties = provider_def.get('properties', []) self.file_types = provider_def.get('file_types', []) - self.fields = {} + self._fields = {} self.filename = None # for coverage providers @@ -85,13 +85,31 @@ def get_fields(self): """ Get provider field information (names, types) - Example response: {'field1': 'string', 'field2': 'number'}} + Example response: + {'field1': {'type': 'string'}, 'field2': {'type': 'number'}} :returns: dict of field names and their associated JSON Schema types """ raise NotImplementedError() + @property + def fields(self) -> dict: + """ + Store provider field information (names, types) + + Example response: + {'field1': {'type': 'string'}, 'field2': {'type': 'number'}} + + :returns: dict of dicts (field names and their + associated JSON Schema definitions) + """ + + if hasattr(self, '_fields'): + return self._fields + else: + return self.get_fields() + def get_schema(self, schema_type: SchemaType = SchemaType.item): """ Get provider schema model diff --git a/pygeoapi/provider/base_edr.py b/pygeoapi/provider/base_edr.py index 3e7a259cb..021b2f403 100644 --- a/pygeoapi/provider/base_edr.py +++ b/pygeoapi/provider/base_edr.py @@ -29,10 +29,14 @@ import logging -from pygeoapi.provider.base import BaseProvider +from pygeoapi.provider.base import BaseProvider, ProviderInvalidDataError LOGGER = logging.getLogger(__name__) +EDR_QUERY_TYPES = ['position', 'radius', 'area', 'cube', + 'trajectory', 'corridor', 'items', + 'locations', 'instances'] + class BaseEDRProvider(BaseProvider): """Base EDR Provider""" @@ -55,6 +59,11 @@ def __init__(self, provider_def): @classmethod def register(cls): def inner(fn): + if fn.__name__ not in EDR_QUERY_TYPES: + msg = 'Invalid EDR Query type' + LOGGER.error(msg) + raise ProviderInvalidDataError(msg) + cls.query_types.append(fn.__name__) return fn return inner diff --git a/pygeoapi/provider/csv_.py b/pygeoapi/provider/csv_.py index a8800ff0f..15aad908a 100644 --- a/pygeoapi/provider/csv_.py +++ b/pygeoapi/provider/csv_.py @@ -54,7 +54,7 @@ def __init__(self, provider_def): super().__init__(provider_def) self.geometry_x = provider_def['geometry']['x_field'] self.geometry_y = provider_def['geometry']['y_field'] - self.fields = self.get_fields() + self.get_fields() def get_fields(self): """ @@ -62,32 +62,31 @@ def get_fields(self): :returns: dict of fields """ - - LOGGER.debug('Treating all columns as string types') - with open(self.data) as ff: - LOGGER.debug('Serializing DictReader') - data_ = csv.DictReader(ff) - fields = {} - - row = next(data_) - - for key, value in row.items(): - LOGGER.debug(f'key: {key}, value: {value}') - value2 = get_typed_value(value) - if key in [self.geometry_x, self.geometry_y]: - continue - if key == self.id_field: - type_ = 'string' - elif isinstance(value2, float): - type_ = 'number' - elif isinstance(value2, int): - type_ = 'integer' - else: - type_ = 'string' - - fields[key] = {'type': type_} - - return fields + if not self._fields: + LOGGER.debug('Treating all columns as string types') + with open(self.data) as ff: + LOGGER.debug('Serializing DictReader') + data_ = csv.DictReader(ff) + + row = next(data_) + + for key, value in row.items(): + LOGGER.debug(f'key: {key}, value: {value}') + value2 = get_typed_value(value) + if key in [self.geometry_x, self.geometry_y]: + continue + if key == self.id_field: + type_ = 'string' + elif isinstance(value2, float): + type_ = 'number' + elif isinstance(value2, int): + type_ = 'integer' + else: + type_ = 'string' + + self._fields[key] = {'type': type_} + + return self._fields def _load(self, offset=0, limit=10, resulttype='results', identifier=None, bbox=[], datetime_=None, properties=[], diff --git a/pygeoapi/provider/csw_facade.py b/pygeoapi/provider/csw_facade.py index cfb5bb826..69cd0ddee 100644 --- a/pygeoapi/provider/csw_facade.py +++ b/pygeoapi/provider/csw_facade.py @@ -69,7 +69,8 @@ def __init__(self, provider_def): 'language': ('dc:language', 'language') } - self.fields = self.get_fields() + self._fields = {} + self.get_fields() def get_fields(self): """ @@ -78,17 +79,17 @@ def get_fields(self): :returns: dict of fields """ - fields = {} - date_fields = ['date', 'created', 'updated'] + if not self._fields: + date_fields = ['date', 'created', 'updated'] - for key in self.record_mappings.keys(): - LOGGER.debug(f'key: {key}') - fields[key] = {'type': 'string'} + for key in self.record_mappings.keys(): + LOGGER.debug(f'key: {key}') + self._fields[key] = {'type': 'string'} - if key in date_fields: - fields[key]['format'] = 'date-time' + if key in date_fields: + self._fields[key]['format'] = 'date-time' - return fields + return self._fields @crs_transform def query(self, offset=0, limit=10, resulttype='results', diff --git a/pygeoapi/provider/elasticsearch_.py b/pygeoapi/provider/elasticsearch_.py index cf5b4fd99..5cd90a8bb 100644 --- a/pygeoapi/provider/elasticsearch_.py +++ b/pygeoapi/provider/elasticsearch_.py @@ -87,7 +87,7 @@ def __init__(self, provider_def): LOGGER.debug('Grabbing field information') try: - self.fields = self.get_fields() + self.get_fields() except exceptions.NotFoundError as err: LOGGER.error(err) raise ProviderQueryError(err) @@ -98,38 +98,40 @@ def get_fields(self): :returns: dict of fields """ + if not self._fields: + ii = self.es.indices.get(index=self.index_name, + allow_no_indices=False) - fields_ = {} - ii = self.es.indices.get(index=self.index_name, allow_no_indices=False) - - LOGGER.debug(f'Response: {ii}') - try: - if '*' not in self.index_name: - p = ii[self.index_name]['mappings']['properties']['properties'] - else: - LOGGER.debug('Wildcard index; setting from first match') - index_name_ = list(ii.keys())[0] - p = ii[index_name_]['mappings']['properties']['properties'] - except KeyError: - LOGGER.warning('Trying for alias') - alias_name = next(iter(ii)) - p = ii[alias_name]['mappings']['properties']['properties'] - except IndexError: - LOGGER.warning('could not get fields; returning empty set') - return {} - - for k, v in p['properties'].items(): - if 'type' in v: - if v['type'] == 'text': - fields_[k] = {'type': 'string'} - elif v['type'] == 'date': - fields_[k] = {'type': 'string', 'format': 'date'} - elif v['type'] in ('float', 'long'): - fields_[k] = {'type': 'number', 'format': v['type']} + LOGGER.debug(f'Response: {ii}') + try: + if '*' not in self.index_name: + mappings = ii[self.index_name]['mappings'] + p = mappings['properties']['properties'] else: - fields_[k] = {'type': v['type']} - - return fields_ + LOGGER.debug('Wildcard index; setting from first match') + index_name_ = list(ii.keys())[0] + p = ii[index_name_]['mappings']['properties']['properties'] + except KeyError: + LOGGER.warning('Trying for alias') + alias_name = next(iter(ii)) + p = ii[alias_name]['mappings']['properties']['properties'] + except IndexError: + LOGGER.warning('could not get fields; returning empty set') + return {} + + for k, v in p['properties'].items(): + if 'type' in v: + if v['type'] == 'text': + self._fields[k] = {'type': 'string'} + elif v['type'] == 'date': + self._fields[k] = {'type': 'string', 'format': 'date'} + elif v['type'] in ('float', 'long'): + self._fields[k] = {'type': 'number', + 'format': v['type']} + else: + self._fields[k] = {'type': v['type']} + + return self._fields @crs_transform def query(self, offset=0, limit=10, resulttype='results', diff --git a/pygeoapi/provider/erddap.py b/pygeoapi/provider/erddap.py index a81876e1f..2fc71c064 100644 --- a/pygeoapi/provider/erddap.py +++ b/pygeoapi/provider/erddap.py @@ -51,6 +51,7 @@ from pygeoapi.provider.base import ( BaseProvider, ProviderNotFoundError, ProviderQueryError) +from pygeoapi.util import crs_transform LOGGER = logging.getLogger(__name__) @@ -61,25 +62,27 @@ def __init__(self, provider_def): LOGGER.debug('Setting provider query filters') self.filters = self.options.get('filters') - self.fields = self.get_fields() + self.get_fields() def get_fields(self): - LOGGER.debug('Fetching one feature for field definitions') - properties = self.query(limit=1)['features'][0]['properties'] + if not self._fields: + LOGGER.debug('Fetching one feature for field definitions') + properties = self.query(limit=1)['features'][0]['properties'] - for key, value in properties.items(): - LOGGER.debug(f'Field: {key}={value}') + for key, value in properties.items(): + LOGGER.debug(f'Field: {key}={value}') - data_type = type(value).__name__ + data_type = type(value).__name__ - if data_type == 'str': - data_type = 'string' - if data_type == 'float': - data_type = 'number' - properties[key] = {'type': data_type} + if data_type == 'str': + data_type = 'string' + if data_type == 'float': + data_type = 'number' + self._fields[key] = {'type': data_type} - return properties + return self._fields + @crs_transform def query(self, offset=0, limit=10, resulttype='results', bbox=[], datetime_=None, properties=[], sortby=[], select_properties=[], skip_geometry=False, q=None, @@ -164,6 +167,7 @@ def query(self, offset=0, limit=10, resulttype='results', 'numberReturned': returned } + @crs_transform def get(self, identifier, **kwargs): query_params = [] diff --git a/pygeoapi/provider/esri.py b/pygeoapi/provider/esri.py index 0d22a8805..47d74e2b9 100644 --- a/pygeoapi/provider/esri.py +++ b/pygeoapi/provider/esri.py @@ -62,8 +62,9 @@ def __init__(self, provider_def): self.crs = provider_def.get('crs', '4326') self.username = provider_def.get('username') self.password = provider_def.get('password') + self.token_url = provider_def.get('token_service', ARCGIS_URL) + self.token_referer = provider_def.get('referer', GENERATE_TOKEN_URL) self.token = None - self.session = Session() self.login() @@ -76,7 +77,7 @@ def get_fields(self): :returns: `dict` of fields """ - if not self.fields: + if not self._fields: # Load fields params = {'f': 'pjson'} resp = self.get_response(self.data, params=params) @@ -102,9 +103,9 @@ def get_fields(self): raise ProviderTypeError(msg) for _ in resp['fields']: - self.fields.update({_['name']: {'type': _['type']}}) + self._fields.update({_['name']: {'type': _['type']}}) - return self.fields + return self._fields @crs_transform def query(self, offset=0, limit=10, resulttype='results', @@ -194,16 +195,15 @@ def login(self): msg = 'Missing ESRI login information, not setting token' LOGGER.debug(msg) return - params = { 'f': 'pjson', 'username': self.username, 'password': self.password, - 'referer': ARCGIS_URL + 'referer': self.token_referer } LOGGER.debug('Logging in') - with self.session.post(GENERATE_TOKEN_URL, data=params) as r: + with self.session.post(self.token_url, data=params) as r: self.token = r.json().get('token') # https://enterprise.arcgis.com/en/server/latest/administer/windows/about-arcgis-tokens.htm self.session.headers.update({ diff --git a/pygeoapi/provider/geojson.py b/pygeoapi/provider/geojson.py index 180cf0746..257166a7c 100644 --- a/pygeoapi/provider/geojson.py +++ b/pygeoapi/provider/geojson.py @@ -68,7 +68,7 @@ def __init__(self, provider_def): """initializer""" super().__init__(provider_def) - self.fields = self.get_fields() + self.get_fields() def get_fields(self): """ @@ -77,23 +77,24 @@ def get_fields(self): :returns: dict of fields """ - fields = {} - LOGGER.debug('Treating all columns as string types') - if os.path.exists(self.data): - with open(self.data) as src: - data = json.loads(src.read()) - for key, value in data['features'][0]['properties'].items(): - if isinstance(value, float): - type_ = 'number' - elif isinstance(value, int): - type_ = 'integer' - else: - type_ = 'string' - - fields[key] = {'type': type_} - else: - LOGGER.warning(f'File {self.data} does not exist.') - return fields + if not self._fields: + LOGGER.debug('Treating all columns as string types') + if os.path.exists(self.data): + with open(self.data) as src: + data = json.loads(src.read()) + for key, value in data['features'][0]['properties'].items(): + if isinstance(value, float): + type_ = 'number' + elif isinstance(value, int): + type_ = 'integer' + else: + type_ = 'string' + + self._fields[key] = {'type': type_} + else: + LOGGER.warning(f'File {self.data} does not exist.') + + return self._fields def _load(self, skip_geometry=None, properties=[], select_properties=[]): """Load and validate the source GeoJSON file diff --git a/pygeoapi/provider/mongo.py b/pygeoapi/provider/mongo.py index 1656de496..ca258018c 100644 --- a/pygeoapi/provider/mongo.py +++ b/pygeoapi/provider/mongo.py @@ -66,7 +66,7 @@ def __init__(self, provider_def): self.featuredb = dbclient.get_default_database() self.collection = provider_def['collection'] self.featuredb[self.collection].create_index([("geometry", GEOSPHERE)]) - self.fields = self.get_fields() + self.get_fields() def get_fields(self): """ @@ -75,25 +75,24 @@ def get_fields(self): :returns: dict of fields """ - pipeline = [ - {"$project": {"properties": 1}}, - {"$unwind": "$properties"}, - {"$group": {"_id": "$properties", "count": {"$sum": 1}}}, - {"$project": {"_id": 1}} - ] + if not self._fields: + pipeline = [ + {"$project": {"properties": 1}}, + {"$unwind": "$properties"}, + {"$group": {"_id": "$properties", "count": {"$sum": 1}}}, + {"$project": {"_id": 1}} + ] - result = list(self.featuredb[self.collection].aggregate(pipeline)) + result = list(self.featuredb[self.collection].aggregate(pipeline)) - # prepare a dictionary with fields - # set the field type to 'string'. - # by operating without a schema, mongo can query any data type. - fields = {} + # prepare a dictionary with fields + # set the field type to 'string'. + # by operating without a schema, mongo can query any data type. + for i in result: + for key in result[0]['_id'].keys(): + self._fields[key] = {'type': 'string'} - for i in result: - for key in result[0]['_id'].keys(): - fields[key] = {'type': 'string'} - - return fields + return self._fields def _get_feature_list(self, filterObj, sortList=[], skip=0, maxitems=1, skip_geometry=False): diff --git a/pygeoapi/provider/ogr.py b/pygeoapi/provider/ogr.py index 1584e6dcc..3132c2cb8 100644 --- a/pygeoapi/provider/ogr.py +++ b/pygeoapi/provider/ogr.py @@ -188,7 +188,7 @@ def __init__(self, provider_def): self.conn = None LOGGER.debug('Grabbing field information') - self.fields = self.get_fields() + self.get_fields() def _list_open_options(self): return [ @@ -260,43 +260,43 @@ def get_fields(self): :returns: dict of fields """ - fields = {} - try: - layer_defn = self._get_layer().GetLayerDefn() - for fld in range(layer_defn.GetFieldCount()): - field_defn = layer_defn.GetFieldDefn(fld) - fieldName = field_defn.GetName() - fieldTypeCode = field_defn.GetType() - fieldType = field_defn.GetFieldTypeName(fieldTypeCode) + if not self._fields: + try: + layer_defn = self._get_layer().GetLayerDefn() + for fld in range(layer_defn.GetFieldCount()): + field_defn = layer_defn.GetFieldDefn(fld) + fieldName = field_defn.GetName() + fieldTypeCode = field_defn.GetType() + fieldType = field_defn.GetFieldTypeName(fieldTypeCode) - fieldName2 = fieldType.lower() + fieldName2 = fieldType.lower() - if fieldName2 == 'integer64': - fieldName2 = 'integer' - elif fieldName2 == 'real': - fieldName2 = 'number' + if fieldName2 == 'integer64': + fieldName2 = 'integer' + elif fieldName2 == 'real': + fieldName2 = 'number' - fields[fieldName] = {'type': fieldName2} + self._fields[fieldName] = {'type': fieldName2} - if fieldName2 == 'datetime': - fields[fieldName] = { - 'type': 'string', - 'format': 'date-time' - } + if fieldName2 == 'datetime': + self._fields[fieldName] = { + 'type': 'string', + 'format': 'date-time' + } - # fieldWidth = layer_defn.GetFieldDefn(fld).GetWidth() - # GetPrecision = layer_defn.GetFieldDefn(fld).GetPrecision() + # fieldWidth = layer_defn.GetFieldDefn(fld).GetWidth() + # GetPrecision = layer_defn.GetFieldDefn(fld).GetPrecision() # noqa - except RuntimeError as err: - LOGGER.error(err) - raise ProviderConnectionError(err) - except Exception as err: - LOGGER.error(err) + except RuntimeError as err: + LOGGER.error(err) + raise ProviderConnectionError(err) + except Exception as err: + LOGGER.error(err) - finally: - self._close() + finally: + self._close() - return fields + return self._fields def query(self, offset=0, limit=10, resulttype='results', bbox=[], datetime_=None, properties=[], sortby=[], diff --git a/pygeoapi/provider/oracle.py b/pygeoapi/provider/oracle.py index 16ce2ced5..a302d2513 100644 --- a/pygeoapi/provider/oracle.py +++ b/pygeoapi/provider/oracle.py @@ -66,17 +66,31 @@ def create_pool(cls, conn_dict, oracle_pool_min, oracle_pool_max): """Initialize the connection pool for the class Lock is implemented before function call at __init__""" dsn = cls._make_dsn(conn_dict) + + connect_kwargs = { + 'dsn': dsn, + 'min': oracle_pool_min, + 'max': oracle_pool_max, + 'increment': 1 + } + # Create the pool + if conn_dict.get("external_auth") == "wallet": + # If Auth is via Wallet you need to save a wallet under + # the directory returned by this bash command if apache is used + # cat /etc/passwd |grep apache + # except another directory is specified in the sqlnet.ora file + LOGGER.debug("Connection pool from wallet.") + connect_kwargs["externalauth"] = True + connect_kwargs["homogeneous"] = False - p = oracledb.create_pool( - user=conn_dict["user"], - password=conn_dict["password"], - dsn=dsn, - min=oracle_pool_min, - max=oracle_pool_max, - increment=1, - ) - LOGGER.debug("Connection pool created successfully.") + else: + LOGGER.debug("Connection pool from user and password.") + connect_kwargs["user"] = conn_dict["user"] + connect_kwargs["password"] = conn_dict["password"] + + p = oracledb.create_pool(**connect_kwargs) + LOGGER.debug("Connection pool created successfully") return p @@ -435,12 +449,12 @@ def get_fields(self): """ LOGGER.debug("Get available fields/properties") - if not self.fields: + if not self._fields: with DatabaseConnection( self.conn_dic, self.table, properties=self.properties ) as db: - self.fields = db.fields - return self.fields + self._fields = db.fields + return self._fields def _get_where_clauses( self, @@ -633,6 +647,19 @@ def query( :returns: GeoJSON FeaturesCollection """ + LOGGER.debug(f"properties contains: {properties}") + + # NOTE: properties contains field keys plus extra params + # need to split them up here + filtered_properties = [] + extra_params = {} + for (key, value) in properties: + if key in self.fields.keys(): + filtered_properties.append((key, value)) + else: + extra_params[key] = value + + properties = filtered_properties # Check mandatory filter properties property_dict = dict(properties) @@ -790,6 +817,7 @@ def query( q, language, filterq, + extra_params=extra_params ) # Clean up placeholders that aren't used by the diff --git a/pygeoapi/provider/parquet.py b/pygeoapi/provider/parquet.py new file mode 100644 index 000000000..70ec81e80 --- /dev/null +++ b/pygeoapi/provider/parquet.py @@ -0,0 +1,458 @@ +# ================================================================= +# +# Authors: Leo Ghignone +# +# Copyright (c) 2024 Leo Ghignone +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +from itertools import chain +import json +import logging + +from dateutil.parser import isoparse +import geopandas as gpd +import pyarrow +import pyarrow.compute as pc +import pyarrow.dataset +import s3fs + +from pygeoapi.provider.base import ( + BaseProvider, + ProviderConnectionError, + ProviderGenericError, + ProviderItemNotFoundError, + ProviderQueryError, +) +from pygeoapi.util import crs_transform + +LOGGER = logging.getLogger(__name__) + + +def arrow_to_pandas_type(arrow_type): + pd_type = arrow_type.to_pandas_dtype() + try: + # Needed for specific types such as dtype(' pc.scalar(minx)) + & (pc.field(self.miny) > pc.scalar(miny)) + & (pc.field(self.maxx) < pc.scalar(maxx)) + & (pc.field(self.maxy) < pc.scalar(maxy)) + ) + + if datetime_ is not None: + if self.time_field is None: + msg = ( + 'Dataset does not have a time field, ' + 'querying by datetime is not supported.' + ) + raise ProviderQueryError(msg) + timefield = pc.field(self.time_field) + if '/' in datetime_: + begin, end = datetime_.split('/') + if begin != '..': + begin = isoparse(begin) + filter = filter & (timefield >= begin) + if end != '..': + end = isoparse(end) + filter = filter & (timefield <= end) + else: + target_time = isoparse(datetime_) + filter = filter & (timefield == target_time) + + if properties: + LOGGER.debug('processing properties') + for name, value in properties: + field = self.ds.schema.field(name) + pd_type = arrow_to_pandas_type(field.type) + expr = pc.field(name) == pc.scalar(pd_type(value)) + + filter = filter & expr + + if len(select_properties) == 0: + select_properties = self.ds.schema.names + else: # Load id and geometry together with any specified columns + if self.has_geometry and 'geometry' not in select_properties: + select_properties.append('geometry') + if self.id_field not in select_properties: + select_properties.insert(0, self.id_field) + + if skip_geometry: + select_properties.remove('geometry') + + # Make response based on resulttype specified + if resulttype == 'hits': + LOGGER.debug('hits only specified') + result = self._response_feature_hits(filter) + elif resulttype == 'results': + LOGGER.debug('results specified') + result = self._response_feature_collection( + filter, offset, limit, columns=select_properties + ) + else: + LOGGER.error(f'Invalid resulttype: {resulttype}') + + except RuntimeError as err: + LOGGER.error(err) + raise ProviderQueryError(err) + except ProviderConnectionError as err: + LOGGER.error(err) + raise ProviderConnectionError(err) + except Exception as err: + LOGGER.error(err) + raise ProviderGenericError(err) + + return result + + @crs_transform + def get(self, identifier, **kwargs): + """ + Get Feature by id + + :param identifier: feature id + + :returns: a single feature + """ + result = None + try: + LOGGER.debug(f'Fetching identifier {identifier}') + id_type = arrow_to_pandas_type( + self.ds.schema.field(self.id_field).type) + batches = self._read_parquet( + filter=( + pc.field(self.id_field) == pc.scalar(id_type(identifier)) + ) + ) + + for batch in batches: + if batch.num_rows > 0: + assert ( + batch.num_rows == 1 + ), f'Multiple items found with ID {identifier}' + row = batch.to_pandas() + break + else: + raise ProviderItemNotFoundError(f'ID {identifier} not found') + + if self.has_geometry: + geom = gpd.GeoSeries.from_wkb(row['geometry'], crs=self.crs) + else: + geom = [None] + gdf = gpd.GeoDataFrame(row, geometry=geom) + LOGGER.debug('results computed') + + # Grab the collection from geopandas geo_interface + result = gdf.__geo_interface__['features'][0] + + except RuntimeError as err: + LOGGER.error(err) + raise ProviderQueryError(err) + except ProviderConnectionError as err: + LOGGER.error(err) + raise ProviderConnectionError(err) + except ProviderItemNotFoundError as err: + LOGGER.error(err) + raise ProviderItemNotFoundError(err) + except Exception as err: + LOGGER.error(err) + raise ProviderGenericError(err) + + return result + + def __repr__(self): + return f' {self.data}' + + def _response_feature_collection(self, filter, offset, limit, + columns=None): + """ + Assembles output from query as + GeoJSON FeatureCollection structure. + + :returns: GeoJSON FeatureCollection + """ + + LOGGER.debug(f'offset:{offset}, limit:{limit}') + + try: + batches, scanner = self._read_parquet( + filter=filter, columns=columns, return_scanner=True + ) + + # Discard batches until offset is reached + counted = 0 + for batch in batches: + if counted + batch.num_rows > offset: + # Slice current batch to start from the requested row + batch = batch.slice(offset=offset - counted) + # Build a new generator yielding the current batch + # and all following ones + + batches = chain([batch], batches) + break + else: + counted += batch.num_rows + + # batches is a generator, it will now be either fully spent + # or set to the new generator starting from offset + + # Get the next `limit+1` rows + # The extra row is used to check if a "next" link is needed + # (when numberMatched > offset + limit) + batches_list = [] + read = 0 + + for batch in batches: + read += batch.num_rows + if read > limit: + batches_list.append(batch.slice(0, limit + 1)) + break + else: + batches_list.append(batch) + + # Passing schema from scanner in case no rows are returned + table = pyarrow.Table.from_batches( + batches_list, schema=scanner.projected_schema + ) + + rp = table.to_pandas() + + number_matched = offset + len(rp) + + # Remove the extra row + if len(rp) > limit: + rp = rp.iloc[:-1] + + if 'geometry' not in rp.columns: + # We need a null geometry column to create a GeoDataFrame + rp['geometry'] = None + geom = gpd.GeoSeries.from_wkb(rp['geometry']) + else: + geom = gpd.GeoSeries.from_wkb(rp['geometry'], crs=self.crs) + + gdf = gpd.GeoDataFrame(rp, geometry=geom) + LOGGER.debug('results computed') + result = gdf.__geo_interface__ + + # Add numberMatched to generate "next" link + result['numberMatched'] = number_matched + + return result + + except RuntimeError as error: + LOGGER.error(error) + raise error + + def _response_feature_hits(self, filter): + """ + Assembles GeoJSON hits from row count + + :returns: GeoJSON FeaturesCollection + """ + + try: + scanner = pyarrow.dataset.Scanner.from_dataset(self.ds, + filter=filter) + return { + 'type': 'FeatureCollection', + 'numberMatched': scanner.count_rows(), + 'features': [], + } + except Exception as error: + LOGGER.error(error) + raise error diff --git a/pygeoapi/provider/postgresql.py b/pygeoapi/provider/postgresql.py index bf0a5799f..eb4b21454 100644 --- a/pygeoapi/provider/postgresql.py +++ b/pygeoapi/provider/postgresql.py @@ -62,7 +62,8 @@ import shapely from sqlalchemy import create_engine, MetaData, PrimaryKeyConstraint, asc, desc from sqlalchemy.engine import URL -from sqlalchemy.exc import InvalidRequestError, OperationalError +from sqlalchemy.exc import ConstraintColumnNotFoundError, \ + InvalidRequestError, OperationalError from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import Session, load_only from sqlalchemy.sql.expression import and_ @@ -124,7 +125,7 @@ def __init__(self, provider_def): ) LOGGER.debug(f'DB connection: {repr(self._engine.url)}') - self.fields = self.get_fields() + self.get_fields() def query(self, offset=0, limit=10, resulttype='results', bbox=[], datetime_=None, properties=[], sortby=[], @@ -204,8 +205,6 @@ def get_fields(self): LOGGER.debug('Get available fields/properties') - fields = {} - # sql-schema only allows these types, so we need to map from sqlalchemy # string, number, integer, object, array, boolean, null, # https://json-schema.org/understanding-json-schema/reference/type.html @@ -248,17 +247,18 @@ def _column_format_to_json_schema_format(column_type): LOGGER.debug('No string format detected') return None - for column in self.table_model.__table__.columns: - LOGGER.debug(f'Testing {column.name}') - if column.name == self.geom: - continue + if not self._fields: + for column in self.table_model.__table__.columns: + LOGGER.debug(f'Testing {column.name}') + if column.name == self.geom: + continue - fields[str(column.name)] = { - 'type': _column_type_to_json_schema_type(column.type), - 'format': _column_format_to_json_schema_format(column.type) - } + self._fields[str(column.name)] = { + 'type': _column_type_to_json_schema_type(column.type), + 'format': _column_format_to_json_schema_format(column.type) + } - return fields + return self._fields def get(self, identifier, crs_transform_spec=None, **kwargs): """ @@ -516,7 +516,7 @@ def get_table_model( sqlalchemy_table_def = metadata.tables[f'{schema}.{table_name}'] try: sqlalchemy_table_def.append_constraint(PrimaryKeyConstraint(id_field)) - except KeyError: + except (ConstraintColumnNotFoundError, KeyError): raise ProviderQueryError( f"No such id_field column ({id_field}) on {schema}.{table_name}.") diff --git a/pygeoapi/provider/rasterio_.py b/pygeoapi/provider/rasterio_.py index 2bf25e3fb..3b0fbc2c7 100644 --- a/pygeoapi/provider/rasterio_.py +++ b/pygeoapi/provider/rasterio_.py @@ -59,38 +59,39 @@ def __init__(self, provider_def): self.axes = self._coverage_properties['axes'] self.crs = self._coverage_properties['bbox_crs'] self.num_bands = self._coverage_properties['num_bands'] - self.fields = self.get_fields() + self.get_fields() self.native_format = provider_def['format']['name'] except Exception as err: LOGGER.warning(err) raise ProviderConnectionError(err) def get_fields(self): - fields = {} - - for i, dtype in zip(self._data.indexes, self._data.dtypes): - LOGGER.debug(f'Adding field for band {i}') - i2 = str(i) - - parameter = _get_parameter_metadata( - self._data.profile['driver'], self._data.tags(i)) - - name = parameter['description'] - units = parameter.get('unit_label') - - dtype2 = dtype - if dtype.startswith('float'): - dtype2 = 'number' - - fields[i2] = { - 'title': name, - 'type': dtype2, - '_meta': self._data.tags(i) - } - if units is not None: - fields[i2]['x-ogc-unit'] = units + if not self._fields: + for i, dtype in zip(self._data.indexes, self._data.dtypes): + LOGGER.debug(f'Adding field for band {i}') + i2 = str(i) + + parameter = _get_parameter_metadata( + self._data.profile['driver'], self._data.tags(i)) + + name = parameter['description'] + units = parameter.get('unit_label') + + dtype2 = dtype + if dtype.startswith('float'): + dtype2 = 'number' + elif dtype.startswith('int'): + dtype2 = 'integer' + + self._fields[i2] = { + 'title': name, + 'type': dtype2, + '_meta': self._data.tags(i) + } + if units is not None: + self._fields[i2]['x-ogc-unit'] = units - return fields + return self._fields def query(self, properties=[], subsets={}, bbox=None, bbox_crs=4326, datetime_=None, format_='json', **kwargs): @@ -241,16 +242,15 @@ def query(self, properties=[], subsets={}, bbox=None, bbox_crs=4326, out_meta['units'] = _data.units LOGGER.debug('Serializing data in memory') - with MemoryFile() as memfile: - with memfile.open(**out_meta) as dest: - dest.write(out_image) - - if format_ == 'json': - LOGGER.debug('Creating output in CoverageJSON') - out_meta['bands'] = args['indexes'] - return self.gen_covjson(out_meta, out_image) - - else: # return data in native format + if format_ == 'json': + LOGGER.debug('Creating output in CoverageJSON') + out_meta['bands'] = args['indexes'] + return self.gen_covjson(out_meta, out_image) + + else: # return data in native format + with MemoryFile() as memfile: + with memfile.open(**out_meta) as dest: + dest.write(out_image) LOGGER.debug('Returning data in native format') return memfile.read() diff --git a/pygeoapi/provider/sensorthings.py b/pygeoapi/provider/sensorthings.py index 6b497b44b..defd48c43 100644 --- a/pygeoapi/provider/sensorthings.py +++ b/pygeoapi/provider/sensorthings.py @@ -30,14 +30,14 @@ # ================================================================= from json.decoder import JSONDecodeError -import os import logging from requests import Session +from pygeoapi.config import get_config from pygeoapi.provider.base import ( BaseProvider, ProviderQueryError, ProviderConnectionError) from pygeoapi.util import ( - yaml_load, url_join, get_provider_default, crs_transform, get_base_url) + url_join, get_provider_default, crs_transform, get_base_url) LOGGER = logging.getLogger(__name__) @@ -51,10 +51,10 @@ _EXPAND = { 'Things': 'Locations,Datastreams', 'Observations': 'Datastream,FeatureOfInterest', + 'ObservedProperties': 'Datastreams/Thing/Locations', 'Datastreams': """ Sensor ,ObservedProperty - ,Thing ,Thing/Locations ,Observations( $select=@iot.id; @@ -71,6 +71,7 @@ class SensorThingsProvider(BaseProvider): """SensorThings API (STA) Provider""" + expand = EXPAND def __init__(self, provider_def): """ @@ -82,63 +83,11 @@ def __init__(self, provider_def): :returns: pygeoapi.provider.sensorthings.SensorThingsProvider """ LOGGER.debug('Setting SensorThings API (STA) provider') - + self.linked_entity = {} super().__init__(provider_def) - self.data.rstrip('/') - try: - self.entity = provider_def['entity'] - self._url = url_join(self.data, self.entity) - except KeyError: - LOGGER.debug('Attempting to parse Entity from provider data') - if not self._get_entity(self.data): - raise RuntimeError('Entity type required') - self.entity = self._get_entity(self.data) - self._url = self.data - self.data = self._url.rstrip(f'/{self.entity}') - LOGGER.debug(f'STA endpoint: {self.data}, Entity: {self.entity}') - - # Default id - if self.id_field: - LOGGER.debug(f'Using id field: {self.id_field}') - else: - LOGGER.debug('Using default @iot.id for id field') - self.id_field = '@iot.id' - - # Create intra-links - self.links = {} - self.intralink = provider_def.get('intralink', False) - if self.intralink and provider_def.get('rel_link'): - # For pytest - self.rel_link = provider_def['rel_link'] - elif self.intralink: - # Read from pygeoapi config - with open(os.getenv('PYGEOAPI_CONFIG'), encoding='utf8') as fh: - CONFIG = yaml_load(fh) - self.rel_link = get_base_url(CONFIG) - - for (name, rs) in CONFIG['resources'].items(): - pvs = rs.get('providers') - p = get_provider_default(pvs) - e = p.get('entity') or self._get_entity(p['data']) - if any([ - not pvs, # No providers in resource - not p.get('intralink'), # No configuration for intralinks - not e, # No STA entity found - self.data not in p.get('data') # No common STA endpoint - ]): - continue - - if p.get('uri_field'): - LOGGER.debug(f'Linking {e} with field: {p["uri_field"]}') - else: - LOGGER.debug(f'Linking {e} with collection: {name}') - - self.links[e] = { - 'cnm': name, # OAPI collection name, - 'cid': p.get('id_field', '@iot.id'), # OAPI id_field - 'uri': p.get('uri_field') # STA uri_field - } + self._generate_mappings(provider_def) + LOGGER.debug(f'STA endpoint: {self.data}, Entity: {self.entity}') # Start session self.http = Session() @@ -150,7 +99,7 @@ def get_fields(self): :returns: dict of fields """ - if not self.fields: + if not self._fields: r = self._get_response(self._url, {'$top': 1}) try: results = r['value'][0] @@ -161,11 +110,11 @@ def get_fields(self): for (n, v) in results.items(): if isinstance(v, (int, float)) or \ (isinstance(v, (dict, list)) and n in ENTITY): - self.fields[n] = {'type': 'number'} + self._fields[n] = {'type': 'number'} elif isinstance(v, str): - self.fields[n] = {'type': 'string'} + self._fields[n] = {'type': 'string'} - return self.fields + return self._fields @crs_transform def query(self, offset=0, limit=10, resulttype='results', @@ -272,17 +221,19 @@ def _load(self, offset=0, limit=10, resulttype='results', return fc - def _make_feature(self, entity, select_properties=[], skip_geometry=False): + def _make_feature(self, feature, select_properties=[], skip_geometry=False, + entity=None): """ Private function: Create feature from entity - :param entity: `dict` of STA entity + :param feature: `dict` of STA entity :param select_properties: list of property names :param skip_geometry: bool of whether to skip geometry (default False) + :param entity: SensorThings entity name :returns: dict of GeoJSON Feature """ - _ = entity.pop(self.id_field) + _ = feature.pop(self.id_field) id = f"'{_}'" if isinstance(_, str) else str(_) f = { 'type': 'Feature', 'id': id, 'properties': {}, 'geometry': None @@ -290,28 +241,35 @@ def _make_feature(self, entity, select_properties=[], skip_geometry=False): # Make geometry if not skip_geometry: - f['geometry'] = self._geometry(entity) + f['geometry'] = self._geometry(feature, entity) # Fill properties block try: f['properties'] = self._expand_properties( - entity, select_properties) + feature, select_properties, entity) except KeyError as err: LOGGER.error(err) raise ProviderQueryError(err) return f - def _get_response(self, url, params={}): + def _get_response(self, url, params={}, entity=None, expand=None): """ Private function: Get STA response :param url: request url :param params: query parameters + :param entity: SensorThings entity name + :param expand: SensorThings expand query + :returns: STA response """ - params.update({'$expand': EXPAND[self.entity]}) + if expand: + params.update({'$expand': expand}) + else: + entity_ = entity or self.entity + params.update({'$expand': self.expand[entity_]}) r = self.http.get(url, params=params) @@ -327,13 +285,15 @@ def _get_response(self, url, params={}): return response - def _make_filter(self, properties, bbox=[], datetime_=None): + def _make_filter(self, properties, bbox=[], datetime_=None, + entity=None): """ Private function: Make STA filter from query properties :param properties: list of tuples (name, value) :param bbox: bounding box [minx,miny,maxx,maxy] :param datetime_: temporal (datestamp or extent) + :param entity: SensorThings entity name :returns: STA $filter string of properties """ @@ -345,16 +305,8 @@ def _make_filter(self, properties, bbox=[], datetime_=None): ret.append(f'{name} eq {value}') if bbox: - minx, miny, maxx, maxy = bbox - bbox_ = f'POLYGON (({minx} {miny}, {maxx} {miny}, \ - {maxx} {maxy}, {minx} {maxy}, {minx} {miny}))' - if self.entity == 'Things': - loc = 'Locations/location' - elif self.entity == 'Datastreams': - loc = 'Thing/Locations/location' - elif self.entity == 'Observations': - loc = 'FeatureOfInterest/feature' - ret.append(f"st_within({loc}, geography'{bbox_}')") + entity_ = entity or self.entity + ret.append(self._make_bbox(bbox, entity_)) if datetime_ is not None: if self.time_field is None: @@ -373,6 +325,20 @@ def _make_filter(self, properties, bbox=[], datetime_=None): return ' and '.join(ret) + @staticmethod + def _make_bbox(bbox, entity): + minx, miny, maxx, maxy = bbox + bbox_ = f'POLYGON(({minx} {miny},{maxx} {miny},{maxx} {maxy},{minx} {maxy},{minx} {miny}))' # noqa + if entity == 'Things': + loc = 'Locations/location' + elif entity == 'Datastreams': + loc = 'Thing/Locations/location' + elif entity == 'Observations': + loc = 'FeatureOfInterest/feature' + elif entity == 'ObservedProperties': + loc = 'Datastreams/observedArea' + return f"st_within({loc},geography'{bbox_}')" + def _make_orderby(self, sortby): """ Private function: Make STA filter from query properties @@ -393,79 +359,85 @@ def _make_orderby(self, sortby): return ','.join(ret) - def _geometry(self, entity): + def _geometry(self, feature, entity=None): """ Private function: Retrieve STA geometry - :param entity: SensorThings entity + :param feature: SensorThings entity + :param entity: SensorThings entity name :returns: GeoJSON Geometry for feature """ + entity_ = entity or self.entity try: - if self.entity == 'Things': - return entity['Locations'][0]['location'] + if entity_ == 'Things': + return feature['Locations'][0]['location'] - elif self.entity == 'Observations': - return entity['FeatureOfInterest'].pop('feature') + elif entity_ == 'Observations': + return feature['FeatureOfInterest'].pop('feature') - elif self.entity == 'Datastreams': + elif entity_ == 'Datastreams': try: - return entity['Observations'][0]['FeatureOfInterest'].pop('feature') # noqa + return feature['Observations'][0]['FeatureOfInterest'].pop('feature') # noqa except (KeyError, IndexError): - return entity['Thing'].pop('Locations')[0]['location'] + return feature['Thing'].pop('Locations')[0]['location'] + + elif entity_ == 'ObservedProperties': + return feature['Datastreams'][0]['Thing']['Locations'][0]['location'] # noqa except (KeyError, IndexError): LOGGER.warning('No geometry found') return None - def _expand_properties(self, entity, keys=(), uri=''): + def _expand_properties(self, feature, keys=(), uri='', + entity=None): """ Private function: Parse STA entity into feature - :param entity: SensorThings entity + :param feature: `dict` of SensorThings entity :param keys: keys used in properties block :param uri: uri of STA entity + :param entity: SensorThings entity name :returns: dict of SensorThings feature properties """ - LOGGER.debug('Adding extra properties') - # Properties filter & display keys = (() if not self.properties and not keys else set(self.properties) | set(keys)) - if self.entity == 'Things': - self._expand_location(entity) - elif 'Thing' in entity.keys(): - self._expand_location(entity['Thing']) + entity = entity or self.entity + if entity == 'Things': + self._expand_location(feature) + elif 'Thing' in feature.keys(): + self._expand_location(feature['Thing']) # Retain URI if present - if entity.get('properties') and self.uri_field: - uri = entity['properties'] + if feature.get('properties') and self.uri_field: + uri = feature['properties'] # Create intra links - LOGGER.debug('Creating intralinks') - for k, v in entity.items(): - if k in self.links: - entity[k] = [self._get_uri(_v, **self.links[k]) for _v in v] + for k, v in feature.items(): + if k in self.linked_entity: + feature[k] = [self._get_uri(_v, **self.linked_entity[k]) + for _v in v] LOGGER.debug(f'Created link for {k}') - elif f'{k}s' in self.links: - entity[k] = self._get_uri(v, **self.links[f'{k}s']) + elif f'{k}s' in self.linked_entity: + feature[k] = \ + self._get_uri(v, **self.linked_entity[f'{k}s']) LOGGER.debug(f'Created link for {k}') # Make properties block - LOGGER.debug('Making properties block') - if entity.get('properties'): - entity.update(entity.pop('properties')) + if feature.get('properties'): + feature.update(feature.pop('properties')) if keys: - ret = {k: entity.pop(k) for k in keys} - entity = ret + ret = {k: feature.pop(k) for k in keys} + feature = ret if self.uri_field is not None and uri != '': - entity[self.uri_field] = uri + feature[self.uri_field] = uri - return entity + return feature @staticmethod def _expand_location(entity): @@ -517,5 +489,68 @@ def _get_entity(uri): else: return '' + def _generate_mappings(self, provider_def: dict): + """ + Generate mappings for the STA entity and set up intra-links. + + This function sets up the necessary mappings and configurations for + the STA entity based on the provided provider definition. + + :param provider_def: `dict` of provider definition containing + configuration details for the STA entity. + """ + self.data.rstrip('/') + try: + self.entity = provider_def['entity'] + self._url = url_join(self.data, self.entity) + except KeyError: + LOGGER.debug('Attempting to parse Entity from provider data') + if not self._get_entity(self.data): + raise RuntimeError('Entity type required') + self.entity = self._get_entity(self.data) + self._url = self.data + self.data = self._url.rstrip(f'/{self.entity}') + + # Default id + if self.id_field: + LOGGER.debug(f'Using id field: {self.id_field}') + else: + LOGGER.debug('Using default @iot.id for id field') + self.id_field = '@iot.id' + + # Create intra-links + self.intralink = provider_def.get('intralink', False) + if self.intralink and provider_def.get('rel_link'): + # For pytest + self.rel_link = provider_def['rel_link'] + + elif self.intralink: + # Read from pygeoapi config + CONFIG = get_config() + self.rel_link = get_base_url(CONFIG) + + for name, rs in CONFIG['resources'].items(): + pvs = rs.get('providers') + p = get_provider_default(pvs) + e = p.get('entity') or self._get_entity(p['data']) + if any([ + not pvs, # No providers in resource + not p.get('intralink'), # No configuration for intralinks + not e, # No STA entity found + self.data not in p.get('data') # No common STA endpoint + ]): + continue + + if p.get('uri_field'): + LOGGER.debug(f'Linking {e} with field: {p["uri_field"]}') + else: + LOGGER.debug(f'Linking {e} with collection: {name}') + + self.linked_entity[e] = { + 'cnm': name, # OAPI collection name, + 'cid': p.get('id_field', '@iot.id'), # OAPI id_field + 'uri': p.get('uri_field') # STA uri_field + } + def __repr__(self): return f' {self.data}, {self.entity}' diff --git a/pygeoapi/provider/socrata.py b/pygeoapi/provider/socrata.py index 0f402a735..9d2292549 100644 --- a/pygeoapi/provider/socrata.py +++ b/pygeoapi/provider/socrata.py @@ -75,7 +75,7 @@ def get_fields(self): :returns: dict of fields """ - if not self.fields: + if not self._fields: try: [dataset] = self.client.datasets(ids=[self.resource_id]) @@ -87,9 +87,9 @@ def get_fields(self): fields = self.properties or resource[FIELD_NAME] for field in fields: idx = resource[FIELD_NAME].index(field) - self.fields[field] = {'type': resource[DATA_TYPE][idx]} + self._fields[field] = {'type': resource[DATA_TYPE][idx]} - return self.fields + return self._fields @crs_transform def query(self, offset=0, limit=10, resulttype='results', diff --git a/pygeoapi/provider/sqlite.py b/pygeoapi/provider/sqlite.py index bb046ac86..9ba796bb9 100644 --- a/pygeoapi/provider/sqlite.py +++ b/pygeoapi/provider/sqlite.py @@ -88,7 +88,7 @@ def get_fields(self): :returns: dict of fields """ - if not self.fields: + if not self._fields: results = self.cursor.execute( f'PRAGMA table_info({self.table})').fetchall() for item in results: @@ -100,9 +100,9 @@ def get_fields(self): json_type = 'string' if json_type is not None: - self.fields[item['name']] = {'type': json_type} + self._fields[item['name']] = {'type': json_type} - return self.fields + return self._fields def __get_where_clauses(self, properties=[], bbox=[]): """ diff --git a/pygeoapi/provider/tinydb_.py b/pygeoapi/provider/tinydb_.py index 3bac4c059..bb86d3ae2 100644 --- a/pygeoapi/provider/tinydb_.py +++ b/pygeoapi/provider/tinydb_.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2023 Tom Kralidis +# Copyright (c) 2024 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -32,17 +32,19 @@ import os import uuid +from dateutil.parser import parse as parse_date from shapely.geometry import shape from tinydb import TinyDB, Query, where from pygeoapi.provider.base import (BaseProvider, ProviderConnectionError, ProviderItemNotFoundError) +from pygeoapi.util import crs_transform, get_typed_value LOGGER = logging.getLogger(__name__) -class TinyDBCatalogueProvider(BaseProvider): - """TinyDB Catalogue Provider""" +class TinyDBProvider(BaseProvider): + """TinyDB Provider""" def __init__(self, provider_def): """ @@ -50,15 +52,13 @@ def __init__(self, provider_def): :param provider_def: provider definition - :returns: pygeoapi.provider.tinydb_.TinyDBCatalogueProvider + :returns: pygeoapi.provider.tinydb_.TinyDBProvider """ - self.excludes = [ - '_metadata-anytext', - ] - super().__init__(provider_def) + self._excludes = [] + LOGGER.debug(f'Connecting to TinyDB db at {self.data}') if not os.path.exists(self.data): @@ -74,7 +74,7 @@ def __init__(self, provider_def): else: self.db = TinyDB(self.data) - self.fields = self.get_fields() + self.get_fields() def get_fields(self): """ @@ -83,22 +83,39 @@ def get_fields(self): :returns: dict of fields """ - fields = {} - - try: - r = self.db.all()[0] - except IndexError as err: - LOGGER.debug(err) - return fields - - for p in r['properties'].keys(): - if p not in self.excludes: - fields[p] = {'type': 'string'} - - fields['q'] = {'type': 'string'} - - return fields - + if not self._fields: + try: + r = self.db.all()[0] + except IndexError as err: + LOGGER.debug(err) + return {} + + for key, value in r['properties'].items(): + if key not in self._excludes: + typed_value = get_typed_value(str(value)) + if isinstance(typed_value, float): + typed_value_type = 'number' + elif isinstance(typed_value, int): + typed_value_type = 'integer' + else: + typed_value_type = 'string' + + self._fields[key] = {'type': typed_value_type} + + try: + LOGGER.debug('Attempting to detect date types') + _ = parse_date(value) + if len(value) > 11: + self._fields[key]['format'] = 'date-time' + else: + self._fields[key]['format'] = 'date' + except Exception: + LOGGER.debug('No date types detected') + pass + + return self._fields + + @crs_transform def query(self, offset=0, limit=10, resulttype='results', bbox=[], datetime_=None, properties=[], sortby=[], select_properties=[], skip_geometry=False, q=None, **kwargs): @@ -164,11 +181,9 @@ def query(self, offset=0, limit=10, resulttype='results', if properties: LOGGER.debug('processing properties') for prop in properties: - QUERY.append(f"(Q.properties['{prop[0]}']=='{prop[1]}')") + QUERY.append(f"(Q.properties['{prop[0]}']=={prop[1]})") - if q is not None: - for t in q.split(): - QUERY.append(f"(Q.properties['_metadata-anytext'].search('{t}', flags=re.IGNORECASE))") # noqa + QUERY = self._add_search_query(QUERY, q) QUERY_STRING = '&'.join(QUERY) LOGGER.debug(f'QUERY_STRING: {QUERY_STRING}') @@ -188,7 +203,7 @@ def query(self, offset=0, limit=10, resulttype='results', return feature_collection for r in results: - for e in self.excludes: + for e in self._excludes: try: del r['properties'][e] except KeyError: @@ -219,6 +234,7 @@ def query(self, offset=0, limit=10, resulttype='results', return feature_collection + @crs_transform def get(self, identifier, **kwargs): """ Get TinyDB document by id @@ -235,7 +251,7 @@ def get(self, identifier, **kwargs): if record is None: raise ProviderItemNotFoundError('record does not exist') - for e in self.excludes: + for e in self._excludes: try: del record['properties'][e] except KeyError: @@ -259,14 +275,7 @@ def create(self, item): identifier = str(uuid.uuid4()) json_data["id"] = identifier - try: - json_data['properties']['_metadata-anytext'] = ''.join([ - json_data['properties']['title'], - json_data['properties']['description'] - ]) - except KeyError: - LOGGER.debug('Missing title and description') - json_data['properties']['_metadata_anytext'] = '' + json_data = self._add_extra_fields(json_data) LOGGER.debug(f'Inserting data with identifier {identifier}') result = self.db.insert(json_data) @@ -306,17 +315,71 @@ def delete(self, identifier): return True - def _bbox(input_bbox, record_bbox): + def _add_extra_fields(self, json_data: dict) -> dict: """ - Test whether one bbox intersects another + Helper function to add extra fields to an item payload - :param input_bbox: `list` of minx,miny,maxx,maxy - :param record_bbox: `list` of minx,miny,maxx,maxy + :param json_data: `dict` of JSON data - :returns: `bool` of result + :returns: `dict` of updated JSON data """ - return True + return json_data + + def _add_search_query(self, query: list, search_term: str = None) -> str: + """ + Helper function to add extra query predicates + + :param query: `list` of query predicates + :param search_term: `str` of search term + + :returns: `list` of updated query predicates + """ + + return query + + def __repr__(self): + return f' {self.data}' + + +class TinyDBCatalogueProvider(TinyDBProvider): + """TinyDB Catalogue Provider""" + + def __init__(self, provider_def): + super().__init__(provider_def) + + LOGGER.debug('Refreshing fields') + self._excludes = ['_metadata-anytext'] + self._fields = {} + self.get_fields() + + def get_fields(self): + fields = super().get_fields() + + fields['q'] = {'type': 'string'} + + return fields + + def _add_extra_fields(self, json_data: dict) -> dict: + LOGGER.debug('Adding catalogue anytext property') + try: + json_data['properties']['_metadata-anytext'] = ''.join([ + json_data['properties']['title'], + json_data['properties']['description'] + ]) + except KeyError: + LOGGER.debug('Missing title and description') + json_data['properties']['_metadata_anytext'] = '' + + return json_data + + def _add_search_query(self, query: list, search_term: str = None) -> str: + if search_term is not None: + LOGGER.debug('catalogue q= query') + for t in search_term.split(): + query.append(f"(Q.properties['_metadata-anytext'].search('{t}', flags=re.IGNORECASE))") # noqa + + return query def __repr__(self): return f' {self.data}' diff --git a/pygeoapi/provider/wms_facade.py b/pygeoapi/provider/wms_facade.py index 8771b6395..fa3ffd064 100644 --- a/pygeoapi/provider/wms_facade.py +++ b/pygeoapi/provider/wms_facade.py @@ -84,7 +84,9 @@ def query(self, style=None, bbox=[-180, -90, 180, 90], width=500, self._transparent = 'TRUE' - if crs in [4326, 'CRS;84']: + version = self.options.get('version', '1.3.0') + + if crs in [4326, 'CRS;84'] and version == '1.3.0': LOGGER.debug('Swapping 4326 axis order to WMS 1.3 mode (yx)') bbox2 = ','.join(str(c) for c in [bbox[1], bbox[0], bbox[3], bbox[2]]) @@ -106,12 +108,14 @@ def query(self, style=None, bbox=[-180, -90, 180, 90], width=500, if not transparent: self._transparent = 'FALSE' + crs_param = 'crs' if version == '1.3.0' else 'srs' + params = { - 'version': '1.3.0', + 'version': version, 'service': 'WMS', 'request': 'GetMap', 'bbox': bbox2, - 'crs': CRS_CODES[crs], + crs_param: CRS_CODES[crs], 'layers': self.options['layer'], 'styles': self.options.get('style', 'default'), 'width': width, @@ -128,7 +132,7 @@ def query(self, style=None, bbox=[-180, -90, 180, 90], width=500, else: request_url = '?'.join([self.data, urlencode(params)]) - LOGGER.debug(f'WMS 1.3.0 request url: {request_url}') + LOGGER.debug(f'WMS {version} request url: {request_url}') response = requests.get(request_url) diff --git a/pygeoapi/provider/xarray_.py b/pygeoapi/provider/xarray_.py index 9a82daec0..9ed2726b1 100644 --- a/pygeoapi/provider/xarray_.py +++ b/pygeoapi/provider/xarray_.py @@ -37,12 +37,16 @@ import xarray import fsspec import numpy as np +import pyproj +from pyproj.exceptions import CRSError + +from pygeoapi.api import DEFAULT_STORAGE_CRS from pygeoapi.provider.base import (BaseProvider, ProviderConnectionError, ProviderNoDataError, ProviderQueryError) -from pygeoapi.util import read_data +from pygeoapi.util import get_crs_from_uri, read_data LOGGER = logging.getLogger(__name__) @@ -81,35 +85,43 @@ def __init__(self, provider_def): else: data_to_open = self.data - self._data = open_func(data_to_open) + try: + self._data = open_func(data_to_open) + except ValueError as err: + # Manage non-cf-compliant time dimensions + if 'time' in str(err): + self._data = open_func(self.data, decode_times=False) + else: + raise err + + self.storage_crs = self._parse_storage_crs(provider_def) self._coverage_properties = self._get_coverage_properties() - self.axes = [self._coverage_properties['x_axis_label'], - self._coverage_properties['y_axis_label'], - self._coverage_properties['time_axis_label']] + self.axes = self._coverage_properties['axes'] - self.fields = self.get_fields() + self.get_fields() except Exception as err: LOGGER.warning(err) raise ProviderConnectionError(err) def get_fields(self): - fields = {} - - for key, value in self._data.variables.items(): - if len(value.shape) >= 3: - LOGGER.debug('Adding variable') - dtype = value.dtype - if dtype.name.startswith('float'): - dtype = 'number' - - fields[key] = { - 'type': dtype, - 'title': value.attrs['long_name'], - 'x-ogc-unit': value.attrs.get('units') - } + if not self._fields: + for key, value in self._data.variables.items(): + if key not in self._data.coords: + LOGGER.debug('Adding variable') + dtype = value.dtype + if dtype.name.startswith('float'): + dtype = 'number' + elif dtype.name.startswith('int'): + dtype = 'integer' + + self._fields[key] = { + 'type': dtype, + 'title': value.attrs.get('long_name'), + 'x-ogc-unit': value.attrs.get('units') + } - return fields + return self._fields def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, datetime_=None, format_='json', **kwargs): @@ -138,9 +150,9 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, data = self._data[[*properties]] - if any([self._coverage_properties['x_axis_label'] in subsets, - self._coverage_properties['y_axis_label'] in subsets, - self._coverage_properties['time_axis_label'] in subsets, + if any([self._coverage_properties.get('x_axis_label') in subsets, + self._coverage_properties.get('y_axis_label') in subsets, + self._coverage_properties.get('time_axis_label') in subsets, datetime_ is not None]): LOGGER.debug('Creating spatio-temporal subset') @@ -159,18 +171,36 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, self._coverage_properties['y_axis_label'] in subsets, len(bbox) > 0]): msg = 'bbox and subsetting by coordinates are exclusive' - LOGGER.warning(msg) + LOGGER.error(msg) raise ProviderQueryError(msg) else: - query_params[self._coverage_properties['x_axis_label']] = \ - slice(bbox[0], bbox[2]) - query_params[self._coverage_properties['y_axis_label']] = \ - slice(bbox[1], bbox[3]) + x_axis_label = self._coverage_properties['x_axis_label'] + x_coords = data.coords[x_axis_label] + if x_coords.values[0] > x_coords.values[-1]: + LOGGER.debug( + 'Reversing slicing of x axis from high to low' + ) + query_params[x_axis_label] = slice(bbox[2], bbox[0]) + else: + query_params[x_axis_label] = slice(bbox[0], bbox[2]) + y_axis_label = self._coverage_properties['y_axis_label'] + y_coords = data.coords[y_axis_label] + if y_coords.values[0] > y_coords.values[-1]: + LOGGER.debug( + 'Reversing slicing of y axis from high to low' + ) + query_params[y_axis_label] = slice(bbox[3], bbox[1]) + else: + query_params[y_axis_label] = slice(bbox[1], bbox[3]) LOGGER.debug('bbox_crs is not currently handled') if datetime_ is not None: - if self._coverage_properties['time_axis_label'] in subsets: + if self._coverage_properties['time_axis_label'] is None: + msg = 'Dataset does not contain a time axis' + LOGGER.error(msg) + raise ProviderQueryError(msg) + elif self._coverage_properties['time_axis_label'] in subsets: msg = 'datetime and temporal subsetting are exclusive' LOGGER.error(msg) raise ProviderQueryError(msg) @@ -192,13 +222,15 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, LOGGER.warning(err) raise ProviderQueryError(err) - if (any([data.coords[self.x_field].size == 0, - data.coords[self.y_field].size == 0, - data.coords[self.time_field].size == 0])): + if any(size == 0 for size in data.sizes.values()): msg = 'No data found' LOGGER.warning(msg) raise ProviderNoDataError(msg) + if format_ == 'json': + # json does not support float32 + data = _convert_float32_to_float64(data) + out_meta = { 'bbox': [ data.coords[self.x_field].values[0], @@ -206,18 +238,20 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, data.coords[self.x_field].values[-1], data.coords[self.y_field].values[-1] ], - "time": [ - _to_datetime_string(data.coords[self.time_field].values[0]), - _to_datetime_string(data.coords[self.time_field].values[-1]) - ], "driver": "xarray", "height": data.sizes[self.y_field], "width": data.sizes[self.x_field], - "time_steps": data.sizes[self.time_field], "variables": {var_name: var.attrs for var_name, var in data.variables.items()} } + if self.time_field is not None: + out_meta['time'] = [ + _to_datetime_string(data.coords[self.time_field].values[0]), + _to_datetime_string(data.coords[self.time_field].values[-1]), + ] + out_meta["time_steps"] = data.sizes[self.time_field] + LOGGER.debug('Serializing data in memory') if format_ == 'json': LOGGER.debug('Creating output in CoverageJSON') @@ -226,9 +260,11 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, LOGGER.debug('Returning data in native zarr format') return _get_zarr_data(data) else: # return data in native format - with tempfile.TemporaryFile() as fp: + with tempfile.NamedTemporaryFile() as fp: LOGGER.debug('Returning data in native NetCDF format') - fp.write(data.to_netcdf()) + data.to_netcdf( + fp.name + ) # we need to pass a string to be able to use the "netcdf4" engine # noqa fp.seek(0) return fp.read() @@ -238,14 +274,18 @@ def gen_covjson(self, metadata, data, fields): :param metadata: coverage metadata :param data: rasterio DatasetReader object - :param fields: fields dict + :param fields: fields :returns: dict of CoverageJSON representation """ LOGGER.debug('Creating CoverageJSON domain') minx, miny, maxx, maxy = metadata['bbox'] - mint, maxt = metadata['time'] + + selected_fields = { + key: value for key, value in self.fields.items() + if key in fields + } try: tmp_min = data.coords[self.y_field].values[0] @@ -276,11 +316,6 @@ def gen_covjson(self, metadata, data, fields): 'start': maxy, 'stop': miny, 'num': metadata['height'] - }, - self.time_field: { - 'start': mint, - 'stop': maxt, - 'num': metadata['time_steps'] } }, 'referencing': [{ @@ -295,7 +330,15 @@ def gen_covjson(self, metadata, data, fields): 'ranges': {} } - for key, value in self.fields.items(): + if self.time_field is not None: + mint, maxt = metadata['time'] + cj['domain']['axes'][self.time_field] = { + 'start': mint, + 'stop': maxt, + 'num': metadata['time_steps'], + } + + for key, value in selected_fields.items(): parameter = { 'type': 'Parameter', 'description': value['title'], @@ -313,21 +356,25 @@ def gen_covjson(self, metadata, data, fields): cj['parameters'][key] = parameter data = data.fillna(None) - data = _convert_float32_to_float64(data) try: - for key, value in self.fields.items(): + for key, value in selected_fields.items(): cj['ranges'][key] = { 'type': 'NdArray', 'dataType': value['type'], 'axisNames': [ - 'y', 'x', self._coverage_properties['time_axis_label'] + 'y', 'x' ], 'shape': [metadata['height'], - metadata['width'], - metadata['time_steps']] + metadata['width']] } cj['ranges'][key]['values'] = data[key].values.flatten().tolist() # noqa + + if self.time_field is not None: + cj['ranges'][key]['axisNames'].append( + self._coverage_properties['time_axis_label'] + ) + cj['ranges'][key]['shape'].append(metadata['time_steps']) except IndexError as err: LOGGER.warning(err) raise ProviderQueryError('Invalid query parameter') @@ -337,6 +384,7 @@ def gen_covjson(self, metadata, data, fields): def _get_coverage_properties(self): """ Helper function to normalize coverage properties + :param provider_def: provider definition :returns: `dict` of coverage properties """ @@ -372,48 +420,61 @@ def _get_coverage_properties(self): self._data.coords[self.x_field].values[-1], self._data.coords[self.y_field].values[-1], ], - 'time_range': [ - _to_datetime_string( - self._data.coords[self.time_field].values[0] - ), - _to_datetime_string( - self._data.coords[self.time_field].values[-1] - ) - ], 'bbox_crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84', 'crs_type': 'GeographicCRS', 'x_axis_label': self.x_field, 'y_axis_label': self.y_field, - 'time_axis_label': self.time_field, 'width': self._data.sizes[self.x_field], 'height': self._data.sizes[self.y_field], - 'time': self._data.sizes[self.time_field], - 'time_duration': self.get_time_coverage_duration(), 'bbox_units': 'degrees', - 'resx': np.abs(self._data.coords[self.x_field].values[1] - - self._data.coords[self.x_field].values[0]), - 'resy': np.abs(self._data.coords[self.y_field].values[1] - - self._data.coords[self.y_field].values[0]), - 'restime': self.get_time_resolution() + 'resx': np.abs( + self._data.coords[self.x_field].values[1] + - self._data.coords[self.x_field].values[0] + ), + 'resy': np.abs( + self._data.coords[self.y_field].values[1] + - self._data.coords[self.y_field].values[0] + ), } - if 'crs' in self._data.variables.keys(): - try: - properties['bbox_crs'] = f'http://www.opengis.net/def/crs/OGC/1.3/{self._data.crs.epsg_code}' # noqa - - properties['inverse_flattening'] = self._data.crs.\ - inverse_flattening - + if self.time_field is not None: + properties['time_axis_label'] = self.time_field + properties['time_range'] = [ + _to_datetime_string( + self._data.coords[self.time_field].values[0] + ), + _to_datetime_string( + self._data.coords[self.time_field].values[-1] + ), + ] + properties['time'] = self._data.sizes[self.time_field] + properties['time_duration'] = self.get_time_coverage_duration() + properties['restime'] = self.get_time_resolution() + + # Update properties based on the xarray's CRS + epsg_code = self.storage_crs.to_epsg() + LOGGER.debug(f'{epsg_code}') + if epsg_code == 4326 or self.storage_crs == 'OGC:CRS84': + pass + LOGGER.debug('Confirmed default of WGS 84') + else: + properties['bbox_crs'] = \ + f'https://www.opengis.net/def/crs/EPSG/0/{epsg_code}' + properties['inverse_flattening'] = \ + self.storage_crs.ellipsoid.inverse_flattening + if self.storage_crs.is_projected: properties['crs_type'] = 'ProjectedCRS' - except AttributeError: - pass + + LOGGER.debug(f'properties: {properties}') properties['axes'] = [ properties['x_axis_label'], - properties['y_axis_label'], - properties['time_axis_label'] + properties['y_axis_label'] ] + if self.time_field is not None: + properties['axes'].append(properties['time_axis_label']) + return properties @staticmethod @@ -440,7 +501,8 @@ def get_time_resolution(self): :returns: time resolution string """ - if self._data[self.time_field].size > 1: + if self.time_field is not None \ + and self._data[self.time_field].size > 1: time_diff = (self._data[self.time_field][1] - self._data[self.time_field][0]) @@ -457,6 +519,9 @@ def get_time_coverage_duration(self): :returns: time coverage duration string """ + if self.time_field is None: + return None + dur = self._data[self.time_field][-1] - self._data[self.time_field][0] ms_difference = dur.values.astype('timedelta64[ms]').astype(np.double) @@ -472,6 +537,71 @@ def get_time_coverage_duration(self): return ', '.join(times) + def _parse_grid_mapping(self): + """ + Identifies grid_mapping. + + :returns: name of xarray data variable that contains CRS information. + """ + LOGGER.debug('Parsing grid mapping...') + spatiotemporal_dims = (self.time_field, self.y_field, self.x_field) + LOGGER.debug(spatiotemporal_dims) + grid_mapping_name = None + for var_name, var in self._data.variables.items(): + if all(dim in var.dims for dim in spatiotemporal_dims): + try: + grid_mapping_name = self._data[var_name].attrs['grid_mapping'] # noqa + LOGGER.debug(f'Grid mapping: {grid_mapping_name}') + except KeyError as err: + LOGGER.debug(err) + LOGGER.debug('No grid mapping information found.') + return grid_mapping_name + + def _parse_storage_crs( + self, + provider_def: dict + ) -> pyproj.CRS: + """ + Parse the storage CRS from an xarray dataset. + + :param provider_def: provider definition + + :returns: `pyproj.CRS` instance parsed from dataset + """ + storage_crs = None + + try: + storage_crs = provider_def['storage_crs'] + crs_function = pyproj.CRS.from_user_input + except KeyError as err: + LOGGER.debug(err) + LOGGER.debug('No storage_crs found. Attempting to parse the CRS.') + + if storage_crs is None: + grid_mapping = self._parse_grid_mapping() + if grid_mapping is not None: + storage_crs = self._data[grid_mapping].attrs + crs_function = pyproj.CRS.from_cf + elif 'crs' in self._data.variables.keys(): + storage_crs = self._data['crs'].attrs + crs_function = pyproj.CRS.from_dict + else: + storage_crs = DEFAULT_STORAGE_CRS + crs_function = get_crs_from_uri + LOGGER.debug('Failed to parse dataset CRS. Assuming WGS84.') + + LOGGER.debug(f'Parsing CRS {storage_crs} with {crs_function}') + try: + crs = crs_function(storage_crs) + except CRSError as err: + LOGGER.debug(f'Unable to parse projection with pyproj: {err}') + LOGGER.debug('Assuming default WGS84.') + crs = get_crs_from_uri(DEFAULT_STORAGE_CRS) + + LOGGER.debug(crs) + + return crs + def _to_datetime_string(datetime_obj): """ @@ -554,7 +684,7 @@ def _convert_float32_to_float64(data): for var_name in data.variables: if data[var_name].dtype == 'float32': og_attrs = data[var_name].attrs - data[var_name] = data[var_name].astype('float64') + data[var_name] = data[var_name].astype('float64', copy=False) data[var_name].attrs = og_attrs return data diff --git a/pygeoapi/provider/xarray_edr.py b/pygeoapi/provider/xarray_edr.py index e724f1d64..f5bf543f2 100644 --- a/pygeoapi/provider/xarray_edr.py +++ b/pygeoapi/provider/xarray_edr.py @@ -81,14 +81,14 @@ def position(self, **kwargs): wkt = kwargs.get('wkt') if wkt is not None: LOGGER.debug('Processing WKT') - LOGGER.debug(f'Geometry type: {wkt.type}') - if wkt.type == 'Point': + LOGGER.debug(f'Geometry type: {wkt.geom_type}') + if wkt.geom_type == 'Point': query_params[self._coverage_properties['x_axis_label']] = wkt.x query_params[self._coverage_properties['y_axis_label']] = wkt.y - elif wkt.type == 'LineString': + elif wkt.geom_type == 'LineString': query_params[self._coverage_properties['x_axis_label']] = wkt.xy[0] # noqa query_params[self._coverage_properties['y_axis_label']] = wkt.xy[1] # noqa - elif wkt.type == 'Polygon': + elif wkt.geom_type == 'Polygon': query_params[self._coverage_properties['x_axis_label']] = slice(wkt.bounds[0], wkt.bounds[2]) # noqa query_params[self._coverage_properties['y_axis_label']] = slice(wkt.bounds[1], wkt.bounds[3]) # noqa pass @@ -109,7 +109,7 @@ def position(self, **kwargs): try: if select_properties: - self.fields = {k: v for k, v in self.fields.items() if k in select_properties} # noqa + self._fields = {k: v for k, v in self._fields.items() if k in select_properties} # noqa data = self._data[[*select_properties]] else: data = self._data @@ -206,7 +206,7 @@ def cube(self, **kwargs): LOGGER.debug(f'query parameters: {query_params}') try: if select_properties: - self.fields = {k: v for k, v in self.fields.items() if k in select_properties} # noqa + self._fields = {k: v for k, v in self._fields.items() if k in select_properties} # noqa data = self._data[[*select_properties]] else: data = self._data diff --git a/pygeoapi/schemas/config/pygeoapi-config-0.x.yml b/pygeoapi/schemas/config/pygeoapi-config-0.x.yml index 5d567c93a..ecc146c8c 100644 --- a/pygeoapi/schemas/config/pygeoapi-config-0.x.yml +++ b/pygeoapi/schemas/config/pygeoapi-config-0.x.yml @@ -411,6 +411,10 @@ properties: type: [string, 'null'] format: date-time nullable: true + trs: + type: string + description: temporal reference system of features + default: 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' required: - spatial providers: diff --git a/pygeoapi/starlette_app.py b/pygeoapi/starlette_app.py index b2ddf4858..af8798a05 100644 --- a/pygeoapi/starlette_app.py +++ b/pygeoapi/starlette_app.py @@ -334,11 +334,7 @@ async def collection_items(request: Request, collection_id=None, item_id=None): if 'item_id' in request.path_params: item_id = request.path_params['item_id'] if item_id is None: - if request.method == 'GET': # list items - return await execute_from_starlette( - itemtypes_api.get_collection_items, request, collection_id, - skip_valid_check=True) - elif request.method == 'POST': # filter or manage items + if request.method == 'POST': # filter or manage items content_type = request.headers.get('content-type') if content_type is not None: if content_type == 'application/geo+json': @@ -357,6 +353,10 @@ async def collection_items(request: Request, collection_id=None, item_id=None): itemtypes_api.manage_collection_item, request, 'options', collection_id, skip_valid_check=True, ) + else: # GET: list items + return await execute_from_starlette( + itemtypes_api.get_collection_items, request, collection_id, + skip_valid_check=True) elif request.method == 'DELETE': return await execute_from_starlette( @@ -742,7 +742,12 @@ async def __call__(self, scope: Scope, # CORS: optionally enable from config. if CONFIG['server'].get('cors', False): from starlette.middleware.cors import CORSMiddleware - APP.add_middleware(CORSMiddleware, allow_origins=['*']) + APP.add_middleware( + CORSMiddleware, + allow_origins=['*'], + allow_methods=['*'], + expose_headers=['*'] + ) try: OGC_SCHEMAS_LOCATION = Path(CONFIG['server']['ogc_schemas_location']) diff --git a/pygeoapi/static/css/default.css b/pygeoapi/static/css/default.css index 2f87fc9e3..a8bed5164 100644 --- a/pygeoapi/static/css/default.css +++ b/pygeoapi/static/css/default.css @@ -27,6 +27,15 @@ main { height: 400px; } +#coverages-map { + width: 100%; + height: 80vh; +} + +.c3-tooltip-container { + z-index: 300; +} + /* cancel mini-css header>button uppercase */ header button, header [type="button"], header .button, header [role="button"] { text-transform: none; diff --git a/pygeoapi/templates/_base.html b/pygeoapi/templates/_base.html index b53701054..0e892deaf 100644 --- a/pygeoapi/templates/_base.html +++ b/pygeoapi/templates/_base.html @@ -70,7 +70,7 @@ {% set links_found = namespace(json=0, jsonld=0) %} {% for link in data['links'] %} - {% if link['rel'] == 'alternate' and link['type'] and link['type'] in ['application/json', 'application/geo+json'] %} + {% if link['rel'] == 'alternate' and link['type'] and link['type'] in ['application/json', 'application/geo+json', 'application/prs.coverage+json'] %} {% set links_found.json = 1 %} {% trans %}json{% endtrans %} {% elif link['rel'] == 'alternate' and link['type'] and link['type'] == 'application/ld+json' %} @@ -102,9 +102,13 @@ -
{% trans %}Powered by {% endtrans %} {{ version }}
+
+
+ {% trans %}Powered by {% endtrans %} + + {{ version }} +
+
{% block extrafoot %} {% endblock %} + {% if data.type == "Coverage" or data.type == "CoverageCollection" %} + + + + {% elif data.type == "Feature" or data.type == "FeatureCollection" %} + + + + {% endif %} {% endblock %} {% block body %}
-
+ {% if data.features or data.coverages or data.ranges or data.references %} +
+ {% else %} +
+

{% trans %}No items{% endtrans %}

+
+ {% endif %}
{% endblock %} {% block extrafoot %} {% if data %} {% endif %} {% endblock %} diff --git a/pygeoapi/templates/collections/items/index.html b/pygeoapi/templates/collections/items/index.html index 6153d2d00..d2049becd 100644 --- a/pygeoapi/templates/collections/items/index.html +++ b/pygeoapi/templates/collections/items/index.html @@ -22,7 +22,6 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% endfor %}

-

{% trans %}Items in this collection{% endtrans %}.

{% if data['features'] %} @@ -35,7 +34,9 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en
- {% trans %}Warning: Higher limits not recommended!{% endtrans %} + {% if data['numberMatched'] %} +

{% trans %}Items in this collection{% endtrans %}: {{ data['numberMatched'] }}

+ {% endif %}
@@ -47,6 +48,7 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en +

{% trans %}Warning: Higher limits not recommended!{% endtrans %}

diff --git a/pygeoapi/templates/collections/items/item.html b/pygeoapi/templates/collections/items/item.html index 9c98492f3..705f9057c 100644 --- a/pygeoapi/templates/collections/items/item.html +++ b/pygeoapi/templates/collections/items/item.html @@ -1,5 +1,5 @@ {% extends "_base.html" %} -{% set ptitle = data['properties'][data['title_field']] or '_(Item) '.format(data['id']) %} +{% set ptitle = data['properties'][data['title_field']] or data['id'] | string %} {% block desc %}{{ data.get('properties',{}).get('description', {}) | string | truncate(250) }}{% endblock %} {% block tags %}{{ data['properties'].get('themes', [{}])[0].get('concepts', []) | join(',') }}{% endblock %} {# Optionally renders an img element, otherwise standard value or link rendering #} diff --git a/pygeoapi/templates/collections/queryables.html b/pygeoapi/templates/collections/queryables.html index df4387135..3a5af610e 100644 --- a/pygeoapi/templates/collections/queryables.html +++ b/pygeoapi/templates/collections/queryables.html @@ -2,8 +2,8 @@ {% block title %}{{ super() }} {{ data['title'] }} {% endblock %} {% block crumbs %}{{ super() }} / {% trans %}Collections{% endtrans %} -/ {{ data['title'] | truncate( 25 ) }} -/ {% trans %}Queryables{% endtrans %} +/ {{ data['title'] | truncate( 25 ) }} +/ {% trans %}Queryables{% endtrans %} {% endblock %} {% block body %}
diff --git a/pygeoapi/templates/collections/schema.html b/pygeoapi/templates/collections/schema.html index e839d36f5..6a0383373 100644 --- a/pygeoapi/templates/collections/schema.html +++ b/pygeoapi/templates/collections/schema.html @@ -2,8 +2,8 @@ {% block title %}{{ super() }} {{ data['title'] }} {% endblock %} {% block crumbs %}{{ super() }} / {% trans %}Collections{% endtrans %} -/ {{ data['title'] | truncate( 25 ) }} -/ {% trans %}Schema{% endtrans %} +/ {{ data['title'] | truncate( 25 ) }} +/ {% trans %}Schema{% endtrans %} {% endblock %} {% block body %}
diff --git a/pygeoapi/templates/jobs/index.html b/pygeoapi/templates/jobs/index.html index 05c12d918..1c1903520 100644 --- a/pygeoapi/templates/jobs/index.html +++ b/pygeoapi/templates/jobs/index.html @@ -48,5 +48,38 @@

+
+
+ {% trans %}Limit{% endtrans %}: + + +
+
+
+
+ {% for link in data['jobs']['links'] %} + {% if link['rel'] == 'prev' and data['offset'] > 0 %} + {% trans %}Prev{% endtrans %} + {% elif link['rel'] == 'next' and data['jobs']['jobs'] %} + {% trans %}Next{% endtrans %} + {% endif %} + {% endfor %} +
+
+

{% endblock %} diff --git a/pygeoapi/templates/landing_page.html b/pygeoapi/templates/landing_page.html index e104d3dc3..e135ddb86 100644 --- a/pygeoapi/templates/landing_page.html +++ b/pygeoapi/templates/landing_page.html @@ -40,6 +40,15 @@

{{ config['metadata']['identification']['title'] }}

{% endif %} + diff --git a/pygeoapi/util.py b/pygeoapi/util.py index 4a1ddfd57..b0db50cd8 100644 --- a/pygeoapi/util.py +++ b/pygeoapi/util.py @@ -597,6 +597,11 @@ class RequestedProcessExecutionMode(Enum): respond_async = 'respond-async' +class RequestedResponse(Enum): + raw = 'raw' + document = 'document' + + class JobStatus(Enum): """ Enum for the job status options specified in the WPS 2.0 specification diff --git a/requirements-manager.txt b/requirements-manager.txt new file mode 100644 index 000000000..6ccfa4b1a --- /dev/null +++ b/requirements-manager.txt @@ -0,0 +1,2 @@ +psycopg2 + diff --git a/requirements-provider.txt b/requirements-provider.txt index b42210f72..74bc473fc 100644 --- a/requirements-provider.txt +++ b/requirements-provider.txt @@ -6,11 +6,13 @@ elasticsearch-dsl fiona GDAL<=3.8.4 geoalchemy2 +geopandas netCDF4 -numpy +numpy==2.0.1 oracledb pandas psycopg2 +pyarrow pygeofilter[backend-sqlalchemy] pygeoif pygeometa diff --git a/requirements.txt b/requirements.txt index 22a16a49a..82244eb4b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,6 +14,5 @@ PyYAML rasterio requests shapely -SQLAlchemy<2.0.0 +SQLAlchemy tinydb -unicodecsv diff --git a/setup.py b/setup.py index 9ff9c3180..7013179aa 100644 --- a/setup.py +++ b/setup.py @@ -155,6 +155,7 @@ def get_package_version(): maintainer='Tom Kralidis', maintainer_email='tomkralidis@gmail.com', url='https://pygeoapi.io', + python_requires='>=3.10', install_requires=read('requirements.txt').splitlines(), packages=find_packages(exclude=['pygeoapi.tests']), include_package_data=True, diff --git a/tests/api/test_api.py b/tests/api/test_api.py index c11afca28..faad9ed09 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -271,8 +271,9 @@ def test_apirules_active(config_with_rules, rules_api): assert response.status_code == 200 assert response.is_json links = response.json['links'] + assert links[0]['rel'] == 'about' assert all( - href.startswith(base_url) for href in (rel['href'] for rel in links) # noqa + href.startswith(base_url) for href in (rel['href'] for rel in links[1:]) # noqa ) # Test Starlette @@ -303,8 +304,9 @@ def test_apirules_active(config_with_rules, rules_api): response = starlette_client.get(starlette_prefix, follow_redirects=True) # noqa assert response.status_code == 200 links = response.json()['links'] + assert links[0]['rel'] == 'about' assert all( - href.startswith(base_url) for href in (rel['href'] for rel in links) # noqa + href.startswith(base_url) for href in (rel['href'] for rel in links[1:]) # noqa ) @@ -387,6 +389,9 @@ def test_api(config, api_, openapi): assert rsp_headers['Content-Language'] == 'en-US' assert code == HTTPStatus.BAD_REQUEST + response = json.loads(response) + assert response['description'] == 'Invalid format requested' + assert api_.get_collections_url() == 'http://localhost:5000/collections' @@ -510,14 +515,17 @@ def test_root(config, api_): assert isinstance(root, dict) assert 'links' in root - assert root['links'][0]['rel'] == 'self' - assert root['links'][0]['type'] == FORMAT_TYPES[F_JSON] - assert root['links'][0]['href'].endswith('?f=json') + assert root['links'][0]['rel'] == 'about' + assert root['links'][0]['type'] == 'text/html' + assert root['links'][0]['href'] == 'http://example.org' + assert root['links'][1]['rel'] == 'self' + assert root['links'][1]['type'] == FORMAT_TYPES[F_JSON] + assert root['links'][1]['href'].endswith('?f=json') assert any(link['href'].endswith('f=jsonld') and link['rel'] == 'alternate' for link in root['links']) assert any(link['href'].endswith('f=html') and link['rel'] == 'alternate' for link in root['links']) - assert len(root['links']) == 11 + assert len(root['links']) == 12 assert 'title' in root assert root['title'] == 'pygeoapi default instance' assert 'description' in root @@ -567,7 +575,7 @@ def test_conformance(config, api_): assert isinstance(root, dict) assert 'conformsTo' in root - assert len(root['conformsTo']) == 37 + assert len(root['conformsTo']) == 42 assert 'http://www.opengis.net/spec/ogcapi-features-2/1.0/conf/crs' \ in root['conformsTo'] @@ -596,7 +604,7 @@ def test_describe_collections(config, api_): collections = json.loads(response) assert len(collections) == 2 - assert len(collections['collections']) == 9 + assert len(collections['collections']) == 10 assert len(collections['links']) == 3 rsp_headers, code, response = api_.describe_collections(req, 'foo') diff --git a/tests/api/test_itemtypes.py b/tests/api/test_itemtypes.py index 20822836c..ae19c28d6 100644 --- a/tests/api/test_itemtypes.py +++ b/tests/api/test_itemtypes.py @@ -62,6 +62,11 @@ def test_get_collection_queryables(config, api_): api_, req, 'notfound') assert code == HTTPStatus.NOT_FOUND + req = mock_api_request() + rsp_headers, code, response = get_collection_queryables( + api_, req, 'mapserver_world_map') + assert code == HTTPStatus.BAD_REQUEST + req = mock_api_request({'f': 'html'}) rsp_headers, code, response = get_collection_queryables(api_, req, 'obs') assert rsp_headers['Content-Type'] == FORMAT_TYPES[F_HTML] @@ -74,6 +79,14 @@ def test_get_collection_queryables(config, api_): assert 'properties' in queryables assert len(queryables['properties']) == 5 + req = mock_api_request({'f': 'json'}) + rsp_headers, code, response = get_collection_queryables(api_, req, 'canada-metadata') # noqa + assert rsp_headers['Content-Type'] == 'application/schema+json' + queryables = json.loads(response) + + assert 'properties' in queryables + assert len(queryables['properties']) == 10 + # test with provider filtered properties api_.config['resources']['obs']['providers'][0]['properties'] = ['stn_id'] @@ -573,6 +586,13 @@ def test_get_collection_item(config, api_): assert 'prev' not in feature['links'] assert 'next' not in feature['links'] + req = mock_api_request() + rsp_headers, code, response = get_collection_item(api_, req, 'norway_pop', + '790') + feature = json.loads(response) + + assert feature['properties']['name'] == 'Ålesund' + def test_get_collection_item_json_ld(config, api_): req = mock_api_request({'f': 'jsonld'}) diff --git a/tests/api/test_processes.py b/tests/api/test_processes.py index a33972889..62256e4b1 100644 --- a/tests/api/test_processes.py +++ b/tests/api/test_processes.py @@ -39,7 +39,7 @@ from pygeoapi.api import FORMAT_TYPES, F_HTML, F_JSON from pygeoapi.api.processes import ( - describe_processes, execute_process, delete_job, get_job_result, + describe_processes, execute_process, delete_job, get_job_result, get_jobs ) from tests.util import mock_api_request @@ -198,6 +198,12 @@ def test_execute_process(config, api_): 'failedUri': 'https://example.com/failed', } } + req_body_8 = { + 'inputs': { + 'name': 'Test document' + }, + 'response': 'document' + } cleanup_jobs = set() @@ -346,6 +352,14 @@ def test_execute_process(config, api_): cleanup_jobs.add(tuple(['hello-world', rsp_headers['Location'].split('/')[-1]])) + req = mock_api_request(data=req_body_8) + rsp_headers, code, response = execute_process(api_, req, 'hello-world') + + response = json.loads(response) + assert code == HTTPStatus.OK + assert 'outputs' in response + assert isinstance(response['outputs'], list) + # Cleanup time.sleep(2) # Allow time for any outstanding async jobs for _, job_id in cleanup_jobs: @@ -386,7 +400,10 @@ def test_delete_job(api_): job_id = _execute_a_job(api_) rsp_headers, code, response = delete_job(api_, mock_api_request(), job_id) + data = json.loads(response) + assert code == HTTPStatus.OK + assert data['message'] == 'Job dismissed' rsp_headers, code, response = delete_job(api_, mock_api_request(), job_id) assert code == HTTPStatus.NOT_FOUND @@ -425,4 +442,51 @@ def test_get_job_result(api_): ) assert code == HTTPStatus.OK assert rsp_headers['Content-Type'] == 'application/json' - assert json.loads(response)['value'] == "Hello Sync Test!" + assert json.loads(response)['value'] == 'Hello Sync Test!' + + +def test_get_jobs_single(api_): + job_id = _execute_a_job(api_) + headers, code, response = get_jobs(api_, mock_api_request(), job_id=job_id) + assert code == HTTPStatus.OK + + job = json.loads(response) + assert job['jobID'] == job_id + assert job['status'] == 'successful' + + +def test_get_jobs_pagination(api_): + # generate test jobs for querying + for _ in range(11): + _execute_a_job(api_) + + # test default pagination limit + headers, code, response = get_jobs(api_, mock_api_request(), job_id=None) + job_response = json.loads(response) + assert len(job_response['jobs']) == 10 + assert next( + link for link in job_response['links'] if link['rel'] == 'next' + )['href'].endswith('/jobs?offset=10') + + headers, code, response = get_jobs( + api_, + mock_api_request({'limit': 10, 'offset': 9}), + job_id=None) + job_response_offset = json.loads(response) + # check to get 1 same job id with an offset of 9 and limit of 10 + same_job_ids = {job['jobID'] for job in job_response['jobs']}.intersection( + {job['jobID'] for job in job_response_offset['jobs']} + ) + assert len(same_job_ids) == 1 + assert next( + link for link in job_response_offset['links'] if link['rel'] == 'prev' + )['href'].endswith('/jobs?offset=0&limit=10') + + # test custom limit + headers, code, response = get_jobs( + api_, + mock_api_request({'limit': 20}), + job_id=None) + job_response = json.loads(response) + # might be more than 11 due to test interaction + assert len(job_response['jobs']) > 10 diff --git a/tests/cite/README.md b/tests/cite/README.md index 4341e8cbf..e81afe17f 100644 --- a/tests/cite/README.md +++ b/tests/cite/README.md @@ -1,12 +1,20 @@ # CITE testing for OGC API capabilities - OGC API - Features +- OGC API - Tiles +- OGC API - Environmental Data Retrieval - OGC API - Processes ## Test data ### OGC API - Features -Test data used is a subset of the [Canadian National Water Data Archive](https://www.canada.ca/en/environment-climate-change/services/water-overview/quantity/monitoring/survey/data-products-services/national-archive-hydat.html) as extracted from the [MSC GeoMet OGC API](https://eccc-msc.github.io/open-data/msc-geomet/web-services_en/#ogc-api-features) service. +Test data used is in `tests/data/canada-hydat-daily-mean-02HC003.tinydb`. + +### OGC API - Tiles +Test data used is in `tests/data/tiles/ne_110m_lakes`. + +### OGC API - Environmental Data Retrieval +TODO ### OGC API - Processes The `hello-world` test process that is provided with pygeoapi by default is used. @@ -23,7 +31,6 @@ rm -f /tmp/pygeoapi-process-manager.db* pip3 install gunicorn cd tests/cite . cite.env -python3 ../load_es_data.py ./canada-hydat-daily-mean-02hc003.geojson IDENTIFIER pygeoapi openapi generate $PYGEOAPI_CONFIG --output-file $PYGEOAPI_OPENAPI gunicorn pygeoapi.flask_app:APP -b 0.0.0.0:5001 --access-logfile '-' ``` diff --git a/tests/cite/canada-hydat-daily-mean-02HC003.geojson b/tests/cite/canada-hydat-daily-mean-02HC003.geojson deleted file mode 100644 index 13f534411..000000000 --- a/tests/cite/canada-hydat-daily-mean-02HC003.geojson +++ /dev/null @@ -1,1240 +0,0 @@ -{ - "features": [ - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-03", - "properties": { - "DATE": "1975-10-03", - "FLOW": 2.039999961853028, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-03", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-02", - "properties": { - "DATE": "1975-10-02", - "FLOW": 2.059999942779541, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-02", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-01", - "properties": { - "DATE": "1975-10-01", - "FLOW": 2.140000104904175, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-01", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-07", - "properties": { - "DATE": "1975-10-07", - "FLOW": 1.940000057220459, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-07", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-06", - "properties": { - "DATE": "1975-10-06", - "FLOW": 1.9600000381469729, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-06", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-05", - "properties": { - "DATE": "1975-10-05", - "FLOW": 1.909999966621399, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-05", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1975-10-04", - "properties": { - "DATE": "1975-10-04", - "FLOW": 2.0, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1975-10-04", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1983-12-31", - "properties": { - "DATE": "1983-12-31", - "FLOW": 2.5999999046325684, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1983-12-31", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1983-12-30", - "properties": { - "DATE": "1983-12-30", - "FLOW": 2.650000095367432, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1983-12-30", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.2017-05-23", - "properties": { - "DATE": "2017-05-23", - "FLOW": 7.079999923706055, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.2017-05-23", - "LEVEL": 2.3320000171661377, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-09", - "properties": { - "DATE": "1955-09-09", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-09", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-08", - "properties": { - "DATE": "1955-09-08", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-08", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-07", - "properties": { - "DATE": "1955-09-07", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-07", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-06", - "properties": { - "DATE": "1955-09-06", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-06", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-05", - "properties": { - "DATE": "1955-09-05", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-05", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-04", - "properties": { - "DATE": "1955-09-04", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-04", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-03", - "properties": { - "DATE": "1955-09-03", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-03", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-02", - "properties": { - "DATE": "1955-09-02", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-02", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1955-09-01", - "properties": { - "DATE": "1955-09-01", - "FLOW": 1.4700000286102295, - "FLOW_SYMBOL_EN": "Estimated", - "FLOW_SYMBOL_FR": "Estim\u00e9", - "IDENTIFIER": "02HC003.1955-09-01", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-22", - "properties": { - "DATE": "1979-11-22", - "FLOW": 5.230000019073486, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-22", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-23", - "properties": { - "DATE": "1979-11-23", - "FLOW": 9.260000228881836, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-23", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-20", - "properties": { - "DATE": "1979-11-20", - "FLOW": 3.390000104904175, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-20", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-21", - "properties": { - "DATE": "1979-11-21", - "FLOW": 3.3299999237060547, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-21", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-26", - "properties": { - "DATE": "1979-11-26", - "FLOW": 25.799999237060547, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-26", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-27", - "properties": { - "DATE": "1979-11-27", - "FLOW": 24.399999618530277, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-27", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-24", - "properties": { - "DATE": "1979-11-24", - "FLOW": 20.5, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-24", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-25", - "properties": { - "DATE": "1979-11-25", - "FLOW": 17.899999618530273, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-25", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-28", - "properties": { - "DATE": "1979-11-28", - "FLOW": 18.600000381469727, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-28", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1979-11-29", - "properties": { - "DATE": "1979-11-29", - "FLOW": 17.5, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1979-11-29", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-09", - "properties": { - "DATE": "1967-02-09", - "FLOW": 4.53000020980835, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-09", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-08", - "properties": { - "DATE": "1967-02-08", - "FLOW": 5.659999847412109, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-08", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-01", - "properties": { - "DATE": "1967-02-01", - "FLOW": 3.259999990463257, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-01", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-03", - "properties": { - "DATE": "1967-02-03", - "FLOW": 3.109999895095825, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-03", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-02", - "properties": { - "DATE": "1967-02-02", - "FLOW": 2.8299999237060547, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-02", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-05", - "properties": { - "DATE": "1967-02-05", - "FLOW": 3.9600000381469727, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-05", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-04", - "properties": { - "DATE": "1967-02-04", - "FLOW": 3.400000095367432, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-04", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-07", - "properties": { - "DATE": "1967-02-07", - "FLOW": 6.369999885559082, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-07", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1967-02-06", - "properties": { - "DATE": "1967-02-06", - "FLOW": 4.53000020980835, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1967-02-06", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1995-01-29", - "properties": { - "DATE": "1995-01-29", - "FLOW": 3.309999942779541, - "FLOW_SYMBOL_EN": "Ice Conditions", - "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", - "IDENTIFIER": "02HC003.1995-01-29", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-13", - "properties": { - "DATE": "1987-03-13", - "FLOW": 9.720000267028809, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-13", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.2017-05-27", - "properties": { - "DATE": "2017-05-27", - "FLOW": 17.299999237060547, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.2017-05-27", - "LEVEL": 2.5420000553131104, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-12", - "properties": { - "DATE": "1987-03-12", - "FLOW": 12.600000381469728, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-12", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-11", - "properties": { - "DATE": "1987-03-11", - "FLOW": 16.200000762939453, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-11", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-10", - "properties": { - "DATE": "1987-03-10", - "FLOW": 29.100000381469727, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-10", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-17", - "properties": { - "DATE": "1987-03-17", - "FLOW": 7.440000057220459, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-17", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-16", - "properties": { - "DATE": "1987-03-16", - "FLOW": 6.989999771118164, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-16", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.2017-05-26", - "properties": { - "DATE": "2017-05-26", - "FLOW": 30.399999618530277, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.2017-05-26", - "LEVEL": 2.691999912261963, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1987-03-15", - "properties": { - "DATE": "1987-03-15", - "FLOW": 7.210000038146973, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1987-03-15", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.1983-11-17", - "properties": { - "DATE": "1983-11-17", - "FLOW": 13.0, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.1983-11-17", - "LEVEL": null, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - }, - { - "geometry": { - "coordinates": [ - -79.52039337158203, - 43.69894027709961 - ], - "type": "Point" - }, - "id": "02HC003.2014-07-07", - "properties": { - "DATE": "2014-07-07", - "FLOW": 6.110000133514404, - "FLOW_SYMBOL_EN": null, - "FLOW_SYMBOL_FR": null, - "IDENTIFIER": "02HC003.2014-07-07", - "LEVEL": 2.2950000762939453, - "LEVEL_SYMBOL_EN": null, - "LEVEL_SYMBOL_FR": null, - "PROV_TERR_STATE_LOC": "ON", - "STATION_NAME": "HUMBER RIVER AT WESTON", - "STATION_NUMBER": "02HC003" - }, - "type": "Feature" - } - ], - "links": [ - { - "href": "https://geo.weather.gc.ca/geomet/features/collections/hydrometric-daily-mean/items?f=json", - "rel": "self", - "title": "This document as GeoJSON", - "type": "application/geo+json" - }, - { - "href": "https://geo.weather.gc.ca/geomet/features/collections/hydrometric-daily-mean/items?f=html", - "rel": "alternate", - "title": "This document as HTML", - "type": "text/html" - }, - { - "href": "https://geo.weather.gc.ca/geomet/features/collections/hydrometric-daily-mean/items/?offset=0", - "rel": "prev", - "title": "items (prev)", - "type": "application/geo+json" - }, - { - "href": "https://geo.weather.gc.ca/geomet/features/collections/hydrometric-daily-mean/items/?offset=500", - "rel": "next", - "title": "items (next)", - "type": "application/geo+json" - }, - { - "href": "https://geo.weather.gc.ca/geomet/features/collections/hydrometric-daily-mean", - "rel": "collection", - "title": "Daily Mean of Water Level or Flow", - "type": "application/json" - } - ], - "numberMatched": 26516, - "numberReturned": 50, - "timeStamp": "2019-12-23T14:08:36.974150Z", - "type": "FeatureCollection" -} diff --git a/tests/cite/cite.config.yml b/tests/cite/cite.config.yml index cc0eb7344..22d39510d 100644 --- a/tests/cite/cite.config.yml +++ b/tests/cite/cite.config.yml @@ -96,14 +96,51 @@ resources: temporal: begin: 1850-01-01T00:00:00Z end: null # or empty - providers: - type: feature - name: Elasticsearch - data: http://localhost:9200/canada-hydat-daily-mean-02hc003 + name: TinyDB + data: ../data/canada-hydat-daily-mean-02hc003.tinydb id_field: IDENTIFIER time_field: DATE + lakes: + type: collection + title: Large Lakes + description: lakes of the world, public domain + keywords: + - lakes + crs: + - CRS84 + links: + - type: text/html + rel: canonical + title: information + href: http://www.naturalearthdata.com/ + hreflang: en-US + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: 2011-11-11T00:00:00Z + end: null # or empty + providers: + - type: feature + name: GeoJSON + data: ../data/ne_110m_lakes.geojson + id_field: id + - type: tile + name: MVT-tippecanoe + data: ../data/tiles/ne_110m_lakes + options: + bounds: [[-124.953634,-16.536406],[109.929807,66.969298]] + zoom: + min: 0 + max: 5 + format: + name: pbf + mimetype: application/vnd.mapbox-vector-tile + hello-world: type: process processor: diff --git a/tests/data/README.md b/tests/data/README.md index 3cebdae30..96b2fed8f 100644 --- a/tests/data/README.md +++ b/tests/data/README.md @@ -101,3 +101,8 @@ This directory provides test data to demonstrate functionality. - Notes - `items.geojson` tests pygeoapi's capability to serialize all geometry types for individual collection items in [JSON-LD formats](https://docs.pygeoapi.io/en/latest/configuration.html#linked-data), including GeoSPARQL WKT and schema.org/geo - The features represent the range of GeoJSON geometry types, instead of real locations. Additionally, each feature has a uri defined in the properties block + +### `canada-hydat-daily-mean-02HC003.tinydb` +- source: [Canadian National Water Data Archive](https://www.canada.ca/en/environment-climate-change/services/water-overview/quantity/monitoring/survey/data-products-services/national-archive-hydat.html) as extracted from the [MSC GeoMet OGC API](https://eccc-msc.github.io/open-data/msc-geomet/web-services_en/#ogc-api-features) service +- URL: https://www.canada.ca/en/environment-climate-change/services/water-overview/quantity/monitoring/survey/data-products-services/national-archive-hydat.html +- License: https://eccc-msc.github.io/open-data/licence/readme_en diff --git a/tests/data/admin/admin-put.json b/tests/data/admin/admin-put.json index c09827254..c3b2a0c94 100644 --- a/tests/data/admin/admin-put.json +++ b/tests/data/admin/admin-put.json @@ -13,8 +13,8 @@ "pretty_print": true, "limit": 10, "map": { - "url": "https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png", - "attribution": "Wikimedia maps | Map data © OpenStreetMap contributors" + "url": "https://tile.openstreetmap.org/{z}/{x}/{y}.png", + "attribution": "© OpenStreetMap contributors" } }, "logging": { @@ -69,4 +69,4 @@ } } } - \ No newline at end of file + diff --git a/tests/data/canada-hydat-daily-mean-02HC003.tinydb b/tests/data/canada-hydat-daily-mean-02HC003.tinydb new file mode 100644 index 000000000..f37010195 --- /dev/null +++ b/tests/data/canada-hydat-daily-mean-02HC003.tinydb @@ -0,0 +1 @@ +{"_default": {"1": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-03", "properties": {"DATE": "1975-10-03", "FLOW": 2.039999961853028, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-03", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "2": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-02", "properties": {"DATE": "1975-10-02", "FLOW": 2.059999942779541, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-02", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "3": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-01", "properties": {"DATE": "1975-10-01", "FLOW": 2.140000104904175, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-01", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "4": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-07", "properties": {"DATE": "1975-10-07", "FLOW": 1.940000057220459, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-07", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "5": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-06", "properties": {"DATE": "1975-10-06", "FLOW": 1.9600000381469729, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-06", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "6": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-05", "properties": {"DATE": "1975-10-05", "FLOW": 1.909999966621399, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-05", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "7": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1975-10-04", "properties": {"DATE": "1975-10-04", "FLOW": 2.0, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1975-10-04", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "8": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1983-12-31", "properties": {"DATE": "1983-12-31", "FLOW": 2.5999999046325684, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1983-12-31", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "9": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1983-12-30", "properties": {"DATE": "1983-12-30", "FLOW": 2.650000095367432, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1983-12-30", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "10": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.2017-05-23", "properties": {"DATE": "2017-05-23", "FLOW": 7.079999923706055, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.2017-05-23", "LEVEL": 2.3320000171661377, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "11": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-09", "properties": {"DATE": "1955-09-09", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-09", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "12": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-08", "properties": {"DATE": "1955-09-08", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-08", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "13": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-07", "properties": {"DATE": "1955-09-07", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-07", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "14": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-06", "properties": {"DATE": "1955-09-06", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-06", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "15": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-05", "properties": {"DATE": "1955-09-05", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-05", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "16": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-04", "properties": {"DATE": "1955-09-04", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-04", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "17": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-03", "properties": {"DATE": "1955-09-03", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-03", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "18": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-02", "properties": {"DATE": "1955-09-02", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-02", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "19": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1955-09-01", "properties": {"DATE": "1955-09-01", "FLOW": 1.4700000286102295, "FLOW_SYMBOL_EN": "Estimated", "FLOW_SYMBOL_FR": "Estim\u00e9", "IDENTIFIER": "02HC003.1955-09-01", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "20": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-22", "properties": {"DATE": "1979-11-22", "FLOW": 5.230000019073486, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-22", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "21": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-23", "properties": {"DATE": "1979-11-23", "FLOW": 9.260000228881836, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-23", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "22": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-20", "properties": {"DATE": "1979-11-20", "FLOW": 3.390000104904175, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-20", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "23": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-21", "properties": {"DATE": "1979-11-21", "FLOW": 3.3299999237060547, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-21", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "24": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-26", "properties": {"DATE": "1979-11-26", "FLOW": 25.799999237060547, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-26", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "25": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-27", "properties": {"DATE": "1979-11-27", "FLOW": 24.399999618530277, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-27", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "26": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-24", "properties": {"DATE": "1979-11-24", "FLOW": 20.5, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-24", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "27": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-25", "properties": {"DATE": "1979-11-25", "FLOW": 17.899999618530273, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-25", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "28": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-28", "properties": {"DATE": "1979-11-28", "FLOW": 18.600000381469727, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-28", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "29": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1979-11-29", "properties": {"DATE": "1979-11-29", "FLOW": 17.5, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1979-11-29", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "30": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-09", "properties": {"DATE": "1967-02-09", "FLOW": 4.53000020980835, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-09", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "31": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-08", "properties": {"DATE": "1967-02-08", "FLOW": 5.659999847412109, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-08", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "32": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-01", "properties": {"DATE": "1967-02-01", "FLOW": 3.259999990463257, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-01", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "33": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-03", "properties": {"DATE": "1967-02-03", "FLOW": 3.109999895095825, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-03", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "34": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-02", "properties": {"DATE": "1967-02-02", "FLOW": 2.8299999237060547, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-02", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "35": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-05", "properties": {"DATE": "1967-02-05", "FLOW": 3.9600000381469727, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-05", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "36": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-04", "properties": {"DATE": "1967-02-04", "FLOW": 3.400000095367432, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-04", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "37": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-07", "properties": {"DATE": "1967-02-07", "FLOW": 6.369999885559082, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-07", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "38": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1967-02-06", "properties": {"DATE": "1967-02-06", "FLOW": 4.53000020980835, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1967-02-06", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "39": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1995-01-29", "properties": {"DATE": "1995-01-29", "FLOW": 3.309999942779541, "FLOW_SYMBOL_EN": "Ice Conditions", "FLOW_SYMBOL_FR": "Conditions \u00e0 glace", "IDENTIFIER": "02HC003.1995-01-29", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "40": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-13", "properties": {"DATE": "1987-03-13", "FLOW": 9.720000267028809, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-13", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "41": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.2017-05-27", "properties": {"DATE": "2017-05-27", "FLOW": 17.299999237060547, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.2017-05-27", "LEVEL": 2.5420000553131104, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "42": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-12", "properties": {"DATE": "1987-03-12", "FLOW": 12.600000381469728, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-12", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "43": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-11", "properties": {"DATE": "1987-03-11", "FLOW": 16.200000762939453, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-11", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "44": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-10", "properties": {"DATE": "1987-03-10", "FLOW": 29.100000381469727, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-10", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "45": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-17", "properties": {"DATE": "1987-03-17", "FLOW": 7.440000057220459, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-17", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "46": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-16", "properties": {"DATE": "1987-03-16", "FLOW": 6.989999771118164, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-16", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "47": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.2017-05-26", "properties": {"DATE": "2017-05-26", "FLOW": 30.399999618530277, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.2017-05-26", "LEVEL": 2.691999912261963, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "48": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1987-03-15", "properties": {"DATE": "1987-03-15", "FLOW": 7.210000038146973, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1987-03-15", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "49": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.1983-11-17", "properties": {"DATE": "1983-11-17", "FLOW": 13.0, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.1983-11-17", "LEVEL": null, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}, "50": {"geometry": {"coordinates": [-79.52039337158203, 43.69894027709961], "type": "Point"}, "id": "02HC003.2014-07-07", "properties": {"DATE": "2014-07-07", "FLOW": 6.110000133514404, "FLOW_SYMBOL_EN": null, "FLOW_SYMBOL_FR": null, "IDENTIFIER": "02HC003.2014-07-07", "LEVEL": 2.2950000762939453, "LEVEL_SYMBOL_EN": null, "LEVEL_SYMBOL_FR": null, "PROV_TERR_STATE_LOC": "ON", "STATION_NAME": "HUMBER RIVER AT WESTON", "STATION_NUMBER": "02HC003"}, "type": "Feature"}}} \ No newline at end of file diff --git a/tests/data/postgres_manager_full_structure.backup.sql b/tests/data/postgres_manager_full_structure.backup.sql new file mode 100644 index 000000000..804b34b67 --- /dev/null +++ b/tests/data/postgres_manager_full_structure.backup.sql @@ -0,0 +1,68 @@ +-- +-- PostgreSQL database dump +-- + +-- Dumped from database version 14.12 (Ubuntu 14.12-1.pgdg20.04+1) +-- Dumped by pg_dump version 16.3 (Ubuntu 16.3-1.pgdg20.04+1) + +SET statement_timeout = 0; +SET lock_timeout = 0; +SET idle_in_transaction_session_timeout = 0; +SET client_encoding = 'UTF8'; +SET standard_conforming_strings = on; +SELECT pg_catalog.set_config('search_path', '', false); +SET check_function_bodies = false; +SET xmloption = content; +SET client_min_messages = warning; +SET row_security = off; + +-- +-- Name: public; Type: SCHEMA; Schema: -; Owner: postgres +-- + +ALTER SCHEMA public OWNER TO postgres; + +SET default_tablespace = ''; + +SET default_table_access_method = heap; + +-- +-- Name: jobs; Type: TABLE; Schema: public; Owner: postgres +-- + +CREATE TABLE public.jobs ( + type character varying DEFAULT 'process'::character varying NOT NULL, + identifier character varying NOT NULL, + process_id character varying NOT NULL, + job_start_datetime timestamp without time zone, + job_end_datetime timestamp without time zone, + status character varying NOT NULL, + location character varying, + mimetype character varying, + message character varying, + progress integer NOT NULL +); + + +ALTER TABLE public.jobs OWNER TO postgres; + +-- +-- Name: jobs jobs_pkey; Type: CONSTRAINT; Schema: public; Owner: postgres +-- + +ALTER TABLE ONLY public.jobs + ADD CONSTRAINT jobs_pkey PRIMARY KEY (identifier); + + +-- +-- Name: SCHEMA public; Type: ACL; Schema: -; Owner: postgres +-- + +REVOKE USAGE ON SCHEMA public FROM PUBLIC; +GRANT ALL ON SCHEMA public TO PUBLIC; + + +-- +-- PostgreSQL database dump complete +-- + diff --git a/tests/data/random.parquet b/tests/data/random.parquet new file mode 100644 index 000000000..f8168f471 Binary files /dev/null and b/tests/data/random.parquet differ diff --git a/tests/data/random_nogeom.parquet b/tests/data/random_nogeom.parquet new file mode 100644 index 000000000..e29695dae Binary files /dev/null and b/tests/data/random_nogeom.parquet differ diff --git a/tests/pygeoapi-test-config-admin.yml b/tests/pygeoapi-test-config-admin.yml index aba8b9146..c7bdbc49d 100644 --- a/tests/pygeoapi-test-config-admin.yml +++ b/tests/pygeoapi-test-config-admin.yml @@ -41,10 +41,8 @@ server: pretty_print: true limit: 10 map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: Wikimedia - maps | Map data © OpenStreetMap - contributors + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' # manager: # name: TinyDB # connection: /tmp/pygeoapi-process-manager.db diff --git a/tests/pygeoapi-test-config-apirules.yml b/tests/pygeoapi-test-config-apirules.yml index 598903de6..6aba4c71c 100644 --- a/tests/pygeoapi-test-config-apirules.yml +++ b/tests/pygeoapi-test-config-apirules.yml @@ -46,8 +46,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' manager: name: TinyDB connection: /tmp/pygeoapi-test-process-manager.db diff --git a/tests/pygeoapi-test-config-enclosure.yml b/tests/pygeoapi-test-config-enclosure.yml index e4011622a..aa65b0595 100644 --- a/tests/pygeoapi-test-config-enclosure.yml +++ b/tests/pygeoapi-test-config-enclosure.yml @@ -44,8 +44,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' manager: name: TinyDB connection: /tmp/pygeoapi-test-process-manager.db diff --git a/tests/pygeoapi-test-config-envvars.yml b/tests/pygeoapi-test-config-envvars.yml index 0b2d4c8e9..f84fc79ac 100644 --- a/tests/pygeoapi-test-config-envvars.yml +++ b/tests/pygeoapi-test-config-envvars.yml @@ -41,8 +41,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' logging: level: DEBUG diff --git a/tests/pygeoapi-test-config-hidden-resources.yml b/tests/pygeoapi-test-config-hidden-resources.yml index 5be7a676a..3682bc2c9 100644 --- a/tests/pygeoapi-test-config-hidden-resources.yml +++ b/tests/pygeoapi-test-config-hidden-resources.yml @@ -44,8 +44,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' manager: name: TinyDB connection: /tmp/pygeoapi-test-process-manager.db diff --git a/tests/pygeoapi-test-config-ogr.yml b/tests/pygeoapi-test-config-ogr.yml index 703e0a7fa..e4237941a 100644 --- a/tests/pygeoapi-test-config-ogr.yml +++ b/tests/pygeoapi-test-config-ogr.yml @@ -41,8 +41,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' logging: level: DEBUG diff --git a/tests/pygeoapi-test-config-postgresql-manager.yml b/tests/pygeoapi-test-config-postgresql-manager.yml new file mode 100644 index 000000000..e0fe947d0 --- /dev/null +++ b/tests/pygeoapi-test-config-postgresql-manager.yml @@ -0,0 +1,113 @@ +# ================================================================= +# +# Authors: Francesco Martinelli +# +# Copyright (c) 2024 Francesco Martinelli +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000/ + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limit: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + manager: + name: PostgreSQL + connection: + host: localhost + port: 5432 + database: test + user: postgres + password: ${POSTGRESQL_PASSWORD:-postgres} + # Alternative accepted connection definition: + # connection: postgresql://postgres:${POSTGRESQL_PASSWORD:-postgres}@localhost:5432/test + output_dir: /tmp + +logging: + level: DEBUG + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: + en: pygeoapi instance to test Process Manager backed by PostgreSql + fr: test instance de pygeoapi + description: + en: pygeoapi provides an API to geospatial data + fr: pygeoapi fournit une API aux données géospatiales + keywords: + en: + - geospatial + - data + - api + fr: + - géospatiale + - données + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: http://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: Organization Name + url: https://pygeoapi.io + contact: + name: Lastname, Firstname + position: Position Title + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Country + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: + + hello-world: + type: process + processor: + name: HelloWorld + diff --git a/tests/pygeoapi-test-config.yml b/tests/pygeoapi-test-config.yml index 588d06301..58b62484f 100644 --- a/tests/pygeoapi-test-config.yml +++ b/tests/pygeoapi-test-config.yml @@ -44,8 +44,8 @@ server: limit: 10 # templates: /path/to/templates map: - url: https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}.png - attribution: 'Wikimedia maps | Map data © OpenStreetMap contributors' + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' manager: name: TinyDB connection: /tmp/pygeoapi-test-process-manager.db @@ -398,6 +398,44 @@ resources: name: png mimetype: image/png + canada-metadata: + type: collection + title: + en: Open Canada sample data + fr: Exemple de donn\u00e9es Canada Ouvert + description: + en: Sample metadata records from open.canada.ca + fr: Exemples d'enregistrements de m\u00e9tadonn\u00e9es sur ouvert.canada.ca + keywords: + en: + - canada + - open data + fr: + - canada + - donn\u00e9es ouvertes + links: + - type: text/html + rel: canonical + title: information + href: https://open.canada.ca/en/open-data + hreflang: en-CA + - type: text/html + rel: alternate + title: informations + href: https://ouvert.canada.ca/fr/donnees-ouvertes + hreflang: fr-CA + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + providers: + - type: record + name: TinyDBCatalogue + data: tests/data/open.canada.ca/sample-records.tinydb + id_field: externalId + time_field: created + title_field: title + hello-world: type: process processor: diff --git a/tests/test_oracle_provider.py b/tests/test_oracle_provider.py index d84b298e7..64620ee0f 100644 --- a/tests/test_oracle_provider.py +++ b/tests/test_oracle_provider.py @@ -62,8 +62,11 @@ def process_query( q, language, filterq, + extra_params ): sql = "ID = 10 AND :foo != :bar" + if extra_params.get("custom-auth") == "forbidden": + sql = f"{sql} AND 'auth' = 'you are not allowed'" if sql_query.find(" WHERE ") == -1: sql_query = sql_query.replace("#WHERE#", f" WHERE {sql}") @@ -632,6 +635,15 @@ def test_query_mandatory_properties_must_be_specified(config): p.query(properties=[("id", "123")]) +def test_extra_params_are_passed_to_sql_manipulator(config_manipulator): + extra_params = [("custom-auth", "forbidden")] + + p = OracleProvider(config_manipulator) + response = p.query(properties=extra_params) + + assert not response['features'] + + @pytest.fixture() def database_connection_pool(config_db_conn): os.environ["ORACLE_POOL_MIN"] = "2" # noqa: F841 diff --git a/tests/test_parquet_provider.py b/tests/test_parquet_provider.py new file mode 100644 index 000000000..eaa28a848 --- /dev/null +++ b/tests/test_parquet_provider.py @@ -0,0 +1,211 @@ +# ================================================================= +# +# Authors: Leo Ghignone +# +# Copyright (c) 2024 Leo Ghignone +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import pytest + +from pygeoapi.provider.base import ProviderItemNotFoundError +from pygeoapi.provider.parquet import ParquetProvider + +from .util import get_test_file_path + +path = get_test_file_path( + 'data/random.parquet') + +path_nogeom = get_test_file_path( + 'data/random_nogeom.parquet') + + +@pytest.fixture() +def config_parquet(): + return { + 'name': 'Parquet', + 'type': 'feature', + 'data': { + 'source_type': 'Parquet', + 'source': path, + }, + 'id_field': 'id', + 'time_field': 'time', + 'x_field': 'lon', + 'y_field': 'lat', + } + + +@pytest.fixture() +def config_parquet_nogeom_notime(): + return { + 'name': 'ParquetNoGeomNoTime', + 'type': 'feature', + 'data': { + 'source_type': 'Parquet', + 'source': path_nogeom, + }, + 'id_field': 'id' + } + + +def test_get_fields(config_parquet): + """Testing field types""" + + p = ParquetProvider(config_parquet) + results = p.get_fields() + assert results['lat']['type'] == 'number' + assert results['lon']['format'] == 'double' + assert results['time']['format'] == 'date-time' + + +def test_get(config_parquet): + """Testing query for a specific object""" + + p = ParquetProvider(config_parquet) + result = p.get('42') + assert result['id'] == '42' + assert result['properties']['lon'] == 4.947447 + + +def test_get_not_existing_feature_raise_exception( + config_parquet +): + """Testing query for a not existing object""" + p = ParquetProvider(config_parquet) + with pytest.raises(ProviderItemNotFoundError): + p.get(-1) + + +def test_query_hits(config_parquet): + """Testing query on entire collection for hits""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(resulttype='hits') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 0 + hits = feature_collection.get('numberMatched') + assert hits is not None + assert hits == 100 + + +def test_query_bbox_hits(config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + bbox=[100, -50, 150, 0], + resulttype='hits') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 0 + hits = feature_collection.get('numberMatched') + assert hits is not None + assert hits == 6 + + +def test_query_with_limit(config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(limit=2, resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 2 + hits = feature_collection.get('numberMatched') + assert hits > 2 + feature = features[0] + properties = feature.get('properties') + assert properties is not None + geometry = feature.get('geometry') + assert geometry is not None + + +def test_query_with_offset(config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(offset=20, limit=10, resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 10 + hits = feature_collection.get('numberMatched') + assert hits > 30 + feature = features[0] + properties = feature.get('properties') + assert properties is not None + assert feature['id'] == '21' + assert properties['lat'] == 66.264988 + geometry = feature.get('geometry') + assert geometry is not None + + +def test_query_with_property(config_parquet): + """Testing query for a valid JSON object with property filter""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + resulttype='results', + properties=[('lon', -12.855022)]) + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 1 + for feature in features: + assert feature['properties']['lon'] == -12.855022 + + +def test_query_with_skip_geometry(config_parquet): + """Testing query for a valid JSON object with property filter""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(skip_geometry=True) + for feature in feature_collection['features']: + assert feature.get('geometry') is None + + +def test_query_with_datetime(config_parquet): + """Testing query for a valid JSON object with time""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + datetime_='2022-05-01T00:00:00Z/2022-05-31T23:59:59Z') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 7 + for feature in feature_collection['features']: + time = feature['properties'][config_parquet['time_field']] + assert time.year == 2022 + assert time.month == 5 + + +def test_query_nogeom(config_parquet_nogeom_notime): + """Testing query for a valid JSON object without geometry""" + + p = ParquetProvider(config_parquet_nogeom_notime) + feature_collection = p.query(resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + assert len(feature_collection.get('features')) > 0 + for feature in feature_collection['features']: + assert feature.get('geometry') is None diff --git a/tests/test_postgresql_manager.py b/tests/test_postgresql_manager.py new file mode 100644 index 000000000..db20fba46 --- /dev/null +++ b/tests/test_postgresql_manager.py @@ -0,0 +1,186 @@ +# ================================================================= +# +# Authors: Francesco Martinelli +# +# Copyright (c) 2024 Francesco Martinelli +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +# See pygeoapi/process/manager/postgresql.py +# for instructions on setting up database structure. + +import json + +import pytest +from werkzeug.wrappers import Request +from werkzeug.test import create_environ + +from .util import get_test_file_path +from pygeoapi.api import API, APIRequest +import pygeoapi.api.processes as processes_api +from pygeoapi.util import yaml_load + + +@pytest.fixture() +def config(): + with open(get_test_file_path( + 'pygeoapi-test-config-postgresql-manager.yml') + ) as fh: + return yaml_load(fh) + + +@pytest.fixture() +def openapi(): + with open(get_test_file_path('pygeoapi-test-openapi.yml')) as fh: + return yaml_load(fh) + + +@pytest.fixture() +def api_(config, openapi): + return API(config, openapi) + + +def _create_execute_request(name, message, locales): + data = { + "response": "raw", + "inputs": { + "name": name, + "message": message + } + } + environ = create_environ( + base_url='http://localhost:5000/processes/hello-world/execution', + method="POST", json=data) + req = Request(environ) + return APIRequest.with_data(req, locales) + + +def _create_job_request(job_id, locales): + environ = create_environ( + base_url=f'http://localhost:5000/jobs/{job_id}', + query_string="f=json", + method="GET") + req = Request(environ) + return APIRequest.with_data(req, locales) + + +def _create_results_request(job_id, locales): + environ = create_environ( + base_url=f'http://localhost:5000/jobs/{job_id}/results', + query_string="f=json", + method="GET") + req = Request(environ) + return APIRequest.with_data(req, locales) + + +def _create_delete_request(job_id, locales): + environ = create_environ( + base_url=f'http://localhost:5000/jobs/{job_id}', + query_string="f=json", + method="DELETE") + req = Request(environ) + return APIRequest.with_data(req, locales) + + +def test_api_connection_rfc3986(config, openapi): + connection = config['server']['manager']['connection'] + connection_string = ( + f"postgresql://{connection['user']}:{connection['password']}" + f"@{connection['host']}:{connection['port']}/{connection['database']}") + config['server']['manager']['connection'] = connection_string + API(config, openapi) + + +def test_job_sync_hello_world(api_, config): + """ + Create a new job for hello-world, + which mplicitly tests add_job() and update_job(); + then: + -) get the job info, whch tests get_job(), + -) get the job results, whch tests get_job_result(), + -) get all present jobs, whch tests get_jobs(), + -) delete the newly inserted job, whch tests delete_job(). + """ + process_id = "hello-world" + + # Create new job + req = _create_execute_request("World", "Hello", api_.locales) + headers, http_status, response = processes_api.execute_process( + api_, req, process_id) + assert http_status == 200 + out_json = json.loads(response) + assert out_json["id"] == "echo" + assert out_json["value"] == "Hello World! Hello" + + # Save job_id for later use + job_id = headers['Location'].split('/')[-1] + mimetype = headers['Content-Type'] + + # Get job info + req = _create_job_request(job_id, api_.locales) + headers, http_status, response = processes_api.get_jobs( + api_, req, job_id) + assert http_status == 200 + out_json = json.loads(response) + assert out_json["type"] == "process" + assert out_json["processID"] == process_id + assert out_json["jobID"] == job_id + + # Get job results + req = _create_results_request(job_id, api_.locales) + headers, http_status, response = processes_api.get_job_result( + api_, req, job_id) + assert http_status == 200 + assert mimetype == headers['Content-Type'] + out_json = json.loads(response) + assert out_json["id"] == "echo" + assert out_json["value"] == "Hello World! Hello" + + # Get all present jobs + req = _create_job_request(None, api_.locales) + headers, http_status, response = processes_api.get_jobs( + api_, req, None) + assert http_status == 200 + # check the inserted job is in the list + out_json = json.loads(response) + jobs = out_json["jobs"] + assert any(job["jobID"] == job_id for job in jobs) + + # Delete the inserted job + req = _create_delete_request(job_id, api_.locales) + headers, http_status, response = processes_api.delete_job( + api_, req, job_id) + assert http_status == 200 + out_json = json.loads(response) + assert out_json["jobID"] == job_id + assert out_json["status"] == "dismissed" + + # Try again to delete the inserted job + req = _create_delete_request(job_id, api_.locales) + headers, http_status, response = processes_api.get_jobs( + api_, req, job_id) + assert http_status == 404 + out_json = json.loads(response) + assert out_json["code"] == "InvalidParameterValue" + assert out_json["description"] == job_id diff --git a/tests/test_postgresql_provider.py b/tests/test_postgresql_provider.py index a0679798f..6a63e7105 100644 --- a/tests/test_postgresql_provider.py +++ b/tests/test_postgresql_provider.py @@ -556,7 +556,7 @@ def test_get_collection_items_postgresql_cql_bad_cql(pg_api_, bad_cql): assert code == HTTPStatus.BAD_REQUEST error_response = json.loads(response) assert error_response['code'] == 'InvalidParameterValue' - assert error_response['description'] == f'Bad CQL string : {bad_cql}' + assert error_response['description'] == 'Bad CQL text' def test_post_collection_items_postgresql_cql(pg_api_): @@ -642,7 +642,7 @@ def test_post_collection_items_postgresql_cql_bad_cql(pg_api_, bad_cql): assert code == HTTPStatus.BAD_REQUEST error_response = json.loads(response) assert error_response['code'] == 'InvalidParameterValue' - assert error_response['description'].startswith('Bad CQL string') + assert error_response['description'] == 'Bad CQL text' def test_get_collection_items_postgresql_crs(pg_api_): diff --git a/tests/test_tinydb_manager_for_parallel_requests.py b/tests/test_tinydb_manager_for_parallel_requests.py index a1da59ce9..393cea7cd 100644 --- a/tests/test_tinydb_manager_for_parallel_requests.py +++ b/tests/test_tinydb_manager_for_parallel_requests.py @@ -39,6 +39,7 @@ from werkzeug.test import create_environ from pygeoapi.api import API, APIRequest +import pygeoapi.api.processes as processes_api from pygeoapi.util import yaml_load from .util import get_test_file_path @@ -61,7 +62,8 @@ def api_(config, openapi): def _execute_process(api, request, process_id, index, processes_out): - headers, http_status, response = api.execute_process(request, process_id) + headers, http_status, response = processes_api.execute_process( + api, request, process_id) processes_out[index] = {"headers": headers, "http_status": http_status, "response": response} @@ -112,6 +114,7 @@ def test_async_hello_world_process_parallel(api_, config): # Test if jobs are registered and run correctly db = TinyDB(index_name) query = Query() + assert len(processes_out.values()) == NUM_PROCS for process_out in processes_out.values(): try: assert process_out['http_status'] == 200 diff --git a/tests/test_tinydb_provider.py b/tests/test_tinydb_provider.py new file mode 100644 index 000000000..c90f8ae0c --- /dev/null +++ b/tests/test_tinydb_provider.py @@ -0,0 +1,191 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2024 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import json +import shutil + +import pytest + +from pygeoapi.provider.base import ProviderItemNotFoundError +from pygeoapi.provider.tinydb_ import TinyDBProvider + +from .util import get_test_file_path + +path = get_test_file_path('tests/data/canada-hydat-daily-mean-02HC003.tinydb') + + +@pytest.fixture() +def data(): + return json.dumps({ + 'type': 'Feature', + 'geometry': { + 'type': 'Polygon', + 'coordinates': [[ + [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], + [100.0, 1.0], [100.0, 0.0] + ]] + }, + 'properties': { + 'identifier': 123, + 'title': 'test item', + 'description': 'test item' + } + }) + + +@pytest.fixture() +def data_no_id(): + return json.dumps({ + 'type': 'Feature', + 'geometry': { + 'type': 'Polygon', + 'coordinates': [[ + [100.0, 0.0], [101.0, 0.0], [101.0, 1.0], + [100.0, 1.0], [100.0, 0.0] + ]] + }, + 'properties': { + 'title': 'test item', + 'description': 'test item' + } + }) + + +@pytest.fixture() +def config(tmp_path): + tmp_file = tmp_path / 'sample-features.tinydb' + shutil.copy(path, tmp_file) + return { + 'name': 'TinyDB', + 'type': 'feature', + 'data': tmp_file, + 'id_field': 'IDENTIFIER', + 'time_field': 'DATE' + } + + +def test_query(config): + p = TinyDBProvider(config) + + fields = p.get_fields() + assert len(fields) == 11 + assert fields['FLOW']['type'] == 'number' + assert fields['DATE']['type'] == 'string' + assert fields['DATE']['format'] == 'date' + + results = p.query() + assert len(results['features']) == 10 + assert results['numberMatched'] == 50 + assert results['numberReturned'] == 10 + assert results['features'][0]['id'] == '02HC003.1975-10-03' + assert results['features'][0]['properties']['STATION_NUMBER'] == '02HC003' + + results = p.query(properties=[('FLOW', 2.039999961853028)]) + assert len(results['features']) == 1 + assert results['numberMatched'] == 1 + assert results['numberReturned'] == 1 + + results = p.query(limit=1) + assert len(results['features']) == 1 + assert results['features'][0]['id'] == '02HC003.1975-10-03' + + results = p.query(datetime_='2017/..') + assert len(results['features']) == 3 + assert results['features'][0]['id'] == '02HC003.2017-05-23' + + results = p.query(datetime_='../2017') + assert len(results['features']) == 10 + assert results['features'][0]['id'] == '02HC003.1975-10-03' + + results = p.query(datetime_='1987-11-11/2000-11-11') + assert len(results['features']) == 1 + assert results['features'][0]['id'] == '02HC003.1995-01-29' + + results = p.query(bbox=[-154, 42, -52, 84]) + assert len(results['features']) == 10 + assert results['features'][0]['id'] == '02HC003.1975-10-03' + + results = p.query(offset=1, limit=1) + assert len(results['features']) == 1 + assert results['features'][0]['id'] == '02HC003.1975-10-02' + + results = p.query(offset=2, limit=2) + assert len(results['features']) == 2 + assert results['features'][0]['id'] == '02HC003.1975-10-01' + + results = p.query(sortby=[{'property': 'DATE', 'order': '+'}]) + assert results['features'][0]['id'] == '02HC003.1955-09-01' + + results = p.query(sortby=[{'property': 'DATE', 'order': '-'}]) + assert results['features'][0]['id'] == '02HC003.2017-05-27' + + +def test_get(config): + p = TinyDBProvider(config) + + result = p.get('02HC003.1975-10-02') + assert result['id'] == '02HC003.1975-10-02' + assert result['properties']['FLOW'] == 2.059999942779541 + + +def test_get_not_existing_item_raise_exception(config): + """Testing query for a not existing object""" + p = TinyDBProvider(config) + with pytest.raises(ProviderItemNotFoundError): + p.get('404') + + +def test_transactions_create(config, data): + """Testing transactional capabilities""" + + p = TinyDBProvider(config) + + new_id = p.create(data) + assert new_id == 123 + + assert p.update(123, data) + + assert p.delete(123) + + +def test_transactions_create_no_id(config, data_no_id): + """Testing transactional capabilities with incoming feature without ID""" + + p = TinyDBProvider(config) + + new_id = p.create(data_no_id) + assert new_id is not None + + data_got = p.get(new_id) + assert data_got['id'] == new_id + assert data_got['geometry'] == json.loads(data_no_id)['geometry'] + + assert p.update(new_id, json.dumps(data_got)) + + assert p.delete(new_id) diff --git a/tests/test_util.py b/tests/test_util.py index c71ce80a0..d15aac321 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -172,7 +172,7 @@ def test_path_basename(): def test_filter_dict_by_key_value(config): collections = util.filter_dict_by_key_value(config['resources'], 'type', 'collection') - assert len(collections) == 9 + assert len(collections) == 10 notfound = util.filter_dict_by_key_value(config['resources'], 'type', 'foo') diff --git a/tests/test_xarray_zarr_provider.py b/tests/test_xarray_zarr_provider.py index 5163b32a6..ec014e655 100644 --- a/tests/test_xarray_zarr_provider.py +++ b/tests/test_xarray_zarr_provider.py @@ -30,6 +30,7 @@ from numpy import float64, int64 import pytest +import xarray as xr from pygeoapi.provider.xarray_ import XarrayProvider from pygeoapi.util import json_serial @@ -53,6 +54,20 @@ def config(): } +@pytest.fixture() +def config_no_time(tmp_path): + ds = xr.open_zarr(path) + ds = ds.sel(time=ds.time[0]) + ds = ds.drop_vars('time') + ds.to_zarr(tmp_path / 'no_time.zarr') + return { + 'name': 'zarr', + 'type': 'coverage', + 'data': str(tmp_path / 'no_time.zarr'), + 'format': {'name': 'zarr', 'mimetype': 'application/zip'}, + } + + def test_provider(config): p = XarrayProvider(config) @@ -85,3 +100,14 @@ def test_numpy_json_serial(): d = float64(500.00000005) assert json_serial(d) == 500.00000005 + + +def test_no_time(config_no_time): + p = XarrayProvider(config_no_time) + + assert len(p.fields) == 4 + assert p.axes == ['lon', 'lat'] + + coverage = p.query(format='json') + + assert sorted(coverage['domain']['axes'].keys()) == ['x', 'y']