From 9bc4aff92e8814adfe4d728f60255addc3a05000 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Wed, 7 Sep 2022 13:53:48 +0200 Subject: [PATCH 01/14] Update processes to new data cube concepts --- CHANGELOG.md | 10 +++ add_dimension.json | 9 +-- aggregate_spatial.json | 57 ++++++++++++----- aggregate_temporal.json | 16 ++++- aggregate_temporal_period.json | 16 ++++- anomaly.json | 21 +++++- apply.json | 8 +-- apply_dimension.json | 14 ++-- apply_kernel.json | 26 ++++++-- apply_neighborhood.json | 50 +++++++++++++-- climatological_normal.json | 14 +++- ..._raster_cube.json => create_data_cube.json | 12 ++-- dimension_labels.json | 4 +- drop_dimension.json | 6 +- filter_bands.json | 16 ++++- filter_bbox.json | 38 ++++++++++- filter_spatial.json | 59 ++++++++++++++--- filter_temporal.json | 14 +++- load_collection.json | 19 ++++-- mask.json | 33 +++++++++- mask_polygon.json | 45 ++++++++++--- merge_cubes.json | 10 +-- meta/subtype-schemas.json | 14 +++- ndvi.json | 25 +++++++- proposals/aggregate_spatial_window.json | 26 ++++++-- .../ard_normalized_radar_backscatter.json | 34 ++++++++-- proposals/ard_surface_reflectance.json | 34 ++++++++-- proposals/atmospheric_correction.json | 34 ++++++++-- proposals/cloud_detection.json | 34 ++++++++-- proposals/filter_labels.json | 6 +- proposals/fit_class_random_forest.json | 14 +++- proposals/fit_curve.json | 8 +-- proposals/fit_regr_random_forest.json | 14 +++- proposals/flatten_dimensions.json | 4 +- proposals/inspect.json | 2 +- proposals/load_result.json | 15 ++++- proposals/load_uploaded_files.json | 2 +- proposals/predict_curve.json | 10 +-- proposals/reduce_spatial.json | 19 ++++-- proposals/resample_cube_temporal.json | 23 +++++-- proposals/sar_backscatter.json | 32 ++++++++-- proposals/unflatten_dimension.json | 4 +- proposals/vector_buffer.json | 14 +++- proposals/vector_to_random_points.json | 14 +++- proposals/vector_to_regular_points.json | 14 +++- reduce_dimension.json | 8 +-- rename_dimension.json | 6 +- rename_labels.json | 4 +- resample_cube_spatial.json | 39 +++++++++-- resample_spatial.json | 24 ++++++- save_result.json | 14 ++-- tests/package.json | 2 +- tests/testHelpers.js | 64 ++++++++++++++++++- trim_cube.json | 10 +-- 54 files changed, 856 insertions(+), 208 deletions(-) rename create_raster_cube.json => create_data_cube.json (53%) diff --git a/CHANGELOG.md b/CHANGELOG.md index c11339c8..6cdb52fb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,16 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +## Changes for vector cubes + +- Update the processes based on `raster-cubes` to work with `datacube` instead +- Renamed `create_raster_cube` to `create_data_cube` +- `add_dimension`: Added new dimension type `vector` +- New definition for `aggregate_spatial`: + - Allows more than 3 input dimensions + - Allow to not export statistics by changing the parameter `target_dimenaion` + - Clarified how the resulting vector cube looks like + ## Unreleased / Draft ### Added diff --git a/add_dimension.json b/add_dimension.json index a3b07075..a7c76d13 100644 --- a/add_dimension.json +++ b/add_dimension.json @@ -11,7 +11,7 @@ "description": "A data cube to add the dimension to.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -39,9 +39,10 @@ "schema": { "type": "string", "enum": [ + "bands", "spatial", "temporal", - "bands", + "vector", "other" ] }, @@ -53,7 +54,7 @@ "description": "The data cube with a newly added dimension. The new dimension has exactly one dimension label. All other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -61,4 +62,4 @@ "message": "A dimension with the specified name already exists." } } -} \ No newline at end of file +} diff --git a/aggregate_spatial.json b/aggregate_spatial.json index 4020610c..2b406295 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -1,7 +1,7 @@ { "id": "aggregate_spatial", "summary": "Zonal statistics for geometries", - "description": "Aggregates statistics for one or more geometries (e.g. zonal statistics for polygons) over the spatial dimensions. The number of total and valid pixels is returned together with the calculated values.\n\nAn 'unbounded' aggregation over the full extent of the horizontal spatial dimensions can be computed with the process ``reduce_spatial()``.\n\nThis process passes a list of values to the reducer. The list of values has an undefined order, therefore processes such as ``last()`` and ``first()`` that depend on the order of the values will lead to unpredictable results.", + "description": "Aggregates statistics for one or more geometries (e.g. zonal statistics for polygons) over the spatial dimensions. The given data cube can have multiple additional dimension and for all these dimensions results will be computed individually.\n\nAn 'unbounded' aggregation over the full extent of the horizontal spatial dimensions can be computed with the process ``reduce_spatial()``.\n\nThis process passes a list of values to the reducer. The list of values has an undefined order, therefore processes such as ``last()`` and ``first()`` that depend on the order of the values will lead to unpredictable results.", "categories": [ "cubes", "aggregate & resample" @@ -9,19 +9,39 @@ "parameters": [ { "name": "data", - "description": "A raster data cube.\n\nThe data cube must have been reduced to only contain two spatial dimensions and a third dimension the values are aggregated for, for example the temporal dimension to get a time series. Otherwise, this process fails with the `TooManyDimensions` exception.\n\nThe data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process.", + "description": "A raster data cube with at least two spatial dimensions.\n\nThe data cube implicitly gets restricted to the bounds of the geometries as if ``filter_spatial()`` would have been used with the same values for the corresponding parameters immediately before this process.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { "name": "geometries", - "description": "Geometries as GeoJSON on which the aggregation will be based. Vector properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", - "schema": { - "type": "object", - "subtype": "geojson" - } + "description": "Geometries for which the aggregation will be computed. Vector properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `vector`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "schema": [ + { + "type": "object", + "subtype": "geojson" + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + } + ] }, { "name": "reducer", @@ -60,11 +80,14 @@ }, { "name": "target_dimension", - "description": "The name of a new dimensions that is used to store the results. A new dimension will be created with the given name and type `other` (see ``add_dimension()``). Defaults to the dimension name `result`. Fails with a `TargetDimensionExists` exception if a dimension with the specified name exists.", + "description": "By default (which is `null`), the process only computes the results and doesn't add a new dimension. If this parameter contains a new dimension name, the computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each computed value. These values are added as a new dimension. The new dimension of type `other` has the dimension labels `value`, `total_count` and `valid_count`.", "schema": { - "type": "string" + "type": [ + "string", + "null" + ] }, - "default": "result", + "default": null, "optional": true }, { @@ -78,16 +101,18 @@ } ], "returns": { - "description": "A vector data cube with the computed results and restricted to the bounds of the geometries.\n\nThe computed value is used for the dimension with the name that was specified in the parameter `target_dimension`.\n\nThe computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each geometry. These values are added as a new dimension with a dimension name derived from `target_dimension` by adding the suffix `_meta`. The new dimension has the dimension labels `total_count` and `valid_count`.", + "description": "A vector data cube with the computed results and restricted to the bounds of the geometries. The spatial dimensions is replaced by a vector dimension and if `target_dimension` is not `null`, a new dimension is added.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, "exceptions": { - "TooManyDimensions": { - "message": "The number of dimensions must be reduced to three for `aggregate_spatial`." - }, "TargetDimensionExists": { "message": "A dimension with the specified target dimension name already exists." } diff --git a/aggregate_temporal.json b/aggregate_temporal.json index b68b366c..d63099b7 100644 --- a/aggregate_temporal.json +++ b/aggregate_temporal.json @@ -12,7 +12,12 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -162,7 +167,12 @@ "description": "A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, "examples": [ @@ -234,4 +244,4 @@ "title": "Aggregation explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/aggregate_temporal_period.json b/aggregate_temporal_period.json index 832e72aa..ce6ec410 100644 --- a/aggregate_temporal_period.json +++ b/aggregate_temporal_period.json @@ -13,7 +13,12 @@ "description": "The source data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -97,7 +102,12 @@ "description": "A new data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the given temporal dimension. The specified temporal dimension has the following dimension labels (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month):\n\n* `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23`\n* `day`: `YYYY-001` - `YYYY-365`\n* `week`: `YYYY-01` - `YYYY-52`\n* `dekad`: `YYYY-00` - `YYYY-36`\n* `month`: `YYYY-01` - `YYYY-12`\n* `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November).\n* `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October).\n* `year`: `YYYY`\n* `decade`: `YYY0`\n* `decade-ad`: `YYY1`\n\nThe dimension labels in the new data cube are complete for the whole extent of the source data cube. For example, if `period` is set to `day` and the source data cube has two dimension labels at the beginning of the year (`2020-01-01`) and the end of a year (`2020-12-31`), the process returns a data cube with 365 dimension labels (`2020-001`, `2020-002`, ..., `2020-365`). In contrast, if `period` is set to `day` and the source data cube has just one dimension label `2020-01-05`, the process returns a data cube with just a single dimension label (`2020-005`).", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, "exceptions": { @@ -118,4 +128,4 @@ "title": "Aggregation explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/anomaly.json b/anomaly.json index 3f369087..7a3890b7 100644 --- a/anomaly.json +++ b/anomaly.json @@ -13,7 +13,12 @@ "description": "A data cube with exactly one temporal dimension and the following dimension labels for the given period (`YYYY` = four-digit year, `MM` = two-digit month, `DD` two-digit day of month):\n\n* `hour`: `YYYY-MM-DD-00` - `YYYY-MM-DD-23`\n* `day`: `YYYY-001` - `YYYY-365`\n* `week`: `YYYY-01` - `YYYY-52`\n* `dekad`: `YYYY-00` - `YYYY-36`\n* `month`: `YYYY-01` - `YYYY-12`\n* `season`: `YYYY-djf` (December - February), `YYYY-mam` (March - May), `YYYY-jja` (June - August), `YYYY-son` (September - November).\n* `tropical-season`: `YYYY-ndjfma` (November - April), `YYYY-mjjaso` (May - October).\n* `year`: `YYYY`\n* `decade`: `YYY0`\n* `decade-ad`: `YYY1`\n* `single-period` / `climatology-period`: Any\n\n``aggregate_temporal_period()`` can compute such a data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -21,7 +26,12 @@ "description": "A data cube with normals, e.g. daily, monthly or yearly values computed from a process such as ``climatological_normal()``. Must contain exactly one temporal dimension with the following dimension labels for the given period:\n\n* `hour`: `00` - `23`\n* `day`: `001` - `365`\n* `week`: `01` - `52`\n* `dekad`: `00` - `36`\n* `month`: `01` - `12`\n* `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November)\n* `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October)\n* `year`: Four-digit year numbers\n* `decade`: Four-digit year numbers, the last digit being a `0`\n* `decade-ad`: Four-digit year numbers, the last digit being a `1`\n* `single-period` / `climatology-period`: A single dimension label with any name is expected.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -50,7 +60,12 @@ "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } } } diff --git a/apply.json b/apply.json index d0be1e1d..20995c88 100644 --- a/apply.json +++ b/apply.json @@ -1,7 +1,7 @@ { "id": "apply", - "summary": "Apply a process to each pixel", - "description": "Applies a process to each pixel value in the data cube (i.e. a local operation). In contrast, the process ``apply_dimension()`` applies a process to all pixel values along a particular dimension.", + "summary": "Apply a process to each value", + "description": "Applies a process to each value in the data cube (i.e. a local operation). In contrast, the process ``apply_dimension()`` applies a process to all values along a particular dimension.", "categories": [ "cubes" ], @@ -11,7 +11,7 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -60,7 +60,7 @@ "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "links": [ diff --git a/apply_dimension.json b/apply_dimension.json index 796393f4..5f9a984a 100644 --- a/apply_dimension.json +++ b/apply_dimension.json @@ -1,7 +1,7 @@ { "id": "apply_dimension", - "summary": "Apply a process to pixels along a dimension", - "description": "Applies a process to all pixel values along a dimension of a raster data cube. For example, if the temporal dimension is specified the process will work on a time series of pixel values.\n\nThe process ``reduce_dimension()`` also applies a process to pixel values along a dimension, but drops the dimension afterwards. The process ``apply()`` applies a process to each pixel value in the data cube.\n\nThe target dimension is the source dimension if not specified otherwise in the `target_dimension` parameter. The pixel values in the target dimension get replaced by the computed pixel values. The name, type and reference system are preserved.\n\nThe dimension labels are preserved when the target dimension is the source dimension and the number of pixel values in the source dimension is equal to the number of values computed by the process. Otherwise, the dimension labels will be incrementing integers starting from zero, which can be changed using ``rename_labels()`` afterwards. The number of labels will equal to the number of values computed by the process.", + "summary": "Apply a process to all values along a dimension", + "description": "Applies a process to all values along a dimension of a data cube. For example, if the temporal dimension is specified the process will work on the values of a time series.\n\nThe process ``reduce_dimension()`` also applies a process to values along a dimension, but drops the dimension afterwards. The process ``apply()`` applies a process to each value in the data cube.\n\nThe target dimension is the source dimension if not specified otherwise in the `target_dimension` parameter. The values in the target dimension get replaced by the computed values. The name, type and reference system are preserved.\n\nThe dimension labels are preserved when the target dimension is the source dimension and the number of values in the source dimension is equal to the number of values computed by the process. Otherwise, the dimension labels will be incrementing integers starting from zero, which can be changed using ``rename_labels()`` afterwards. The number of labels will equal to the number of values computed by the process.", "categories": [ "cubes" ], @@ -11,12 +11,12 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { "name": "process", - "description": "Process to be applied on all pixel values. The specified process needs to accept an array and must return an array with at least one element. A process may consist of multiple sub-processes.", + "description": "Process to be applied on all values along the given dimension. The specified process needs to accept an array and must return an array with at least one element. A process may consist of multiple sub-processes.", "schema": { "type": "object", "subtype": "process-graph", @@ -83,10 +83,10 @@ } ], "returns": { - "description": "A data cube with the newly computed values.\n\nAll dimensions stay the same, except for the dimensions specified in corresponding parameters. There are three cases how the dimensions can change:\n\n1. The source dimension is the target dimension:\n - The (number of) dimensions remain unchanged as the source dimension is the target dimension.\n - The source dimension properties name and type remain unchanged.\n - The dimension labels, the reference system and the resolution are preserved only if the number of pixel values in the source dimension is equal to the number of values computed by the process. Otherwise, all other dimension properties change as defined in the list below.\n2. The source dimension is not the target dimension and the latter exists:\n - The number of dimensions decreases by one as the source dimension is dropped.\n - The target dimension properties name and type remain unchanged. All other dimension properties change as defined in the list below.\n3. The source dimension is not the target dimension and the latter does not exist:\n - The number of dimensions remain unchanged, but the source dimension is replaced with the target dimension.\n - The target dimension has the specified name and the type other. All other dimension properties are set as defined in the list below.\n\nUnless otherwise stated above, for the given (target) dimension the following applies:\n\n- the number of dimension labels is equal to the number of values computed by the process,\n- the dimension labels are incrementing integers starting from zero,\n- the resolution changes, and\n- the reference system is undefined.", + "description": "A data cube with the newly computed values.\n\nAll dimensions stay the same, except for the dimensions specified in corresponding parameters. There are three cases how the dimensions can change:\n\n1. The source dimension is the target dimension:\n - The (number of) dimensions remain unchanged as the source dimension is the target dimension.\n - The source dimension properties name and type remain unchanged.\n - The dimension labels, the reference system and the resolution are preserved only if the number of values in the source dimension is equal to the number of values computed by the process. Otherwise, all other dimension properties change as defined in the list below.\n2. The source dimension is not the target dimension and the latter exists:\n - The number of dimensions decreases by one as the source dimension is dropped.\n - The target dimension properties name and type remain unchanged. All other dimension properties change as defined in the list below.\n3. The source dimension is not the target dimension and the latter does not exist:\n - The number of dimensions remain unchanged, but the source dimension is replaced with the target dimension.\n - The target dimension has the specified name and the type other. All other dimension properties are set as defined in the list below.\n\nUnless otherwise stated above, for the given (target) dimension the following applies:\n\n- the number of dimension labels is equal to the number of values computed by the process,\n- the dimension labels are incrementing integers starting from zero,\n- the resolution changes, and\n- the reference system is undefined.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -101,4 +101,4 @@ "title": "Apply explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/apply_kernel.json b/apply_kernel.json index 20d940c2..cf16dc78 100644 --- a/apply_kernel.json +++ b/apply_kernel.json @@ -1,7 +1,7 @@ { "id": "apply_kernel", "summary": "Apply a spatial convolution with a kernel", - "description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of the data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.\n\nThe process can't handle non-numerical or infinite numerical values in the data cube. Boolean values are converted to integers (`false` = 0, `true` = 1), but all other non-numerical or infinite values are replaced with zeroes by default (see parameter `replace_invalid`).\n\nFor cases requiring more generic focal operations or non-numerical values, see ``apply_neighborhood()``.", + "description": "Applies a 2D convolution (i.e. a focal operation with a weighted kernel) on the horizontal spatial dimensions (axes `x` and `y`) of a raster data cube.\n\nEach value in the kernel is multiplied with the corresponding pixel value and all products are summed up afterwards. The sum is then multiplied with the factor.\n\nThe process can't handle non-numerical or infinite numerical values in the data cube. Boolean values are converted to integers (`false` = 0, `true` = 1), but all other non-numerical or infinite values are replaced with zeroes by default (see parameter `replace_invalid`).\n\nFor cases requiring more generic focal operations or non-numerical values, see ``apply_neighborhood()``.", "categories": [ "cubes", "math > image filter" @@ -9,10 +9,19 @@ "parameters": [ { "name": "data", - "description": "A data cube.", + "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -73,7 +82,16 @@ "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "exceptions": { diff --git a/apply_neighborhood.json b/apply_neighborhood.json index 4966f28e..3b89adf4 100644 --- a/apply_neighborhood.json +++ b/apply_neighborhood.json @@ -8,10 +8,19 @@ "parameters": [ { "name": "data", - "description": "A data cube.", + "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -23,10 +32,19 @@ "parameters": [ { "name": "data", - "description": "A subset of the data cube as specified in `context` and `overlap`.", + "description": "A subset of the data cube as specified in `size` and `overlap`.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -43,7 +61,16 @@ "description": "The data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) must remain unchanged, otherwise a `DataCubePropertiesImmutable` exception will be thrown.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } } } @@ -184,10 +211,19 @@ } ], "returns": { - "description": "A data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", + "description": "A raster data cube with the newly computed values and the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "examples": [ diff --git a/climatological_normal.json b/climatological_normal.json index 33cd2d60..e1b1e0c9 100644 --- a/climatological_normal.json +++ b/climatological_normal.json @@ -12,7 +12,12 @@ "description": "A data cube with exactly one temporal dimension. The data cube must span at least the temporal interval specified in the parameter `climatology-period`.\n\nSeasonal periods may span two consecutive years, e.g. temporal winter that includes months December, January and February. If the required months before the actual climate period are available, the season is taken into account. If not available, the first season is not taken into account and the seasonal mean is based on one year less than the other seasonal normals. The incomplete season at the end of the last year is never taken into account.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -56,7 +61,12 @@ "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the temporal dimension. The temporal dimension has the following dimension labels:\n\n* `day`: `001` - `365`\n* `month`: `01` - `12`\n* `climatology-period`: `climatology-period`\n* `season`: `djf` (December - February), `mam` (March - May), `jja` (June - August), `son` (September - November)\n* `tropical-season`: `ndjfma` (November - April), `mjjaso` (May - October)", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, "links": [ diff --git a/create_raster_cube.json b/create_data_cube.json similarity index 53% rename from create_raster_cube.json rename to create_data_cube.json index 576728ee..55f0aede 100644 --- a/create_raster_cube.json +++ b/create_data_cube.json @@ -1,16 +1,16 @@ { - "id": "create_raster_cube", - "summary": "Create an empty raster data cube", - "description": "Creates a new raster data cube without dimensions. Dimensions can be added with ``add_dimension()``.", + "id": "create_data_cube", + "summary": "Create an empty data cube", + "description": "Creates a new data cube without dimensions. Dimensions can be added with ``add_dimension()``.", "categories": [ "cubes" ], "parameters": [], "returns": { - "description": "An empty raster data cube with zero dimensions.", + "description": "An empty data cube with no dimensions.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "links": [ @@ -20,4 +20,4 @@ "title": "Data Cubes explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/dimension_labels.json b/dimension_labels.json index 37a5908d..15c5ba0f 100644 --- a/dimension_labels.json +++ b/dimension_labels.json @@ -11,7 +11,7 @@ "description": "The data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -39,4 +39,4 @@ "message": "A dimension with the specified name does not exist." } } -} \ No newline at end of file +} diff --git a/drop_dimension.json b/drop_dimension.json index 90212dd9..eaee1d4c 100644 --- a/drop_dimension.json +++ b/drop_dimension.json @@ -11,7 +11,7 @@ "description": "The data cube to drop a dimension from.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -26,7 +26,7 @@ "description": "A data cube without the specified dimension. The number of dimensions decreases by one, but the dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -37,4 +37,4 @@ "message": "A dimension with the specified name does not exist." } } -} \ No newline at end of file +} diff --git a/filter_bands.json b/filter_bands.json index ee9c9aae..24ccf023 100644 --- a/filter_bands.json +++ b/filter_bands.json @@ -12,7 +12,12 @@ "description": "A data cube with bands.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "bands" + } + ] } }, { @@ -62,7 +67,12 @@ "description": "A data cube limited to a subset of its original bands. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the dimension of type `bands` has less (or the same) dimension labels.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "bands" + } + ] } }, "exceptions": { @@ -85,4 +95,4 @@ "title": "Filters explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/filter_bbox.json b/filter_bbox.json index 8cc2103a..dae5141c 100644 --- a/filter_bbox.json +++ b/filter_bbox.json @@ -12,7 +12,23 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ], + [ + { + "type": "vector" + } + ] + ] } }, { @@ -94,7 +110,23 @@ "description": "A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ], + [ + { + "type": "vector" + } + ] + ] } }, "links": [ @@ -124,4 +156,4 @@ "title": "Simple Features standard by the OGC" } ] -} \ No newline at end of file +} diff --git a/filter_spatial.json b/filter_spatial.json index b807b8df..54e5b50a 100644 --- a/filter_spatial.json +++ b/filter_spatial.json @@ -12,23 +12,66 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ], + [ + { + "type": "vector" + } + ] + ] } }, { "name": "geometries", - "description": "One or more geometries used for filtering, specified as GeoJSON.", - "schema": { - "type": "object", - "subtype": "geojson" - } + "description": "One or more geometries used for filtering, given as GeoJSON or vector data cube.\n\nLimits the data cube to the bounding box of the given geometries. No implicit masking gets applied. To mask the pixels of the data cube use ``mask_polygon()``.", + "schema": [ + { + "type": "object", + "subtype": "geojson" + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + } + ] } ], "returns": { "description": "A data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ], + [ + { + "type": "vector" + } + ] + ] } }, "links": [ @@ -43,4 +86,4 @@ "title": "Simple Features standard by the OGC" } ] -} \ No newline at end of file +} diff --git a/filter_temporal.json b/filter_temporal.json index bd7ea0b3..0ba2274e 100644 --- a/filter_temporal.json +++ b/filter_temporal.json @@ -12,7 +12,12 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -76,7 +81,12 @@ "description": "A data cube restricted to the specified temporal extent. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the temporal dimensions (determined by `dimensions` parameter) may have less dimension labels.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, "exceptions": { diff --git a/load_collection.json b/load_collection.json index 160888be..9f69f494 100644 --- a/load_collection.json +++ b/load_collection.json @@ -1,7 +1,7 @@ { "id": "load_collection", "summary": "Load a collection", - "description": "Loads a collection from the current back-end by its id and returns it as a processable data cube. The data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent`, `bands` and `properties`. If no data is available for the given extents, a `NoDataAvailable` exception is thrown.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the pixel values should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", + "description": "Loads a collection from the current back-end by its id and returns it as a processable data cube. The data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent`, `bands` and `properties`. If no data is available for the given extents, a `NoDataAvailable` exception is thrown.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the values in the data cube should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", "categories": [ "cubes", "import" @@ -18,7 +18,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\nThe process puts a pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\nThe process puts the corresponding value into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", @@ -93,10 +93,21 @@ }, { "title": "GeoJSON", - "description": "Limits the data cube to the bounding box of the given geometry. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "description": "Limits the data cube to the bounding box of the given geometries. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", "type": "object", "subtype": "geojson" }, + { + "title": "Vector data cube", + "description": "Limits the data cube to the bounding box of the given geometries in the vector data cube. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + }, { "title": "No filter", "description": "Don't filter spatially. All data is included in the data cube.", @@ -219,7 +230,7 @@ "description": "A data cube for further processing. The dimensions and dimension properties (name, type, labels, reference system and resolution) correspond to the collection's metadata, but the dimension labels are restricted as specified in the parameters.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { diff --git a/mask.json b/mask.json index 515c81cb..06e43d67 100644 --- a/mask.json +++ b/mask.json @@ -12,7 +12,16 @@ "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -20,7 +29,16 @@ "description": "A mask as a raster data cube. Every pixel in `data` must have a corresponding element in `mask`.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -42,7 +60,16 @@ "description": "A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } } } diff --git a/mask_polygon.json b/mask_polygon.json index c1f59d4e..4e358fef 100644 --- a/mask_polygon.json +++ b/mask_polygon.json @@ -12,16 +12,36 @@ "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { "name": "mask", - "description": "A GeoJSON object containing at least one polygon. The provided feature types can be one of the following:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.", - "schema": { - "type": "object", - "subtype": "geojson" - } + "description": "A GeoJSON object or a vector data cube containing at least one polygon. The provided vector data can be one of the following:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.", + "schema": [ + { + "type": "object", + "subtype": "geojson" + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + } + ] }, { "name": "replacement", @@ -57,7 +77,16 @@ "description": "A masked raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "links": [ @@ -67,4 +96,4 @@ "title": "Simple Features standard by the OGC" } ] -} \ No newline at end of file +} diff --git a/merge_cubes.json b/merge_cubes.json index 28b4803b..79deb389 100644 --- a/merge_cubes.json +++ b/merge_cubes.json @@ -1,7 +1,7 @@ { "id": "merge_cubes", "summary": "Merge two data cubes", - "description": "The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. The process performs the join on overlapping dimensions, with the same name and type.\n\nAn overlapping dimension has the same name, type, reference system and resolution in both dimensions, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all pixels. The merged data cube has the same dimensions and labels as the original data cubes, but all pixel values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", + "description": "The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. The process performs the join on overlapping dimensions, with the same name and type.\n\nAn overlapping dimension has the same name, type, reference system and resolution in both dimensions, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all values. The merged data cube has the same dimensions and labels as the original data cubes, but all values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", "categories": [ "cubes" ], @@ -11,7 +11,7 @@ "description": "The first data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -19,7 +19,7 @@ "description": "The second data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -77,7 +77,7 @@ "description": "The merged data cube. See the process description for details regarding the dimensions and dimension properties (name, type, labels, reference system and resolution).", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -92,4 +92,4 @@ "title": "Background information on reduction operators (binary reducers) by Wikipedia" } ] -} \ No newline at end of file +} diff --git a/meta/subtype-schemas.json b/meta/subtype-schemas.json index 3dc15b36..b21917b0 100644 --- a/meta/subtype-schemas.json +++ b/meta/subtype-schemas.json @@ -1,6 +1,6 @@ { "$schema": "http://json-schema.org/draft-07/schema#", - "$id": "http://processes.openeo.org/1.2.0/meta/subtype-schemas.json", + "$id": "https://processes.openeo.org/1.2.0/meta/subtype-schemas.json", "title": "Subtype Schemas", "description": "This file defines the schemas for subtypes we define for openEO processes.", "definitions": { @@ -112,6 +112,12 @@ "description": "A collection identifier from the list of supported collections.", "pattern": "^[\\w\\-\\.~/]+$" }, + "datacube": { + "type": "object", + "subtype": "datacube", + "title": "Data Cube", + "description": "A data cube that consists of an arbitrary number of dimensions and doesn't require any dimension type specifically." + }, "date": { "type": "string", "subtype": "date", @@ -290,7 +296,8 @@ "type": "object", "subtype": "raster-cube", "title": "Raster data cube", - "description": "A raster data cube, an image collection stored at the back-end. Different back-ends have different internal representations for this data structure." + "description": "A vector data cube, which is a data cube with two dimension of type spatial (x and y). This has been deprecated in favour of `datacube`.", + "deprecated": true }, "temporal-interval": { "type": "array", @@ -417,7 +424,8 @@ "type": "object", "subtype": "vector-cube", "title": "Vector data cube", - "description": "A vector data cube, a vector collection stored at the back-end. Different back-ends have different internal representations for this data structure" + "description": "A vector data cube, which is a data cube with a dimension of type vector. This has been deprecated in favour of `datacube`.", + "deprecated": true }, "wkt2-definition": { "type": "string", diff --git a/ndvi.json b/ndvi.json index e86a27e6..5bb952d4 100644 --- a/ndvi.json +++ b/ndvi.json @@ -13,7 +13,19 @@ "description": "A raster data cube with two bands that have the common names `red` and `nir` assigned.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -56,7 +68,16 @@ "description": "A raster data cube containing the computed NDVI values. The structure of the data cube differs depending on the value passed to `target_band`:\n\n* `target_band` is `null`: The data cube does not contain the dimension of type `bands`, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.\n* `target_band` is a string: The data cube keeps the same dimensions. The dimension properties remain unchanged, but the number of dimension labels for the dimension of type `bands` increases by one. The additional label is named as specified in `target_band`.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "exceptions": { diff --git a/proposals/aggregate_spatial_window.json b/proposals/aggregate_spatial_window.json index 77230275..5bc3e03c 100644 --- a/proposals/aggregate_spatial_window.json +++ b/proposals/aggregate_spatial_window.json @@ -13,7 +13,16 @@ "description": "A raster data cube with exactly two horizontal spatial dimensions and an arbitrary number of additional dimensions. The process is applied to all additional dimensions individually.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -103,10 +112,19 @@ } ], "returns": { - "description": "A data cube with the newly computed values and the same dimensions.\n\nThe resolution will change depending on the chosen values for the `size` and `boundary` parameter. It usually decreases for the dimensions which have the corresponding parameter `size` set to values greater than 1.\n\nThe dimension labels will be set to the coordinate at the center of the window. The other dimension properties (name, type and reference system) remain unchanged.", + "description": "A raster data cube with the newly computed values and the same dimensions.\n\nThe resolution will change depending on the chosen values for the `size` and `boundary` parameter. It usually decreases for the dimensions which have the corresponding parameter `size` set to values greater than 1.\n\nThe dimension labels will be set to the coordinate at the center of the window. The other dimension properties (name, type and reference system) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "links": [ @@ -116,4 +134,4 @@ "title": "Aggregation explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/proposals/ard_normalized_radar_backscatter.json b/proposals/ard_normalized_radar_backscatter.json index e643845f..ec60de44 100644 --- a/proposals/ard_normalized_radar_backscatter.json +++ b/proposals/ard_normalized_radar_backscatter.json @@ -13,8 +13,20 @@ "name": "data", "description": "The source data cube containing SAR input.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -73,8 +85,20 @@ "returns": { "description": "Backscatter values expressed as gamma0 in linear scale.\n\nIn addition to the bands `contributing_area` and `ellipsoid_incidence_angle` that can optionally be added with corresponding parameters, the following bands are always added to the data cube:\n\n- `mask`: A data mask that indicates which values are valid (1), invalid (0) or contain no-data (null).\n- `local_incidence_angle`: A band with DEM-based local incidence angles in degrees.\n\nThe data returned is CARD4L compliant with corresponding metadata.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, "exceptions": { @@ -128,4 +152,4 @@ "result": true } } -} \ No newline at end of file +} diff --git a/proposals/ard_surface_reflectance.json b/proposals/ard_surface_reflectance.json index 38aa758b..01328f10 100644 --- a/proposals/ard_surface_reflectance.json +++ b/proposals/ard_surface_reflectance.json @@ -13,8 +13,20 @@ "description": "The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances. There must be a single dimension of type `bands` available.", "name": "data", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -83,8 +95,20 @@ "returns": { "description": "Data cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. No-data values (null) are directly set in the bands. Depending on the methods used, several additional bands will be added to the data cube:\n\nData cube containing bottom of atmosphere reflectances for each spectral band in the source data cube, with atmospheric disturbances like clouds and cloud shadows removed. Depending on the methods used, several additional bands will be added to the data cube:\n\n- `date` (optional): Specifies per-pixel acquisition timestamps.\n- `incomplete-testing` (required): Identifies pixels with a value of 1 for which the per-pixel tests (at least saturation, cloud and cloud shadows, see CARD4L specification for details) have not all been successfully completed. Otherwise, the value is 0.\n- `saturation` (required) / `saturation_{band}` (optional): Indicates where pixels in the input spectral bands are saturated (1) or not (0). If the saturation is given per band, the band names are `saturation_{band}` with `{band}` being the band name from the source data cube.\n- `cloud`, `shadow` (both required),`aerosol`, `haze`, `ozone`, `water_vapor` (all optional): Indicates the probability of pixels being an atmospheric disturbance such as clouds. All bands have values between 0 (clear) and 1, which describes the probability that it is an atmospheric disturbance.\n- `snow-ice` (optional): Points to a file that indicates whether a pixel is assessed as being snow/ice (1) or not (0). All values describe the probability and must be between 0 and 1.\n- `land-water` (optional): Indicates whether a pixel is assessed as being land (1) or water (0). All values describe the probability and must be between 0 and 1.\n- `incidence-angle` (optional): Specifies per-pixel incidence angles in degrees.\n- `azimuth` (optional): Specifies per-pixel azimuth angles in degrees.\n- `sun-azimuth:` (optional): Specifies per-pixel sun azimuth angles in degrees.\n- `sun-elevation` (optional): Specifies per-pixel sun elevation angles in degrees.\n- `terrain-shadow` (optional): Indicates with a value of 1 whether a pixel is not directly illuminated due to terrain shadowing. Otherwise, the value is 0.\n- `terrain-occlusion` (optional): Indicates with a value of 1 whether a pixel is not visible to the sensor due to terrain occlusion during off-nadir viewing. Otherwise, the value is 0.\n- `terrain-illumination` (optional): Contains coefficients used for terrain illumination correction are provided for each pixel.\n\nThe data returned is CARD4L compliant with corresponding metadata.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, "links": [ @@ -94,4 +118,4 @@ "title": "CEOS CARD4L specification" } ] -} \ No newline at end of file +} diff --git a/proposals/atmospheric_correction.json b/proposals/atmospheric_correction.json index 9b537322..d366f1ed 100644 --- a/proposals/atmospheric_correction.json +++ b/proposals/atmospheric_correction.json @@ -12,8 +12,20 @@ "description": "Data cube containing multi-spectral optical top of atmosphere reflectances to be corrected.", "name": "data", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -63,8 +75,20 @@ "returns": { "description": "Data cube containing bottom of atmosphere reflectances.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, "exceptions": { @@ -79,4 +103,4 @@ "title": "Atmospheric correction explained by EO4GEO body of knowledge." } ] -} \ No newline at end of file +} diff --git a/proposals/cloud_detection.json b/proposals/cloud_detection.json index f9025c5b..d695720e 100644 --- a/proposals/cloud_detection.json +++ b/proposals/cloud_detection.json @@ -12,8 +12,20 @@ "description": "The source data cube containing multi-spectral optical top of the atmosphere (TOA) reflectances on which to perform cloud detection.", "name": "data", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -49,8 +61,20 @@ "returns": { "description": "A data cube with bands for the atmospheric disturbances. Each of the masks contains values between 0 and 1. The data cube has the same spatial and temporal dimensions as the source data cube and a dimension that contains a dimension label for each of the supported/considered atmospheric disturbance.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, "links": [ @@ -60,4 +84,4 @@ "title": "Cloud mask explained by EO4GEO body of knowledge." } ] -} \ No newline at end of file +} diff --git a/proposals/filter_labels.json b/proposals/filter_labels.json index 01d77035..cd077306 100644 --- a/proposals/filter_labels.json +++ b/proposals/filter_labels.json @@ -13,7 +13,7 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -74,7 +74,7 @@ "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the given dimension has less (or the same) dimension labels.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -115,4 +115,4 @@ "title": "Filters explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/proposals/fit_class_random_forest.json b/proposals/fit_class_random_forest.json index a9a549d9..11f0c9b9 100644 --- a/proposals/fit_class_random_forest.json +++ b/proposals/fit_class_random_forest.json @@ -12,7 +12,12 @@ "description": "The predictors for the classification model as a vector data cube. Aggregated to the features (vectors) of the target input variable.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, { @@ -20,7 +25,12 @@ "description": "The training sites for the classification model as a vector data cube. This is associated with the target variable for the Random Forest model. The geometry has to associated with a value to predict (e.g. fractional forest canopy cover).", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, { diff --git a/proposals/fit_curve.json b/proposals/fit_curve.json index 3b5df7e1..9d97dfda 100644 --- a/proposals/fit_curve.json +++ b/proposals/fit_curve.json @@ -13,7 +13,7 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -30,7 +30,7 @@ { "title": "Data Cube with optimal values from a previous result of this process.", "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } ] }, @@ -80,7 +80,7 @@ "description": "A data cube with the optimal values for the parameters.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -91,4 +91,4 @@ "message": "A dimension with the specified name does not exist." } } -} \ No newline at end of file +} diff --git a/proposals/fit_regr_random_forest.json b/proposals/fit_regr_random_forest.json index e75028b2..f2a97ca7 100644 --- a/proposals/fit_regr_random_forest.json +++ b/proposals/fit_regr_random_forest.json @@ -12,7 +12,12 @@ "description": "The predictors for the regression model as a vector data cube. Aggregated to the features (vectors) of the target input variable.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, { @@ -20,7 +25,12 @@ "description": "The training sites for the regression model as a vector data cube. This is associated with the target variable for the Random Forest model. The geometry has to associated with a value to predict (e.g. fractional forest canopy cover).", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, { diff --git a/proposals/flatten_dimensions.json b/proposals/flatten_dimensions.json index 05e54212..da3647ab 100644 --- a/proposals/flatten_dimensions.json +++ b/proposals/flatten_dimensions.json @@ -12,7 +12,7 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -47,7 +47,7 @@ "description": "A data cube with the new shape. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { diff --git a/proposals/inspect.json b/proposals/inspect.json index b0a0335d..9d7f2190 100644 --- a/proposals/inspect.json +++ b/proposals/inspect.json @@ -1,7 +1,7 @@ { "id": "inspect", "summary": "Add information to the logs", - "description": "This process can be used to add runtime information to the logs, e.g. for debugging purposes. This process should be used with caution and it is recommended to remove the process in production workflows. For example, logging each pixel or array individually in a process such as ``apply()`` or ``reduce_dimension()`` could lead to a (too) large number of log entries. Several data structures (e.g. data cubes) are too large to log and will only return summaries of their contents.\n\nThe data provided in the parameter `data` is returned without changes.", + "description": "This process can be used to add runtime information to the logs, e.g. for debugging purposes. This process should be used with caution and it is recommended to remove the process in production workflows. For example, logging each value or array individually in a process such as ``apply()`` or ``reduce_dimension()`` could lead to a (too) large number of log entries. Several data structures (e.g. data cubes) are too large to log and will only return summaries of their contents.\n\nThe data provided in the parameter `data` is returned without changes.", "categories": [ "development" ], diff --git a/proposals/load_result.json b/proposals/load_result.json index fa056b48..5e305fa4 100644 --- a/proposals/load_result.json +++ b/proposals/load_result.json @@ -1,7 +1,7 @@ { "id": "load_result", "summary": "Load batch job results", - "description": "Loads batch job results and returns them as a processable data cube. A batch job result can be loaded by ID or URL:\n\n* **ID**: The identifier for a finished batch job. The job must have been submitted by the authenticated user on the back-end currently connected to.\n* **URL**: The URL to the STAC metadata for a batch job result. This is usually a signed URL that is provided by some back-ends since openEO API version 1.1.0 through the `canonical` link relation in the batch job result metadata.\n\nIf supported by the underlying metadata and file format, the data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent` and `bands`.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the pixel values should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", + "description": "Loads batch job results and returns them as a processable data cube. A batch job result can be loaded by ID or URL:\n\n* **ID**: The identifier for a finished batch job. The job must have been submitted by the authenticated user on the back-end currently connected to.\n* **URL**: The URL to the STAC metadata for a batch job result. This is usually a signed URL that is provided by some back-ends since openEO API version 1.1.0 through the `canonical` link relation in the batch job result metadata.\n\nIf supported by the underlying metadata and file format, the data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent` and `bands`.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the values should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", "categories": [ "cubes", "import" @@ -108,6 +108,17 @@ "type": "object", "subtype": "geojson" }, + { + "title": "Vector data cube", + "description": "Limits the data cube to the bounding box of the given geometries in the vector data cube. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + }, { "title": "No filter", "description": "Don't filter spatially. All data is included in the data cube.", @@ -196,7 +207,7 @@ "description": "A data cube for further processing.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } } } diff --git a/proposals/load_uploaded_files.json b/proposals/load_uploaded_files.json index bf811b4e..039994ff 100644 --- a/proposals/load_uploaded_files.json +++ b/proposals/load_uploaded_files.json @@ -44,7 +44,7 @@ "description": "A data cube for further processing.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { diff --git a/proposals/predict_curve.json b/proposals/predict_curve.json index 52adcc5e..9fb5d341 100644 --- a/proposals/predict_curve.json +++ b/proposals/predict_curve.json @@ -13,15 +13,15 @@ "description": "A data cube to predict values for.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { "name": "parameters", - "description": "A data cube with optimal values from a result of e.g. ``fit_curve()``.", + "description": "A data cube with optimal values, e.g. computed by the process ``fit_curve()``.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -101,7 +101,7 @@ "description": "A data cube with the predicted values.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -109,4 +109,4 @@ "message": "A dimension with the specified name does not exist." } } -} \ No newline at end of file +} diff --git a/proposals/reduce_spatial.json b/proposals/reduce_spatial.json index d9a2fb56..77d64d71 100644 --- a/proposals/reduce_spatial.json +++ b/proposals/reduce_spatial.json @@ -11,10 +11,19 @@ "parameters": [ { "name": "data", - "description": "A data cube.", + "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -63,10 +72,10 @@ } ], "returns": { - "description": "A data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", + "description": "A raster data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "links": [ @@ -76,4 +85,4 @@ "title": "Reducers explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/proposals/resample_cube_temporal.json b/proposals/resample_cube_temporal.json index 2bd38dde..5ebb8f47 100644 --- a/proposals/resample_cube_temporal.json +++ b/proposals/resample_cube_temporal.json @@ -13,7 +13,12 @@ "description": "A data cube with one or more temporal dimensions.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -21,7 +26,12 @@ "description": "A data cube that describes the temporal target resolution.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, { @@ -53,7 +63,12 @@ "description": "A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension, the name and type remain unchanged, but the dimension labels, resolution and reference system may change.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "temporal" + } + ] } }, "exceptions": { @@ -71,4 +86,4 @@ "title": "Resampling explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/proposals/sar_backscatter.json b/proposals/sar_backscatter.json index 77fdf73e..03d13d29 100644 --- a/proposals/sar_backscatter.json +++ b/proposals/sar_backscatter.json @@ -12,8 +12,20 @@ "name": "data", "description": "The source data cube containing SAR input.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, { @@ -112,8 +124,20 @@ "returns": { "description": "Backscatter values corresponding to the chosen parametrization. The values are given in linear scale.", "schema": { - "subtype": "raster-cube", - "type": "object" + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + }, + { + "type": "bands" + } + ] } }, "exceptions": { diff --git a/proposals/unflatten_dimension.json b/proposals/unflatten_dimension.json index 1cbf2d1d..990e7469 100644 --- a/proposals/unflatten_dimension.json +++ b/proposals/unflatten_dimension.json @@ -12,7 +12,7 @@ "description": "A data cube that is consistently structured so that operation can execute flawlessly (e.g. the dimension labels need to contain the `label_separator` exactly 1 time for two target dimensions, 2 times for three target dimensions etc.).", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -48,7 +48,7 @@ "description": "A data cube with the new shape. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { diff --git a/proposals/vector_buffer.json b/proposals/vector_buffer.json index 204a54b7..9ee12fa6 100644 --- a/proposals/vector_buffer.json +++ b/proposals/vector_buffer.json @@ -17,7 +17,12 @@ }, { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } ] }, @@ -36,7 +41,12 @@ "description": "Returns a vector data cube with the computed new geometries.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } } } diff --git a/proposals/vector_to_random_points.json b/proposals/vector_to_random_points.json index afe340ef..4aa6a3d9 100644 --- a/proposals/vector_to_random_points.json +++ b/proposals/vector_to_random_points.json @@ -18,7 +18,12 @@ }, { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } ] }, @@ -80,7 +85,12 @@ "description": "Returns a vector data cube with the sampled points.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } }, "exceptions": { diff --git a/proposals/vector_to_regular_points.json b/proposals/vector_to_regular_points.json index 3fd105f6..ce281485 100644 --- a/proposals/vector_to_regular_points.json +++ b/proposals/vector_to_regular_points.json @@ -18,7 +18,12 @@ }, { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } ] }, @@ -44,7 +49,12 @@ "description": "Returns a vector data cube with the sampled points.", "schema": { "type": "object", - "subtype": "vector-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] } } } diff --git a/reduce_dimension.json b/reduce_dimension.json index 27ed34de..7fb77ba5 100644 --- a/reduce_dimension.json +++ b/reduce_dimension.json @@ -1,7 +1,7 @@ { "id": "reduce_dimension", "summary": "Reduce dimensions", - "description": "Applies a reducer to a data cube dimension by collapsing all the pixel values along the specified dimension into an output value computed by the reducer.\n\nThe dimension is dropped. To avoid this, use ``apply_dimension()`` instead.", + "description": "Applies a reducer to a data cube dimension by collapsing all the values along the specified dimension into an output value computed by the reducer.\n\nThe dimension is dropped. To avoid this, use ``apply_dimension()`` instead.", "categories": [ "cubes", "reducer" @@ -12,7 +12,7 @@ "description": "A data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -72,7 +72,7 @@ "description": "A data cube with the newly computed values. It is missing the given dimension, the number of dimensions decreases by one. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -87,4 +87,4 @@ "title": "Reducers explained in the openEO documentation" } ] -} \ No newline at end of file +} diff --git a/rename_dimension.json b/rename_dimension.json index 15c46410..ecfd1983 100644 --- a/rename_dimension.json +++ b/rename_dimension.json @@ -11,7 +11,7 @@ "description": "The data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -33,7 +33,7 @@ "description": "A data cube with the same dimensions, but the name of one of the dimensions changes. The old name can not be referred to any longer. The dimension properties (name, type, labels, reference system and resolution) remain unchanged.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { @@ -44,4 +44,4 @@ "message": "A dimension with the specified name already exists." } } -} \ No newline at end of file +} diff --git a/rename_labels.json b/rename_labels.json index 41fe7d7d..2042737d 100644 --- a/rename_labels.json +++ b/rename_labels.json @@ -11,7 +11,7 @@ "description": "The data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, { @@ -54,7 +54,7 @@ "description": "The data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that for the given dimension the labels change. The old labels can not be referred to any longer. The number of labels remains the same.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } }, "exceptions": { diff --git a/resample_cube_spatial.json b/resample_cube_spatial.json index 54a5f801..3cbdfa49 100644 --- a/resample_cube_spatial.json +++ b/resample_cube_spatial.json @@ -9,18 +9,36 @@ "parameters": [ { "name": "data", - "description": "A data cube.", + "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { "name": "target", - "description": "A data cube that describes the spatial target resolution.", + "description": "A raster data cube that describes the spatial target resolution.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -50,10 +68,19 @@ } ], "returns": { - "description": "A data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial dimensions.", + "description": "A raster data cube with the same dimensions. The dimension properties (name, type, labels, reference system and resolution) remain unchanged, except for the resolution and dimension labels of the spatial dimensions.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "links": [ diff --git a/resample_spatial.json b/resample_spatial.json index 91d6bc5f..d97865f2 100644 --- a/resample_spatial.json +++ b/resample_spatial.json @@ -12,7 +12,16 @@ "description": "A raster data cube.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, { @@ -115,7 +124,16 @@ "description": "A raster data cube with values warped onto the new projection. It has the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-spatial or vertical spatial dimensions. For the horizontal spatial dimensions the name and type remain unchanged, but reference system, labels and resolution may change depending on the given parameters.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] } }, "links": [ @@ -145,4 +163,4 @@ "title": "gdalwarp resampling methods" } ] -} \ No newline at end of file +} diff --git a/save_result.json b/save_result.json index 0ad0a582..8fa67ebb 100644 --- a/save_result.json +++ b/save_result.json @@ -10,16 +10,10 @@ { "name": "data", "description": "The data to deliver in the given file format.", - "schema": [ - { - "type": "object", - "subtype": "raster-cube" - }, - { - "type": "object", - "subtype": "vector-cube" - } - ] + "schema": { + "type": "object", + "subtype": "datacube" + } }, { "name": "format", diff --git a/tests/package.json b/tests/package.json index be51806f..861bfe5f 100644 --- a/tests/package.json +++ b/tests/package.json @@ -1,6 +1,6 @@ { "name": "@openeo/processes-validator", - "version": "0.2.0", + "version": "0.3.0", "author": "openEO Consortium", "contributors": [ { diff --git a/tests/testHelpers.js b/tests/testHelpers.js index 418fd830..cc63232a 100644 --- a/tests/testHelpers.js +++ b/tests/testHelpers.js @@ -106,7 +106,67 @@ async function getAjv() { }, compile: function (subtype, schema) { if (schema.type != subtypes.definitions[subtype].type) { - throw "Subtype '"+subtype+"' not allowed for type '"+schema.type+"'." + throw "Subtype '"+subtype+"' not allowed for type '"+schema.type+"'."; + } + if (subtypes.definitions[subtype].deprecated) { + throw "Deprecated subtypes not allowed."; + } + return () => true; + }, + errors: false + }); + let dimensionSchema = { + type: "array", + minItems: 1, + items: { + type: "object", + required: ["type"], + oneOf: [ + { + properties: { + type: { + type: "string", + const: "spatial" + }, + axis: { + type: "array", + minItems: 1, + items: { + type: "string", + enum: ["x", "y", "z"] + } + } + } + }, + { + properties: { + type: { + type: "string", + enum: ["bands", "temporal", "vector", "other"] + } + } + } + ] + } + }; + jsv.addKeyword("dimensions", { + dependencies: [ + "type", + "subtype" + ], + metaSchema: { + oneOf: [ + dimensionSchema, + { + type: "array", + minItems: 2, + items: dimensionSchema + } + ] + }, + compile: function (_, schema) { + if (schema.subtype != 'datacube') { + throw "Dimensions only allowed for subtype 'datacube'." } return () => true; }, @@ -169,7 +229,7 @@ function checkSpelling(text, p = null) { if (p && p.id) { pre += " in " + p.id; } - console.warn(pre + ": " + JSON.stringify(errors)); + throw (pre + ": " + JSON.stringify(errors)); } } diff --git a/trim_cube.json b/trim_cube.json index 4329024e..c3c7891e 100644 --- a/trim_cube.json +++ b/trim_cube.json @@ -8,18 +8,18 @@ "parameters": [ { "name": "data", - "description": "A raster data cube to trim.", + "description": "A data cube to trim.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } } ], "returns": { - "description": "A trimmed raster data cube with the same dimensions. The dimension properties name, type, reference system and resolution remain unchanged. The number of dimension labels may decrease.", + "description": "A trimmed data cube with the same dimensions. The dimension properties name, type, reference system and resolution remain unchanged. The number of dimension labels may decrease.", "schema": { "type": "object", - "subtype": "raster-cube" + "subtype": "datacube" } } -} \ No newline at end of file +} From 188a43d1ad207db02d7671d29ce46ae9b4ef619c Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Thu, 6 Oct 2022 16:48:37 +0200 Subject: [PATCH 02/14] Make dimensions schema more simple, update load_collection and spartial filters --- filter_bbox.json | 50 ++++++++++++++++++------------ filter_spatial.json | 50 +++++++++++------------------- load_collection.json | 6 ++-- tests/testHelpers.js | 74 +++++++++++++++++++------------------------- 4 files changed, 84 insertions(+), 96 deletions(-) diff --git a/filter_bbox.json b/filter_bbox.json index dae5141c..dc6575c9 100644 --- a/filter_bbox.json +++ b/filter_bbox.json @@ -1,7 +1,7 @@ { "id": "filter_bbox", "summary": "Spatial filter using a bounding box", - "description": "Limits the data cube to the specified bounding box.\n\nThe filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).", + "description": "Limits the data cube to the specified bounding box.\n\n* For raster data cubes, the filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).\n* For vector data cubes, the filter retains the geometry into the data cube of the geometry is fully within the bounding box (as defined in the Simple Features standard by the OGC).", "categories": [ "cubes", "filter" @@ -10,11 +10,12 @@ { "name": "data", "description": "A data cube.", - "schema": { - "type": "object", - "subtype": "datacube", - "dimensions": [ - [ + "schema": [ + { + "title": "Raster data cube", + "type": "object", + "subtype": "datacube", + "dimensions": [ { "type": "spatial", "axis": [ @@ -22,14 +23,19 @@ "y" ] } - ], - [ + ] + }, + { + "title": "Vector data cube", + "type": "object", + "subtype": "datacube", + "dimensions": [ { "type": "vector" } ] - ] - } + } + ] }, { "name": "extent", @@ -108,11 +114,12 @@ ], "returns": { "description": "A data cube restricted to the bounding box. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", - "schema": { - "type": "object", - "subtype": "datacube", - "dimensions": [ - [ + "schema": [ + { + "title": "Raster data cube", + "type": "object", + "subtype": "datacube", + "dimensions": [ { "type": "spatial", "axis": [ @@ -120,14 +127,19 @@ "y" ] } - ], - [ + ] + }, + { + "title": "Vector data cube", + "type": "object", + "subtype": "datacube", + "dimensions": [ { "type": "vector" } ] - ] - } + } + ] }, "links": [ { diff --git a/filter_spatial.json b/filter_spatial.json index 54e5b50a..8d9db74b 100644 --- a/filter_spatial.json +++ b/filter_spatial.json @@ -1,7 +1,7 @@ { "id": "filter_spatial", - "summary": "Spatial filter using geometries", - "description": "Limits the data cube over the spatial dimensions to the specified geometries.\n\n- For **polygons**, the filter retains a pixel in the data cube if the point at the pixel center intersects with at least one of the polygons (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nMore specifically, pixels outside of the bounding box of the given geometry will not be available after filtering. All pixels inside the bounding box that are not retained will be set to `null` (no data).", + "summary": "Spatial filter raster data cubes using geometries", + "description": "Limits the raster data cube over the spatial dimensions to the specified geometries.\n\n- For **polygons**, the filter retains a pixel in the data cube if the point at the pixel center intersects with at least one of the polygons (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nMore specifically, pixels outside of the bounding box of the given geometry will not be available after filtering. All pixels inside the bounding box that are not retained will be set to `null` (no data).", "categories": [ "cubes", "filter" @@ -9,25 +9,18 @@ "parameters": [ { "name": "data", - "description": "A data cube.", + "description": "A raster data cube.", "schema": { "type": "object", "subtype": "datacube", "dimensions": [ - [ - { - "type": "spatial", - "axis": [ - "x", - "y" - ] - } - ], - [ - { - "type": "vector" - } - ] + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } ] } }, @@ -52,25 +45,18 @@ } ], "returns": { - "description": "A data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", + "description": "A raster data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the spatial dimensions have less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "datacube", "dimensions": [ - [ - { - "type": "spatial", - "axis": [ - "x", - "y" - ] - } - ], - [ - { - "type": "vector" - } - ] + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } ] } }, diff --git a/load_collection.json b/load_collection.json index 9f69f494..dd7d424f 100644 --- a/load_collection.json +++ b/load_collection.json @@ -18,7 +18,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\nThe process puts the corresponding value into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully within the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", @@ -93,13 +93,13 @@ }, { "title": "GeoJSON", - "description": "Limits the data cube to the bounding box of the given geometries. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "description": "Limits the data cube to the bounding box of the given geometries. For raster data, all pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", "type": "object", "subtype": "geojson" }, { "title": "Vector data cube", - "description": "Limits the data cube to the bounding box of the given geometries in the vector data cube. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "description": "Limits the data cube to the bounding box of the given geometries in the vector data cube. For raster data, all pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", "type": "object", "subtype": "datacube", "dimensions": [ diff --git a/tests/testHelpers.js b/tests/testHelpers.js index cc63232a..06c560a7 100644 --- a/tests/testHelpers.js +++ b/tests/testHelpers.js @@ -115,54 +115,44 @@ async function getAjv() { }, errors: false }); - let dimensionSchema = { - type: "array", - minItems: 1, - items: { - type: "object", - required: ["type"], - oneOf: [ - { - properties: { - type: { - type: "string", - const: "spatial" - }, - axis: { - type: "array", - minItems: 1, - items: { - type: "string", - enum: ["x", "y", "z"] - } - } - } - }, - { - properties: { - type: { - type: "string", - enum: ["bands", "temporal", "vector", "other"] - } - } - } - ] - } - }; jsv.addKeyword("dimensions", { dependencies: [ "type", "subtype" ], metaSchema: { - oneOf: [ - dimensionSchema, - { - type: "array", - minItems: 2, - items: dimensionSchema - } - ] + type: "array", + minItems: 1, + items: { + type: "object", + required: ["type"], + oneOf: [ + { + properties: { + type: { + type: "string", + const: "spatial" + }, + axis: { + type: "array", + minItems: 1, + items: { + type: "string", + enum: ["x", "y", "z"] + } + } + } + }, + { + properties: { + type: { + type: "string", + enum: ["bands", "temporal", "vector", "other"] + } + } + } + ] + } }, compile: function (_, schema) { if (schema.subtype != 'datacube') { From bbdcaffd2052fd5cfe152f4823da45f74ccdd369 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Thu, 6 Oct 2022 16:54:08 +0200 Subject: [PATCH 03/14] Add `NoDataAvailable` to load_result --- CHANGELOG.md | 5 +++-- proposals/load_result.json | 9 +++++++-- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6cdb52fb..6b0b8dc1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -39,8 +39,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Renamed `text_merge` to `text_concat` for better alignment with `array_concat` and existing implementations. - `apply_neighborhood`: Allow `null` as default value for units. - `run_udf`: Allow all data types instead of just objects in the `context` parameter. [#376](https://github.com/Open-EO/openeo-processes/issues/376) -- `load_collection` and `load_result`: Require at least one band if not set to `null`. [#372](https://github.com/Open-EO/openeo-processes/issues/372) -- `load_collection`: Added a `NoDataAvailable` exception +- `load_collection` and `load_result`: + - Require at least one band if not set to `null`. [#372](https://github.com/Open-EO/openeo-processes/issues/372) + - Added a `NoDataAvailable` exception - `inspect`: The parameter `message` has been moved to be the second argument. [#369](https://github.com/Open-EO/openeo-processes/issues/369) - `save_result`: Added a more concrete `DataCubeEmpty` exception. diff --git a/proposals/load_result.json b/proposals/load_result.json index 5e305fa4..7906fd29 100644 --- a/proposals/load_result.json +++ b/proposals/load_result.json @@ -1,7 +1,7 @@ { "id": "load_result", "summary": "Load batch job results", - "description": "Loads batch job results and returns them as a processable data cube. A batch job result can be loaded by ID or URL:\n\n* **ID**: The identifier for a finished batch job. The job must have been submitted by the authenticated user on the back-end currently connected to.\n* **URL**: The URL to the STAC metadata for a batch job result. This is usually a signed URL that is provided by some back-ends since openEO API version 1.1.0 through the `canonical` link relation in the batch job result metadata.\n\nIf supported by the underlying metadata and file format, the data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent` and `bands`.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the values should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", + "description": "Loads batch job results and returns them as a processable data cube. A batch job result can be loaded by ID or URL:\n\n* **ID**: The identifier for a finished batch job. The job must have been submitted by the authenticated user on the back-end currently connected to.\n* **URL**: The URL to the STAC metadata for a batch job result. This is usually a signed URL that is provided by some back-ends since openEO API version 1.1.0 through the `canonical` link relation in the batch job result metadata.\n\nIf supported by the underlying metadata and file format, the data that is added to the data cube can be restricted with the parameters `spatial_extent`, `temporal_extent` and `bands`. If no data is available for the given extents, a `NoDataAvailable` exception is thrown.\n\n**Remarks:**\n\n* The bands (and all dimensions that specify nominal dimension labels) are expected to be ordered as specified in the metadata if the `bands` parameter is set to `null`.\n* If no additional parameter is specified this would imply that the whole data set is expected to be loaded. Due to the large size of many data sets, this is not recommended and may be optimized by back-ends to only load the data that is actually required after evaluating subsequent processes such as filters. This means that the values should be processed only after the data has been limited to the required extent and as a consequence also to a manageable size.", "categories": [ "cubes", "import" @@ -29,7 +29,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the batch job result to the specified bounding box or polygons.\n\nThe process puts a pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the batch job result to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully within the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", @@ -209,5 +209,10 @@ "type": "object", "subtype": "datacube" } + }, + "exceptions": { + "NoDataAvailable": { + "message": "There is no data available for the given extents." + } } } From f313d29a2f6c4bdca7c038bd74296bf3e0b3813e Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Thu, 6 Oct 2022 17:13:23 +0200 Subject: [PATCH 04/14] Add `filter_vector` --- CHANGELOG.md | 1 + load_collection.json | 2 +- proposals/filter_vector.json | 92 ++++++++++++++++++++++++++++++++++++ 3 files changed, 94 insertions(+), 1 deletion(-) create mode 100644 proposals/filter_vector.json diff --git a/CHANGELOG.md b/CHANGELOG.md index 6b0b8dc1..fd93c5c7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -19,6 +19,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Added - New processes in proposal state: + - `filter_vector` - `fit_class_random_forest` - `fit_regr_random_forest` - `flatten_dimensions` diff --git a/load_collection.json b/load_collection.json index dd7d424f..c1350946 100644 --- a/load_collection.json +++ b/load_collection.json @@ -18,7 +18,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully within the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully *within* the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", diff --git a/proposals/filter_vector.json b/proposals/filter_vector.json new file mode 100644 index 00000000..e04bf9e3 --- /dev/null +++ b/proposals/filter_vector.json @@ -0,0 +1,92 @@ +{ + "id": "filter_vector", + "summary": "Spatial vector filter using geometries", + "description": "Limits the vector data cube to the specified geometries. The process works on geometries as defined in the Simple Features standard by the OGC.", + "categories": [ + "cubes", + "filter", + "vector" + ], + "experimental": true, + "parameters": [ + { + "name": "data", + "description": "A vector data cube with the candidate geometries.", + "schema": { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + } + }, + { + "name": "geometries", + "description": "One or more base geometries used for filtering, given as GeoJSON or vector data cube.\n\nTo maximize interoperability, `GeometryCollection` and multi geometries (e.g. `MultiPolygon`) should be be avoided.", + "schema": [ + { + "type": "object", + "subtype": "geojson" + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "vector" + } + ] + } + ] + }, + { + "name": "relation", + "description": "The spatial filter predicate for comparing the geometries provided through (a) `geometries` (base geometries) and (b) `data` (candidate geometries).", + "schema": { + "type": "string", + "enum": [ + "intersects", + "disjoint", + "equals", + "touches", + "crosses", + "overlaps", + "contains", + "within" + ] + }, + "optional": true, + "default": "intersects" + } + ], + "returns": { + "description": "A vector data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the vector dimension has less (or the same) dimension labels.", + "schema": { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "spatial", + "axis": [ + "x", + "y" + ] + } + ] + } + }, + "links": [ + { + "href": "https://openeo.org/documentation/1.0/datacubes.html#filter", + "rel": "about", + "title": "Filters explained in the openEO documentation" + }, + { + "href": "http://www.opengeospatial.org/standards/sfa", + "rel": "about", + "title": "Simple Features standard by the OGC" + } + ] +} From f918166cc1321d76a02aa4ce9116d1b5193b7741 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Sun, 9 Oct 2022 13:26:49 +0200 Subject: [PATCH 05/14] Add geometry_type for dimension constraints --- mask_polygon.json | 7 ++++++- proposals/vector_to_random_points.json | 6 +++++- proposals/vector_to_regular_points.json | 6 +++++- tests/testHelpers.js | 18 +++++++++++++++++- 4 files changed, 33 insertions(+), 4 deletions(-) diff --git a/mask_polygon.json b/mask_polygon.json index 4e358fef..c46d17af 100644 --- a/mask_polygon.json +++ b/mask_polygon.json @@ -37,7 +37,12 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "vector", + "geometry_type": [ + "Polygon", + "MultiPolygon", + "GeometryCollection" + ] } ] } diff --git a/proposals/vector_to_random_points.json b/proposals/vector_to_random_points.json index 4aa6a3d9..600ed547 100644 --- a/proposals/vector_to_random_points.json +++ b/proposals/vector_to_random_points.json @@ -88,7 +88,11 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "vector", + "geometry_type": [ + "Point", + "MultiPoint" + ] } ] } diff --git a/proposals/vector_to_regular_points.json b/proposals/vector_to_regular_points.json index ce281485..20b44d9a 100644 --- a/proposals/vector_to_regular_points.json +++ b/proposals/vector_to_regular_points.json @@ -52,7 +52,11 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "vector", + "geometry_type": [ + "Point", + "MultiPoint" + ] } ] } diff --git a/tests/testHelpers.js b/tests/testHelpers.js index 06c560a7..4d7a224f 100644 --- a/tests/testHelpers.js +++ b/tests/testHelpers.js @@ -147,7 +147,23 @@ async function getAjv() { properties: { type: { type: "string", - enum: ["bands", "temporal", "vector", "other"] + const: "vector" + }, + geometry_type: { + type: "array", + minItems: 1, + items: { + type: "string", + enum: ["Point", "LineString", "Polygon", "MultiPoint", "MultiLineString", "MultiPolygon", "GeometryCollection"] + } + } + } + }, + { + properties: { + type: { + type: "string", + enum: ["bands", "temporal", "other"] } } } From 4061c9405d59092be245ac70a7411bbaf9fad9b7 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Tue, 8 Nov 2022 15:39:35 +0100 Subject: [PATCH 06/14] Apply suggestions from code review Co-authored-by: Lukas Weidenholzer <17790923+LukeWeidenwalker@users.noreply.github.com> --- aggregate_spatial.json | 2 +- load_collection.json | 2 +- meta/subtype-schemas.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/aggregate_spatial.json b/aggregate_spatial.json index 2b406295..731e6b31 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -1,7 +1,7 @@ { "id": "aggregate_spatial", "summary": "Zonal statistics for geometries", - "description": "Aggregates statistics for one or more geometries (e.g. zonal statistics for polygons) over the spatial dimensions. The given data cube can have multiple additional dimension and for all these dimensions results will be computed individually.\n\nAn 'unbounded' aggregation over the full extent of the horizontal spatial dimensions can be computed with the process ``reduce_spatial()``.\n\nThis process passes a list of values to the reducer. The list of values has an undefined order, therefore processes such as ``last()`` and ``first()`` that depend on the order of the values will lead to unpredictable results.", + "description": "Aggregates statistics for one or more geometries (e.g. zonal statistics for polygons) over the spatial dimensions. The given data cube can have multiple additional dimensions and for all these dimensions results will be computed individually.\n\nAn 'unbounded' aggregation over the full extent of the horizontal spatial dimensions can be computed with the process ``reduce_spatial()``.\n\nThis process passes a list of values to the reducer. The list of values has an undefined order, therefore processes such as ``last()`` and ``first()`` that depend on the order of the values will lead to unpredictable results.", "categories": [ "cubes", "aggregate & resample" diff --git a/load_collection.json b/load_collection.json index c1350946..3759ab56 100644 --- a/load_collection.json +++ b/load_collection.json @@ -18,7 +18,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully *within* the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube if the geometry is fully *within* the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", diff --git a/meta/subtype-schemas.json b/meta/subtype-schemas.json index b21917b0..17cd2b72 100644 --- a/meta/subtype-schemas.json +++ b/meta/subtype-schemas.json @@ -296,7 +296,7 @@ "type": "object", "subtype": "raster-cube", "title": "Raster data cube", - "description": "A vector data cube, which is a data cube with two dimension of type spatial (x and y). This has been deprecated in favour of `datacube`.", + "description": "A raster data cube, which is a data cube with two dimension of type spatial (x and y). This has been deprecated in favour of `datacube`.", "deprecated": true }, "temporal-interval": { From 408c491b7fe9807fab9024ff5d95c4f55d89ddff Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Tue, 8 Nov 2022 15:55:57 +0100 Subject: [PATCH 07/14] Mention TargetDimensionExists in aggregate_spatial --- aggregate_spatial.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/aggregate_spatial.json b/aggregate_spatial.json index 731e6b31..1842649d 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -80,7 +80,7 @@ }, { "name": "target_dimension", - "description": "By default (which is `null`), the process only computes the results and doesn't add a new dimension. If this parameter contains a new dimension name, the computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each computed value. These values are added as a new dimension. The new dimension of type `other` has the dimension labels `value`, `total_count` and `valid_count`.", + "description": "By default (which is `null`), the process only computes the results and doesn't add a new dimension.\n\nIf this parameter contains a new dimension name, the computation also stores information about the total count of pixels (valid + invalid pixels) and the number of valid pixels (see ``is_valid()``) for each computed value. These values are added as a new dimension. The new dimension of type `other` has the dimension labels `value`, `total_count` and `valid_count`.\n\nFails with a `TargetDimensionExists` exception if a dimension with the specified name exists.", "schema": { "type": [ "string", From 7c48a742150918351e3e228bd1d450e98331cae8 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Thu, 10 Nov 2022 14:00:21 +0100 Subject: [PATCH 08/14] Define how merge_cubes works for vector data cubes + minor improvements --- merge_cubes.json | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/merge_cubes.json b/merge_cubes.json index 79deb389..655609bb 100644 --- a/merge_cubes.json +++ b/merge_cubes.json @@ -1,7 +1,7 @@ { "id": "merge_cubes", "summary": "Merge two data cubes", - "description": "The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. The process performs the join on overlapping dimensions, with the same name and type.\n\nAn overlapping dimension has the same name, type, reference system and resolution in both dimensions, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all values. The merged data cube has the same dimensions and labels as the original data cubes, but all values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", + "description": "The process performs the join on overlapping dimensions. The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. As such it is not possible to merge a vector and a raster data cube.\n\nOverlapping dimensions have the same name, type, reference system and resolution, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. Equality for vector labels follows the definition in the Simple Features standard by the OGC. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all values. The merged data cube has the same dimensions and labels as the original data cubes, but all values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", "categories": [ "cubes" ], @@ -90,6 +90,11 @@ "rel": "about", "href": "https://en.wikipedia.org/wiki/Reduction_Operator", "title": "Background information on reduction operators (binary reducers) by Wikipedia" + }, + { + "href": "http://www.opengeospatial.org/standards/sfa", + "rel": "about", + "title": "Simple Features standard by the OGC" } ] } From a2367519789ffebc1574d8b1f7df1c7148987ec0 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Thu, 10 Nov 2022 15:44:04 +0100 Subject: [PATCH 09/14] raster data cube -> data cube --- proposals/reduce_spatial.json | 2 +- proposals/resample_cube_temporal.json | 2 +- proposals/run_udf_externally.json | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/proposals/reduce_spatial.json b/proposals/reduce_spatial.json index 77d64d71..d27bd9cf 100644 --- a/proposals/reduce_spatial.json +++ b/proposals/reduce_spatial.json @@ -72,7 +72,7 @@ } ], "returns": { - "description": "A raster data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", + "description": "A data cube with the newly computed values. It is missing the horizontal spatial dimensions, the number of dimensions decreases by two. The dimension properties (name, type, labels, reference system and resolution) for all other dimensions remain unchanged.", "schema": { "type": "object", "subtype": "datacube" diff --git a/proposals/resample_cube_temporal.json b/proposals/resample_cube_temporal.json index 5ebb8f47..9c6aac09 100644 --- a/proposals/resample_cube_temporal.json +++ b/proposals/resample_cube_temporal.json @@ -60,7 +60,7 @@ } ], "returns": { - "description": "A raster data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension, the name and type remain unchanged, but the dimension labels, resolution and reference system may change.", + "description": "A data cube with the same dimensions and the same dimension properties (name, type, labels, reference system and resolution) for all non-temporal dimensions. For the temporal dimension, the name and type remain unchanged, but the dimension labels, resolution and reference system may change.", "schema": { "type": "object", "subtype": "datacube", diff --git a/proposals/run_udf_externally.json b/proposals/run_udf_externally.json index 9672eb71..d4713933 100644 --- a/proposals/run_udf_externally.json +++ b/proposals/run_udf_externally.json @@ -1,7 +1,7 @@ { "id": "run_udf_externally", "summary": "Run an externally hosted UDF container", - "description": "Runs a compatible UDF container that is either externally hosted by a service provider or running on a local machine of the user. The UDF container must follow the [openEO UDF specification](https://openeo.org/documentation/1.0/udfs.html).\n\nThe referenced UDF service can be executed in several processes such as ``aggregate_spatial()``, ``apply()``, ``apply_dimension()`` and ``reduce_dimension()``. In this case, an array is passed instead of a raster data cube. The user must ensure that the data is given in a way that the UDF code can make sense of it.", + "description": "Runs a compatible UDF container that is either externally hosted by a service provider or running on a local machine of the user. The UDF container must follow the [openEO UDF specification](https://openeo.org/documentation/1.0/udfs.html).\n\nThe referenced UDF service can be executed in several processes such as ``aggregate_spatial()``, ``apply()``, ``apply_dimension()`` and ``reduce_dimension()``. In this case, an array is passed instead of a data cube. The user must ensure that the data is given in a way that the UDF code can make sense of it.", "categories": [ "cubes", "import", @@ -66,4 +66,4 @@ "title": "openEO UDF repository" } ] -} \ No newline at end of file +} From 02153e7eba89be69a3d578a7cd2013a7f1c5f2d4 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Tue, 17 Jan 2023 11:57:45 +0100 Subject: [PATCH 10/14] Apply suggestions from code review --- CHANGELOG.md | 6 +++--- apply_dimension.json | 2 +- filter_bbox.json | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fd93c5c7..3ed60ee3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,12 +7,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Changes for vector cubes - Update the processes based on `raster-cubes` to work with `datacube` instead -- Renamed `create_raster_cube` to `create_data_cube` +- Rename `create_raster_cube` to `create_data_cube` - `add_dimension`: Added new dimension type `vector` - New definition for `aggregate_spatial`: - Allows more than 3 input dimensions - - Allow to not export statistics by changing the parameter `target_dimenaion` - - Clarified how the resulting vector cube looks like + - Allow to not export statistics by changing the parameter `target_dimension` + - Clarify how the resulting vector cube looks like ## Unreleased / Draft diff --git a/apply_dimension.json b/apply_dimension.json index 5f9a984a..7f8a5616 100644 --- a/apply_dimension.json +++ b/apply_dimension.json @@ -1,7 +1,7 @@ { "id": "apply_dimension", "summary": "Apply a process to all values along a dimension", - "description": "Applies a process to all values along a dimension of a data cube. For example, if the temporal dimension is specified the process will work on the values of a time series.\n\nThe process ``reduce_dimension()`` also applies a process to values along a dimension, but drops the dimension afterwards. The process ``apply()`` applies a process to each value in the data cube.\n\nThe target dimension is the source dimension if not specified otherwise in the `target_dimension` parameter. The values in the target dimension get replaced by the computed values. The name, type and reference system are preserved.\n\nThe dimension labels are preserved when the target dimension is the source dimension and the number of values in the source dimension is equal to the number of values computed by the process. Otherwise, the dimension labels will be incrementing integers starting from zero, which can be changed using ``rename_labels()`` afterwards. The number of labels will equal to the number of values computed by the process.", + "description": "Applies a process to all values along a dimension of a data cube. For example, if the temporal dimension is specified the process will work on the values of a time series.\n\nThe process ``reduce_dimension()`` also applies a process to values along a dimension, but drops the dimension afterwards. The process ``apply()`` applies a process to each value in the data cube.\n\nThe target dimension is the source dimension if not specified otherwise in the `target_dimension` parameter. The values in the target dimension get replaced by the computed values. The name, type and reference system are preserved.\n\nThe dimension labels are preserved when the target dimension is the source dimension and the number of values in the source dimension is equal to the number of values computed by the process. Otherwise, the dimension labels will be incrementing integers starting from zero, which can be changed using ``rename_labels()`` afterwards. The number of labels will be equal to the number of values computed by the process.", "categories": [ "cubes" ], diff --git a/filter_bbox.json b/filter_bbox.json index dc6575c9..4ec73d95 100644 --- a/filter_bbox.json +++ b/filter_bbox.json @@ -1,7 +1,7 @@ { "id": "filter_bbox", "summary": "Spatial filter using a bounding box", - "description": "Limits the data cube to the specified bounding box.\n\n* For raster data cubes, the filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).\n* For vector data cubes, the filter retains the geometry into the data cube of the geometry is fully within the bounding box (as defined in the Simple Features standard by the OGC).", + "description": "Limits the data cube to the specified bounding box.\n\n* For raster data cubes, the filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).\n* For vector data cubes, the filter retains the geometry in the data cube if the geometry is fully within the bounding box (as defined in the Simple Features standard by the OGC).", "categories": [ "cubes", "filter" From cb737f4749b689efbf94a62394901d6e419f2bab Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Wed, 18 Jan 2023 17:21:40 +0100 Subject: [PATCH 11/14] Apply suggestions from code review --- CHANGELOG.md | 2 +- filter_bbox.json | 2 +- proposals/filter_vector.json | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3ed60ee3..3b465fc2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,7 +6,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## Changes for vector cubes -- Update the processes based on `raster-cubes` to work with `datacube` instead +- Update the processes based on `raster-cube` or `vector-cube` to work with `datacube` instead - Rename `create_raster_cube` to `create_data_cube` - `add_dimension`: Added new dimension type `vector` - New definition for `aggregate_spatial`: diff --git a/filter_bbox.json b/filter_bbox.json index 4ec73d95..436833c7 100644 --- a/filter_bbox.json +++ b/filter_bbox.json @@ -1,7 +1,7 @@ { "id": "filter_bbox", "summary": "Spatial filter using a bounding box", - "description": "Limits the data cube to the specified bounding box.\n\n* For raster data cubes, the filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC).\n* For vector data cubes, the filter retains the geometry in the data cube if the geometry is fully within the bounding box (as defined in the Simple Features standard by the OGC).", + "description": "Limits the data cube to the specified bounding box.\n\n* For raster data cubes, the filter retains a pixel in the data cube if the point at the pixel center intersects with the bounding box (as defined in the Simple Features standard by the OGC). Alternatively, ``filter_spatial()`` can be used to filter by geometry.\n* For vector data cubes, the filter retains the geometry in the data cube if the geometry is fully within the bounding box (as defined in the Simple Features standard by the OGC). Alternatively, ``filter_vector()`` can be used to filter by geometry.", "categories": [ "cubes", "filter" diff --git a/proposals/filter_vector.json b/proposals/filter_vector.json index e04bf9e3..47cc2cc1 100644 --- a/proposals/filter_vector.json +++ b/proposals/filter_vector.json @@ -24,7 +24,7 @@ }, { "name": "geometries", - "description": "One or more base geometries used for filtering, given as GeoJSON or vector data cube.\n\nTo maximize interoperability, `GeometryCollection` and multi geometries (e.g. `MultiPolygon`) should be be avoided.", + "description": "One or more base geometries used for filtering, given as GeoJSON or vector data cube. If multiple base geometries are provided, the union of them is used.\n\nTo maximize interoperability, `GeometryCollection` should be be avoided.", "schema": [ { "type": "object", From 849b6115de2f0d74471f8f27d150d771d6806111 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Wed, 18 Jan 2023 17:52:35 +0100 Subject: [PATCH 12/14] Update processes according to discussions --- CHANGELOG.md | 4 ++-- add_dimension.json | 2 +- aggregate_spatial.json | 8 ++++---- filter_bbox.json | 4 ++-- filter_spatial.json | 4 ++-- load_collection.json | 2 +- mask.json | 2 +- mask_polygon.json | 5 ++--- merge_cubes.json | 5 ++++- proposals/filter_labels.json | 2 +- proposals/filter_vector.json | 14 +++++--------- proposals/fit_class_random_forest.json | 4 ++-- proposals/fit_regr_random_forest.json | 4 ++-- proposals/load_result.json | 2 +- proposals/vector_buffer.json | 6 +++--- proposals/vector_to_random_points.json | 6 +++--- proposals/vector_to_regular_points.json | 6 +++--- tests/testHelpers.js | 4 ++-- 18 files changed, 41 insertions(+), 43 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b465fc2..a386952c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -8,7 +8,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Update the processes based on `raster-cube` or `vector-cube` to work with `datacube` instead - Rename `create_raster_cube` to `create_data_cube` -- `add_dimension`: Added new dimension type `vector` +- `add_dimension`: Added new dimension type `geometries` - New definition for `aggregate_spatial`: - Allows more than 3 input dimensions - Allow to not export statistics by changing the parameter `target_dimension` @@ -53,7 +53,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - `aggregate_spatial`: - - Clarified that vector properties are preserved for vector data cubes and all GeoJSON Features. [#270](https://github.com/Open-EO/openeo-processes/issues/270) + - Clarified that feature properties are preserved for vector data cubes and all GeoJSON Features. [#270](https://github.com/Open-EO/openeo-processes/issues/270) - Clarified that a `TargetDimensionExists` exception is thrown if the target dimension exists. - `apply` and `array_apply`: Fixed broken references to the `absolute` process - `apply_neighborhood`: Parameter `overlap` was optional but had no default value and no schena for the default value defined. diff --git a/add_dimension.json b/add_dimension.json index a7c76d13..b156846b 100644 --- a/add_dimension.json +++ b/add_dimension.json @@ -40,9 +40,9 @@ "type": "string", "enum": [ "bands", + "geometries", "spatial", "temporal", - "vector", "other" ] }, diff --git a/aggregate_spatial.json b/aggregate_spatial.json index 1842649d..bda108dc 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -26,7 +26,7 @@ }, { "name": "geometries", - "description": "Geometries for which the aggregation will be computed. Vector properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `vector`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "description": "Geometries for which the aggregation will be computed. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `geometries`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations. No operation is applied to geometries that are outside of the bounds of the data.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding type (e.g. `MultiPolygon`).", "schema": [ { "type": "object", @@ -37,7 +37,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -101,13 +101,13 @@ } ], "returns": { - "description": "A vector data cube with the computed results and restricted to the bounds of the geometries. The spatial dimensions is replaced by a vector dimension and if `target_dimension` is not `null`, a new dimension is added.", + "description": "A vector data cube with the computed results and restricted to the bounds of the geometries. The spatial dimensions is replaced by a geometries dimension and if `target_dimension` is not `null`, a new dimension is added.", "schema": { "type": "object", "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/filter_bbox.json b/filter_bbox.json index 436833c7..818bcaaa 100644 --- a/filter_bbox.json +++ b/filter_bbox.json @@ -31,7 +31,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -135,7 +135,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/filter_spatial.json b/filter_spatial.json index 8d9db74b..1e0a1e49 100644 --- a/filter_spatial.json +++ b/filter_spatial.json @@ -1,7 +1,7 @@ { "id": "filter_spatial", "summary": "Spatial filter raster data cubes using geometries", - "description": "Limits the raster data cube over the spatial dimensions to the specified geometries.\n\n- For **polygons**, the filter retains a pixel in the data cube if the point at the pixel center intersects with at least one of the polygons (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nMore specifically, pixels outside of the bounding box of the given geometry will not be available after filtering. All pixels inside the bounding box that are not retained will be set to `null` (no data).", + "description": "Limits the raster data cube over the spatial dimensions to the specified geometries.\n\n- For **polygons**, the filter retains a pixel in the data cube if the point at the pixel center intersects with at least one of the polygons (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nMore specifically, pixels outside of the bounding box of the given geometry will not be available after filtering. All pixels inside the bounding box that are not retained will be set to `null` (no data).\n\n Alternatively, use ``filter_bbox()`` to filter by bounding box.", "categories": [ "cubes", "filter" @@ -37,7 +37,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/load_collection.json b/load_collection.json index 3759ab56..d9fdc5d6 100644 --- a/load_collection.json +++ b/load_collection.json @@ -104,7 +104,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] }, diff --git a/mask.json b/mask.json index 06e43d67..d5940b25 100644 --- a/mask.json +++ b/mask.json @@ -1,7 +1,7 @@ { "id": "mask", "summary": "Apply a raster mask", - "description": "Applies a mask to a raster data cube. To apply a vector mask use ``mask_polygon()``.\n\nA mask is a raster data cube for which corresponding pixels among `data` and `mask` are compared and those pixels in `data` are replaced whose pixels in `mask` are non-zero (for numbers) or `true` (for boolean values). The pixel values are replaced with the value specified for `replacement`, which defaults to `null` (no data).\n\nThe data cubes have to be compatible so that each dimension in the mask must also be available in the raster data cube with the same name, type, reference system, resolution and labels. Dimensions can be missing in the mask with the result that the mask is applied to each label of the dimension in `data` that is missing in the data cube of the mask. The process fails if there's an incompatibility found between the raster data cube and the mask.", + "description": "Applies a mask to a raster data cube. To apply a polygon as a mask, use ``mask_polygon()``.\n\nA mask is a raster data cube for which corresponding pixels among `data` and `mask` are compared and those pixels in `data` are replaced whose pixels in `mask` are non-zero (for numbers) or `true` (for boolean values). The pixel values are replaced with the value specified for `replacement`, which defaults to `null` (no data).\n\nThe data cubes have to be compatible so that each dimension in the mask must also be available in the raster data cube with the same name, type, reference system, resolution and labels. Dimensions can be missing in the mask with the result that the mask is applied to each label of the dimension in `data` that is missing in the data cube of the mask. The process fails if there's an incompatibility found between the raster data cube and the mask.", "categories": [ "cubes", "masks" diff --git a/mask_polygon.json b/mask_polygon.json index c46d17af..be545ae8 100644 --- a/mask_polygon.json +++ b/mask_polygon.json @@ -37,11 +37,10 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector", + "type": "geometries", "geometry_type": [ "Polygon", - "MultiPolygon", - "GeometryCollection" + "MultiPolygon" ] } ] diff --git a/merge_cubes.json b/merge_cubes.json index 655609bb..e41d5f2e 100644 --- a/merge_cubes.json +++ b/merge_cubes.json @@ -1,7 +1,7 @@ { "id": "merge_cubes", "summary": "Merge two data cubes", - "description": "The process performs the join on overlapping dimensions. The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. As such it is not possible to merge a vector and a raster data cube.\n\nOverlapping dimensions have the same name, type, reference system and resolution, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. Equality for vector labels follows the definition in the Simple Features standard by the OGC. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all values. The merged data cube has the same dimensions and labels as the original data cubes, but all values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", + "description": "The process performs the join on overlapping dimensions. The data cubes have to be compatible. A merge operation without overlap should be reversible with (a set of) filter operations for each of the two cubes. As such it is not possible to merge a vector and a raster data cube. It is also not possible to merge vector data cubes that contain different base geometry types (points, lines/line strings, polygons). The base geometry types can be merged with their corresponding multi geometry types. In case of such a conflict, the `IncompatibleGeometryTypes` exception is thrown.\n\nOverlapping dimensions have the same name, type, reference system and resolution, but can have different labels. One of the dimensions can have different labels, for all other dimensions the labels must be equal. Equality for geometries follows the definition in the Simple Features standard by the OGC. If data overlaps, the parameter `overlap_resolver` must be specified to resolve the overlap.\n\n**Examples for merging two data cubes:**\n\n1. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first cube and `B3` and `B4`. An overlap resolver is *not needed*. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has four dimension labels: `B1`, `B2`, `B3`, `B4`.\n2. Data cubes with the dimensions (`x`, `y`, `t`, `bands`) have the same dimension labels in `x`, `y` and `t`, but the labels for the dimension `bands` are `B1` and `B2` for the first data cube and `B2` and `B3` for the second. An overlap resolver is *required* to resolve overlap in band `B2`. The merged data cube has the dimensions `x`, `y`, `t` and `bands` and the dimension `bands` has three dimension labels: `B1`, `B2`, `B3`.\n3. Data cubes with the dimensions (`x`, `y`, `t`) have the same dimension labels in `x`, `y` and `t`. There are two options:\n 1. Keep the overlapping values separately in the merged data cube: An overlap resolver is *not needed*, but for each data cube you need to add a new dimension using ``add_dimension()``. The new dimensions must be equal, except that the labels for the new dimensions must differ by name. The merged data cube has the same dimensions and labels as the original data cubes, plus the dimension added with ``add_dimension()``, which has the two dimension labels after the merge.\n 2. Combine the overlapping values into a single value: An overlap resolver is *required* to resolve the overlap for all values. The merged data cube has the same dimensions and labels as the original data cubes, but all values have been processed by the overlap resolver.\n4. A data cube with dimensions (`x`, `y`, `t` / `bands`) or (`x`, `y`, `t`, `bands`) and another data cube with dimensions (`x`, `y`) have the same dimension labels in `x` and `y`. Merging them will join dimensions `x` and `y`, so the lower dimension cube is merged with each time step and band available in the higher dimensional cube. This can for instance be used to apply a digital elevation model to a spatio-temporal data cube. An overlap resolver is *required* to resolve the overlap for all pixels.\n\nAfter the merge, the dimensions with a natural/inherent label order (with a reference system this is each spatial and temporal dimensions) still have all dimension labels sorted. For other dimensions where there is no inherent order, including bands, the dimension labels keep the order in which they are present in the original data cubes and the dimension labels of `cube2` are appended to the dimension labels of `cube1`.", "categories": [ "cubes" ], @@ -83,6 +83,9 @@ "exceptions": { "OverlapResolverMissing": { "message": "Overlapping data cubes, but no overlap resolver has been specified." + }, + "IncompatibleGeometryTypes": { + "message": "The geometry types are not compatible and can't be merged." } }, "links": [ diff --git a/proposals/filter_labels.json b/proposals/filter_labels.json index cd077306..4b26fb1d 100644 --- a/proposals/filter_labels.json +++ b/proposals/filter_labels.json @@ -25,7 +25,7 @@ "parameters": [ { "name": "value", - "description": "A single dimension label to compare against. The data type of the parameter depends on the dimension labels set for the dimension.", + "description": "A single dimension label to compare against. The data type of the parameter depends on the dimension labels set for the dimension. Please note that for some dimension types a representation is used, e.g.\n\n* dates and/or times are usually strings compliant to [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601),\n* geometries can be a WKT string or an identifier.", "schema": [ { "type": "number" diff --git a/proposals/filter_vector.json b/proposals/filter_vector.json index 47cc2cc1..701a5549 100644 --- a/proposals/filter_vector.json +++ b/proposals/filter_vector.json @@ -1,7 +1,7 @@ { "id": "filter_vector", "summary": "Spatial vector filter using geometries", - "description": "Limits the vector data cube to the specified geometries. The process works on geometries as defined in the Simple Features standard by the OGC.", + "description": "Limits the vector data cube to the specified geometries. The process works on geometries as defined in the Simple Features standard by the OGC. Alternatively, use ``filter_bbox()`` to filter by bounding box.", "categories": [ "cubes", "filter", @@ -17,7 +17,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -35,7 +35,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -62,17 +62,13 @@ } ], "returns": { - "description": "A vector data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the vector dimension has less (or the same) dimension labels.", + "description": "A vector data cube restricted to the specified geometries. The dimensions and dimension properties (name, type, labels, reference system and resolution) remain unchanged, except that the geometries dimension has less (or the same) dimension labels.", "schema": { "type": "object", "subtype": "datacube", "dimensions": [ { - "type": "spatial", - "axis": [ - "x", - "y" - ] + "type": "geometries" } ] } diff --git a/proposals/fit_class_random_forest.json b/proposals/fit_class_random_forest.json index 11f0c9b9..f5995330 100644 --- a/proposals/fit_class_random_forest.json +++ b/proposals/fit_class_random_forest.json @@ -15,7 +15,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -28,7 +28,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/proposals/fit_regr_random_forest.json b/proposals/fit_regr_random_forest.json index f2a97ca7..a185e6d9 100644 --- a/proposals/fit_regr_random_forest.json +++ b/proposals/fit_regr_random_forest.json @@ -15,7 +15,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -28,7 +28,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/proposals/load_result.json b/proposals/load_result.json index 7906fd29..b2951740 100644 --- a/proposals/load_result.json +++ b/proposals/load_result.json @@ -115,7 +115,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] }, diff --git a/proposals/vector_buffer.json b/proposals/vector_buffer.json index 9ee12fa6..a7163964 100644 --- a/proposals/vector_buffer.json +++ b/proposals/vector_buffer.json @@ -9,7 +9,7 @@ "parameters": [ { "name": "geometries", - "description": "Geometries to apply the buffer on. Vector properties are preserved for vector data cubes and all GeoJSON Features.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "description": "Geometries to apply the buffer on. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", "schema": [ { "type": "object", @@ -20,7 +20,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -44,7 +44,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } diff --git a/proposals/vector_to_random_points.json b/proposals/vector_to_random_points.json index 600ed547..f060b3c9 100644 --- a/proposals/vector_to_random_points.json +++ b/proposals/vector_to_random_points.json @@ -1,7 +1,7 @@ { "id": "vector_to_random_points", "summary": "Sample random points from geometries", - "description": "Generate a vector data cube of points by sampling random points from input geometries. At least one point is sampled per input geometry. Vector properties are preserved.\n\nIf `geometry_count` and `total_count` are both unrestricted (i.e. set to `null`, which is the default), one sample per geometry is used.", + "description": "Generate a vector data cube of points by sampling random points from input geometries. At least one point is sampled per input geometry. Feature properties are preserved.\n\nIf `geometry_count` and `total_count` are both unrestricted (i.e. set to `null`, which is the default), one sample per geometry is used.", "categories": [ "cubes", "vector" @@ -21,7 +21,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -88,7 +88,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector", + "type": "geometries", "geometry_type": [ "Point", "MultiPoint" diff --git a/proposals/vector_to_regular_points.json b/proposals/vector_to_regular_points.json index 20b44d9a..992b7ef9 100644 --- a/proposals/vector_to_regular_points.json +++ b/proposals/vector_to_regular_points.json @@ -1,7 +1,7 @@ { "id": "vector_to_regular_points", "summary": "Sample regular points from geometries", - "description": "Generate a vector data cube of points by sampling regularly-spaced points from input geometries. Vector properties are preserved.", + "description": "Generate a vector data cube of points by sampling regularly-spaced points from input geometries. Feature properties are preserved.", "categories": [ "cubes", "vector" @@ -21,7 +21,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector" + "type": "geometries" } ] } @@ -52,7 +52,7 @@ "subtype": "datacube", "dimensions": [ { - "type": "vector", + "type": "geometries", "geometry_type": [ "Point", "MultiPoint" diff --git a/tests/testHelpers.js b/tests/testHelpers.js index 4d7a224f..6305049b 100644 --- a/tests/testHelpers.js +++ b/tests/testHelpers.js @@ -147,14 +147,14 @@ async function getAjv() { properties: { type: { type: "string", - const: "vector" + const: "geometries" }, geometry_type: { type: "array", minItems: 1, items: { type: "string", - enum: ["Point", "LineString", "Polygon", "MultiPoint", "MultiLineString", "MultiPolygon", "GeometryCollection"] + enum: ["Point", "LineString", "Polygon", "MultiPoint", "MultiLineString", "MultiPolygon"] } } } From 9260dfdb03d6a1713b011bc428e1305a7f336b43 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Wed, 18 Jan 2023 18:01:11 +0100 Subject: [PATCH 13/14] Deprecated `GeometryCollections` are not supported any longer. #389 (#395) --- CHANGELOG.md | 1 + aggregate_spatial.json | 5 +++-- load_collection.json | 4 ++-- mask_polygon.json | 5 +++-- proposals/filter_vector.json | 5 +++-- proposals/load_result.json | 4 ++-- proposals/vector_buffer.json | 5 +++-- proposals/vector_to_random_points.json | 5 +++-- proposals/vector_to_regular_points.json | 5 +++-- 9 files changed, 23 insertions(+), 16 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a386952c..177a2d92 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -49,6 +49,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed - The `examples` folder has been migrated to the [openEO Community Examples](https://github.com/Open-EO/openeo-community-examples/tree/main/processes) repository. +- Deprecated `GeometryCollections` are not supported any longer. [#389](https://github.com/Open-EO/openeo-processes/issues/389) ### Fixed diff --git a/aggregate_spatial.json b/aggregate_spatial.json index bda108dc..58c5a70b 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -26,11 +26,12 @@ }, { "name": "geometries", - "description": "Geometries for which the aggregation will be computed. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `geometries`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations. No operation is applied to geometries that are outside of the bounds of the data.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding type (e.g. `MultiPolygon`).", + "description": "Geometries for which the aggregation will be computed. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `geometries`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations. No operation is applied to geometries that are outside of the bounds of the data.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", diff --git a/load_collection.json b/load_collection.json index d9fdc5d6..1a5296e2 100644 --- a/load_collection.json +++ b/load_collection.json @@ -18,7 +18,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube if the geometry is fully *within* the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the collection to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube if the geometry is fully *within* the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry, or\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", @@ -93,7 +93,7 @@ }, { "title": "GeoJSON", - "description": "Limits the data cube to the bounding box of the given geometries. For raster data, all pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "description": "Limits the data cube to the bounding box of the given geometries. For raster data, all pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).\n\nThe GeoJSON type `GeometryCollection` is not supported.", "type": "object", "subtype": "geojson" }, diff --git a/mask_polygon.json b/mask_polygon.json index be545ae8..f79db016 100644 --- a/mask_polygon.json +++ b/mask_polygon.json @@ -26,11 +26,12 @@ }, { "name": "mask", - "description": "A GeoJSON object or a vector data cube containing at least one polygon. The provided vector data can be one of the following:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.", + "description": "A GeoJSON object or a vector data cube containing at least one polygon. The provided vector data can be one of the following:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry, or\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", diff --git a/proposals/filter_vector.json b/proposals/filter_vector.json index 701a5549..46279fa7 100644 --- a/proposals/filter_vector.json +++ b/proposals/filter_vector.json @@ -24,11 +24,12 @@ }, { "name": "geometries", - "description": "One or more base geometries used for filtering, given as GeoJSON or vector data cube. If multiple base geometries are provided, the union of them is used.\n\nTo maximize interoperability, `GeometryCollection` should be be avoided.", + "description": "One or more base geometries used for filtering, given as GeoJSON or vector data cube. If multiple base geometries are provided, the union of them is used.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", diff --git a/proposals/load_result.json b/proposals/load_result.json index b2951740..6d67f4d8 100644 --- a/proposals/load_result.json +++ b/proposals/load_result.json @@ -29,7 +29,7 @@ }, { "name": "spatial_extent", - "description": "Limits the data to load from the batch job result to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully within the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry,\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries, or\n* a `GeometryCollection` containing `Polygon` or `MultiPolygon` geometries. To maximize interoperability, `GeometryCollection` should be avoided in favour of one of the alternatives above.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", + "description": "Limits the data to load from the batch job result to the specified bounding box or polygons.\n\n* For raster data, the process loads the pixel into the data cube if the point at the pixel center intersects with the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n* For vector data, the process loads the geometry into the data cube of the geometry is fully within the bounding box or any of the polygons (as defined in the Simple Features standard by the OGC).\n\nThe GeoJSON can be one of the following feature types:\n\n* A `Polygon` or `MultiPolygon` geometry,\n* a `Feature` with a `Polygon` or `MultiPolygon` geometry, or\n* a `FeatureCollection` containing at least one `Feature` with `Polygon` or `MultiPolygon` geometries.\n\nSet this parameter to `null` to set no limit for the spatial extent. Be careful with this when loading large datasets! It is recommended to use this parameter instead of using ``filter_bbox()`` or ``filter_spatial()`` directly after loading unbounded data.", "schema": [ { "title": "Bounding Box", @@ -104,7 +104,7 @@ }, { "title": "GeoJSON", - "description": "Limits the data cube to the bounding box of the given geometry. All pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).", + "description": "Limits the data cube to the bounding box of the given geometries. For raster data, all pixels inside the bounding box that do not intersect with any of the polygons will be set to no data (`null`).\n\nThe GeoJSON type `GeometryCollection` is not supported.", "type": "object", "subtype": "geojson" }, diff --git a/proposals/vector_buffer.json b/proposals/vector_buffer.json index a7163964..ad30030b 100644 --- a/proposals/vector_buffer.json +++ b/proposals/vector_buffer.json @@ -9,11 +9,12 @@ "parameters": [ { "name": "geometries", - "description": "Geometries to apply the buffer on. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "description": "Geometries to apply the buffer on. Feature properties are preserved for vector data cubes and all GeoJSON Features.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", diff --git a/proposals/vector_to_random_points.json b/proposals/vector_to_random_points.json index f060b3c9..9a018849 100644 --- a/proposals/vector_to_random_points.json +++ b/proposals/vector_to_random_points.json @@ -10,11 +10,12 @@ "parameters": [ { "name": "data", - "description": "Input geometries for sample extraction.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "description": "Input geometries for sample extraction.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", diff --git a/proposals/vector_to_regular_points.json b/proposals/vector_to_regular_points.json index 992b7ef9..d49a333d 100644 --- a/proposals/vector_to_regular_points.json +++ b/proposals/vector_to_regular_points.json @@ -10,11 +10,12 @@ "parameters": [ { "name": "data", - "description": "Input geometries for sample extraction.\n\nTo maximize interoperability, a nested `GeometryCollection` should be avoided. Furthermore, a `GeometryCollection` composed of a single type of geometries should be avoided in favour of the corresponding multi-part type (e.g. `MultiPolygon`).", + "description": "Input geometries for sample extraction.", "schema": [ { "type": "object", - "subtype": "geojson" + "subtype": "geojson", + "description": "The GeoJSON type `GeometryCollection` is not supported." }, { "type": "object", From 28ee2ac26c203bf7900f8c4c47ec4c7942bd9c28 Mon Sep 17 00:00:00 2001 From: Matthias Mohr Date: Wed, 18 Jan 2023 18:09:03 +0100 Subject: [PATCH 14/14] Further clarifications --- CHANGELOG.md | 17 ++++++------- aggregate_spatial.json | 2 +- filter_spatial.json | 2 +- proposals/fit_class_random_forest.json | 35 +++++++++++++++++++------- proposals/fit_regr_random_forest.json | 35 +++++++++++++++++++------- 5 files changed, 61 insertions(+), 30 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 177a2d92..6f394d38 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,16 +4,6 @@ All notable changes to this project will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). -## Changes for vector cubes - -- Update the processes based on `raster-cube` or `vector-cube` to work with `datacube` instead -- Rename `create_raster_cube` to `create_data_cube` -- `add_dimension`: Added new dimension type `geometries` -- New definition for `aggregate_spatial`: - - Allows more than 3 input dimensions - - Allow to not export statistics by changing the parameter `target_dimension` - - Clarify how the resulting vector cube looks like - ## Unreleased / Draft ### Added @@ -30,6 +20,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - `vector_buffer` - `vector_to_random_points` - `vector_to_regular_points` +- `add_dimension`: Added new dimension type `geometries`. [#68](https://github.com/Open-EO/openeo-processes/issues/68) ### Changed @@ -45,6 +36,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Added a `NoDataAvailable` exception - `inspect`: The parameter `message` has been moved to be the second argument. [#369](https://github.com/Open-EO/openeo-processes/issues/369) - `save_result`: Added a more concrete `DataCubeEmpty` exception. +- New definition for `aggregate_spatial`: + - Allows more than 3 input dimensions [#126](https://github.com/Open-EO/openeo-processes/issues/126) + - Allow to not export statistics by changing the parameter `target_dimension` [#366](https://github.com/Open-EO/openeo-processes/issues/366) + - Clarify how the resulting vector data cube looks like [#356](https://github.com/Open-EO/openeo-processes/issues/356) +- Renamed `create_raster_cube` to `create_data_cube`. [#68](https://github.com/Open-EO/openeo-processes/issues/68) +- Updated the processes based on the subtypes `raster-cube` or `vector-cube` to work with the subtype `datacube` instead. [#68](https://github.com/Open-EO/openeo-processes/issues/68) ### Removed diff --git a/aggregate_spatial.json b/aggregate_spatial.json index 58c5a70b..380e34c0 100644 --- a/aggregate_spatial.json +++ b/aggregate_spatial.json @@ -26,7 +26,7 @@ }, { "name": "geometries", - "description": "Geometries for which the aggregation will be computed. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `geometries`, GeoJSON `Feature`, `Geometry` or `GeometryCollection`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations. No operation is applied to geometries that are outside of the bounds of the data.", + "description": "Geometries for which the aggregation will be computed. Feature properties are preserved for vector data cubes and all GeoJSON Features.\n\nOne value will be computed per label in the dimension of type `geometries`, GeoJSON `Feature` or `Geometry`. For a `FeatureCollection` multiple values will be computed, one value per contained `Feature`. For example, a single value will be computed for a `MultiPolygon`, but two values will be computed for a `FeatureCollection` containing two polygons.\n\n- For **polygons**, the process considers all pixels for which the point at the pixel center intersects with the corresponding polygon (as defined in the Simple Features standard by the OGC).\n- For **points**, the process considers the closest pixel center.\n- For **lines** (line strings), the process considers all the pixels whose centers are closest to at least one point on the line.\n\nThus, pixels may be part of multiple geometries and be part of multiple aggregations. No operation is applied to geometries that are outside of the bounds of the data.", "schema": [ { "type": "object", diff --git a/filter_spatial.json b/filter_spatial.json index 1e0a1e49..b6d7a7de 100644 --- a/filter_spatial.json +++ b/filter_spatial.json @@ -26,7 +26,7 @@ }, { "name": "geometries", - "description": "One or more geometries used for filtering, given as GeoJSON or vector data cube.\n\nLimits the data cube to the bounding box of the given geometries. No implicit masking gets applied. To mask the pixels of the data cube use ``mask_polygon()``.", + "description": "One or more geometries used for filtering, given as GeoJSON or vector data cube. If multiple geometries are provided, the union of them is used.\n\nLimits the data cube to the bounding box of the given geometries. No implicit masking gets applied. To mask the pixels of the data cube use ``mask_polygon()``.", "schema": [ { "type": "object", diff --git a/proposals/fit_class_random_forest.json b/proposals/fit_class_random_forest.json index f5995330..1b6f299f 100644 --- a/proposals/fit_class_random_forest.json +++ b/proposals/fit_class_random_forest.json @@ -10,15 +10,32 @@ { "name": "predictors", "description": "The predictors for the classification model as a vector data cube. Aggregated to the features (vectors) of the target input variable.", - "schema": { - "type": "object", - "subtype": "datacube", - "dimensions": [ - { - "type": "geometries" - } - ] - } + "schema": [ + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "geometries" + }, + { + "type": "bands" + } + ] + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "geometries" + }, + { + "type": "other" + } + ] + } + ] }, { "name": "target", diff --git a/proposals/fit_regr_random_forest.json b/proposals/fit_regr_random_forest.json index a185e6d9..121af96d 100644 --- a/proposals/fit_regr_random_forest.json +++ b/proposals/fit_regr_random_forest.json @@ -10,15 +10,32 @@ { "name": "predictors", "description": "The predictors for the regression model as a vector data cube. Aggregated to the features (vectors) of the target input variable.", - "schema": { - "type": "object", - "subtype": "datacube", - "dimensions": [ - { - "type": "geometries" - } - ] - } + "schema": [ + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "geometries" + }, + { + "type": "bands" + } + ] + }, + { + "type": "object", + "subtype": "datacube", + "dimensions": [ + { + "type": "geometries" + }, + { + "type": "other" + } + ] + } + ] }, { "name": "target",