Skip to content

Commit

Permalink
fixup! fixup! Issue #114/#141 convert inline GeoJSON in aggregate_spa…
Browse files Browse the repository at this point in the history
…tial to VectorCube
  • Loading branch information
soxofaan committed Oct 6, 2022
1 parent 4a6d5ea commit 2864f25
Show file tree
Hide file tree
Showing 6 changed files with 152 additions and 54 deletions.
40 changes: 40 additions & 0 deletions openeo_driver/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -243,6 +243,11 @@ def write_assets(
format_info = IOFORMATS.get(format)
# TODO: check if format can be used for vector data?
path = directory / f"vectorcube.{format_info.extension}"

if format_info.format == "JSON":
# TODO: eliminate this legacy format?
return self._write_legacy_aggregate_polygon_result_json(directory=directory)

self._as_geopandas_df().to_file(path, driver=format_info.fiona_driver)

if not format_info.multi_file:
Expand Down Expand Up @@ -275,6 +280,41 @@ def write_assets(
def to_multipolygon(self) -> shapely.geometry.MultiPolygon:
return shapely.ops.unary_union(self._geometries.geometry)

def _write_legacy_aggregate_polygon_result_json(
self, directory: Path
) -> Dict[str, StacAsset]:
"""Export to legacy AggregatePolygonResult JSON format"""
# TODO: eliminate this legacy, non-standard format?
from openeo_driver.save_result import AggregatePolygonResult, JSONResult

def write_spatiotemporal(cube: xarray.DataArray) -> Dict[str, StacAsset]:
"""Export to legacy AggregatePolygonResult JSON format"""
cube = cube.transpose("t", self.DIM_GEOMETRIES, "bands")
timeseries = {
t.item(): t_slice.values.tolist()
for t, t_slice in zip(cube.coords["t"], cube)
}
result = AggregatePolygonResult(timeseries=timeseries, regions=self)
return result.write_assets(directory=directory / "ignored")

def write_spatial(cube: xarray.DataArray) -> Dict[str, StacAsset]:
cube = cube.transpose(self.DIM_GEOMETRIES, "bands")
result = JSONResult(data=cube.values.tolist())
return result.write_assets(directory / "ignored")

cube = self._cube
# TODO: more flexible temporal/band dimension detection?
if cube.dims == (self.DIM_GEOMETRIES, "t"):
return write_spatiotemporal(cube.expand_dims({"bands": ["band"]}, axis=-1))
elif cube.dims == (self.DIM_GEOMETRIES, "t", "bands"):
return write_spatiotemporal(cube)
elif cube.dims == (self.DIM_GEOMETRIES, "bands"):
return write_spatial(cube)
else:
raise ValueError(
f"Unsupported cube configuration {cube.dims} for _write_legacy_aggregate_polygon_result_json"
)

def get_bounding_box(self) -> Tuple[float, float, float, float]:
return tuple(self._geometries.total_bounds)

Expand Down
11 changes: 9 additions & 2 deletions openeo_driver/dummy/dummy_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -227,8 +227,15 @@ def assert_polygon_sequence(geometries: Union[Sequence, BaseMultipartGeometry])
dims += (self.metadata.band_dimension.name,)
coords[self.metadata.band_dimension.name] = self.metadata.band_names
shape = [len(coords[d]) for d in dims]
data = numpy.arange(numpy.prod(shape)).reshape(shape)
cube = xarray.DataArray(data=data, dims=dims, coords=coords, name="aggregate_spatial")
data = numpy.arange(numpy.prod(shape), dtype="float")
data[0] = 2.345
data[1] = float("nan")
cube = xarray.DataArray(
data=data.reshape(shape),
dims=dims,
coords=coords,
name="aggregate_spatial",
)
return geometries.with_cube(cube=cube, flatten_prefix="agg")
elif isinstance(geometries, str):
geometries = [geometry for geometry in DelayedVector(geometries).geometries]
Expand Down
4 changes: 2 additions & 2 deletions openeo_driver/save_result.py
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ def __init__(self, data, format: str = "json", options: dict = None):
super().__init__(format=format, options=options)
self.data = data

def write_assets(self, path:str) -> Dict[str, StacAsset]:
def write_assets(self, path: Union[str, Path]) -> Dict[str, StacAsset]:
"""
Save generated assets into a directory, return asset metadata.
TODO: can an asset also be a full STAC item? In principle, one openEO job can either generate a full STAC collection, or one STAC item with multiple assets...
Expand Down Expand Up @@ -220,7 +220,7 @@ def get_data(self):
# By default, keep original (proprietary) result format
return self.data

def write_assets(self, directory: str) -> Dict[str, StacAsset]:
def write_assets(self, directory: Union[str, Path]) -> Dict[str, StacAsset]:
"""
Save generated assets into a directory, return asset metadata.
TODO: can an asset also be a full STAC item? In principle, one openEO job can either generate a full STAC collection, or one STAC item with multiple assets...
Expand Down
2 changes: 1 addition & 1 deletion tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def udp_registry(backend_implementation) -> UserDefinedProcesses:
def flask_app(backend_implementation) -> flask.Flask:
app = build_app(
backend_implementation=backend_implementation,
# error_handling=False
# error_handling=False,
)
app.config.from_mapping(TEST_APP_CONFIG)
return app
Expand Down
2 changes: 1 addition & 1 deletion tests/data/pg/1.0/no_nested_json_result.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"result": true,
"process_id": "save_result",
"arguments": {
"format": "GTIFF",
"format": "GeoJSON",
"data": {
"from_node": "aggregatespatial1"
},
Expand Down
147 changes: 99 additions & 48 deletions tests/test_views_execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -734,17 +734,33 @@ def test_aggregate_spatial(api):
"2015-07-06T00:00:00Z": [[2.345]],
"2015-08-22T00:00:00Z": [[None]]
}
params = dummy_backend.last_load_collection_call('S2_FAPAR_CLOUDCOVER')
assert params["spatial_extent"] == {"west": 7.02, "south": 51.29, "east": 7.65, "north": 51.75, "crs": 'EPSG:4326'}
assert params["aggregate_spatial_geometries"] == shapely.geometry.shape({
"type": "Polygon",
"coordinates": [[[7.02, 51.75], [7.65, 51.74], [7.65, 51.29], [7.04, 51.31], [7.02, 51.75]]]
})
params = dummy_backend.last_load_collection_call("S2_FAPAR_CLOUDCOVER")
assert params["spatial_extent"] == {
"west": 7.02,
"south": 51.29,
"east": 7.65,
"north": 51.75,
"crs": "EPSG:4326",
}
assert params["aggregate_spatial_geometries"] == DriverVectorCube.from_geojson(
{
"type": "Polygon",
"coordinates": [
[
[7.02, 51.75],
[7.65, 51.74],
[7.65, 51.29],
[7.04, 51.31],
[7.02, 51.75],
]
],
}
)


def test_execute_aggregate_spatial_spatial_cube(api100):
resp = api100.check_result("aggregate_spatial_spatial_cube.json")
assert resp.json == [[100.0, 100.1], [101.0, 101.1]]
assert resp.json == [[2.345, None], [2.0, 3.0]]


@pytest.mark.parametrize(["geometries", "expected"], [
Expand Down Expand Up @@ -789,37 +805,51 @@ def test_aggregate_spatial_vector_cube_basic(api100, feature_collection_test_pat
assert params["spatial_extent"] == {"west": 1, "south": 1, "east": 5, "north": 4, "crs": "EPSG:4326"}
assert isinstance(params["aggregate_spatial_geometries"], DriverVectorCube)

assert res.json == DictSubSet({
"type": "FeatureCollection",
"features": [
DictSubSet({
"type": "Feature",
"geometry": {"type": "Polygon", "coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]]},
"properties": {
"id": "first", "pop": 1234,
"agg~2015-07-06T00:00:00Z~B02": 0,
"agg~2015-07-06T00:00:00Z~B03": 1,
"agg~2015-07-06T00:00:00Z~B04": 2,
"agg~2015-08-22T00:00:00Z~B02": 3,
"agg~2015-08-22T00:00:00Z~B03": 4,
"agg~2015-08-22T00:00:00Z~B04": 5,
},
}),
DictSubSet({
"type": "Feature",
"geometry": {"type": "Polygon", "coordinates": [[[4, 2], [5, 4], [3, 4], [4, 2]]]},
"properties": {
"id": "second", "pop": 5678,
"agg~2015-07-06T00:00:00Z~B02": 6,
"agg~2015-07-06T00:00:00Z~B03": 7,
"agg~2015-07-06T00:00:00Z~B04": 8,
"agg~2015-08-22T00:00:00Z~B02": 9,
"agg~2015-08-22T00:00:00Z~B03": 10,
"agg~2015-08-22T00:00:00Z~B04": 11,
},
}),
]
})
assert res.json == DictSubSet(
{
"type": "FeatureCollection",
"features": [
DictSubSet(
{
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [[[1, 1], [3, 1], [2, 3], [1, 1]]],
},
"properties": {
"id": "first",
"pop": 1234,
"agg~2015-07-06T00:00:00Z~B02": 2.345,
"agg~2015-07-06T00:00:00Z~B03": None,
"agg~2015-07-06T00:00:00Z~B04": 2.0,
"agg~2015-08-22T00:00:00Z~B02": 3.0,
"agg~2015-08-22T00:00:00Z~B03": 4.0,
"agg~2015-08-22T00:00:00Z~B04": 5.0,
},
}
),
DictSubSet(
{
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [[[4, 2], [5, 4], [3, 4], [4, 2]]],
},
"properties": {
"id": "second",
"pop": 5678,
"agg~2015-07-06T00:00:00Z~B02": 6.0,
"agg~2015-07-06T00:00:00Z~B03": 7.0,
"agg~2015-07-06T00:00:00Z~B04": 8.0,
"agg~2015-08-22T00:00:00Z~B02": 9.0,
"agg~2015-08-22T00:00:00Z~B03": 10.0,
"agg~2015-08-22T00:00:00Z~B04": 11.0,
},
}
),
],
}
)


@pytest.mark.parametrize(["info", "preprocess_pg", "aggregate_data", "p1_properties", "p2_properties"], [
Expand All @@ -828,9 +858,14 @@ def test_aggregate_spatial_vector_cube_basic(api100, feature_collection_test_pat
{},
"lc",
{
"id": "first", "pop": 1234,
"agg~2015-07-06T00:00:00Z~B02": 0, "agg~2015-07-06T00:00:00Z~B03": 1, "agg~2015-07-06T00:00:00Z~B04": 2,
"agg~2015-08-22T00:00:00Z~B02": 3, "agg~2015-08-22T00:00:00Z~B03": 4, "agg~2015-08-22T00:00:00Z~B04": 5,
"id": "first",
"pop": 1234,
"agg~2015-07-06T00:00:00Z~B02": 2.345,
"agg~2015-07-06T00:00:00Z~B03": None,
"agg~2015-07-06T00:00:00Z~B04": 2,
"agg~2015-08-22T00:00:00Z~B02": 3,
"agg~2015-08-22T00:00:00Z~B03": 4,
"agg~2015-08-22T00:00:00Z~B04": 5,
},
{
"id": "second", "pop": 5678,
Expand All @@ -850,7 +885,13 @@ def test_aggregate_spatial_vector_cube_basic(api100, feature_collection_test_pat
}},
},
"r",
{"id": "first", "pop": 1234, "agg~B02": 0, "agg~B03": 1, "agg~B04": 2},
{
"id": "first",
"pop": 1234,
"agg~B02": 2.345,
"agg~B03": None,
"agg~B04": 2,
},
{"id": "second", "pop": 5678, "agg~B02": 3, "agg~B03": 4, "agg~B04": 5},
),
(
Expand All @@ -865,10 +906,20 @@ def test_aggregate_spatial_vector_cube_basic(api100, feature_collection_test_pat
}}
},
"r",
{"id": "first", "pop": 1234, "agg~2015-07-06T00:00:00Z": 0, "agg~2015-08-22T00:00:00Z": 1},
{"id": "second", "pop": 5678, "agg~2015-07-06T00:00:00Z": 2, "agg~2015-08-22T00:00:00Z": 3},
),
(
{
"id": "first",
"pop": 1234,
"agg~2015-07-06T00:00:00Z": 2.345,
"agg~2015-08-22T00:00:00Z": None,
},
{
"id": "second",
"pop": 5678,
"agg~2015-07-06T00:00:00Z": 2,
"agg~2015-08-22T00:00:00Z": 3,
},
),
(
"no-time-nor-bands",
{
"r1": {"process_id": "reduce_dimension", "arguments": {
Expand All @@ -887,8 +938,8 @@ def test_aggregate_spatial_vector_cube_basic(api100, feature_collection_test_pat
}},
},
"r2",
{"id": "first", "pop": 1234, "agg": 0},
{"id": "second", "pop": 5678, "agg": 1},
{"id": "first", "pop": 1234, "agg": 2.345},
{"id": "second", "pop": 5678, "agg": None},
),
])
def test_aggregate_spatial_vector_cube_dimensions(
Expand Down

0 comments on commit 2864f25

Please sign in to comment.