Skip to content

Commit

Permalink
Using getmetadata for EDR
Browse files Browse the repository at this point in the history
  • Loading branch information
ernstdevreedeKNMI committed Sep 16, 2024
1 parent 90d7b1b commit 61566d8
Show file tree
Hide file tree
Showing 7 changed files with 309 additions and 434 deletions.
69 changes: 38 additions & 31 deletions python/python_fastapi_server/routers/edr.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@
from .edr_utils import (
generate_max_age,
get_base_url,
get_collectioninfo_for_id,
get_edr_collections,
get_collectioninfo_from_md,
get_time_values_for_range,
get_metadata,
)

logger = logging.getLogger(__name__)
Expand Down Expand Up @@ -121,15 +121,19 @@ async def rest_get_edr_collections(request: Request, response: Response):
links.append(self_link)
collections: list[Collection] = []
ttl_set = set()
edr_collections = get_edr_collections()
for edr_coll in edr_collections:
coll, ttl = await get_collectioninfo_for_id(edr_coll)
if coll:
collections.append(coll)
if ttl is not None:
ttl_set.add(ttl)
else:
logger.warning("Unable to fetch WMS GetCapabilities for %s", edr_coll)
metadata = await get_metadata()
for coll_name in metadata.keys():
print("COLL:", coll_name)
try:
coll = get_collectioninfo_from_md(
{coll_name: metadata[coll_name]}, coll_name
)
if coll:
collections.append(coll)
else:
logger.warning("Unable to fetch WMS GetMetadata for %s", coll_name)
except Exception as exc:
print("ERR", exc)
collections_data = Collections(links=links, collections=collections)
if ttl_set:
response.headers["cache-control"] = generate_max_age(min(ttl_set))
Expand All @@ -145,7 +149,10 @@ async def rest_get_edr_collection_by_id(collection_name: str, response: Response
"""
GET Returns collection information for given collection id
"""
collection, ttl = await get_collectioninfo_for_id(collection_name)
metadata = await get_metadata(collection_name)
ttl = None

collection = get_collectioninfo_from_md(metadata, collection_name)
if ttl is not None:
response.headers["cache-control"] = generate_max_age(ttl)
if collection is None:
Expand Down Expand Up @@ -219,25 +226,25 @@ def get_fixed_api():
version=edrApiApp.version,
routes=edrApiApp.routes,
)
for pth in api["paths"].values():
if "parameters" in pth["get"]:
for param in pth["get"]["parameters"]:
if param["in"] == "query" and param["name"] == "datetime":
param["style"] = "form"
param["explode"] = False
param["schema"] = {
"type": "string",
}
if "schema" in param:
if "anyOf" in param["schema"]:
for itany in param["schema"]["anyOf"]:
if itany.get("type") == "null":
print("NULL found p")

if "CompactAxis" in api["components"]["schemas"]:
comp = api["components"]["schemas"]["CompactAxis"]
if "exclusiveMinimum" in comp["properties"]["num"]:
comp["properties"]["num"]["exclusiveMinimum"] = False
# for pth in api["paths"].values():
# if "parameters" in pth["get"]:
# for param in pth["get"]["parameters"]:
# if param["in"] == "query" and param["name"] == "datetime":
# param["style"] = "form"
# param["explode"] = False
# param["schema"] = {
# "type": "string",
# }
# if "schema" in param:
# if "anyOf" in param["schema"]:
# for itany in param["schema"]["anyOf"]:
# if itany.get("type") == "null":
# print("NULL found p")

# if "CompactAxis" in api["components"]["schemas"]:
# comp = api["components"]["schemas"]["CompactAxis"]
# if "exclusiveMinimum" in comp["properties"]["num"]:
# comp["properties"]["num"]["exclusiveMinimum"] = False

return api

Expand Down
17 changes: 14 additions & 3 deletions python/python_fastapi_server/routers/edr_covjson.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,14 +31,24 @@
SYMBOL_TYPE_URL = "http://www.opengis.net/def/uom/UCUM"


def covjson_from_resp(dats, vertical_name, custom_name, collection_name):
def covjson_from_resp(dats, metadata):
"""
Returns a coverage json from a Adaguc WMS GetFeatureInfo request
"""
covjson_list = []
for dat in dats:
if len(dat["data"]):
(lon, lat) = dat["point"]["coords"].split(",")
custom_name = None
vertical_name = None
for param_dim in metadata[dat["name"]]["dims"].values():
print("Checking dim ", param_dim["name"])
if param_dim["name"] not in ["x", "y", "reference_time", "time"]:
if not param_dim["hidden"]:
if "isvertical" in param_dim:
vertical_name = param_dim["name"]
else:
custom_name = param_dim["name"]
lat = float(lat)
lon = float(lon)
dims = makedims(dat["dims"], dat["data"])
Expand All @@ -49,7 +59,8 @@ def covjson_from_resp(dats, vertical_name, custom_name, collection_name):
vertical_steps = getdimvals(dims, "elevation")

custom_dim_values = []
if custom_name is not None:
if custom_name is not None and len(custom_name) > 0:
print("CUSTOM_DIM:", custom_name, dat)
custom_dim_values = getdimvals(dims, custom_name)

valstack = []
Expand All @@ -72,7 +83,7 @@ def covjson_from_resp(dats, vertical_name, custom_name, collection_name):

parameters: dict[str, CovJsonParameter] = {}
ranges = {}
param_metadata = get_param_metadata(dat["name"], collection_name)
param_metadata = get_param_metadata(metadata[dat["name"]])
symbol = CovJsonSymbol(
value=param_metadata["parameter_unit"], type=SYMBOL_TYPE_URL
)
Expand Down
124 changes: 70 additions & 54 deletions python/python_fastapi_server/routers/edr_cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,9 @@

from .covjsonresponse import CovJSONResponse
from .edr_utils import (
parse_config_file,
get_ref_times_for_coll,
get_edr_collections,
instance_to_iso,
get_metadata,
)
from .netcdf_to_covjson import netcdf_to_covjson
from .ogcapi_tools import call_adaguc
Expand Down Expand Up @@ -91,30 +90,34 @@ async def get_coll_inst_cube(
"res_y",
]

edr_collectioninfo = get_edr_collections()[collection_name]
metadata = await get_metadata(collection_name)

vertical_dim = ""
custom_name = None
first_layer_name = parameter_name.split(",")[0]
for param_dim in metadata[collection_name][first_layer_name]["dims"].values():
if not param_dim["hidden"]:
if "isvertical" in param_dim:
vertical_name = param_dim["name"]
else:
custom_name = param_dim["name"]
if z_par:
if edr_collectioninfo.get("vertical_name"):
vertical_name = edr_collectioninfo.get("vertical_name")
if vertical_name is not None:
if vertical_name.upper() == "ELEVATION":
vertical_dim = f"{edr_collectioninfo.get('vertical_name')}={z_par}"
vertical_dim = f"{vertical_name}={z_par}"
else:
vertical_dim = f"DIM_{edr_collectioninfo.get('vertical_name')}={z_par}"
vertical_dim = f"DIM_{vertical_name}={z_par}"

custom_dims = [k for k in request.query_params if k not in allowed_params]
custom_dim_parameter = ""
logger.info("custom dims: %s", custom_dims)
logger.info("custom dims: %s %s", custom_dims, custom_name)
if len(custom_dims) > 0:
for custom_dim in custom_dims:
custom_dim_parameter += (
f"&DIM_{custom_dim}={request.query_params[custom_dim]}"
)

dataset = edr_collectioninfo["dataset"]
ref_times = await get_ref_times_for_coll(
dataset, edr_collectioninfo["parameters"][0]["name"]
)
ref_times = get_ref_times_for_coll(metadata[collection_name])
if not instance and len(ref_times) > 0:
instance = ref_times[-1]

Expand All @@ -125,51 +128,64 @@ async def get_coll_inst_cube(
else:
res_queryterm = ""

collection_info = get_edr_collections().get(collection_name)
if "dataset" in collection_info:
logger.info("callADAGUC by dataset")
dataset = collection_info["dataset"]
translate_names, translate_dims = parse_config_file(dataset)
coveragejsons = []
parameters = {}
for parameter_name in parameter_names:
if instance is None:
urlrequest = (
f"dataset={dataset}&service=wcs&version=1.1.1&request=getcoverage&format=NetCDF4&crs=EPSG:4326&coverage={parameter_name}"
+ f"&bbox={bbox}&time={datetime_par}"
+ (
f"&{custom_dim_parameter}"
if len(custom_dim_parameter) > 0
else ""
)
+ (f"&{vertical_dim}" if len(vertical_dim) > 0 else "")
+ res_queryterm
)
else:
urlrequest = (
f"dataset={dataset}&service=wcs&request=getcoverage&format=NetCDF4&crs=EPSG:4326&coverage={parameter_name}"
+ f"&bbox={bbox}&time={datetime_par}&dim_reference_time={instance_to_iso(instance)}"
+ (
f"&{custom_dim_parameter}"
if len(custom_dim_parameter) > 0
else ""
)
+ (f"&{vertical_dim}" if len(vertical_dim) > 0 else "")
+ res_queryterm
)

status, response, _ = await call_adaguc(url=urlrequest.encode("UTF-8"))
logger.info("status: %d", status)
result_dataset = Dataset(f"{parameter_name}.nc", memory=response.getvalue())

coveragejson = netcdf_to_covjson(
result_dataset, translate_names, translate_dims
logger.info("callADAGUC by dataset")
dataset = collection_name

translate_names = get_translate_names(metadata[collection_name])
translate_dims = get_translate_dims(metadata[collection_name])

coveragejsons = []
parameters = {}
datetime_arg = datetime_par
if datetime_arg is None:
datetime_arg = "*"
for parameter_name in parameter_names:
if instance is None:
urlrequest = (
f"dataset={dataset}&service=wcs&version=1.1.1&request=getcoverage&format=NetCDF4&crs=EPSG:4326&coverage={parameter_name}"
+ f"&bbox={bbox}&time={datetime_arg}"
+ (f"&{custom_dim_parameter}" if len(custom_dim_parameter) > 0 else "")
+ (f"&{vertical_dim}" if len(vertical_dim) > 0 else "")
+ res_queryterm
)
if coveragejson is not None:
coveragejsons.append(coveragejson)
parameters = parameters | coveragejson.parameters
else:
urlrequest = (
f"dataset={dataset}&service=wcs&request=getcoverage&format=NetCDF4&crs=EPSG:4326&coverage={parameter_name}"
+ f"&bbox={bbox}&time={datetime_arg}&dim_reference_time={instance_to_iso(instance)}"
+ (f"&{custom_dim_parameter}" if len(custom_dim_parameter) > 0 else "")
+ (f"&{vertical_dim}" if len(vertical_dim) > 0 else "")
+ res_queryterm
)

status, response, _ = await call_adaguc(url=urlrequest.encode("UTF-8"))
logger.info("status: %d", status)
result_dataset = Dataset(f"{parameter_name}.nc", memory=response.getvalue())

coveragejson = netcdf_to_covjson(
result_dataset, translate_names, translate_dims
)
if coveragejson is not None:
coveragejsons.append(coveragejson)
parameters = parameters | coveragejson.parameters

if len(coveragejsons) == 1:
return coveragejsons[0]

return CoverageCollection(coverages=coveragejsons, parameters=parameters)


def get_translate_names(metadata: dict) -> dict:
translated_names = {}
for layer_name in metadata:
var_name = metadata[layer_name]["layer"]["variables"][0]["variableName"]
translated_names[var_name] = layer_name
return {}


def get_translate_dims(metadata: dict) -> dict:
translated_dims = {}
for layer_name in metadata:
for dim_name in metadata[layer_name]["dims"]:
dim_netcdf_name = metadata[layer_name]["dims"]["name"]
translated_dims[dim_netcdf_name] = dim_name
return {}
21 changes: 8 additions & 13 deletions python/python_fastapi_server/routers/edr_instances.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
from .edr_utils import (
generate_max_age,
get_base_url,
get_collectioninfo_for_id,
get_edr_collections,
get_metadata,
get_collectioninfo_from_md,
get_ref_times_for_coll,
)

Expand All @@ -39,12 +39,10 @@ async def rest_get_edr_inst_for_coll(
)

instances: list[Instance] = []
edr_collections = get_edr_collections()

ref_times = await get_ref_times_for_coll(
edr_collections[collection_name]["dataset"],
edr_collections[collection_name]["parameters"][0]["name"],
)
metadata = await get_metadata(collection_name)

ref_times = get_ref_times_for_coll(metadata[collection_name])
print("REF:", ref_times)
links: list[Link] = []
links.append(Link(href=instances_url, rel="collection"))
Expand All @@ -53,9 +51,7 @@ async def rest_get_edr_inst_for_coll(
instance_links: list[Link] = []
instance_link = Link(href=f"{instances_url}/{instance}", rel="collection")
instance_links.append(instance_link)
instance_info, ttl = await get_collectioninfo_for_id(collection_name, instance)
if ttl is not None:
ttl_set.add(ttl)
instance_info = get_collectioninfo_from_md(metadata, collection_name, instance)
instances.append(instance_info)

instances_data = Instances(instances=instances, links=links)
Expand All @@ -73,7 +69,6 @@ async def rest_get_collection_info(collection_name: str, instance, response: Res
"""
GET "/collections/{collection_name}/instances/{instance}"
"""
coll, ttl = await get_collectioninfo_for_id(collection_name, instance)
if ttl is not None:
response.headers["cache-control"] = generate_max_age(ttl)
metadata = await get_metadata(collection_name)
coll = get_collectioninfo_from_md(metadata, collection_name, instance)
return coll
2 changes: 1 addition & 1 deletion python/python_fastapi_server/routers/edr_locations.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@

@router.get("/collections/{_coll}/locations")
@router.get("/collections/{_coll}/instances/{instance}/locations")
async def get_locations(_coll: str):
async def get_locations(_coll: str, instance: str):
"""
Returns locations where you could query data.
"""
Expand Down
Loading

0 comments on commit 61566d8

Please sign in to comment.