Skip to content

Commit

Permalink
Don't assume product._md exists (#171)
Browse files Browse the repository at this point in the history
* handle missing _md product attr; update use of DatasetType
* lower bound datacube version

---------

Co-authored-by: Ariana Barzinpour <ariana.barzinpour@ga.gov.au>
  • Loading branch information
Ariana-B and Ariana Barzinpour authored Jan 31, 2025
1 parent 696c171 commit 7b1441e
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 14 deletions.
2 changes: 1 addition & 1 deletion docs/stac-vs-odc.rst
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ similar concepts.
- ODC
- Description
* - :py:class:`~pystac.Collection`
- Product or :py:class:`~datacube.model.DatasetType`
- :py:class:`~datacube.model.Product`
- Collection of observations across space and time
* - :py:class:`~pystac.Item`
- :py:class:`~datacube.model.Dataset`
Expand Down
24 changes: 12 additions & 12 deletions odc/stac/eo3/_eo3converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
except ImportError:
from datacube.index.abstract import default_metadata_type_docs # type: ignore

from datacube.model import Dataset, DatasetType, metadata_from_doc
from datacube.model import Dataset, Product, metadata_from_doc
from odc.geo import CRS
from odc.geo.geobox import GeoBox
from toolz import dicttoolz
Expand Down Expand Up @@ -60,7 +60,7 @@
)


def _to_product(md: RasterCollectionMetadata) -> DatasetType:
def _to_product(md: RasterCollectionMetadata) -> Product:
def make_band(
band_key: BandKey,
band: RasterBandMetadata,
Expand Down Expand Up @@ -95,11 +95,11 @@ def make_band(
for band_key, band in md.meta.bands.items()
],
}
return DatasetType(_eo3, doc)
return Product(_eo3, doc)


@singledispatch
def infer_dc_product(x: Any, cfg: Optional[ConversionConfig] = None) -> DatasetType:
def infer_dc_product(x: Any, cfg: Optional[ConversionConfig] = None) -> Product:
"""Overloaded function."""
raise TypeError(
"Invalid type, must be one of: pystac.item.Item, pystac.collection.Collection"
Expand All @@ -109,7 +109,7 @@ def infer_dc_product(x: Any, cfg: Optional[ConversionConfig] = None) -> DatasetT
@infer_dc_product.register(pystac.item.Item)
def infer_dc_product_from_item(
item: pystac.item.Item, cfg: Optional[ConversionConfig] = None
) -> DatasetType:
) -> Product:
"""
Infer Datacube product object from a STAC Item.
Expand Down Expand Up @@ -164,7 +164,7 @@ def _to_dataset(
item: ParsedItem,
properties: Dict[str, Any],
ds_uuid: uuid.UUID,
product: DatasetType,
product: Product,
) -> Dataset:
# pylint: disable=too-many-locals

Expand Down Expand Up @@ -228,7 +228,7 @@ def _to_dataset(


def _item_to_ds(
item: pystac.item.Item, product: DatasetType, cfg: Optional[ConversionConfig] = None
item: pystac.item.Item, product: Product, cfg: Optional[ConversionConfig] = None
) -> Dataset:
"""
Construct Dataset object from STAC Item and previously constructed Product.
Expand All @@ -239,7 +239,7 @@ def _item_to_ds(
if cfg is None:
cfg = {}

md: RasterCollectionMetadata = getattr(product, "_md")
md: Optional[RasterCollectionMetadata] = getattr(product, "_md", None)
uuid_cfg = cfg.get("uuid", {})
ds_uuid = _compute_uuid(
item, mode=uuid_cfg.get("mode", "auto"), extras=uuid_cfg.get("extras", [])
Expand All @@ -252,7 +252,7 @@ def _item_to_ds(
def stac2ds(
items: Iterable[pystac.item.Item],
cfg: Optional[ConversionConfig] = None,
product_cache: Optional[Dict[str, DatasetType]] = None,
product_cache: Optional[Dict[str, Product]] = None,
) -> Iterator[Dataset]:
"""
STAC :class:`~pystac.item.Item` to :class:`~datacube.model.Dataset` stream converter.
Expand All @@ -277,7 +277,7 @@ def stac2ds(
:param product_cache:
Input/Output parameter, contains mapping from collection name to deduced product definition,
i.e. :py:class:`datacube.model.DatasetType` object.
i.e. :py:class:`datacube.model.Product` object.
.. rubric: Sample Configuration
Expand Down Expand Up @@ -313,7 +313,7 @@ def stac2ds(
warnings: ignore
"""
products: Dict[str, DatasetType] = {} if product_cache is None else product_cache
products: Dict[str, Product] = {} if product_cache is None else product_cache
for item in items:
collection_id = _collection_id(item)
product = products.get(collection_id)
Expand All @@ -329,7 +329,7 @@ def stac2ds(
@infer_dc_product.register(pystac.collection.Collection)
def infer_dc_product_from_collection(
collection: pystac.collection.Collection, cfg: Optional[ConversionConfig] = None
) -> DatasetType:
) -> Product:
"""
Construct Datacube Product definition from STAC Collection.
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ docs =
matplotlib-inline
pandas
distributed
datacube
datacube>=1.8.8
ipython
ipykernel

Expand Down
11 changes: 11 additions & 0 deletions tests/test_eo3converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,3 +234,14 @@ def test_old_imports():

with pytest.raises(AttributeError):
_ = odc.stac.no_such_thing


def test_product_cache(sentinel_stac_ms: pystac.item.Item):
item = sentinel_stac_ms
# simulate a product that was not created via infer_dc_product
# (and therefore did not have the _md attr set)
product = infer_dc_product(item, STAC_CFG)
delattr(product, "_md")
# make sure it doesn't error when product_cache is provided
(ds,) = stac2ds([item], STAC_CFG, {product.name: product})
assert ds.id

0 comments on commit 7b1441e

Please sign in to comment.