diff --git a/.gitignore b/.gitignore index 2dc53ca..53052a6 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +# Repo Specific +tests/test_files/tmp/ # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/poetry.lock b/poetry.lock index cc23883..03e49d7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -7,10 +7,10 @@ optional = false python-versions = ">=3.5" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"] [[package]] name = "certifi" @@ -29,7 +29,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode-backport = ["unicodedata2"] +unicode_backport = ["unicodedata2"] [[package]] name = "colorama" @@ -48,7 +48,6 @@ optional = false python-versions = ">=3.6" [package.dependencies] -importlib-metadata = {version = "<4.3", markers = "python_version < \"3.8\""} mccabe = ">=0.6.0,<0.7.0" pycodestyle = ">=2.8.0,<2.9.0" pyflakes = ">=2.4.0,<2.5.0" @@ -69,22 +68,6 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "importlib-metadata" -version = "4.2.0" -description = "Read metadata from Python packages" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" - -[package.extras] -docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] - [[package]] name = "iniconfig" version = "1.1.1" @@ -120,12 +103,9 @@ category = "dev" optional = false python-versions = ">=3.6" -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} - [package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] +testing = ["pytest-benchmark", "pytest"] +dev = ["tox", "pre-commit"] [[package]] name = "py" @@ -160,7 +140,23 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["jinja2", "railroad-diagrams"] +diagrams = ["railroad-diagrams", "jinja2"] + +[[package]] +name = "pystac" +version = "1.7.3" +description = "Python library for working with Spatiotemporal Asset Catalog (STAC)." +category = "main" +optional = false +python-versions = ">=3.8" + +[package.dependencies] +python-dateutil = ">=2.7.0" + +[package.extras] +orjson = ["orjson (>=3.5)"] +urllib3 = ["urllib3 (>=1.26)"] +validation = ["jsonschema (>=4.0.1)"] [[package]] name = "pytest" @@ -173,7 +169,6 @@ python-versions = ">=3.7" [package.dependencies] attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" @@ -195,7 +190,18 @@ python-versions = ">=3.7" pytest = ">=5.0" [package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] +dev = ["pre-commit", "tox", "pytest-asyncio"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" [[package]] name = "requests" @@ -213,7 +219,15 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tenacity" @@ -234,14 +248,6 @@ category = "dev" optional = false python-versions = ">=3.7" -[[package]] -name = "typing-extensions" -version = "4.3.0" -description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" -optional = false -python-versions = ">=3.7" - [[package]] name = "urllib3" version = "1.26.12" @@ -251,26 +257,14 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "urllib3-secure-extra", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -[[package]] -name = "zipp" -version = "3.8.1" -description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" -optional = false -python-versions = ">=3.7" - -[package.extras] -docs = ["jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx"] -testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [metadata] lock-version = "1.1" -python-versions = "^3.7" -content-hash = "35ffe329a2996643b6ddffac0d0ffb3d5a37e3bc7ecd329eb411a206b1a1949b" +python-versions = "^3.9" +content-hash = "7fd5eb2cd4b15e18f0cbd3df4c3497c4265abb820c67e5693007ec8c13592959" [metadata.files] attrs = [ @@ -301,10 +295,6 @@ idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] -importlib-metadata = [ - {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, - {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, -] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, @@ -337,6 +327,10 @@ pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] +pystac = [ + {file = "pystac-1.7.3-py3-none-any.whl", hash = "sha256:2b1b5e11b995e443376ca1d195609d95723f690c8d192604bc00091fcdf52e4c"}, + {file = "pystac-1.7.3.tar.gz", hash = "sha256:6848074fad6665ac631abd62c692bb868de37379615db90f4d913dca37f844ce"}, +] pytest = [ {file = "pytest-7.1.3-py3-none-any.whl", hash = "sha256:1377bda3466d70b55e3f5cecfa55bb7cfcf219c7964629b967c37cf0bda818b7"}, {file = "pytest-7.1.3.tar.gz", hash = "sha256:4f365fec2dff9c1162f834d9f18af1ba13062db0c708bf7b946f8a5c76180c39"}, @@ -345,10 +339,18 @@ pytest-mock = [ {file = "pytest-mock-3.8.2.tar.gz", hash = "sha256:77f03f4554392558700295e05aed0b1096a20d4a60a4f3ddcde58b0c31c8fca2"}, {file = "pytest_mock-3.8.2-py3-none-any.whl", hash = "sha256:8a9e226d6c0ef09fcf20c94eb3405c388af438a90f3e39687f84166da82d5948"}, ] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] requests = [ {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, ] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] tenacity = [ {file = "tenacity-8.1.0-py3-none-any.whl", hash = "sha256:35525cd47f82830069f0d6b73f7eb83bc5b73ee2fff0437952cedf98b27653ac"}, {file = "tenacity-8.1.0.tar.gz", hash = "sha256:e48c437fdf9340f5666b92cd7990e96bc5fc955e1298baf4a907e3972067a445"}, @@ -357,15 +359,7 @@ tomli = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -typing-extensions = [ - {file = "typing_extensions-4.3.0-py3-none-any.whl", hash = "sha256:25642c956049920a5aa49edcdd6ab1e06d7e5d467fc00e0506c44ac86fbfca02"}, - {file = "typing_extensions-4.3.0.tar.gz", hash = "sha256:e6d2677a32f47fc7eb2795db1dd15c1f34eff616bcaf2cfb5e997f854fa1c4a6"}, -] urllib3 = [ {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, ] -zipp = [ - {file = "zipp-3.8.1-py3-none-any.whl", hash = "sha256:47c40d7fe183a6f21403a199b3e4192cca5774656965b0a4988ad2f8feb5f009"}, - {file = "zipp-3.8.1.tar.gz", hash = "sha256:05b45f1ee8f807d0cc928485ca40a07cb491cf092ff587c0df9cb1fd154848d2"}, -] diff --git a/pyproject.toml b/pyproject.toml index fb64b0b..8c6a3fd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,10 +12,11 @@ packages = [ ] [tool.poetry.dependencies] -python = "^3.7" +python = "^3.9" requests = "^2.28.0" tenacity = "^8.0.1" giturlparse = "^0.10.0" +pystac = "^1.7.3" [tool.poetry.dev-dependencies] diff --git a/tests/test_files/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json b/tests/test_files/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json new file mode 100644 index 0000000..b431e39 --- /dev/null +++ b/tests/test_files/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json @@ -0,0 +1,37 @@ +{ + "type": "Feature", + "stac_version": "1.0.0", + "id": "SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422", + "properties": { + "datetime": "2016-08-29T23:17:00Z", + "start_datetime": "2016-08-29T23:17:00Z", + "end_datetime": "2023-01-01T00:06:00.000000Z", + "created": "2023-05-24T16:01:04.801686Z", + "updated": "2023-05-24T16:01:04.801692Z" + }, + "geometry": "", + "links": [ + { + "rel": "root", + "href": "./catalog.json", + "type": "application/json" + }, + { + "rel": "parent", + "href": "./catalog.json", + "type": "application/json" + } + ], + "assets": { + "data": { + "href": "/unity/ads/sounder_sips/chirp_test_data/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.nc", + "title": "Main Data File" + }, + "metadata_stac": { + "href": "/unity/ads/sounder_sips/chirp_test_data/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json", + "title": "Metadata STAC File" + } + }, + "bbox": "", + "stac_extensions": [] +} diff --git a/tests/test_files/catalog_01.json b/tests/test_files/catalog_01.json new file mode 100644 index 0000000..28f04e8 --- /dev/null +++ b/tests/test_files/catalog_01.json @@ -0,0 +1,18 @@ +{ + "type": "Catalog", + "id": "catalog_id", + "stac_version": "1.0.0", + "description": "Tutorial chirp catalog.", + "links": [ + { + "rel": "root", + "href": "./catalog_01.json", + "type": "application/json" + }, + { + "rel": "item", + "href": "./SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json", + "type": "application/json" + } + ] +} diff --git a/tests/test_files/catalog_corrupt.json b/tests/test_files/catalog_corrupt.json new file mode 100644 index 0000000..692caf7 --- /dev/null +++ b/tests/test_files/catalog_corrupt.json @@ -0,0 +1,18 @@ +{ + "type": "Catalog", + "id": "catalog_id", + "stac_version": "1.0.0", + "description": "Tutorial chirp catalog.", + "links": [ + { + "rel": "root", + "href": "./catalog.json", + "type": "application/json" + }, + { + "rel": "item", + "href": "./SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json", + "type": "application/json" + + ] +} diff --git a/tests/test_files/catalog_corrupt_02.json b/tests/test_files/catalog_corrupt_02.json new file mode 100644 index 0000000..7fb7e29 --- /dev/null +++ b/tests/test_files/catalog_corrupt_02.json @@ -0,0 +1,18 @@ +{ + "type": "Catalogsss", + "id": "catalog_id", + "stac_version": "1.0.0", + "description": "Tutorial chirp catalog.", + "links": [ + { + "rel": "root", + "href": "./catalog.json", + "type": "application/json" + }, + { + "rel": "item", + "href": "./SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json", + "type": "application/json" + } + ] +} diff --git a/tests/test_files/cmr_granules.json b/tests/test_files/cmr_granules.json new file mode 100644 index 0000000..830b395 --- /dev/null +++ b/tests/test_files/cmr_granules.json @@ -0,0 +1 @@ +{"type":"FeatureCollection","stac_version":"1.0.0","numberMatched":2,"numberReturned":2,"features":[{"properties":{"datetime":"2016-08-22T00:05:22.000Z","start_datetime":"2016-08-22T00:05:22.000Z","end_datetime":"2016-08-22T00:11:22.000Z"},"bbox":[-7.02,-60.32,26.31,-36.16],"assets":{"metadata":{"href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068613-GES_DISC.xml","type":"application/xml"},"opendap":{"title":"The OPENDAP location for the granule. (GET DATA : OPENDAP DATA)","href":"https://sounder.gesdisc.eosdis.nasa.gov/opendap/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0005.m06.g001.L1_AQ.std.v02_48.G.200425095850.nc","type":"application/x-netcdf"},"data":{"title":"Download SNDR.SS1330.CHIRP.20160822T0005.m06.g001.L1_AQ.std.v02_48.G.200425095850.nc","href":"https://data.gesdisc.earthdata.nasa.gov/data/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0005.m06.g001.L1_AQ.std.v02_48.G.200425095850.nc"}},"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[6.18,-36.16],[-7.02,-56.04],[23.24,-60.32],[26.31,-38.94],[6.18,-36.16]]]},"stac_extensions":[],"id":"G2040068613-GES_DISC","stac_version":"1.0.0","collection":"C2011289787-GES_DISC","links":[{"rel":"self","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068613-GES_DISC.stac"},{"rel":"parent","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/C2011289787-GES_DISC.stac"},{"rel":"collection","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/C2011289787-GES_DISC.stac"},{"rel":"root","href":"https://cmr.earthdata.nasa.gov:443/search/"},{"rel":"via","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068613-GES_DISC.json"},{"rel":"via","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068613-GES_DISC.umm_json"}]},{"properties":{"datetime":"2016-08-22T00:11:22.000Z","start_datetime":"2016-08-22T00:11:22.000Z","end_datetime":"2016-08-22T00:17:22.000Z"},"bbox":[-43.78,-81.77028018298317,23.22,-56.18],"assets":{"metadata":{"href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068619-GES_DISC.xml","type":"application/xml"},"opendap":{"title":"The OPENDAP location for the granule. (GET DATA : OPENDAP DATA)","href":"https://sounder.gesdisc.eosdis.nasa.gov/opendap/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0011.m06.g002.L1_AQ.std.v02_48.G.200425095901.nc","type":"application/x-netcdf"},"data":{"title":"Download SNDR.SS1330.CHIRP.20160822T0011.m06.g002.L1_AQ.std.v02_48.G.200425095901.nc","href":"https://data.gesdisc.earthdata.nasa.gov/data/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0011.m06.g002.L1_AQ.std.v02_48.G.200425095901.nc"}},"type":"Feature","geometry":{"type":"Polygon","coordinates":[[[-7.16,-56.18],[-43.78,-71.72],[20.73,-81.77],[23.22,-60.47],[-7.16,-56.18]]]},"stac_extensions":[],"id":"G2040068619-GES_DISC","stac_version":"1.0.0","collection":"C2011289787-GES_DISC","links":[{"rel":"self","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068619-GES_DISC.stac"},{"rel":"parent","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/C2011289787-GES_DISC.stac"},{"rel":"collection","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/C2011289787-GES_DISC.stac"},{"rel":"root","href":"https://cmr.earthdata.nasa.gov:443/search/"},{"rel":"via","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068619-GES_DISC.json"},{"rel":"via","href":"https://cmr.earthdata.nasa.gov:443/search/concepts/G2040068619-GES_DISC.umm_json"}]}],"links":[{"rel":"self","href":"https://cmr.earthdata.nasa.gov:443/search/granules.stac?collection_concept_id=C2011289787-GES_DISC&temporal%5B%5D=2016-08-22T00%3A10%3A00%2C2016-08-22T00%3A15%3A00&page_num=1"},{"rel":"root","href":"https://cmr.earthdata.nasa.gov:443/search/"}],"context":{"returned":2,"limit":1000000,"matched":2}} \ No newline at end of file diff --git a/tests/test_unity_stac.py b/tests/test_unity_stac.py new file mode 100644 index 0000000..571e0d1 --- /dev/null +++ b/tests/test_unity_stac.py @@ -0,0 +1,54 @@ +from unity_py.unity_exception import UnityException +from unity_py.resources.collection import Collection + +import pytest + +@pytest.fixture +def cleanup_update_test(): + yield None + print("Cleanup...") + +def test_read_corrupt_stac(): + with pytest.raises(UnityException): + Collection.from_stac("tests/test_files/doesnt.exist") + with pytest.raises(UnityException): + collection = Collection.from_stac("tests/test_files/catalog_corrupt_02.json") + with pytest.raises(UnityException): + collection = Collection.from_stac("tests/test_files/catalog_corrupt.json") + +def test_read_stac(): + collection = Collection.from_stac("tests/test_files/cmr_granules.json") + assert collection.collection_id == "C2011289787-GES_DISC" + datasets = collection._datasets + assert len(datasets) == 2 + + data_files = collection.data_locations() + assert len(data_files) == 6 + data_files = collection.data_locations(["data","opendap"]) + assert len(data_files) == 4 + data_files = collection.data_locations(["data","opendap","metadata"]) + assert len(data_files) == 6 + data_files = collection.data_locations(["data"]) + assert len(data_files) == 2 + for x in data_files: + assert x in ['https://data.gesdisc.earthdata.nasa.gov/data/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0005.m06.g001.L1_AQ.std.v02_48.G.200425095850.nc', 'https://data.gesdisc.earthdata.nasa.gov/data/CHIRP/SNDR13CHRP1.2/2016/235/SNDR.SS1330.CHIRP.20160822T0011.m06.g002.L1_AQ.std.v02_48.G.200425095901.nc'] + + #Try a "classic" catalog + item files stac catalog + collection = Collection.from_stac("tests/test_files/catalog_01.json") + datasets = collection._datasets + assert len(datasets) == 1 + data_files = collection.data_locations() + assert len(data_files) == 2 + data_files = collection.data_locations(["data"]) + assert len(data_files) == 1 + data_files = collection.data_locations(["metadata_stac"]) + assert len(data_files) == 1 + assert data_files[0] == "/unity/ads/sounder_sips/chirp_test_data/SNDR.SS1330.CHIRP.20160829T2317.m06.g233.L1_AQ.std.v02_48.G.200425130422.json" + + +def test_write_stac(): + collection = Collection.from_stac("tests/test_files/cmr_granules.json") + Collection.to_stac(collection, "tests/test_files/tmp" ) + + collection = Collection.from_stac("tests/test_files/catalog_01.json") + Collection.to_stac(collection, "tests/test_files/tmp" ) diff --git a/unity_py/resources/collection.py b/unity_py/resources/collection.py index 47accd5..f304f8a 100644 --- a/unity_py/resources/collection.py +++ b/unity_py/resources/collection.py @@ -1,4 +1,15 @@ +from unity_py.unity_exception import UnityException from unity_py.resources.dataset import Dataset +from unity_py.resources.data_file import DataFile +from pystac import Catalog, get_stac_version, ItemCollection, Item, Asset +from pystac.errors import STACTypeError +import json +from datetime import datetime +from datetime import timezone +from pystac import CatalogType +from dateutil import parser as date_parser + +#import pytz class Collection(object): """The Collection object contains metadata about a collection within the Unity system. @@ -15,3 +26,128 @@ def __init__(self, id): self._datasets = [] self._beginning_time = None self._ending_time = None + + def data_locations(self, type=[]): + """ + A method to list all asset locations (data, metdata, etc) + Parameters + ---------- + type : List of Strings + List of "stac asset keys" to filter on. commonly ["data"] is of most importance + + Returns + ------- + locations + List of returned asset locations + """ + if len(type) == 0: + return [file.location for files in [x.datafiles for x in self._datasets] for file in files] + else: + return [file.location for files in [x.datafiles for x in self._datasets] for file in files if file.type in type ] + + + def to_stac(collection, data_dir): + """ + A method for writing stac and converting it from a unity collection object. The caller is responsible for providing a collection, datasets and datafiles along with the output location of the data. + Parameters + ---------- + collection : Collection + The colleciton object to convert into stac catalog + stac item files. + stac_file : String + The location of the stac file to read. + + """ + catalog = Catalog(id=collection.collection_id, description="STAC Catalog") + for dataset in collection._datasets: + updated = datetime.now(timezone.utc).isoformat().replace('+00:00', 'Z') + item = Item( + id=dataset.id, + geometry=dataset.geometry, + bbox=dataset.bbox, + datetime = date_parser.parse(dataset.data_begin_time), + properties={ + "datetime": dataset.data_begin_time, + "start_datetime": dataset.data_begin_time, + "end_datetime":dataset.data_end_time, + "created": dataset.data_create_time if dataset.data_create_time!= None else updated, + "updated": updated + }, + ) + catalog.add_item(item) + for df in dataset.datafiles: + item.add_asset( + # key="data", asset=pystac.Asset(href=f,title="Main Data File", media_type=pystac.MediaType.HDF5) + key=df.type, asset=Asset(href=df.location,title="{} file".format(df.type)) + ) + + from pystac.layout import TemplateLayoutStrategy + write_dir = data_dir + strategy = TemplateLayoutStrategy(item_template="") + catalog.normalize_hrefs(write_dir,strategy=strategy) + catalog.save(catalog_type=CatalogType.SELF_CONTAINED, dest_href=write_dir) + + + def from_stac(stac_file): + """ + A method for reading stac and converting it into a unity collection object. This is usually the result of a stage-in operation. stac formats supported are "GEOJSON Feature Collections" and STAC Catalogs with referenced item files. + Parameters + ---------- + stac_file : String + The location of the stac file to read. + + Returns + ------- + Collection + A collection object including defined datasets + + """ + data = [] + id = None + root_catalog = None + + try: + + try: + root_catalog = Catalog.from_file(stac_file) + id = root_catalog.id + items = root_catalog.get_all_items() + except STACTypeError as e: + pass + # attempt to read as a feature collection + + + # ItemCollection + if root_catalog is None: + with open(stac_file, 'r') as f: + data = json.load(f) + ic = ItemCollection.from_dict(data) + try: + id = data['features'][0]['collection'] + except: + pass + + items = ic.items + + collection = Collection(id) + # Catch file not found... ? + for item in items: + ds = Dataset(item.id, item.properties.get("collection"), item.properties.get("start_datetime",None), item.properties.get("end_datetime", None), item.properties.get("created", None)) + ds.bbox = item.bbox + ds.geometry = item.geometry + # Add other parameters/properties here + # TODO + # ds.add_property(key,value) + + for asset_key in item.assets: + asset = item.assets[asset_key] + ds.add_data_file(DataFile(asset_key ,asset.href)) + collection._datasets.append(ds) + return collection + except FileNotFoundError as fnfe: + raise UnityException(str(fnfe)) + except STACTypeError as ste: + raise UnityException(str(ste)) + except json.decoder.JSONDecodeError as jsd: + raise UnityException(str(jsd)) + except: + raise UnityException("An unknown error occured creating collection from stac") diff --git a/unity_py/resources/data_file.py b/unity_py/resources/data_file.py index 947bb86..20f5403 100644 --- a/unity_py/resources/data_file.py +++ b/unity_py/resources/data_file.py @@ -3,7 +3,7 @@ class DataFile(object): """ def __str__(self): - return f'unity_py.resources.DataFile(collection_id={self.collection_id})' + return f'unity_py.resources.DataFile(location={self.location})' def __repr__(self): return self.__str__() diff --git a/unity_py/resources/dataset.py b/unity_py/resources/dataset.py index fe42ec0..9d5eafe 100644 --- a/unity_py/resources/dataset.py +++ b/unity_py/resources/dataset.py @@ -40,6 +40,9 @@ def __init__(self, name, collection_id, start_time, end_time, creation_time ): self.data_begin_time = start_time self.data_end_time = end_time self.data_create_time = creation_time + self.properties = {} + self.geometry = None + self.bbox = None def add_data_file(self, datafile: type=DataFile): """adds a data file to a dataset @@ -51,3 +54,15 @@ def add_data_file(self, datafile: type=DataFile): """ self.datafiles.append(datafile) + + def add_property(self, key, value): + """adds a custom metadata property to a dataset + + Parameters + ---------- + key : String + The property name to be set + Value : Object + the property value to be set + """ + self.properties.set(key,value)