diff --git a/.github/workflows/build-and-test.yml b/.github/workflows/build-and-test.yml index e1548a0e93..53303e1123 100644 --- a/.github/workflows/build-and-test.yml +++ b/.github/workflows/build-and-test.yml @@ -21,7 +21,7 @@ env: # nomkl: make sure numpy w/out mkl # setuptools_scm: needed for versioning to work CONDA_DEFAULT_DEPENDENCIES: python-build nomkl setuptools_scm - LATEST_SUPPORTED_PYTHON_VERSION: "3.10" + LATEST_SUPPORTED_PYTHON_VERSION: "3.11" jobs: check-syntax-errors: @@ -72,7 +72,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: [3.8, 3.9, "3.10"] + python-version: [3.8, 3.9, "3.10", "3.11"] os: [windows-latest, macos-latest] steps: - uses: actions/checkout@v3 @@ -160,7 +160,7 @@ jobs: needs: check-syntax-errors strategy: matrix: - python-version: [3.8, 3.9, "3.10"] + python-version: [3.8, 3.9, "3.10", "3.11"] os: [windows-latest, macos-latest] steps: - uses: actions/checkout@v3 diff --git a/.readthedocs_environment.yml b/.readthedocs_environment.yml index 8148bd2470..1d1681aa5f 100644 --- a/.readthedocs_environment.yml +++ b/.readthedocs_environment.yml @@ -10,8 +10,8 @@ channels: # https://github.com/conda/conda/issues/5003 - nodefaults dependencies: -- python=3.9 -- gdal>=3.4.2 +- python=3.11 +- gdal>=3.4.2,<3.6.0 - pip - pip: - -r requirements.txt diff --git a/HISTORY.rst b/HISTORY.rst index 8d64b3981e..930daa436a 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -41,6 +41,7 @@ Unreleased Changes * In advance of the numpy 2.0 release, function calls to ``numpy.product`` have been replaced with ``numpy.prod``. https://github.com/natcap/invest/issues/1410 + * Add support for python 3.11 (`#1103 `_) * SDR * RKLS, USLE, avoided erosion, and avoided export rasters will now have nodata in streams (`#1415 `_) diff --git a/pyproject.toml b/pyproject.toml index 58ce1b4c62..c3f19787a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,7 +2,7 @@ name = "natcap.invest" description = "InVEST Ecosystem Service models" readme = "README_PYTHON.rst" -requires-python = ">=3.8,<3.11" +requires-python = ">=3.8,<3.12" license = {file = "LICENSE.txt"} maintainers = [ {name = "Natural Capital Project Software Team"} @@ -20,6 +20,7 @@ classifiers = [ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Programming Language :: Cython", "License :: OSI Approved :: BSD License", "Topic :: Scientific/Engineering :: GIS" diff --git a/requirements.txt b/requirements.txt index 68b1554c36..47dd9a47b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,12 +10,12 @@ # scripts/convert-requirements-to-conda-yml.py as though it can only be found # on pip. -GDAL>=3.4.2,<3.7.0 +GDAL>=3.4.2,<3.6.0 Pyro4==4.77 # pip-only pandas>=1.2.1 numpy>=1.11.0,!=1.16.0 Rtree>=0.8.2,!=0.9.1 -shapely>=1.7.1,<1.8.2 # https://github.com/shapely/shapely/issues/1385 +shapely>=2.0.0 scipy>=1.9.0 pygeoprocessing==2.4.0 # pip-only taskgraph[niced_processes]>=0.11.0 # pip-only diff --git a/src/natcap/invest/coastal_vulnerability.py b/src/natcap/invest/coastal_vulnerability.py index fa883b05c4..e9d3b6c109 100644 --- a/src/natcap/invest/coastal_vulnerability.py +++ b/src/natcap/invest/coastal_vulnerability.py @@ -1167,9 +1167,9 @@ def prepare_landmass_line_index_and_interpolate_shore_points( if aoi_shapely_prepped.intersects(landmass_line): intersected_shapely_geom = aoi_shapely.intersection( landmass_line) - if intersected_shapely_geom.type == 'LineString': + if intersected_shapely_geom.geom_type == 'LineString': lines_in_aoi_list.append(intersected_shapely_geom) - elif intersected_shapely_geom.type == 'MultiLineString': + elif intersected_shapely_geom.geom_type == 'MultiLineString': shapely_geom_explode = [ shapely.geometry.LineString(x) for x in intersected_shapely_geom] @@ -2516,7 +2516,7 @@ def search_for_vector_habitat( float(shore_id) / base_shore_point_layer.GetFeatureCount()) - if habitat_rtree._n_geoms == 0: + if len(habitat_rtree.geometries) == 0: result[shore_id] = point_habitat_rank continue @@ -2524,8 +2524,9 @@ def search_for_vector_habitat( point_shapely = shapely.wkb.loads( bytes(point_feature_geometry.ExportToWkb())) - found_habitat_geoms = [g for g in habitat_rtree.query( - point_shapely.buffer(search_radius))] + found_habitat_geoms = habitat_rtree.geometries.take( + habitat_rtree.query(point_shapely.buffer(search_radius)) + ).tolist() # the rtree query returns geometries in the envelope of the buffer, # so follow-up with a check for distance from actual point. for geom in found_habitat_geoms: @@ -2639,21 +2640,23 @@ def calculate_geomorphology_exposure( None """ LOGGER.info("Assigning geomorphology rank") - geomorph_line_shapely_list = [] geomorph_vector = gdal.OpenEx( geomorphology_vector_path, gdal.OF_VECTOR | gdal.GA_Update) geomorph_layer = geomorph_vector.GetLayer() LOGGER.info("Build spatial index of geomorphology segments") - geom_id_to_rank = {} + # build up a list of geometries to enter into the STRtree, and a + # corresponding list of those geometries' ranks, because STRtree queries + # return indices: + # https://shapely.readthedocs.io/en/stable/strtree.html#shapely.STRtree + geomorph_line_shapely_list = [] + geom_rank_list = [] for feature in geomorph_layer: geometry = feature.GetGeometryRef() shapely_geometry = shapely.wkb.loads( bytes(geometry.ExportToWkb())) if shapely_geometry.is_valid: geomorph_line_shapely_list.append(shapely_geometry) - # use id because geometries aren't hashable. see note: - # https://shapely.readthedocs.io/en/1.8.0/manual.html#strtree.STRtree.strtree.query - geom_id_to_rank[id(shapely_geometry)] = feature.GetField('RANK') + geom_rank_list.append(feature.GetField('RANK')) else: LOGGER.warning(f'geomorphology FID:{feature.GetFID()} is excluded ' 'due to invalid geometry') @@ -2692,11 +2695,16 @@ def calculate_geomorphology_exposure( point_geom = point_feature.GetGeometryRef() poly_geom = point_geom.Buffer(search_radius) poly_shapely = shapely.wkb.loads(bytes(poly_geom.ExportToWkb())) - found_geoms = [g for g in tree.query(poly_shapely)] + + # indices of geoms whose bbox intersects the buffered point's bbox + found_geom_indices = tree.query(poly_shapely) + # filter to only those geoms that actually intersect + intersecting_geom_indices = filter( + lambda idx: poly_shapely.intersects(tree.geometries[idx]), + found_geom_indices) # the tree.query returns geometries found in envelope of poly_shapely, - # so follow-up with a check for intersection. - found_ranks = set(geom_id_to_rank[id(g)] for g in found_geoms - if poly_shapely.intersects(g)) + # so follow-up with a check for intersection.˝ + found_ranks = set(geom_rank_list[i] for i in intersecting_geom_indices) if found_ranks: mean_geomorph = sum(found_ranks) / float(len(found_ranks)) else: @@ -3234,9 +3242,9 @@ def _aggregate_raster_values_in_radius( def geometry_to_lines(geometry, include_interiors=True): """Convert a geometry object to a list of lines.""" - if geometry.type == 'Polygon': + if geometry.geom_type == 'Polygon': return polygon_to_lines(geometry, include_interiors=include_interiors) - elif geometry.type == 'MultiPolygon': + elif geometry.geom_type == 'MultiPolygon': line_list = [] for geom in geometry.geoms: line_list.extend(geometry_to_lines(geom)) @@ -3297,7 +3305,7 @@ def _ogr_to_geometry_list(vector_path): try: shapely_geometry = shapely.wkb.loads( bytes(feature_geometry.ExportToWkb())) - except shapely.errors.WKBReadingError: + except shapely.errors.ShapelyError: # In my experience a geometry that can't be loaded by shapely # is also a geometry that can't be fixed with the buffer(0) trick, # so just skip it.