Skip to content

Commit

Permalink
Merge pull request #303 from ASFHyP3/develop
Browse files Browse the repository at this point in the history
Release v0.21.2
  • Loading branch information
jtherrmann authored Jan 10, 2025
2 parents 85d6221 + 72827c3 commit fdef793
Show file tree
Hide file tree
Showing 8 changed files with 49 additions and 26 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/static-analysis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,6 @@ jobs:
call-ruff-workflow:
# Docs: https://github.com/ASFHyP3/actions
uses: ASFHyP3/actions/.github/workflows/reusable-ruff.yml@v0.12.0

call-mypy-workflow:
uses: ASFHyP3/actions/.github/workflows/reusable-mypy.yml@v0.14.0
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.21.2]
### Added
- Add `mypy` to [`static-analysis`](.github/workflows/static-analysis.yml)

## [0.21.1]
### Changed
- The [`static-analysis`](.github/workflows/static-analysis.yml) Github Actions workflow now uses `ruff` rather than `flake8` for linting.
Expand Down
2 changes: 2 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ dependencies:
# For packaging, and testing
- python-build
- ruff
- mypy
- types-requests
- pillow
- pytest
- pytest-console-scripts
Expand Down
13 changes: 13 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -91,3 +91,16 @@ convention = "google"
[tool.ruff.lint.isort]
case-sensitive = true
lines-after-imports = 2

[tool.mypy]
python_version = "3.9"
warn_redundant_casts = true
warn_unused_ignores = true
warn_unreachable = true
strict_equality = true
check_untyped_defs = true
exclude = ["/vend/"]

[[tool.mypy.overrides]]
module = "hyp3_autorift.vend.*"
follow_imports = "skip"
3 changes: 2 additions & 1 deletion src/hyp3_autorift/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@
}

DEFAULT_PARAMETER_FILE = (
'/vsicurl/http://its-live-data.s3.amazonaws.com/' 'autorift_parameters/v001/autorift_landice_0120m.shp'
'/vsicurl/http://its-live-data.s3.amazonaws.com/autorift_parameters/v001/autorift_landice_0120m.shp'
)

PLATFORM_SHORTNAME_LONGNAME_MAPPING = {
Expand Down Expand Up @@ -413,6 +413,7 @@ def process(

reference_path, secondary_path = utils.ensure_same_projection(reference_path, secondary_path)

assert reference_metadata is not None
bbox = reference_metadata['bbox']
lat_limits = (bbox[1], bbox[3])
lon_limits = (bbox[0], bbox[2])
Expand Down
6 changes: 3 additions & 3 deletions src/hyp3_autorift/s1_isce2.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import textwrap
from datetime import datetime, timedelta
from pathlib import Path
from typing import List
from typing import List, Tuple

import numpy as np
from autoRIFT import __version__ as version
Expand Down Expand Up @@ -188,7 +188,7 @@ def write_conversion_file(
var.setncattr('dr_to_vr_factor', dr_2_vr_factor)
var.setncattr(
'dr_to_vr_factor_description',
'multiplicative factor that converts slant range ' 'pixel displacement dr to slant range velocity vr',
'multiplicative factor that converts slant range pixel displacement dr to slant range velocity vr',
)

M11[noDataMask] = NoDataValue
Expand Down Expand Up @@ -297,7 +297,7 @@ def generate_correction_data(
scene: str,
buffer: int = 0,
parameter_file: str = DEFAULT_PARAMETER_FILE,
) -> (dict, Path):
) -> Tuple[dict, Path]:
from hyp3_autorift.vend.testGeogrid_ISCE import loadParsedata, runGeogrid

scene_path = Path(f'{scene}.zip')
Expand Down
38 changes: 18 additions & 20 deletions src/hyp3_autorift/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,21 +51,21 @@ def find_jpl_parameter_info(polygon: ogr.Geometry, parameter_file: str) -> dict:
'name': f'{feature["name"]}',
'epsg': feature['epsg'],
'geogrid': {
'dem': f"/vsicurl/{feature['h']}",
'ssm': f"/vsicurl/{feature['StableSurfa']}",
'dhdx': f"/vsicurl/{feature['dhdx']}",
'dhdy': f"/vsicurl/{feature['dhdy']}",
'vx': f"/vsicurl/{feature['vx0']}",
'vy': f"/vsicurl/{feature['vy0']}",
'srx': f"/vsicurl/{feature['vxSearchRan']}",
'sry': f"/vsicurl/{feature['vySearchRan']}",
'csminx': f"/vsicurl/{feature['xMinChipSiz']}",
'csminy': f"/vsicurl/{feature['yMinChipSiz']}",
'csmaxx': f"/vsicurl/{feature['xMaxChipSiz']}",
'csmaxy': f"/vsicurl/{feature['yMaxChipSiz']}",
'sp': f"/vsicurl/{feature['sp']}",
'dhdxs': f"/vsicurl/{feature['dhdxs']}",
'dhdys': f"/vsicurl/{feature['dhdys']}",
'dem': f'/vsicurl/{feature["h"]}',
'ssm': f'/vsicurl/{feature["StableSurfa"]}',
'dhdx': f'/vsicurl/{feature["dhdx"]}',
'dhdy': f'/vsicurl/{feature["dhdy"]}',
'vx': f'/vsicurl/{feature["vx0"]}',
'vy': f'/vsicurl/{feature["vy0"]}',
'srx': f'/vsicurl/{feature["vxSearchRan"]}',
'sry': f'/vsicurl/{feature["vySearchRan"]}',
'csminx': f'/vsicurl/{feature["xMinChipSiz"]}',
'csminy': f'/vsicurl/{feature["yMinChipSiz"]}',
'csmaxx': f'/vsicurl/{feature["xMaxChipSiz"]}',
'csmaxy': f'/vsicurl/{feature["yMaxChipSiz"]}',
'sp': f'/vsicurl/{feature["sp"]}',
'dhdxs': f'/vsicurl/{feature["dhdxs"]}',
'dhdys': f'/vsicurl/{feature["dhdys"]}',
},
'autorift': {
'grid_location': 'window_location.tif',
Expand All @@ -83,9 +83,7 @@ def find_jpl_parameter_info(polygon: ogr.Geometry, parameter_file: str) -> dict:
break

if parameter_info is None:
raise DemError(
'Could not determine appropriate DEM for:\n' f' centroid: {centroid}' f' using: {parameter_file}'
)
raise DemError(f'Could not determine appropriate DEM for:\n centroid: {centroid} using: {parameter_file}')

dem_geotransform = gdal.Info(parameter_info['geogrid']['dem'], format='json')['geoTransform']
parameter_info['xsize'] = abs(dem_geotransform[1])
Expand All @@ -111,10 +109,10 @@ def load_geospatial(infile: str, band: int = 1):
def write_geospatial(
outfile: str, data, transform, projection, nodata, driver: str = 'GTiff', dtype: int = gdal.GDT_Float64
) -> str:
driver = gdal.GetDriverByName(driver)
driver_object = gdal.GetDriverByName(driver)

rows, cols = data.shape
ds = driver.Create(outfile, cols, rows, 1, dtype)
ds = driver_object.Create(outfile, cols, rows, 1, dtype)
ds.SetGeoTransform(transform)
ds.SetProjection(projection)

Expand Down
6 changes: 4 additions & 2 deletions tests/test_utils.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from pathlib import Path

import pytest
from hyp3lib import DemError

Expand All @@ -11,14 +13,14 @@ def test_upload_file_to_s3_credentials_missing(tmp_path, monkeypatch):
m.setenv('PUBLISH_SECRET_ACCESS_KEY', 'publish_access_key_secret')
msg = 'Please provide.*'
with pytest.raises(ValueError, match=msg):
utils.upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket')
utils.upload_file_to_s3_with_publish_access_keys(Path('file.zip'), 'myBucket')

with monkeypatch.context() as m:
m.setenv('PUBLISH_ACCESS_KEY_ID', 'publish_access_key_id')
m.delenv('PUBLISH_SECRET_ACCESS_KEY', raising=False)
msg = 'Please provide.*'
with pytest.raises(ValueError, match=msg):
utils.upload_file_to_s3_with_publish_access_keys('file.zip', 'myBucket')
utils.upload_file_to_s3_with_publish_access_keys(Path('file.zip'), 'myBucket')


def test_find_jpl_parameter_info():
Expand Down

0 comments on commit fdef793

Please sign in to comment.