Skip to content

Commit

Permalink
Merge pull request #647 from dbekaert/dev
Browse files Browse the repository at this point in the history
v0.5.1
  • Loading branch information
cmarshak committed May 6, 2024
2 parents c1ef3f1 + a7ad776 commit 223d862
Show file tree
Hide file tree
Showing 7 changed files with 57 additions and 36 deletions.
4 changes: 4 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,10 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [PEP 440](https://www.python.org/dev/peps/pep-0440/)
and uses [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## [0.5.1]
### Changed
* Use hyp3-lib v3* to download orbits to be able to distribute load across ESA and ASF. Can be easily swapped out for `sentineleof` in future release.

## [0.5.0]
### Added
* A `--input-bucket-prefix` argument to `calcDelaysGUNW` which will allow RAiDER to process ARIA GUNW products under one prefix and upload the final products to another prefix provided by the `--bucket-prefix` argument.
Expand Down
1 change: 1 addition & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ dependencies:
- ecmwf-api-client
- h5netcdf
- h5py
- hyp3lib>=3,<4
- herbie-data
- isce3>=0.15.0
- jsonschema==3.2.0 # this is for ASF DAAC ingest schema validation
Expand Down
16 changes: 8 additions & 8 deletions test/test_GUNW.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import unittest
from pathlib import Path

import eof.download
import jsonschema
import numpy as np
import pandas as pd
Expand All @@ -22,7 +21,7 @@
check_weather_model_availability,
)
from RAiDER.cli.raider import calcDelaysGUNW
from RAiDER.models.customExceptions import *
from RAiDER.models.customExceptions import NoWeatherModelData


def compute_transform(lats, lons):
Expand Down Expand Up @@ -282,7 +281,7 @@ def test_azimuth_timing_interp_against_center_time_interp(weather_model_name: st
])

mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
# For azimuth time
[Path(orbit_dict_for_azimuth_time_test['reference'])],
Expand Down Expand Up @@ -313,11 +312,12 @@ def test_azimuth_timing_interp_against_center_time_interp(weather_model_name: st
# Calls 4 times for azimuth time and 4 times for center time
assert RAiDER.processWM.prepareWeatherModel.call_count == 8
# Only calls once each ref and sec list of slcs
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids.call_count == 2
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib.call_count == 2
# Only calls for azimuth timing: once for ref and sec
assert RAiDER.s1_azimuth_timing.get_slc_id_from_point_and_time.call_count == 2
## When we return to sentineleof
# Once for center-time and azimuth-time each
assert eof.download.download_eofs.call_count == 2
# assert eof.download.download_eofs.call_count == 2

for ifg_type in ['reference', 'secondary']:
for var in ['troposphereHydrostatic', 'troposphereWet']:
Expand Down Expand Up @@ -430,7 +430,7 @@ def test_provenance_metadata_for_tropo_group(weather_model_name: str,
# azimuth-time
[Path(orbit_dict_for_azimuth_time_test['reference'])],
]
mocker.patch('eof.download.download_eofs',
mocker.patch('RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=side_effect)

# These outputs are not needed since the orbits are specified above
Expand All @@ -444,7 +444,7 @@ def test_provenance_metadata_for_tropo_group(weather_model_name: str,
])

mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
# For azimuth time
[Path(orbit_dict_for_azimuth_time_test['reference'])],
Expand Down Expand Up @@ -546,7 +546,7 @@ def test_GUNW_workflow_fails_if_a_download_fails(gunw_azimuth_test, orbit_dict_f
])

mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
# For azimuth time
[Path(orbit_dict_for_azimuth_time_test['reference'])],
Expand Down
12 changes: 6 additions & 6 deletions test/test_s1_time_grid.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ def test_s1_timing_array_wrt_slc_center_time(gunw_azimuth_test: Path,

# Azimuth time grid
mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
[Path(orbit_dict_for_azimuth_time_test[ifg_type])],
]
Expand All @@ -105,7 +105,7 @@ def test_s1_timing_array_wrt_slc_center_time(gunw_azimuth_test: Path,
assert np.all(abs_diff < 40)

assert RAiDER.s1_azimuth_timing._asf_query.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib.call_count == 1


@pytest.mark.parametrize('ifg_type', ['reference', 'secondary'])
Expand Down Expand Up @@ -135,7 +135,7 @@ def test_s1_timing_array_wrt_variance(gunw_azimuth_test: Path,

# Azimuth time grid
mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
[Path(orbit_dict_for_azimuth_time_test[ifg_type])],
]
Expand All @@ -151,7 +151,7 @@ def test_s1_timing_array_wrt_variance(gunw_azimuth_test: Path,
assert np.all(std_hgt < 2e-3)

assert RAiDER.s1_azimuth_timing._asf_query.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib.call_count == 1


def test_n_closest_dts():
Expand Down Expand Up @@ -341,7 +341,7 @@ def test_duplicate_orbits(mocker, orbit_paths_for_duplicate_orbit_xml_test):
side_effect=[['slc_id_0', 'slc_id_1', 'slc_id_2', 'slc_id_3']])

mocker.patch(
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids',
'RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib',
side_effect=[
[Path(o_path) for o_path in orbit_paths_for_duplicate_orbit_xml_test],
]
Expand All @@ -352,7 +352,7 @@ def test_duplicate_orbits(mocker, orbit_paths_for_duplicate_orbit_xml_test):
assert time_grid.shape == (len(hgt), len(lat), len(lon))

assert RAiDER.s1_azimuth_timing.get_slc_id_from_point_and_time.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids.call_count == 1
assert RAiDER.s1_azimuth_timing.get_orbits_from_slc_ids_hyp3lib.call_count == 1


def test_get_times_for_az():
Expand Down
23 changes: 6 additions & 17 deletions tools/RAiDER/aria/prepFromGUNW.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from RAiDER.models import credentials
from RAiDER.models.hrrr import HRRR_CONUS_COVERAGE_POLYGON, AK_GEO, check_hrrr_dataset_availability
from RAiDER.s1_azimuth_timing import get_times_for_azimuth_interpolation
from RAiDER.s1_orbits import download_eofs
from RAiDER.s1_orbits import get_orbits_from_slc_ids_hyp3lib

## cube spacing in degrees for each model
DCT_POSTING = {'HRRR': 0.05, 'HRES': 0.10, 'GMAO': 0.10, 'ERA5': 0.10, 'ERA5T': 0.10, 'MERRA2': 0.1}
Expand Down Expand Up @@ -262,20 +262,12 @@ def get_orbit_file(self):

ds = xr.open_dataset(self.path_gunw, group=f'{group}')
slcs = ds['L1InputGranules']
nslcs = slcs.count().item()
# Convert to list of strings
slcs_lst = [slc for slc in slcs.data.tolist() if slc]
# Remove .zip from the granule ids included in this field
slcs_lst = list(map(lambda slc: slc.replace('.zip', ''), slcs_lst))

if nslcs == 1:
slc = slcs.item()
else:
for j in range(nslcs):
slc = slcs.data[j]
if slc:
break

sat = slc.split('_')[0]
dt = datetime.strptime(f'{self.dates[0]}T{self.mid_time}', '%Y%m%dT%H:%M:%S')

path_orb = download_eofs([dt], [sat], str(orbit_dir))
path_orb = get_orbits_from_slc_ids_hyp3lib(slcs_lst)

return [str(o) for o in path_orb]

Expand Down Expand Up @@ -352,9 +344,6 @@ def make_cube(self):
lats = np.arange(lat_st, lat_en, DCT_POSTING[self.wmodel])
lons = np.arange(lon_st, lon_en, DCT_POSTING[self.wmodel])

S, N = lats.min(), lats.max()
W, E = lons.min(), lons.max()

ds = xr.Dataset(coords={'latitude': lats, 'longitude': lons, 'heights': self.heights})
dst_cube = os.path.join(self.out_dir, f'GeoCube_{self.name}.nc')
ds.to_netcdf(dst_cube)
Expand Down
6 changes: 3 additions & 3 deletions tools/RAiDER/s1_azimuth_timing.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
isce = None

from RAiDER.losreader import get_orbit as get_isce_orbit
from RAiDER.s1_orbits import get_orbits_from_slc_ids
from RAiDER.s1_orbits import get_orbits_from_slc_ids_hyp3lib


def _asf_query(point: Point,
Expand Down Expand Up @@ -89,7 +89,7 @@ def get_azimuth_time_grid(lon_mesh: np.ndarray,
Technically, this is "sensor neutral" since it uses an orb object.
'''
if isce is None:
raise ImportError(f'isce3 is required for this function. Use conda to install isce3`')
raise ImportError('isce3 is required for this function. Use conda to install isce3`')

num_iteration = 100
residual_threshold = 1.0e-7
Expand Down Expand Up @@ -183,7 +183,7 @@ def get_s1_azimuth_time_grid(lon: np.ndarray,
dtype='datetime64[ms]')
return az_arr

orb_files = get_orbits_from_slc_ids(slc_ids)
orb_files = get_orbits_from_slc_ids_hyp3lib(slc_ids)
orb_files = [str(of) for of in orb_files]

orb = get_isce_orbit(orb_files, dt, pad=600)
Expand Down
31 changes: 29 additions & 2 deletions tools/RAiDER/s1_orbits.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from typing import List, Optional

import eof.download
from hyp3lib import get_orb
from RAiDER.logger import logger


Expand Down Expand Up @@ -53,7 +54,7 @@ def ensure_orbit_credentials() -> Optional[int]:
username = os.environ.get('EARTHDATA_USERNAME')
password = os.environ.get('EARTHDATA_PASSWORD')
if username is None or password is None:
raise ValueError(f'Credentials are required for fetching orbit data from s1qc.asf.alaska.edu!\n'
raise ValueError('Credentials are required for fetching orbit data from s1qc.asf.alaska.edu!\n'
'Either add your credentials to ~/.netrc or set the EARTHDATA_USERNAME and'
' EARTHDATA_PASSWORD environment variables.')

Expand All @@ -78,6 +79,32 @@ def get_orbits_from_slc_ids(slc_ids: List[str], directory=Path.cwd()) -> List[Pa
return orb_files


def get_orbits_from_slc_ids_hyp3lib(
slc_ids: list, orbit_directory: str = None
) -> dict:
"""Reference: https://github.com/ACCESS-Cloud-Based-InSAR/DockerizedTopsApp/blob/dev/isce2_topsapp/localize_orbits.py#L23"""

# Populates env variables to netrc as required for sentineleof
_ = ensure_orbit_credentials()
esa_username, _, esa_password = netrc.netrc().authenticators(ESA_CDSE_HOST)
esa_credentials = esa_username, esa_password

orbit_directory = orbit_directory or 'orbits'
orbit_dir = Path(orbit_directory)
orbit_dir.mkdir(exist_ok=True)

orbit_fetcher = get_orb.downloadSentinelOrbitFile

orbits = []
for scene in slc_ids:
orbit_file, _ = orbit_fetcher(scene, str(orbit_dir), esa_credentials=esa_credentials, providers=('ASF', 'ESA'))
orbits.append(orbit_file)

orbits = sorted(list(set(orbits)))

return orbits


def download_eofs(dts: list, missions: list, save_dir: str):
"""Wrapper around sentineleof to first try downloading from ASF and fall back to CDSE"""
_ = ensure_orbit_credentials()
Expand All @@ -90,7 +117,7 @@ def download_eofs(dts: list, missions: list, save_dir: str):
try:
orb_file = eof.download.download_eofs(dt, mission, save_dir=save_dir, force_asf=True)
except:
logger.error(f'Could not download orbit from ASF, trying ESA...')
logger.error('Could not download orbit from ASF, trying ESA...')
orb_file = eof.download.download_eofs(dt, mission, save_dir=save_dir, force_asf=False)

orb_file = orb_file[0] if isinstance(orb_file, list) else orb_file
Expand Down

0 comments on commit 223d862

Please sign in to comment.