diff --git a/python/packages/nisar/products/insar/GUNW_writer.py b/python/packages/nisar/products/insar/GUNW_writer.py index 50ba98485..7bba3c710 100644 --- a/python/packages/nisar/products/insar/GUNW_writer.py +++ b/python/packages/nisar/products/insar/GUNW_writer.py @@ -173,7 +173,7 @@ def add_grids_to_hdf5(self): #unwrapped dataset parameters as tuples in the following #order: dataset name, data type, description, and units unwrapped_ds_params = [ - ("coherenceMagnigtude", np.float32, + ("coherenceMagnitude", np.float32, f"Coherence magnitude between {pol} layers", "unitless"), ("connectedComponents", np.uint32, @@ -256,7 +256,7 @@ def add_grids_to_hdf5(self): ("alongTrackOffset", np.float32, "Along track offset", "meters"), - ("crossCorrelationPeak", np.float32, + ("correlationSurfacePeak", np.float32, "Normalized cross-correlation surface peak", "unitless"), ("slantRangeOffset", np.float32, diff --git a/python/packages/nisar/products/insar/InSAR_L2_writer.py b/python/packages/nisar/products/insar/InSAR_L2_writer.py index 64b8c523a..1b9d7058a 100644 --- a/python/packages/nisar/products/insar/InSAR_L2_writer.py +++ b/python/packages/nisar/products/insar/InSAR_L2_writer.py @@ -141,7 +141,7 @@ def add_geocoding_to_procinfo_params_group(self): proc_pcfg = self.cfg["processing"] iono = proc_pcfg["ionosphere_phase_correction"]["enabled"] wet_tropo = proc_pcfg["troposphere_delay"]["enable_wet_product"] - dry_tropo = proc_pcfg["troposphere_delay"]["enable_dry_product"] + dry_tropo = proc_pcfg["troposphere_delay"]["enable_hydrostatic_product"] # if the troposphere delay is not enabled if not proc_pcfg["troposphere_delay"]["enabled"]: diff --git a/python/packages/nisar/products/insar/InSAR_base_writer.py b/python/packages/nisar/products/insar/InSAR_base_writer.py index c5a928ee8..e87e33462 100644 --- a/python/packages/nisar/products/insar/InSAR_base_writer.py +++ b/python/packages/nisar/products/insar/InSAR_base_writer.py @@ -222,7 +222,7 @@ def add_RSLC_to_procinfo_params_group(self, rslc_name: str): np.bool_(rfi_mitigation_flag), ( "Flag to indicate if RFI correction has been applied" - " to reference RSLC" + f" to {rslc_name} RSLC" ), ), mixed_mode, @@ -234,7 +234,23 @@ def add_RSLC_to_procinfo_params_group(self, rslc_name: str): src_param_group = \ rslc_h5py_file_obj[f"{rslc.ProcessingInformationPath}/parameters"] - src_param_group.copy("referenceTerrainHeight", dst_param_group) + reference_terrain_height = "referenceTerrainHeight" + reference_terrain_height_description = \ + f"Reference Terrain Height as a function of time for {rslc_name} RSLC" + if reference_terrain_height in src_param_group: + src_param_group.copy(reference_terrain_height, dst_param_group) + dst_param_group[reference_terrain_height].attrs['description'] = \ + reference_terrain_height_description + else: + ds_param = DatasetParams( + "referenceTerrainHeight", + "None", + reference_terrain_height_description, + { + "units":"meters" + }, + ) + add_dataset_and_attrs(dst_param_group, ds_param) for ds_param in ds_params: add_dataset_and_attrs(dst_param_group, ds_param) diff --git a/python/packages/nisar/products/insar/InSAR_products_info.py b/python/packages/nisar/products/insar/InSAR_products_info.py index f55ee9b2e..1261cc320 100644 --- a/python/packages/nisar/products/insar/InSAR_products_info.py +++ b/python/packages/nisar/products/insar/InSAR_products_info.py @@ -3,7 +3,7 @@ import isce3 ISCE3_VERSION = isce3.__version__ - +PRODUCT_SPECIFICATION_VERSION = "0.9.0" @dataclass class InSARProductsInfo: @@ -33,24 +33,30 @@ class InSARProductsInfo: @classmethod def Base(cls): - return cls("1.0.0", "", "", "", False) + return cls(PRODUCT_SPECIFICATION_VERSION, + "", "", "", False) @classmethod def RIFG(cls): - return cls("1.0.0", "RIFG", "L1", "0.1", False) + return cls(PRODUCT_SPECIFICATION_VERSION, + "RIFG", "L1", "0.1", False) @classmethod def ROFF(cls): - return cls("1.0.0", "ROFF", "L1", "0.1", False) + return cls(PRODUCT_SPECIFICATION_VERSION, + "ROFF", "L1", "0.1", False) @classmethod def RUNW(cls): - return cls("1.0.0", "RUNW", "L1", "0.1", False) + return cls(PRODUCT_SPECIFICATION_VERSION, + "RUNW", "L1", "0.1", False) @classmethod def GOFF(cls): - return cls("1.0.0", "GOFF", "L2", "0.1", True) + return cls(PRODUCT_SPECIFICATION_VERSION, + "GOFF", "L2", "0.1", True) @classmethod def GUNW(cls): - return cls("1.0.0", "GUNW", "L2", "0.1", True) + return cls(PRODUCT_SPECIFICATION_VERSION, + "GUNW", "L2", "0.1", True) diff --git a/python/packages/nisar/products/insar/ROFF_writer.py b/python/packages/nisar/products/insar/ROFF_writer.py index fe7d35f59..03759eed5 100644 --- a/python/packages/nisar/products/insar/ROFF_writer.py +++ b/python/packages/nisar/products/insar/ROFF_writer.py @@ -351,3 +351,51 @@ def _add_datasets_to_pixel_offset_group(self): ds_description, units=ds_unit, ) + + def add_swaths_to_hdf5(self): + """ + Add swaths to the HDF5 + """ + super().add_swaths_to_hdf5() + + # pull the offset parameters + is_roff, margin, rg_start, az_start,\ + rg_skip, az_skip, rg_search, az_search,\ + rg_chip, az_chip, ovs_factor = self._pull_pixel_offsets_params() + + for freq, pol_list, _ in get_cfg_freq_pols(self.cfg): + # Create the swath group + swaths_freq_group_name = ( + f"{self.group_paths.SwathsPath}/frequency{freq}" + ) + swaths_freq_group = self.require_group(swaths_freq_group_name) + + rslc_freq_group = self.ref_h5py_file_obj[ + f"{self.ref_rslc.SwathPath}/frequency{freq}" + ] + + # add scene center parameters + scene_center_params = [ + DatasetParams( + "sceneCenterAlongTrackSpacing", + rslc_freq_group["sceneCenterAlongTrackSpacing"][()] + * az_skip, + ( + "Nominal along track spacing in meters between" + " consecutive lines near mid swath of the ROFF image" + ), + {"units": "meters"}, + ), + DatasetParams( + "sceneCenterGroundRangeSpacing", + rslc_freq_group["sceneCenterGroundRangeSpacing"][()] + * rg_skip, + ( + "Nominal ground range spacing in meters between" + " consecutive pixels near mid swath of the ROFF image" + ), + {"units": "meters"}, + ), + ] + for ds_param in scene_center_params: + add_dataset_and_attrs(swaths_freq_group, ds_param) \ No newline at end of file diff --git a/python/packages/nisar/products/insar/__init__.py b/python/packages/nisar/products/insar/__init__.py index ef36c8d95..dd047d86b 100644 --- a/python/packages/nisar/products/insar/__init__.py +++ b/python/packages/nisar/products/insar/__init__.py @@ -4,4 +4,4 @@ from .InSAR_L1_writer import L1InSARWriter from .RIFG_writer import RIFGWriter from .ROFF_writer import ROFFWriter -from .RUNW_writer import RUNWWriter \ No newline at end of file +from .RUNW_writer import RUNWWriter diff --git a/python/packages/nisar/products/insar/product_paths.py b/python/packages/nisar/products/insar/product_paths.py index 6f7316d33..8088a87cb 100644 --- a/python/packages/nisar/products/insar/product_paths.py +++ b/python/packages/nisar/products/insar/product_paths.py @@ -116,7 +116,6 @@ class RIFGGroupsPaths(L1GroupsPaths): """ ProductName: str = "RIFG" - @dataclass class RUNWGroupsPaths(L1GroupsPaths): """ @@ -129,7 +128,6 @@ class RUNWGroupsPaths(L1GroupsPaths): """ ProductName: str = "RUNW" - @dataclass class ROFFGroupsPaths(L1GroupsPaths): """ @@ -142,7 +140,6 @@ class ROFFGroupsPaths(L1GroupsPaths): """ ProductName: str = "ROFF" - @dataclass class GUNWGroupsPaths(L2GroupsPaths): """ @@ -155,7 +152,6 @@ class GUNWGroupsPaths(L2GroupsPaths): """ ProductName: str = "GUNW" - @dataclass class GOFFGroupsPaths(L2GroupsPaths): """ diff --git a/python/packages/nisar/workflows/crossmul.py b/python/packages/nisar/workflows/crossmul.py index 29ed9e0af..43ad19335 100644 --- a/python/packages/nisar/workflows/crossmul.py +++ b/python/packages/nisar/workflows/crossmul.py @@ -10,14 +10,15 @@ import isce3 import journal from osgeo import gdal + gdal.UseExceptions() from nisar.products.readers import SLC -from nisar.workflows import h5_prep +from nisar.workflows import prepare_insar_hdf5 from nisar.workflows.compute_stats import compute_stats_real_data +from nisar.workflows.crossmul_runconfig import CrossmulRunConfig from nisar.workflows.helpers import (complex_raster_path_from_h5, get_cfg_freq_pols) -from nisar.workflows.crossmul_runconfig import CrossmulRunConfig from nisar.workflows.yaml_argparse import YamlArgparse @@ -43,7 +44,7 @@ def run(cfg: dict, output_hdf5: str = None, resample_type='coarse', flatten_path = crossmul_params['flatten_path'] if output_hdf5 is None: - output_hdf5 = cfg['product_path_group']['sas_output_file'] + output_hdf5 = str(scratch_path.joinpath('crossmul/product.h5')) # init parameters shared by frequency A and B ref_slc = SLC(hdf5file=ref_hdf5) @@ -214,6 +215,6 @@ def stats_offsets(h5_ds, freq, pol): # get a runconfig dict from command line args crossmul_runconfig = CrossmulRunConfig(args, resample_type) # prepare RIFG HDF5 - out_paths = h5_prep.run(crossmul_runconfig.cfg) + out_paths = prepare_insar_hdf5.run(crossmul_runconfig.cfg) # run crossmul run(crossmul_runconfig.cfg, out_paths['RIFG'], resample_type) diff --git a/python/packages/nisar/workflows/geocode_insar.py b/python/packages/nisar/workflows/geocode_insar.py index 7e69c0c03..3ed7eb687 100644 --- a/python/packages/nisar/workflows/geocode_insar.py +++ b/python/packages/nisar/workflows/geocode_insar.py @@ -3,28 +3,25 @@ """ collection of functions for NISAR geocode workflow """ - -from enum import Enum import pathlib import time +from enum import Enum import h5py -import journal import isce3 +import journal import numpy as np -from osgeo import gdal - from nisar.products.readers import SLC from nisar.products.readers.orbit import load_orbit_from_xml -from nisar.workflows import h5_prep +from nisar.workflows import prepare_insar_hdf5 +from nisar.workflows.compute_stats import (compute_layover_shadow_water_stats, + compute_stats_real_data) from nisar.workflows.geocode_corrections import get_az_srg_corrections +from nisar.workflows.geocode_insar_runconfig import GeocodeInsarRunConfig from nisar.workflows.h5_prep import add_radar_grid_cubes_to_hdf5 from nisar.workflows.helpers import get_cfg_freq_pols -from nisar.workflows.geocode_insar_runconfig import \ - GeocodeInsarRunConfig from nisar.workflows.yaml_argparse import YamlArgparse -from nisar.workflows.compute_stats import compute_stats_real_data, \ - compute_layover_shadow_water_stats +from osgeo import gdal class InputProduct(Enum): @@ -87,7 +84,7 @@ def get_shadow_input_output(scratch_path, freq, dst_freq_path): input_raster = isce3.io.Raster(str(raster_ref)) # access the HDF5 dataset for layover shadow mask - dataset_path = f"{dst_freq_path}/interferogram/unwrapped/mask" + dataset_path = f"{dst_freq_path}/unwrappedInterferogram/mask" return input_raster, dataset_path @@ -133,9 +130,9 @@ def get_ds_input_output(src_freq_path, dst_freq_path, pol, input_hdf5, # RUNW and RIFG product if input_product_type is InputProduct.RUNW: - dst_group_path = f'{dst_freq_path}/interferogram/unwrapped/{pol}' + dst_group_path = f'{dst_freq_path}/unwrappedInterferogram/{pol}' elif input_product_type is InputProduct.RIFG: - dst_group_path = f'{dst_freq_path}/interferogram/wrapped/{pol}' + dst_group_path = f'{dst_freq_path}/wrappedInterferogram/{pol}' if input_product_type is InputProduct.ROFF: src_group_path = f'{src_freq_path}/pixelOffsets/{pol}/{off_layer}' @@ -304,7 +301,7 @@ def add_water_to_mask(cfg, freq, geogrid, dst_h5): if water_mask_path is not None: freq_path = f'/science/LSAR/GUNW/grids/frequency{freq}' - mask_h5_path = f'{freq_path}/interferogram/unwrapped/mask' + mask_h5_path = f'{freq_path}/unwrappedInterferogram/mask' water_mask = _project_water_to_geogrid(water_mask_path, geogrid) mask_layer = dst_h5[mask_h5_path][()] @@ -493,7 +490,7 @@ def get_raster_lists(all_geocoded_dataset_flags, pol_out_ds_paths = [] # Append input raster object and output HDF5 paths based on product - if ds_name == "layover_shadow_mask": + if ds_name == "mask": raster, path = get_shadow_input_output( scratch_path, freq, dst_freq_path) @@ -606,7 +603,6 @@ def cpu_geocode_rasters(cpu_geo_obj, geo_datasets, desired, freq, pol_list, for raster, ds in zip(geocoded_rasters, geocoded_datasets): compute_stats_real_data(raster, ds) - def cpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): """ Geocode RUNW products on CPU @@ -638,9 +634,10 @@ def cpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): unwrap_rg_looks = cfg['processing']['phase_unwrap']['range_looks'] unwrap_az_looks = cfg['processing']['phase_unwrap']['azimuth_looks'] - if unwrap_rg_looks != 1 or unwrap_az_looks != 1: - rg_looks = unwrap_rg_looks - az_looks = unwrap_az_looks + if input_product_type is InputProduct.RUNW: + if unwrap_rg_looks != 1 or unwrap_az_looks != 1: + rg_looks = unwrap_rg_looks + az_looks = unwrap_az_looks if input_product_type is InputProduct.ROFF: geo_datasets = cfg["processing"]["geocode"]["goff_datasets"] @@ -800,7 +797,7 @@ def cpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): block_size, az_correction=az_correction, srg_correction=srg_correction) - desired = ["layover_shadow_mask"] + desired = ["mask"] geocode_obj.data_interpolator = 'NEAREST' cpu_geocode_rasters(geocode_obj, geo_datasets, desired, freq, pol_list, input_hdf5, dst_h5, @@ -812,7 +809,7 @@ def cpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): # add water mask to GUNW product add_water_to_mask(cfg, freq, geo_grid, dst_h5) - mask_path = f'/science/LSAR/GUNW/grids/frequency{freq}/interferogram/unwrapped/mask' + mask_path = f'/science/LSAR/GUNW/grids/frequency{freq}/unwrappedInterferogram/mask' mask_ds = dst_h5[mask_path] compute_layover_shadow_water_stats(mask_ds) @@ -983,7 +980,6 @@ def gpu_geocode_rasters(geocoded_dataset_flags, for raster, ds in zip(geocoded_rasters, geocoded_datasets): compute_stats_real_data(raster, ds) - def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): """ Geocode RUNW products on GPU @@ -1014,9 +1010,11 @@ def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): unwrap_rg_looks = cfg['processing']['phase_unwrap']['range_looks'] unwrap_az_looks = cfg['processing']['phase_unwrap']['azimuth_looks'] - if unwrap_rg_looks != 1 or unwrap_az_looks != 1: - rg_looks = unwrap_rg_looks - az_looks = unwrap_az_looks + # Only when the input product is RUNW, then we ajust the range and azimuth looks + if input_product_type is InputProduct.RUNW: + if unwrap_rg_looks != 1 or unwrap_az_looks != 1: + rg_looks = unwrap_rg_looks + az_looks = unwrap_az_looks scratch_path = pathlib.Path(cfg['product_path_group']['scratch_path']) @@ -1205,7 +1203,7 @@ def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): lines_per_block, input_hdf5, dst_h5) # Geocode layover shadow mask - desired_geo_dataset_names = ["layover_shadow_mask"] + desired_geo_dataset_names = ["mask"] # Interpolation methods for dataset above interpolation_methods = [isce3.core.DataInterpMethod.NEAREST] @@ -1234,7 +1232,7 @@ def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): # add water mask to GUNW product add_water_to_mask(cfg, freq, geogrid, dst_h5) - mask_path = f'/science/LSAR/GUNW/grids/frequency{freq}/interferogram/unwrapped/mask' + mask_path = f'/science/LSAR/GUNW/grids/frequency{freq}/unwrappedInterferogram/mask' mask_ds = dst_h5[mask_path] compute_layover_shadow_water_stats(mask_ds) @@ -1326,7 +1324,7 @@ def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): # prepare the HDF5 geocode_insar_runconfig.cfg['primary_executable']['product_type'] = 'GUNW' - out_paths = h5_prep.run(geocode_insar_runconfig.cfg) + out_paths = prepare_insar_hdf5.run(geocode_insar_runconfig.cfg) runw_path = geocode_insar_runconfig.cfg['processing']['geocode'][ 'runw_path'] if runw_path is not None: @@ -1349,7 +1347,7 @@ def gpu_run(cfg, input_hdf5, output_hdf5, input_product_type=InputProduct.RUNW): # Prepare the GOFF product if enabled: geocode_insar_runconfig.cfg['primary_executable']['product_type'] = 'GOFF' - out_paths = h5_prep.run(geocode_insar_runconfig.cfg) + out_paths = prepare_insar_hdf5.run(geocode_insar_runconfig.cfg) roff_path = geocode_insar_runconfig.cfg['processing']['geocode'][ 'roff_path'] if roff_path is not None: diff --git a/python/packages/nisar/workflows/geocode_insar_runconfig.py b/python/packages/nisar/workflows/geocode_insar_runconfig.py index 98f1d8f23..b281c8487 100644 --- a/python/packages/nisar/workflows/geocode_insar_runconfig.py +++ b/python/packages/nisar/workflows/geocode_insar_runconfig.py @@ -33,7 +33,7 @@ def geocode_insar_cfg_check(cfg): 'ionosphere_phase_screen_uncertainty', 'unwrapped_phase', 'along_track_offset', 'slant_range_offset', 'correlation_surface_peak', - 'layover_shadow_mask'] + 'mask'] goff_datasets = ['along_track_offset', 'snr', 'along_track_offset_variance', 'correlation_surface_peak', 'cross_offset_variance', @@ -89,16 +89,3 @@ def yaml_check(self): # Check geocode_insar runconfig values geocode_insar_cfg_check(self.cfg) - - # multilooks valid? - az_looks = self.cfg['processing']['crossmul']['azimuth_looks'] - if az_looks > 1 and az_looks % 2 == 0: - err_str = f"azimuth looks = {az_looks} not an odd integer." - error_channel.log(err_str) - raise ValueError(err_str) - - rg_looks = self.cfg['processing']['crossmul']['range_looks'] - if rg_looks > 1 and rg_looks % 2 == 0: - err_str = f"range looks = {rg_looks} not an odd integer." - error_channel.log(err_str) - raise ValueError(err_str) diff --git a/python/packages/nisar/workflows/h5_prep.py b/python/packages/nisar/workflows/h5_prep.py index 30e39f94b..ec5a5b996 100644 --- a/python/packages/nisar/workflows/h5_prep.py +++ b/python/packages/nisar/workflows/h5_prep.py @@ -481,10 +481,8 @@ def copy_insar_meta(cfg, dst, src_h5, dst_h5, src_meta_path): def prep_ds(cfg, output_hdf5, dst): ''' - Prepare datasets for GSLC, GCOV, - INSAR (GUNW, RIFG, RUNW) workflows + Prepare datasets for GSLC and GCOV ''' - # unpack with h5py.File(output_hdf5, 'a', libver='latest', swmr=True) as dst_h5: # Fork the dataset preparation for GSLC/GCOV and GUNW @@ -538,625 +536,6 @@ def prep_ds_gslc_gcov(cfg, dst, dst_h5): _add_polarization_list(dst_h5, dst, common_parent_path, freq, pol_list) - -def prep_ds_insar(pcfg, dst, dst_h5): - # unpack info - cfg = pcfg['processing'] - common_path = 'science/LSAR' - freq_pols = cfg['input_subset']['list_of_frequencies'] - geogrids =cfg['geocode']['geogrids'] - wrapped_igram_geogrids = cfg['geocode']['wrapped_igram_geogrids'] - iono_args = cfg['ionosphere_phase_correction'] - iono_method = iono_args['spectral_diversity'] - freq_pols_iono = iono_args['list_of_frequencies'] - iono_method_sideband = ['main_side_band', 'main_diff_ms_band'] - is_iono_method_sideband = iono_method in iono_method_sideband - rg_looks = cfg['crossmul']['range_looks'] - az_looks = cfg['crossmul']['azimuth_looks'] - unwrap_rg_looks = cfg['phase_unwrap']['range_looks'] - unwrap_az_looks = cfg['phase_unwrap']['azimuth_looks'] - - # Create list of frequencies - id_group = dst_h5[f'{common_path}/identification'] - descr = "List of frequency layers available in the product" - dset = id_group.create_dataset('listOfFrequencies', - data=np.string_(list(freq_pols.keys()))) - dset.attrs["description"] = descr - - # Open reference SLC - ref_path = pcfg['input_file_group']['reference_rslc_file'] - ref_slc = SLC(hdf5file=ref_path) - with h5py.File(ref_path, 'r', libver='latest', swmr=True) as src_h5: - for freq, pol_list, offset_pol_list in get_cfg_freq_pols(pcfg): - # Extract some info from reference RSLC - slc_path = f'{ref_slc.SwathPath}/frequency{freq}' - slc_dset = src_h5[f'{slc_path}/{pol_list[0]}'] - slc_lines, slc_cols = slc_dset.shape - subswaths = src_h5[f'{slc_path}/numberOfSubSwaths'][()] - center_freq = src_h5[f'{slc_path}/processedCenterFrequency'][()] - rg_bw = src_h5[f'{slc_path}/processedRangeBandwidth'][()] - az_bw = src_h5[f'{slc_path}/processedAzimuthBandwidth'][()] - rg_vect = src_h5[f'{slc_path}/slantRange'][()] - az_vect = src_h5[f'{ref_slc.SwathPath}/zeroDopplerTime'][()] - rg_spac = src_h5[f'{slc_path}/slantRangeSpacing'][()] - az_spac = src_h5[f'{ref_slc.SwathPath}/zeroDopplerTimeSpacing'][()] - center_az_spac = src_h5[f'{slc_path}/sceneCenterAlongTrackSpacing'][()] - center_rg_spac = src_h5[f'{slc_path}/sceneCenterGroundRangeSpacing'][()] - - # Create grids or swath group depending on product - grid_swath = 'swaths' if dst in ['RIFG', 'ROFF', - 'RUNW'] else 'grids' - product_path = f'{common_path}/{dst}' - freq_path = f'{product_path}/{grid_swath}/frequency{freq}' - dst_h5[product_path].require_group(grid_swath) - dst_h5[f'{product_path}/{grid_swath}'].create_group( - f'frequency{freq}') - - # Create common parameters for this group - _add_polarization_list(dst_h5, dst, common_path, freq, pol_list) - - descr = "Center frequency of the processed image" - _create_datasets(dst_h5[freq_path], [0], np.float32, - "centerFrequency", descr=descr, units="Hz", - data=center_freq, long_name="center frequency") - descr = "Processed azimuth bandwidth in Hz" - _create_datasets(dst_h5[freq_path], [0], np.float32, - 'azimuthBandwidth', descr=descr, units="Hz", - data=az_bw, long_name="azimuth bandwidth") - _create_datasets(dst_h5[freq_path], [0], np.float32, - 'rangeBandwidth', - descr=descr.replace("azimuth", "range"), - units="Hz", data=rg_bw, - long_name="range bandwidth") - - if dst in ['RIFG', 'RUNW', 'GUNW']: - descr = "Number of swaths of continuous imagery, due to gaps" - _create_datasets(dst_h5[freq_path], [0], np.uint8, - "numberOfSubSwaths", - descr=descr, units=" ", data=subswaths, - long_name="number of subswaths") - - # Add number of sub-swaths and spacing - if dst in ['RIFG', 'RUNW']: - if dst == 'RUNW' and (unwrap_az_looks > 1 or unwrap_rg_looks > 1): - rg_looks = unwrap_rg_looks - az_looks = unwrap_az_looks - - descr = "Nominal along track spacing in meters between consecutive lines" \ - "near mid-swath of the interferogram image" - _create_datasets(dst_h5[freq_path], [0], np.float32, - "sceneCenterAlongTrackSpacing", - descr=descr, units="meters", data=center_az_spac*az_looks, - long_name="scene center along track spacing") - descr = "Nominal ground range spacing in meters between consecutive pixels" \ - "near mid-swath of the interferogram image" - _create_datasets(dst_h5[freq_path], [0], np.float32, - "sceneCenterGroundRangeSpacing", - descr=descr, units="meters", data=center_rg_spac*rg_looks, - long_name="scene center ground range spacing") - for sub in range(subswaths): - subswath = sub + 1 - if f'validSamplesSubSwath{subswath}' in src_h5[slc_path].keys(): - samples = src_h5[f'{slc_path}/validSamplesSubSwath{subswath}'][()] - else: - samples = src_h5[f'{slc_path}/validSamples'][()] - valid_samples = samples // cfg['crossmul']['range_looks'] - descr = f"First and last valid sample in each line of subswath {subswath}" - _create_datasets(dst_h5[freq_path], valid_samples.shape, - np.uint8, f"validSubSamplesSubSwath{subswath}", - descr=descr, units=" ", data=valid_samples, - long_name=f"valid samples sub swath {subswath}") - elif dst == 'GUNW': - descr = "Nominal spacing in meters between consecutive pixels along X direction" - _create_datasets(dst_h5[freq_path], [0], np.float32, - "xCoordinateSpacing", descr=descr, - units="meters", data=geogrids[freq].spacing_x, - long_name="X coordinate spacing") - _create_datasets(dst_h5[freq_path], [0], np.float32, - "yCoordinateSpacing", descr=descr, - units="meters", data=geogrids[freq].spacing_y, - long_name="Y coordinate spacing") - - # All the products have a pixelOffsets group - offs_path = f'{freq_path}/pixelOffsets' - dst_h5[freq_path].create_group('pixelOffsets') - # Get some offsets parameters - is_roff = cfg['offsets_product']['enabled'] - margin = get_off_params(cfg, 'margin', is_roff) - rg_gross = get_off_params(cfg, 'gross_offset_range', is_roff) - az_gross = get_off_params(cfg, 'gross_offset_azimuth', is_roff) - rg_start = get_off_params(cfg, 'start_pixel_range', is_roff) - az_start = get_off_params(cfg, 'start_pixel_azimuth', is_roff) - rg_skip = get_off_params(cfg, 'skip_range', is_roff) - az_skip = get_off_params(cfg, 'skip_azimuth', is_roff) - rg_search = get_off_params(cfg, 'half_search_range', is_roff, - pattern='layer', get_min=True) - az_search = get_off_params(cfg, 'half_search_azimuth', is_roff, - pattern='layer', get_min=True) - rg_chip = get_off_params(cfg, 'window_range', is_roff, - pattern='layer', get_min=True) - az_chip = get_off_params(cfg, 'window_azimuth', is_roff, - pattern='layer', get_min=True) - # Adjust margin - margin = max(margin, np.abs(rg_gross), np.abs(az_gross)) - # Compute spacing of offset grid - if dst in ['RIFG', 'RUNW', 'ROFF']: - descr = 'Slant range spacing of the pixel offset grid' - _create_datasets(dst_h5[offs_path], [0], np.float32, - "slantRangeSpacing", descr=descr, units="meters", - data=rg_spac * rg_skip, - long_name="slant range spacing") - _create_datasets(dst_h5[offs_path], [0], np.float32, - "zeroDopplerTimeSpacing", - descr=descr.replace('Slant range', 'Along-track'), - units="seconds", data=az_spac * az_skip, - long_name="along-track spacing") - # Compute slant range/azimuth vectors of offset grids - if rg_start is None: - rg_start = margin + rg_search - if az_start is None: - az_start = margin + az_search - - descr = 'CF compliant dimension associated with slant range' - _create_datasets(dst_h5[offs_path], [0], np.float32, - 'slantRange', descr=descr, units='meters', - data=rg_vect[rg_start::rg_skip], - long_name="slant range") - _create_datasets(dst_h5[offs_path], [0], np.float32, - 'zeroDopplerTime', - descr=descr.replace('slant range', 'azimuth time'), - units='seconds', data=az_vect[az_start::az_skip], - long_name="zero doppler time") - else: - set_get_geo_info(dst_h5, offs_path, geogrids[freq]) - - # prepare offset products - for pol in offset_pol_list: - pol_path = f'{offs_path}/{pol}' - dst_h5[offs_path].create_group(f'{pol}') - off_length = get_off_params(cfg, 'offset_length', is_roff) - off_width = get_off_params(cfg, 'offset_width', is_roff) - if off_length is None: - margin_az = 2*margin + 2*az_search + az_chip - off_length = (slc_lines - margin_az) // az_skip - if off_width is None: - margin_rg = 2*margin + 2*rg_search + rg_chip - off_width = (slc_cols - margin_rg) // rg_skip - off_shape = (off_length, off_width) - if dst in ['GUNW', 'GOFF']: - off_shape = (geogrids[freq].length, geogrids[freq].width) - if dst in ['GUNW', 'RUNW', 'RIFG']: - descr = f"Along track offset for {pol} layer" - descr_rg = descr.replace("Along track", "Slant range"), - _create_datasets(dst_h5[pol_path], off_shape, - np.float32, 'alongTrackOffset', - descr=descr, units="meters", - long_name='along track offset') - _create_datasets(dst_h5[pol_path], off_shape, - np.float32,'slantRangeOffset', - descr=descr_rg, units="meters", - long_name='slant range offset') - descr = "Normalized cross-correlation surface peak" - _create_datasets(dst_h5[pol_path], off_shape, - np.float32, 'correlationSurfacePeak', - descr=descr, units=" ", - long_name='correlation surface peak') - else: - for key in cfg['offsets_product'].keys(): - if key.startswith('layer'): - lay_path = f'{pol_path}/{key}' - dst_h5[pol_path].create_group(f'{key}') - descr = f"Along track offset for {pol} layer" - descr_rg = descr.replace("Along track", "Slant range"), - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'alongTrackOffset', - descr=descr, units="meters", - long_name='along track offset') - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'slantRangeOffset', - descr=descr_rg, units="meters", - long_name='slant range offset') - descr='Along-track pixel offset variance' - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'alongTrackOffsetVariance', - descr=descr, units=" ", - long_name='along-track offset variance') - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'slantRangeOffsetVariance', - descr=descr.replace('Along-track', 'Slant range'), - units=" ", long_name='slant range offset variance') - descr = "Normalized surface correlation peak" - _create_datasets(dst_h5[lay_path], off_shape, - np.float32,'correlationSurfacePeak', - descr=descr, units=" ", - long_name='correlation surface peak') - descr = "Off-diagonal term of the pixel offsets covariance matrix" - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'crossOffsetVariance', - descr=descr, units=" ", - long_name='cross offset variance') - descr = "Pixel offsets signal-to-noise ration" - _create_datasets(dst_h5[lay_path], off_shape, - np.float32, 'snr', - descr=descr, units=" ", - long_name='signal-to-noise ratio') - - # Form products inside interferogram - igram_shape = (slc_lines // az_looks, slc_cols // rg_looks) - grids_val = 'None' - if dst in ['GUNW']: - igram_shape = (geogrids[freq].length, geogrids[freq].width) - wrapped_igram_shape = (wrapped_igram_geogrids[freq].length, - wrapped_igram_geogrids[freq].width) - grids_val = 'projection' - - if dst in ['RIFG', 'RUNW', 'GUNW']: - igram_path = f'{freq_path}/interferogram' - dst_h5[freq_path].create_group('interferogram') - - # Create slant range/azimuth X/Y vectors - if dst in ['RUNW', 'RIFG']: - rg_idx = np.arange((len(rg_vect) // rg_looks) * rg_looks)[ - ::rg_looks] + int(rg_looks / 2) - az_idx = np.arange((len(az_vect) // az_looks) * az_looks)[ - ::az_looks] + int(az_looks / 2) - descr = "CF compliant dimension associated with slant range" - _create_datasets(dst_h5[igram_path], [0], np.float32, - 'slantRange', descr=descr, units='meters', - data=rg_vect[rg_idx], - long_name="slant range") - _create_datasets(dst_h5[igram_path], [0], np.float32, - 'zeroDopplerTime', - descr=descr.replace('slant range', - 'azimuth time'), - units='seconds', data=az_vect[az_idx], - long_name="zero doppler time") - descr = "Slant range spacing of grid. Same as difference between \ - consecutive samples in slantRange array" - _create_datasets(dst_h5[igram_path], [0], np.float64, - 'slantRangeSpacing', - descr=descr, units="meters", - data=rg_looks * rg_spac, - long_name="slant range spacing") - descr = "Time interval in the along track direction for raster layers. " \ - "This is the same as the spacing between consecutive entries in " \ - "zeroDopplerTime array" - _create_datasets(dst_h5[igram_path], [0], np.float32, - 'zeroDopplerTimeSpacing', descr=descr, units="seconds", - data=az_looks * az_spac, - long_name="zero doppler time spacing") - elif dst in ['GUNW']: - - # Create the group for the wrapped and unwrapped interferogram - wrapped_igram_path = f'{igram_path}/wrapped' - unwrapped_igram_path = f'{igram_path}/unwrapped' - - dst_h5[igram_path].create_group('wrapped') - dst_h5[igram_path].create_group('unwrapped') - - # Set the geogrids - set_get_geo_info(dst_h5, wrapped_igram_path, wrapped_igram_geogrids[freq]) - set_get_geo_info(dst_h5, unwrapped_igram_path, geogrids[freq]) - - # The igram_path will be replaced by the unwrapped_igram_path variable - # to ensure the iono, water mask, and layover layers will be stored under the unwrapped - # interferogram group - igram_path = unwrapped_igram_path - - # Generate the layover/shadow and water masks - descr = f"Masks for frequency{freq} layer, 1 - Radar Shadow. 2 - Radar Layover. 3 - Shadow+Layover "\ - "4 - Water. 5 - Water+Shadow. 6 - Water+Layover. 7 - Water+Layover+Shadow" - _create_datasets(dst_h5[igram_path], igram_shape, np.byte, - 'mask', - descr=descr, units=" ", grids=grids_val, - long_name='Byte layer with flags for various channels (e.g. layover/shadow, data quality)') - - # Create datasets inside the interferogram group - for pol in pol_list: - pol_path = f'{igram_path}/{pol}' - dst_h5[igram_path].create_group(f'{pol}') - - if dst in ['GUNW']: - wrapped_igram_pol_path = f'{wrapped_igram_path}/{pol}' - dst_h5[wrapped_igram_path].create_group(f'{pol}') - - # Wrapped Interferogram Coherence - descr = f"Coherence magnitude between {pol} layers" - _create_datasets(dst_h5[wrapped_igram_pol_path], wrapped_igram_shape, np.float32, - 'coherenceMagnitude', descr=descr, units=" ", - grids=grids_val, - long_name='coherence magnitude') - - descr = f"Interferogram between {pol} layers" - _create_datasets(dst_h5[wrapped_igram_pol_path], wrapped_igram_shape, - np.complex64, - "wrappedInterferogram", - chunks=(128, 128), - descr=descr, units="radians", - long_name='complex wrapped phase') - - if dst in ['RIFG']: - descr = f"Interferogram between {pol} layers" - _create_datasets(dst_h5[pol_path], igram_shape, - np.complex64, - "wrappedInterferogram", - descr=descr, units="radians", - long_name='wrapped phase') - _create_datasets(dst_h5[pol_path], igram_shape, np.float32, - 'coherenceMagnitude', descr=descr, units=" ", - grids=grids_val, - long_name='coherence magnitude') - elif dst in ['RUNW']: - # Check if we need to further multilook the wrapped interferogram - # in RIFG. If that is the case, igram_shape needs to be updated - if (unwrap_rg_looks > 1) or (unwrap_az_looks > 1): - rg_looks = unwrap_rg_looks - az_looks = unwrap_az_looks - - igram_shape = (slc_lines // az_looks, - slc_cols // rg_looks) - - descr = f"Unwrapped interferogram between {pol} layers" - _create_datasets(dst_h5[pol_path], igram_shape, np.float32, - 'unwrappedPhase', descr=descr, - units="radians", grids=grids_val, - long_name='unwrapped phase') - descr = f"Connected components for {pol} layer" - _create_datasets(dst_h5[pol_path], igram_shape, np.uint32, - 'connectedComponents', descr=descr, units=" ", - grids=grids_val, - long_name='connected components') - _create_datasets(dst_h5[pol_path], igram_shape, np.float32, - 'coherenceMagnitude', descr=descr, units=" ", - grids=grids_val, - long_name='coherence magnitude') - elif dst in ['GUNW']: - descr = f"Unwrapped interferogram between {pol} layers" - _create_datasets(dst_h5[pol_path], igram_shape, np.float32, - 'unwrappedPhase', descr=descr, - units="radians", grids=grids_val, - long_name='unwrapped phase') - descr = f"Connected components for {pol} layer" - _create_datasets(dst_h5[pol_path], igram_shape, np.uint32, - 'connectedComponents', descr=descr, units=" ", - grids=grids_val, - long_name='connected components') - _create_datasets(dst_h5[pol_path], igram_shape, np.float32, - 'coherenceMagnitude', descr=descr, units=" ", - grids=grids_val, - long_name='coherence magnitude') - - - if iono_args['enabled'] and dst in ['RUNW', 'GUNW']: - pol_list_iono = freq_pols_iono['A'] - # polarizations for ionosphere can be independent of insar pol - for pol_iono in pol_list_iono: - if pol_iono not in dst_h5[igram_path]: - dst_h5[igram_path].create_group(f'{pol_iono}') - pol_iono_path = f'{igram_path}/{pol_iono}' - - descr = f"Ionosphere phase screen" - _create_datasets(dst_h5[pol_iono_path], - igram_shape, np.float32, - 'ionospherePhaseScreen', - chunks=(128, 128), - descr=descr, units="radians", - grids=grids_val, - long_name='ionosphere \ - phase screen') - - descr = f"Uncertainty of ionosphere phase screen" - _create_datasets( - dst_h5[pol_iono_path], - igram_shape, np.float32, - 'ionospherePhaseScreenUncertainty', - chunks=(128, 128), - descr=descr, units="radians", - grids=grids_val, - long_name='ionosphere phase screen uncertainty') - - # Allocate datasets in metadata - cal_path = f'{product_path}/metadata/calibrationInformation' - proc_path = f'{product_path}/metadata/processingInformation/parameters' - grid_path = f'{product_path}/metadata/geolocationGrid' - if dst in ['GOFF', 'GUNW']: - grid_path = grid_path.replace('geolocation', 'radar') - - freq_path = f'{cal_path}/frequency{freq}' - - descr = "Bulk along track time offset used to align reference and secondary image" - _create_datasets(dst_h5[freq_path], [0], np.float32, - "bulkAlongTrackTimeOffset", - descr=descr, units="seconds", data=0, - long_name='bulk along track time offset') - _create_datasets(dst_h5[freq_path], [0], np.float32, - "bulkSlantRangeOffset", - descr=descr.replace('along track time', - 'slant range'), - units="meters", - data=0, long_name='bulk slant range offset') - - if dst in ['RIFG', 'RUNW', 'GUNW']: - for pol in pol_list: - pol_path = f'{freq_path}/{pol}' - descr = "Constant wrapped reference phase used to balance the interferogram" - _create_datasets(dst_h5[pol_path], [0], np.float32, - "referencePhase", - descr=descr, units="radians", data=0, - long_name='reference phase') - # Allocate metadata in processingInformation/parameters - freq_path = f'{proc_path}/common/frequency{freq}' - dst_h5[f'{proc_path}/common'].create_group(f'frequency{freq}') - descr = " Common Doppler bandwidth used for processing the interferogram" - _create_datasets(dst_h5[freq_path], [0], np.float64, - "dopplerBandwidth", - descr=descr, units="Hz", data=1, - long_name='doppler bandwidth') - descr = f" 2D LUT of Doppler Centroid for frequency {freq}" - _create_datasets(dst_h5[freq_path], igram_shape, np.float64, - "dopplerCentroid", - descr=descr, units="Hz", data=1, - long_name='doppler centroid') - if dst in ['ROFF', 'RIFG', 'RUNW', 'GUNW']: - descr = "Reference elevation above WGS84 Ellipsoid used for flattening" - _create_datasets(dst_h5[f'{freq_path}'], [0], np.float32, - "referenceFlatteningElevation", - descr=descr, units="meters", data=0, - long_name='reference flattening elevation') - - if dst in ['RIFG', 'RUNW', 'GUNW']: - descr = "Number of looks applied in along track direction" - _create_datasets(dst_h5[igram_path], [0], np.uint8, - "numberOfAzimuthLooks", - descr=descr, units=" ", data=int(az_looks), - long_name='number of azimuth looks') - _create_datasets(dst_h5[igram_path], [0], np.uint8, - "numberOfRangeLooks", - descr=descr.replace("along track", - "slant range"), - units=" ", data=int(rg_looks), - long_name='number of range looks') - descr = "Along track window size for cross-correlation" - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'alongTrackWindowSize', - descr=descr, units=" ", data=az_chip, - long_name='along track window size') - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'slantRangeWindowSize', - descr=descr.replace("Along track", - "Slant range"), - units=" ", data=rg_chip, - long_name="slant range window size") - descr = "Along track skip window size for cross-correlation" - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'alongTrackSkipWindowSize', - descr=descr, units=" ", data=az_skip, - long_name='along track skip window size') - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'slantRangeSkipWindowSize', - descr=descr.replace("Along track ", - "Slant range"), - units=" ", data=rg_skip, - long_name="slant range skip window size") - descr = "Along track search window size for cross-correlation" - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'alongTrackSearchWindowSize', - descr=descr, units=" ", data=az_search, - long_name="along track skip window size") - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'slantRangeSearchWindowSize', - descr=descr.replace("Along track ", - "Slant range"), - units=" ", data=rg_search, - long_name="slant range search window size") - descr = "Oversampling factor of the cross-correlation surface" - corr_ovs = get_off_params(cfg, 'correlation_surface_oversampling_factor', - is_roff) - _create_datasets(dst_h5[offs_path], [0], np.uint8, - 'correlationSurfaceOversampling', - descr=descr, units=" ", data=corr_ovs, - long_name='correlation surface oversampling') - descr = "Method used for generating pixel offsets" - method = get_off_params(cfg, 'cross_correlation_method', is_roff) - _create_datasets(dst_h5[offs_path], [9], np.string_, - 'crossCorrelationMethod', - descr=descr, units=None, data=method, - long_name='cross correlation method') - else: - offs_path = f'{proc_path}/pixelOffsets' - dst_h5[proc_path].require_group(f'pixelOffsets') - freq_path = f'{offs_path}/frequency{freq}' - dst_h5[offs_path].create_group(f'frequency{freq}') - descr='Reference RSLC starting pixel long along-track direction' - _create_datasets(dst_h5[freq_path], [0], np.uint8, - 'alongTrackStartPixel', - descr=descr, units=None, data=az_start, - long_name='along-track start pixel') - _create_datasets(dst_h5[freq_path], [0], np.uint8, - 'slantRangeStartPixel', - descr=descr.replace('along-track', 'slant range'), - units=None, data=az_start, - long_name='slant range start pixel') - corr_ovs = cfg['offsets_product'].get( - 'correlation_surface_oversampling_factor', - 64) - descr="Method used to generate pixel offsets" - _create_datasets(dst_h5[freq_path], [0], np.uint8, - 'correlationSurfaceOversampling', - descr=descr, units=" ", data=corr_ovs, - long_name='correlation surface oversampling') - off_cfg = cfg['offsets_product'] - for key in off_cfg.keys(): - if key.startswith('layer'): - lay_path = f'{freq_path}/{key}' - lay_cfg = off_cfg[key] - dst_h5[freq_path].create_group(f'{key}') - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'alongTrackWindowSize', - descr=descr, units=" ", data=lay_cfg.get('window_azimuth'), - long_name='along track window size') - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'slantRangeWindowSize', - descr=descr.replace("Along track", - "Slant range"), - units=" ", data=lay_cfg.get('window_range'), - long_name="slant range window size") - descr = "Along track skip window size for cross-correlation" - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'alongTrackSkipWindowSize', - descr=descr, units=" ", data=az_skip, - long_name='along track skip window size') - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'slantRangeSkipWindowSize', - descr=descr.replace("Along track ", - "Slant range"), - units=" ", data=rg_skip, - long_name="slant range skip window size") - descr = "Along track search window size for cross-correlation" - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'alongTrackSearchWindowSize', - descr=descr, units=" ", data=2*lay_cfg.get('half_search_azimuth'), - long_name="along track skip window size") - _create_datasets(dst_h5[lay_path], [0], np.uint8, - 'slantRangeSearchWindowSize', - descr=descr.replace("Along track ", - "Slant range"), - units=" ", data=2*lay_cfg.get('half_search_range'), - long_name="slant range search window size") - descr = "Method used to generate pixel offsets" - _create_datasets(dst_h5[lay_path], [9], np.string_, - 'crossCorrelationMethod', - descr=descr, units=None, data=lay_cfg.get('cross_correlation_method'), - long_name='cross correlation method') - # Add perpendicular and parallel baseline - # For radar/geogrid domain product, coordinateX/slantRange - # is chosen to determine the dimension of baseline - if dst in ['RIFG', 'ROFF', 'RUNW']: - cube_ref_dataset_name = 'coordinateX' - else: - cube_ref_dataset_name = 'slantRange' - - baseline_cubes_shape = None - cube_ref_dataset = f'{grid_path}/{cube_ref_dataset_name}' - if cube_ref_dataset in dst_h5: - cube_row = dst_h5[cube_ref_dataset].shape[1] - cube_col = dst_h5[cube_ref_dataset].shape[2] - baseline_cubes_shape = [2, cube_row, cube_col] - - # if input data does not have mandatory metadata, - # baseline cannot be estimated, so does not create the baselines - if baseline_cubes_shape is not None: - descr = "Perpendicular component of the InSAR baseline" - _create_datasets(dst_h5[grid_path], baseline_cubes_shape, np.float32, - "perpendicularBaseline", - descr=descr, units="meters", - long_name='perpendicular baseline') - _create_datasets(dst_h5[grid_path], baseline_cubes_shape, np.float32, - "parallelBaseline", - descr=descr.replace('Perpendicular', 'Parallel'), - units="meters", - long_name='parallel baseline') - def get_off_params(pcfg, param_name, is_roff=False, pattern=None, get_min=False): diff --git a/python/packages/nisar/workflows/insar.py b/python/packages/nisar/workflows/insar.py index cddac62cc..6e5cbf4db 100644 --- a/python/packages/nisar/workflows/insar.py +++ b/python/packages/nisar/workflows/insar.py @@ -4,7 +4,8 @@ import journal from nisar.workflows import (bandpass_insar, crossmul, dense_offsets, geo2rdr, geocode_insar, h5_prep, filter_interferogram, - offsets_product, rdr2geo, resample_slc, rubbersheet, + offsets_product, prepare_insar_hdf5, rdr2geo, + resample_slc, rubbersheet, split_spectrum, unwrap, ionosphere, baseline, troposphere, solid_earth_tides) @@ -26,8 +27,8 @@ def run(cfg: dict, out_paths: dict, run_steps: dict): if run_steps['bandpass_insar']: bandpass_insar.run(cfg) - if run_steps['h5_prep']: - h5_prep.run(cfg) + if run_steps['prepare_insar_hdf5']: + prepare_insar_hdf5.run(cfg) if run_steps['rdr2geo']: rdr2geo.run(cfg) @@ -89,7 +90,7 @@ def run(cfg: dict, out_paths: dict, run_steps: dict): if 'GUNW' in out_paths and run_steps['troposphere'] and \ cfg['processing']['troposphere_delay']['enabled']: troposphere.run(cfg, out_paths['GUNW']) - + if 'GUNW' in out_paths and run_steps['solid_earth_tides']: solid_earth_tides.run(cfg, out_paths['GUNW']) diff --git a/python/packages/nisar/workflows/insar_runconfig.py b/python/packages/nisar/workflows/insar_runconfig.py index ef75a74be..be69a313c 100644 --- a/python/packages/nisar/workflows/insar_runconfig.py +++ b/python/packages/nisar/workflows/insar_runconfig.py @@ -1,15 +1,15 @@ +import os import warnings import journal - +import nisar.workflows.helpers as helpers from nisar.products.readers import SLC from nisar.workflows.geo2rdr_runconfig import Geo2rdrRunConfig -import nisar.workflows.helpers as helpers - from nisar.workflows.geocode_insar_runconfig import geocode_insar_cfg_check from nisar.workflows.ionosphere_runconfig import ionosphere_cfg_check from nisar.workflows.troposphere_runconfig import troposphere_delay_check + class InsarRunConfig(Geo2rdrRunConfig): def __init__(self, args): super().__init__(args) diff --git a/python/packages/nisar/workflows/ionosphere.py b/python/packages/nisar/workflows/ionosphere.py index bec9c62b0..3a71a533d 100644 --- a/python/packages/nisar/workflows/ionosphere.py +++ b/python/packages/nisar/workflows/ionosphere.py @@ -21,7 +21,8 @@ from nisar.products.readers import SLC from nisar.workflows import (crossmul, dense_offsets, h5_prep, - filter_interferogram, resample_slc, + filter_interferogram, prepare_insar_hdf5, + resample_slc, rubbersheet, unwrap) from nisar.workflows.ionosphere_runconfig import InsarIonosphereRunConfig from nisar.workflows.yaml_argparse import YamlArgparse @@ -206,7 +207,7 @@ def copy_iono_datasets(iono_insar_cfg, oversample_flag=False, slant_main=None, slant_side=None): - """copy ionosphere layers (frequency B) to frequency A of RUNW product + """copy ionosphere layers (frequency B) to frequency A of RUNW product with oversampling Parameters @@ -530,7 +531,7 @@ def run_insar_workflow(iono_insar_cfg, original_dict, out_paths): ''' # run insar for ionosphere pairs - h5_prep.run(iono_insar_cfg) + prepare_insar_hdf5.run(iono_insar_cfg) iono_freq_pol = iono_insar_cfg['processing']['input_subset'][ 'list_of_frequencies'] diff --git a/python/packages/nisar/workflows/offsets_product.py b/python/packages/nisar/workflows/offsets_product.py index 8a6e1b358..b9ea9f483 100644 --- a/python/packages/nisar/workflows/offsets_product.py +++ b/python/packages/nisar/workflows/offsets_product.py @@ -6,12 +6,12 @@ import journal import numpy as np from nisar.products.readers import SLC -from nisar.workflows import h5_prep +from nisar.workflows import prepare_insar_hdf5 +from nisar.workflows.compute_stats import compute_stats_real_data from nisar.workflows.dense_offsets import create_empty_dataset from nisar.workflows.helpers import copy_raster, get_cfg_freq_pols from nisar.workflows.offsets_product_runconfig import OffsetsProductRunConfig from nisar.workflows.yaml_argparse import YamlArgparse -from nisar.workflows.compute_stats import compute_stats_real_data from osgeo import gdal @@ -346,7 +346,7 @@ def write_data(infile, outfile, band, lines_per_block): lines_per_block: int Lines per block to read in batch ''' - # Get shape of input file (same as output created from h5_prep) + # Get shape of input file (same as output created from prepare_insar_hdf5) ds = gdal.Open(infile, gdal.GA_ReadOnly) length = ds.RasterYSize width = ds.RasterXSize @@ -384,7 +384,7 @@ def write_data(infile, outfile, band, lines_per_block): offsets_runconfig = OffsetsProductRunConfig(args) # Prepare ROFF HDF5 product - out_paths = h5_prep.run(offsets_runconfig.cfg) + out_paths = prepare_insar_hdf5.run(offsets_runconfig.cfg) # Run offsets product generation run(offsets_runconfig.cfg, out_paths['ROFF']) diff --git a/python/packages/nisar/workflows/persistence.py b/python/packages/nisar/workflows/persistence.py index 50b050235..732598b10 100644 --- a/python/packages/nisar/workflows/persistence.py +++ b/python/packages/nisar/workflows/persistence.py @@ -10,7 +10,7 @@ class Persistence(): insar_steps = ['baseline', 'solid_earth_tides', 'troposphere', 'geocode', 'ionosphere', 'unwrap', 'filter_interferogram', 'crossmul', 'fine_resample', 'rubbersheet', 'offsets_product', 'dense_offsets', 'coarse_resample', 'geo2rdr', - 'rdr2geo', 'h5_prep', 'bandpass_insar'] + 'rdr2geo', 'h5_prep', 'prepare_insar_hdf5', 'bandpass_insar'] def __init__(self, logfile_path, restart=False): """ diff --git a/python/packages/nisar/workflows/prepare_insar_hdf5.py b/python/packages/nisar/workflows/prepare_insar_hdf5.py new file mode 100644 index 000000000..a14be7d63 --- /dev/null +++ b/python/packages/nisar/workflows/prepare_insar_hdf5.py @@ -0,0 +1,67 @@ +""" +Prepare InSAR HDF5 for +GUNW, GOFF, RIFG, ROFF, and RUNW +""" + +import journal +from nisar.products.insar import (GOFFWriter, GUNWWriter, RIFGWriter, + ROFFWriter, RUNWWriter) +from nisar.workflows.h5_prep import get_products_and_paths + + +def prepare_insar_hdf5(cfg, output_hdf5, dst): + """ + Prepare InSAR (GOFF, GUNW, RIFG, ROFF, RUNW) HDF5 products. + + Parameters + ---------- + cfg: dict + runconfig dictionary + output_hdf5: str + the output path of the InSAR product + dst : str + the name of the InSAR product + """ + if "RUNW" in dst: + with RUNWWriter(name = output_hdf5, mode = 'w', + runconfig_dict = cfg, + runconfig_path="None") as runw: + runw.save_to_hdf5() + if "ROFF" in dst: + with ROFFWriter(name = output_hdf5, mode = 'w', + runconfig_dict = cfg, + runconfig_path="None") as roff: + roff.save_to_hdf5() + if "RIFG" in dst: + with RIFGWriter(name = output_hdf5, mode = 'w', + runconfig_dict = cfg, + runconfig_path="None") as rifg: + rifg.save_to_hdf5() + if "GUNW" in dst: + with GUNWWriter(name = output_hdf5, mode = 'w', + runconfig_dict = cfg, + runconfig_path="None") as gunw: + gunw.save_to_hdf5() + if "GOFF" in dst: + with GOFFWriter(name = output_hdf5, mode = 'w', + runconfig_dict = cfg, + runconfig_path="None") as goff: + goff.save_to_hdf5() + + +def run(cfg: dict) -> dict: + """ + prepare datasets + Returns dict of output path(s); used for InSAR workflow + """ + info_channel = journal.info("prepare_insar_hdf5.run") + info_channel.log("preparing InSAR HDF5 products") + + product_dict, h5_paths = get_products_and_paths(cfg) + for sub_prod_type in product_dict: + out_path = h5_paths[sub_prod_type] + prepare_insar_hdf5(cfg, out_path, sub_prod_type) + + info_channel.log("successfully ran prepare_insar_hdf5") + + return h5_paths diff --git a/python/packages/nisar/workflows/rubbersheet.py b/python/packages/nisar/workflows/rubbersheet.py index 89814f767..c4af6d96e 100644 --- a/python/packages/nisar/workflows/rubbersheet.py +++ b/python/packages/nisar/workflows/rubbersheet.py @@ -3,20 +3,19 @@ ''' import pathlib -import journal import time + import h5py -import numpy as np import isce3 -from osgeo import gdal -from scipy import ndimage -from scipy import interpolate -from scipy import signal +import journal +import numpy as np from nisar.products.readers import SLC -from nisar.workflows import h5_prep +from nisar.workflows import prepare_insar_hdf5 from nisar.workflows.helpers import get_cfg_freq_pols -from nisar.workflows.yaml_argparse import YamlArgparse from nisar.workflows.rubbersheet_runconfig import RubbersheetRunConfig +from nisar.workflows.yaml_argparse import YamlArgparse +from osgeo import gdal +from scipy import interpolate, ndimage, signal def run(cfg: dict, output_hdf5: str = None): @@ -442,5 +441,5 @@ def write_vrt(file1, file2, out_vrt, width, length, data_type, # Prepare RIFG. Culled offsets will be # allocated in RIFG product - out_paths = h5_prep.run(rubbersheet_runconfig.cfg) + out_paths = prepare_insar_hdf5.run(rubbersheet_runconfig.cfg) run(rubbersheet_runconfig.cfg, out_paths['RIFG']) diff --git a/python/packages/nisar/workflows/unwrap.py b/python/packages/nisar/workflows/unwrap.py index 4d5c0941d..f05717c0b 100644 --- a/python/packages/nisar/workflows/unwrap.py +++ b/python/packages/nisar/workflows/unwrap.py @@ -1,33 +1,31 @@ #!/usr/bin/env python3 -''' +""" Wrapper for phase unwrapping -''' +""" import pathlib import time import h5py -import journal -import numpy as np -from osgeo import gdal import isce3 import isce3.unwrap.snaphu as snaphu - -from nisar.workflows import h5_prep -from nisar.products.readers import SLC +import journal +import numpy as np from isce3.unwrap.preprocess import preprocess_wrapped_igram as preprocess from isce3.unwrap.preprocess import project_map_to_radar +from nisar.products.readers import SLC from nisar.products.readers.orbit import load_orbit_from_xml -from nisar.workflows import crossmul -from nisar.workflows.unwrap_runconfig import UnwrapRunConfig -from nisar.workflows.yaml_argparse import YamlArgparse +from nisar.workflows import crossmul, prepare_insar_hdf5 from nisar.workflows.compute_stats import compute_stats_real_data from nisar.workflows.helpers import get_cfg_freq_pols +from nisar.workflows.unwrap_runconfig import UnwrapRunConfig +from nisar.workflows.yaml_argparse import YamlArgparse +from osgeo import gdal def run(cfg: dict, input_hdf5: str, output_hdf5: str): - ''' + """ run phase unwrapping Parameters @@ -38,7 +36,7 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): File path to input HDF5 product (i.e., RIFG) output_hdf5: str File path to output HDF5 product (i.e., RUNW) - ''' + """ # pull parameters from dictionary ref_slc_hdf5 = cfg['input_file_group']['reference_rslc_file'] @@ -50,7 +48,7 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): unwrap_az_looks = cfg['processing']['phase_unwrap']['azimuth_looks'] # Create error and info channels - error_channel = journal.error('unwrap.run') + error_channel = journal.error("unwrap.run") info_channel = journal.info("unwrap.run") info_channel.log("Starting phase unwrapping") @@ -64,32 +62,38 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): # Start to track time t_all = time.time() - with h5py.File(output_hdf5, 'a', libver='latest', swmr=True) as dst_h5,\ - h5py.File(crossmul_path, 'r', libver='latest', swmr=True) as src_h5: + with h5py.File(output_hdf5, "a", libver="latest", swmr=True) as dst_h5,\ + h5py.File(crossmul_path, "r", libver="latest", swmr=True) as src_h5: for freq, pol_list, offset_pol_list in get_cfg_freq_pols(cfg): - src_freq_group_path = f'/science/LSAR/RIFG/swaths/frequency{freq}' - dst_freq_group_path = src_freq_group_path.replace('RIFG', 'RUNW') + src_freq_group_path = f"/science/LSAR/RIFG/swaths/frequency{freq}" + src_freq_bandwidth_group_path = ("/science/LSAR/RIFG/metadata/" + "processingInformation/parameters" + f"/reference/frequency{freq}") + dst_freq_group_path = src_freq_group_path.replace("RIFG", "RUNW") for pol in pol_list: - src_pol_group_path = f'{src_freq_group_path}/interferogram/{pol}' - dst_pol_group_path = f'{dst_freq_group_path}/interferogram/{pol}' + src_pol_group_path = \ + f"{src_freq_group_path}/interferogram/{pol}" + dst_pol_group_path = \ + f"{dst_freq_group_path}/interferogram/{pol}" # Fetch paths to input/output datasets - igram_path = f'HDF5:{crossmul_path}:/' \ - f'{src_pol_group_path}/wrappedInterferogram' - corr_path = f'HDF5:{crossmul_path}:/' \ - f'{src_pol_group_path}/coherenceMagnitude' + igram_path = \ + f"HDF5:{crossmul_path}:/{src_pol_group_path}/wrappedInterferogram" + corr_path = \ + f"HDF5:{crossmul_path}:/{src_pol_group_path}/coherenceMagnitude" # Create unwrapped interferogram output raster - unw_path = f'{dst_pol_group_path}/unwrappedPhase' + unw_path = f"{dst_pol_group_path}/unwrappedPhase" unw_dataset = dst_h5[unw_path] - unw_raster_path = f"IH5:::ID={unw_dataset.id.id}".encode("utf-8") + unw_raster_path = \ + f"IH5:::ID={unw_dataset.id.id}".encode("utf-8") # Create connected components output raster - conn_comp_path = f'{dst_pol_group_path}/connectedComponents' + conn_comp_path = f"{dst_pol_group_path}/connectedComponents" conn_comp_dataset = dst_h5[conn_comp_path] - conn_comp_raster_path = f"IH5:::ID=" \ - f"{conn_comp_dataset.id.id}".encode("utf-8") + conn_comp_raster_path = \ + f"IH5:::ID={conn_comp_dataset.id.id}".encode("utf-8") # Create unwrapping scratch directory to store temporary rasters crossmul_scratch = scratch_path / f'crossmul/freq{freq}/{pol}/' @@ -112,79 +116,93 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): # If enabled, preprocess wrapped phase: remove invalid pixels # and fill their location with a filling algorithm - if unwrap_args['preprocess_wrapped_phase']['enabled']: + if unwrap_args["preprocess_wrapped_phase"]["enabled"]: # Extract preprocessing dictionary and open arrays - preproc_cfg = unwrap_args['preprocess_wrapped_phase'] - filling_enabled = preproc_cfg['filling_enabled'] - filling_method = preproc_cfg['filling_method'] + preproc_cfg = unwrap_args["preprocess_wrapped_phase"] + filling_enabled = preproc_cfg["filling_enabled"] + filling_method = preproc_cfg["filling_method"] igram = open_raster(igram_path) coherence = open_raster(corr_path) - mask = open_raster(preproc_cfg['mask']['mask_path']) if \ - preproc_cfg['mask']['mask_path'] is not None else None + mask = ( + open_raster(preproc_cfg["mask"]["mask_path"]) + if preproc_cfg["mask"]["mask_path"] is not None + else None) - if 'water' in preproc_cfg['mask']['mask_type']: + if "water" in preproc_cfg["mask"]["mask_type"]: # water_mask_file is expected to have distance from the boundary of the # water bodies. The values 0-100 represent the distance from the coastline # and values from 101-200 represent the distance from inland water boundaries. - water_mask_path = cfg['dynamic_ancillary_file_group']['water_mask_file'] - ocean_water_buffer = preproc_cfg['mask']['ocean_water_buffer'] - inland_water_buffer = preproc_cfg['mask']['inland_water_buffer'] - water_distance = project_map_to_radar(cfg, water_mask_path, freq) + water_mask_path = \ + cfg["dynamic_ancillary_file_group"]["water_mask_file"] + ocean_water_buffer = \ + preproc_cfg["mask"]["ocean_water_buffer"] + inland_water_buffer = \ + preproc_cfg["mask"]["inland_water_buffer"] + water_distance = project_map_to_radar( + cfg, water_mask_path, freq) # Since distance from inland water is defined from 101 to 200 in water mask file, # the value 100 needs to be added. inland_water_mask = water_distance > inland_water_buffer + 100 - ocean_water_mask = (water_distance > ocean_water_buffer) & \ - (water_distance <= 100) + ocean_water_mask = ( + water_distance > ocean_water_buffer + ) & (water_distance <= 100) if mask is not None: mask = mask | inland_water_mask | ocean_water_mask else: mask = inland_water_mask | ocean_water_mask - if filling_method == 'distance_interpolator': - distance = preproc_cfg['distance_interpolator']['distance'] - - igram_filt = preprocess(igram, coherence, - mask, - preproc_cfg['mask']['mask_type'], - preproc_cfg['mask']['outlier_threshold'], - preproc_cfg['mask']['median_filter_size'], - filling_enabled, - filling_method, distance) + if filling_method == "distance_interpolator": + distance = \ + preproc_cfg["distance_interpolator"]["distance"] + + igram_filt = preprocess( + igram, + coherence, + mask, + preproc_cfg["mask"]["mask_type"], + preproc_cfg["mask"]["outlier_threshold"], + preproc_cfg["mask"]["median_filter_size"], + filling_enabled, + filling_method, + distance) # Save filtered/filled wrapped interferogram igram_path = f'{unwrap_scratch}/wrapped_igram.filt' - write_raster(igram_path, igram_filt) + write_raster(igram_path, igram_filt) # Run unwrapping based on user-defined algorithm - algorithm = unwrap_args['algorithm'] + algorithm = unwrap_args["algorithm"] - if algorithm == 'icu': - info_channel.log('Unwrapping with ICU') - icu_cfg = unwrap_args['icu'] + if algorithm == "icu": + info_channel.log("Unwrapping with ICU") + icu_cfg = unwrap_args["icu"] icu_obj = set_icu_attributes(icu_cfg) # Allocate input/output rasters igram_raster = isce3.io.Raster(igram_path) corr_raster = isce3.io.Raster(corr_path) - unw_raster = isce3.io.Raster(unw_raster_path, - update=True) - conn_comp_raster = isce3.io.Raster(conn_comp_raster_path, - update=True) + unw_raster = isce3.io.Raster(unw_raster_path, update=True) + conn_comp_raster = isce3.io.Raster( + conn_comp_raster_path, update=True) # Run unwrapping - icu_obj.unwrap(unw_raster, conn_comp_raster, - igram_raster, corr_raster, seed=icu_cfg['seed']) + icu_obj.unwrap( + unw_raster, + conn_comp_raster, + igram_raster, + corr_raster, + seed=icu_cfg["seed"]) # Compute statistics compute_stats_real_data(unw_raster, unw_dataset) # Log attributes for ICU log_unwrap_attributes(icu_obj, info_channel, algorithm) - elif algorithm == 'phass': - info_channel.log('Unwrapping using PHASS') - phass_cfg = unwrap_args['phass'] + elif algorithm == "phass": + info_channel.log("Unwrapping using PHASS") + phass_cfg = unwrap_args["phass"] phass_obj = set_phass_attributes(phass_cfg) # Phass requires the phase of igram (not complex igram) # Generate InSAR phase using GDAL pixel functions - igram_phase_path = unwrap_scratch/'wrapped_phase.vrt' + igram_phase_path = unwrap_scratch / "wrapped_phase.vrt" igram_phase_to_vrt(igram_path, igram_phase_path) # Allocate input/output raster @@ -192,60 +210,70 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): corr_raster = isce3.io.Raster(corr_path) # Check if it is required to unwrap with power raster - if phass_cfg.get('power') is not None: - power_raster = isce3.io.Raster(phass_cfg['power']) - phass_obj.unwrap(igram_phase_raster, power_raster, - corr_raster, unw_raster, - conn_comp_raster) + if phass_cfg.get("power") is not None: + power_raster = isce3.io.Raster(phass_cfg["power"]) + phass_obj.unwrap( + igram_phase_raster, + power_raster, + corr_raster, + unw_raster, + conn_comp_raster) else: - phass_obj.unwrap(igram_phase_raster, corr_raster, - unw_raster, conn_comp_raster) + phass_obj.unwrap( + igram_phase_raster, + corr_raster, + unw_raster, + conn_comp_raster) # Compute statistics compute_stats_real_data(unw_raster, unw_dataset) # Log attributes for phass log_unwrap_attributes(phass_obj, info_channel, algorithm) - elif algorithm == 'snaphu': - info_channel.log('Unwrapping with SNAPHU') + elif algorithm == "snaphu": + info_channel.log("Unwrapping with SNAPHU") # Get SNAPHU dictionary with user params - snaphu_cfg = unwrap_args['snaphu'] + snaphu_cfg = unwrap_args["snaphu"] # Allocate input/output rasters (for Snaphu isce3.io.gdal.Raster) igram_raster = isce3.io.gdal.Raster(igram_path) - out_file_corr = str(unwrap_scratch/'nan_clip_coherence.tiff') + out_file_corr = str(unwrap_scratch / "nan_clip_coherence.tiff") # SNAPHU isn't currently handling NaN values in the input rasters # (even if masked out). As a temporary fix, we create an intermediate raster # for the coherence data with NaN values replaced with zeros. - filter_nan(corr_path, out_file_corr, lines_per_block, - data_type=gdal.GDT_Float32) + filter_nan( + corr_path, + out_file_corr, + lines_per_block, + data_type=gdal.GDT_Float32) corr_raster = isce3.io.gdal.Raster(out_file_corr) - unw_raster = isce3.io.gdal.Raster(unw_raster_path, - access=isce3.io.gdal.GA_Update) - conn_comp_raster = isce3.io.gdal.Raster(conn_comp_raster_path, - access=isce3.io.gdal.GA_Update) + unw_raster = isce3.io.gdal.Raster( + unw_raster_path, access=isce3.io.gdal.GA_Update) + conn_comp_raster = isce3.io.gdal.Raster( + conn_comp_raster_path, access=isce3.io.gdal.GA_Update) # Allocate ancillary rasters necessary for unwrapping - mask_raster = get_optional_snaphu_raster_dataset(snaphu_cfg, 'mask') - unw_est_raster = get_optional_snaphu_raster_dataset(snaphu_cfg, - 'unwrapped_phase_estimate') + mask_raster = get_optional_snaphu_raster_dataset( + snaphu_cfg, "mask") + unw_est_raster = get_optional_snaphu_raster_dataset( + snaphu_cfg, "unwrapped_phase_estimate") # Get snaphu cost mode (default: smooth) and cost mode obj - cost_mode = snaphu_cfg['cost_mode'] + cost_mode = snaphu_cfg["cost_mode"] cost_obj = select_cost_options(snaphu_cfg, cost_mode) # Initialize solver, connected components objects solver_obj = set_solver_params(snaphu_cfg) conn_comp_obj = set_connected_components_params(snaphu_cfg) # Compute effective number of looks - if snaphu_cfg.get('nlooks') is not None: - nlooks = snaphu_cfg['nlooks'] + if snaphu_cfg.get("nlooks") is not None: + nlooks = snaphu_cfg["nlooks"] else: # Compute nlooks based on info in RIFG - rg_spac = src_h5[f'{src_freq_group_path}/interferogram' \ - f'/slantRangeSpacing'][()] - az_spac = src_h5[f'{src_freq_group_path}/' \ - f'sceneCenterAlongTrackSpacing'][()] - rg_bw = src_h5[f'{src_freq_group_path}/rangeBandwidth'][()] + rg_spac = \ + src_h5[f"{src_freq_group_path}/interferogram/slantRangeSpacing"][()] + az_spac = \ + src_h5[f"{src_freq_group_path}/""sceneCenterAlongTrackSpacing"][()] + rg_bw = \ + src_h5[f"{src_freq_bandwidth_group_path}/rangeBandwidth"][()] # Note, this is the single-look range resolution - rg_res = isce3.core.speed_of_light / ( - 2 * rg_bw) + rg_res = isce3.core.speed_of_light / (2 * rg_bw) # To compute azimuth resolution, get sensor speed at mid-scene # And use azimuth processed bandwidth (copied from RSLC) ref_orbit = cfg['dynamic_ancillary_file_group']['orbit_files']['reference_orbit_file'] @@ -259,29 +287,37 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): _, v_mid = orbit.interpolate(radar_grid.sensing_mid) vs = np.linalg.norm(v_mid) # Note this is the single-look azimuth resolution - az_bw = src_h5[f'{src_freq_group_path}/azimuthBandwidth'][()] + az_bw = \ + src_h5[f"{src_freq_bandwidth_group_path}/azimuthBandwidth"][()] az_res = vs / az_bw nlooks = rg_spac * az_spac / (rg_res * az_res) - if snaphu_cfg['verbose']: + if snaphu_cfg["verbose"]: # Increase output detail level of "isce3.unwrap.snaphu" # info channels. channel = journal.info("isce3.unwrap.snaphu") channel.detail = 2 # Unwrap with snaphu (none for power mode and tiling_params) - snaphu.unwrap(unw_raster, conn_comp_raster, - igram_raster, corr_raster, nlooks, - cost=cost_mode, cost_params=cost_obj, - pwr=None, mask=mask_raster, - unwest=unw_est_raster, - init_method=snaphu_cfg['initialization_method'], - tiling_params=None, solver_params=solver_obj, - conncomp_params=conn_comp_obj, - corr_bias_model_params=None, - phase_stddev_model_params=None, - scratchdir=unwrap_scratch, - debug=snaphu_cfg['debug']) + snaphu.unwrap( + unw_raster, + conn_comp_raster, + igram_raster, + corr_raster, + nlooks, + cost=cost_mode, + cost_params=cost_obj, + pwr=None, + mask=mask_raster, + unwest=unw_est_raster, + init_method=snaphu_cfg["initialization_method"], + tiling_params=None, + solver_params=solver_obj, + conncomp_params=conn_comp_obj, + corr_bias_model_params=None, + phase_stddev_model_params=None, + scratchdir=unwrap_scratch, + debug=snaphu_cfg["debug"]) # Compute statistics (stats module supports isce3.io.Raster) del unw_raster unw_raster = isce3.io.Raster(unw_raster_path) @@ -307,7 +343,8 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): src_path = f'{src_freq_group_path}/{group}/{pol}/{dataset}' if (dataset == 'coherenceMagnitude') and ((unwrap_rg_looks > 1) or (unwrap_az_looks > 1)): - corr_path = str(f'{crossmul_scratch}/coherence_rg{unwrap_rg_looks}_az{unwrap_az_looks}') + corr_path = \ + str(f'{crossmul_scratch}/coherence_rg{unwrap_rg_looks}_az{unwrap_az_looks}') corr = open_raster(corr_path) dst_h5[dst_path][:, :] = corr else: @@ -324,11 +361,13 @@ def run(cfg: dict, input_hdf5: str, output_hdf5: str): del conn_comp_raster t_all_elapsed = time.time() - t_all - info_channel.log(f"Successfully ran phase unwrapping in {t_all_elapsed:.3f} seconds") + info_channel.log( + f"Successfully ran phase unwrapping in {t_all_elapsed:.3f} seconds" + ) def log_unwrap_attributes(unwrap, info, algorithm): - ''' + """ Write unwrap attributes to info channel Parameters @@ -339,9 +378,9 @@ def log_unwrap_attributes(unwrap, info, algorithm): Info channel where to log attributes values algorithm: str String identifying unwrapping algorithm being used - ''' + """ info.log(f"Unwrapping algorithm:{algorithm}") - if algorithm == 'icu': + if algorithm == "icu": info.log(f"Correlation threshold increments: {unwrap.corr_incr_thr}") info.log(f"Number of buffer lines: {unwrap.buffer_lines}") info.log(f"Number of overlap lines: {unwrap.overlap_lines}") @@ -350,8 +389,10 @@ def log_unwrap_attributes(unwrap, info, algorithm): info.log(f"Phase gradient window size: {unwrap.phase_grad_win_size}") info.log(f"Phase gradient threshold: {unwrap.neut_phase_grad_thr}") info.log(f"Neutron intensity threshold: {unwrap.neut_intensity_thr}") - info.log(f"Maximum intensity correlation " - f"threshold: {unwrap.neut_correlation_thr}") + info.log( + "Maximum intensity correlation " + f"threshold: {unwrap.neut_correlation_thr}" + ) info.log(f"Number of trees: {unwrap.trees_number}") info.log(f"Maximum branch length: {unwrap.max_branch_length}") info.log(f"Pixel spacing ratio: {unwrap.ratio_dxdy}") @@ -362,8 +403,9 @@ def log_unwrap_attributes(unwrap, info, algorithm): info.log(f"Number of bootstrapping lines: {unwrap.num_bs_lines}") info.log(f"Minimum overlapping area: {unwrap.min_overlap_area}") info.log(f"Phase variance threshold: {unwrap.phase_var_thr}") - elif algorithm == 'phass': - info.log(f"Correlation threshold increments: {unwrap.correlation_threshold}") + elif algorithm == "phass": + info.log( + f"Correlation threshold increments: {unwrap.correlation_threshold}") info.log(f"Good correlation: {unwrap.good_correlation}") info.log( f"Minimum size of an unwrapped region: {unwrap.min_pixels_region}") @@ -372,7 +414,7 @@ def log_unwrap_attributes(unwrap, info, algorithm): def set_icu_attributes(cfg: dict): - ''' + """ Return ICU object with user-defined attribute values Parameters @@ -384,32 +426,32 @@ def set_icu_attributes(cfg: dict): ------- unwrap: isce3.unwrap.ICU ICU object with user-defined attribute values - ''' + """ unwrap = isce3.unwrap.ICU() - unwrap.corr_incr_thr = cfg['correlation_threshold_increments'] - unwrap.buffer_lines = cfg['buffer_lines'] - unwrap.overlap_lines = cfg['overlap_lines'] - unwrap.use_phase_grad_neut = cfg['use_phase_gradient_neutron'] - unwrap.use_intensity_neut = cfg['use_intensity_neutron'] - unwrap.phase_grad_win_size = cfg['phase_gradient_window_size'] - unwrap.neut_phase_grad_thr = cfg['neutron_phase_gradient_threshold'] - unwrap.neut_intensity_thr = cfg['neutron_intensity_threshold'] - unwrap.neut_correlation_thr = cfg['max_intensity_correlation_threshold'] - unwrap.trees_number = cfg['trees_number'] - unwrap.max_branch_length = cfg['max_branch_length'] - unwrap.ratio_dxdy = cfg['pixel_spacing_ratio'] - unwrap.init_corr_thr = cfg['initial_correlation_threshold'] - unwrap.max_corr_thr = cfg['max_correlation_threshold'] - unwrap.min_cc_area = cfg['min_tile_area'] - unwrap.num_bs_lines = cfg['bootstrap_lines'] - unwrap.min_overlap_area = cfg['min_overlap_area'] - unwrap.phase_var_thr = cfg['phase_variance_threshold'] + unwrap.corr_incr_thr = cfg["correlation_threshold_increments"] + unwrap.buffer_lines = cfg["buffer_lines"] + unwrap.overlap_lines = cfg["overlap_lines"] + unwrap.use_phase_grad_neut = cfg["use_phase_gradient_neutron"] + unwrap.use_intensity_neut = cfg["use_intensity_neutron"] + unwrap.phase_grad_win_size = cfg["phase_gradient_window_size"] + unwrap.neut_phase_grad_thr = cfg["neutron_phase_gradient_threshold"] + unwrap.neut_intensity_thr = cfg["neutron_intensity_threshold"] + unwrap.neut_correlation_thr = cfg["max_intensity_correlation_threshold"] + unwrap.trees_number = cfg["trees_number"] + unwrap.max_branch_length = cfg["max_branch_length"] + unwrap.ratio_dxdy = cfg["pixel_spacing_ratio"] + unwrap.init_corr_thr = cfg["initial_correlation_threshold"] + unwrap.max_corr_thr = cfg["max_correlation_threshold"] + unwrap.min_cc_area = cfg["min_tile_area"] + unwrap.num_bs_lines = cfg["bootstrap_lines"] + unwrap.min_overlap_area = cfg["min_overlap_area"] + unwrap.phase_var_thr = cfg["phase_variance_threshold"] return unwrap def set_phass_attributes(cfg: dict): - ''' + """ Return Phass object with user-defined attribute values Parameters @@ -421,18 +463,18 @@ def set_phass_attributes(cfg: dict): ------- unwrap: isce3.unwrap.Phass Phass object with user-defined attribute values - ''' + """ unwrap = isce3.unwrap.Phass() - unwrap.corr_incr_thr = cfg['correlation_threshold_increments'] - unwrap.good_correlation = cfg['good_correlation'] - unwrap.min_pixels_region = cfg['min_unwrap_area'] + unwrap.corr_incr_thr = cfg["correlation_threshold_increments"] + unwrap.good_correlation = cfg["good_correlation"] + unwrap.min_pixels_region = cfg["min_unwrap_area"] return unwrap def igram_phase_to_vrt(raster_path, output_path): - ''' + """ Save the phase of complex raster in 'raster_path' in a GDAL VRT format @@ -442,10 +484,10 @@ def igram_phase_to_vrt(raster_path, output_path): File path of complex raster to save in VRT format output_path: str File path of output phase VRT raster - ''' + """ ds = gdal.Open(raster_path, gdal.GA_ReadOnly) - vrttmpl = f''' + vrttmpl = f""" Phase @@ -454,14 +496,14 @@ def igram_phase_to_vrt(raster_path, output_path): {raster_path} - ''' + """ ds = None - with open(output_path, 'w') as fid: + with open(output_path, "w") as fid: fid.write(vrttmpl) def get_optional_snaphu_raster_dataset(cfg, dataset_name): - ''' + """ Create ISCE3 raster from cfg snaphu runconfig. Returns None if `dataset_name` is not contained in `cfg`. @@ -476,7 +518,7 @@ def get_optional_snaphu_raster_dataset(cfg, dataset_name): ------- raster: isce3.io.Raster Raster containing the dataset at 'cfg[dataset_name]' - ''' + """ if cfg.get(dataset_name) is not None: raster = isce3.io.gdal.Raster(cfg[dataset_name]) else: @@ -484,7 +526,7 @@ def get_optional_snaphu_raster_dataset(cfg, dataset_name): return raster -def select_cost_options(cfg, cost_mode='smooth'): +def select_cost_options(cfg, cost_mode="smooth"): """ Select and set cost parameter object based on user-defined cost mode options @@ -503,18 +545,18 @@ def select_cost_options(cfg, cost_mode='smooth'): E.g. if cost_mode is "defo", cost_params_obj is an instance of isce3.unwrap.snaphu.DefoCostParams() """ - error_channel = journal.error('unwrap.run.select_cost_options') + error_channel = journal.error("unwrap.run.select_cost_options") # If 'cost_mode_parameters' does not exist, create empty dictionary - if 'cost_mode_parameters' not in cfg: - cfg['cost_mode_parameters'] = {} - - if cost_mode == 'defo': - cost_params_obj = set_defo_params(cfg['cost_mode_parameters']) - elif cost_mode == 'smooth': - cost_params_obj = set_smooth_params(cfg['cost_mode_parameters']) - elif cost_mode == 'p-norm': - cost_params_obj = set_pnorm_params(cfg['cost_mode_parameters']) + if "cost_mode_parameters" not in cfg: + cfg["cost_mode_parameters"] = {} + + if cost_mode == "defo": + cost_params_obj = set_defo_params(cfg["cost_mode_parameters"]) + elif cost_mode == "smooth": + cost_params_obj = set_smooth_params(cfg["cost_mode_parameters"]) + elif cost_mode == "p-norm": + cost_params_obj = set_pnorm_params(cfg["cost_mode_parameters"]) else: err_str = f"{cost_mode} is not a valid cost mode option" error_channel.log(err_str) @@ -538,8 +580,8 @@ def set_defo_params(cost_cfg): Deformation cost parameter object with user-defined parameters """ # If deformation parameter is not empty, extract it - if 'deformation_parameters' in cost_cfg: - cfg = cost_cfg['deformation_parameters'] + if "deformation_parameters" in cost_cfg: + cfg = cost_cfg["deformation_parameters"] defo = snaphu.DefoCostParams(**cfg) else: defo = snaphu.DefoCostParams() @@ -561,8 +603,8 @@ def set_smooth_params(cost_cfg): Smooth cost parameter object with user-defined parameters """ # If smooth parameters are present, extract smooth dict - if 'smooth_parameters' in cost_cfg: - cfg = cost_cfg['smooth_parameters'] + if "smooth_parameters" in cost_cfg: + cfg = cost_cfg["smooth_parameters"] smooth = snaphu.SmoothCostParams(**cfg) else: # use all defaults values @@ -587,8 +629,8 @@ def set_pnorm_params(cost_cfg): # If pnorm section of runconfig is not empty, # proceed to set user-defined pnorm parameters - if 'pnorm_parameters' in cost_cfg: - cfg = cost_cfg['pnorm_parameters'] + if "pnorm_parameters" in cost_cfg: + cfg = cost_cfg["pnorm_parameters"] pnorm = snaphu.PNormCostParams(**cfg) else: pnorm = snaphu.PNormCostParams() @@ -611,8 +653,8 @@ def set_solver_params(snaphu_cfg): """ # If 'solver_parameters' is in snaphu_cfg, inspect setted # options. If None found, assign default - if 'solver_parameters' in snaphu_cfg: - cfg = snaphu_cfg['solver_parameters'] + if "solver_parameters" in snaphu_cfg: + cfg = snaphu_cfg["solver_parameters"] solver = snaphu.SolverParams(**cfg) else: solver = None @@ -636,17 +678,16 @@ def set_connected_components_params(snaphu_cfg): # If 'connected_components_parameters' is in snaphu_cfg, # inspect setted options. If None found, assign default - if 'connected_components_parameters' in snaphu_cfg: - cfg = snaphu_cfg['connected_components_parameters'] + if "connected_components_parameters" in snaphu_cfg: + cfg = snaphu_cfg["connected_components_parameters"] conn = snaphu.ConnCompParams(**cfg) else: conn = None return conn -def filter_nan(file_path, out_file, lines_per_block, - data_type, value=0.0): - ''' +def filter_nan(file_path, out_file, lines_per_block, data_type, value=0.0): + """ Converts NaNs to 'value' in raster at 'out_file' Parameters @@ -661,7 +702,7 @@ def filter_nan(file_path, out_file, lines_per_block, Data type of output raster value: float Value to use to replace NaN - ''' + """ ds_in = gdal.Open(file_path, gdal.GA_ReadOnly) width = ds_in.RasterXSize @@ -669,7 +710,7 @@ def filter_nan(file_path, out_file, lines_per_block, bands = ds_in.RasterCount # Create output raster - driver = gdal.GetDriverByName('GTiff') + driver = gdal.GetDriverByName("GTiff") out_ds = driver.Create(out_file, width, length, bands, data_type) lines_per_block = min(length, lines_per_block) @@ -683,17 +724,16 @@ def filter_nan(file_path, out_file, lines_per_block, else: block_length = lines_per_block # Extract a block from correlation data - data_block = ds_in.GetRasterBand(band + 1).ReadAsArray(0, - line_start, - width, block_length) + data_block = ds_in.GetRasterBand(band + 1).ReadAsArray( + 0, line_start, width, block_length) data_block[np.isnan(data_block)] = value - out_ds.GetRasterBand(band + 1).WriteArray(data_block, - xoff=0, yoff=line_start) + out_ds.GetRasterBand(band + 1).WriteArray( + data_block, xoff=0, yoff=line_start) out_ds.FlushCache() def open_raster(filename, band=1): - ''' + """ Open GDAL-friendly raster and allocate 'band' in numpy array @@ -708,7 +748,7 @@ def open_raster(filename, band=1): ------- raster: np.ndarray Raster band allocated in numpy array - ''' + """ ds = gdal.Open(filename, gdal.GA_ReadOnly) raster = ds.GetRasterBand(band).ReadAsArray() return raster @@ -740,9 +780,9 @@ def write_raster(filename, array, data_type=gdal.GDT_CFloat32, if __name__ == "__main__": - ''' + """ Run phase unwrapping from command line - ''' + """ # Load command line args unwrap_parser = YamlArgparse() @@ -752,11 +792,13 @@ def write_raster(filename, array, data_type=gdal.GDT_CFloat32, unwrap_runconfig = UnwrapRunConfig(args) # Prepare RUNW HDF5 - unwrap_runconfig.cfg['primary_executable']['product_type'] = 'RUNW_STANDALONE' - out_paths = h5_prep.run(unwrap_runconfig.cfg) + unwrap_runconfig.cfg["primary_executable"]["product_type"] =\ + "RUNW_STANDALONE" + out_paths = prepare_insar_hdf5.run(unwrap_runconfig.cfg) # Use RIFG from crossmul_path - rifg_h5 = unwrap_runconfig.cfg['processing']['phase_unwrap']['crossmul_path'] + rifg_h5 = \ + unwrap_runconfig.cfg["processing"]["phase_unwrap"]["crossmul_path"] # Run phase unwrapping - run(unwrap_runconfig.cfg, rifg_h5, out_paths['RUNW']) + run(unwrap_runconfig.cfg, rifg_h5, out_paths["RUNW"]) diff --git a/tests/data/SanAnd_129.h5 b/tests/data/SanAnd_129.h5 new file mode 100644 index 000000000..19bdbf237 Binary files /dev/null and b/tests/data/SanAnd_129.h5 differ diff --git a/tests/data/SanAnd_138.h5 b/tests/data/SanAnd_138.h5 new file mode 100644 index 000000000..f8764b2ae Binary files /dev/null and b/tests/data/SanAnd_138.h5 differ diff --git a/tests/data/SanAnd_dem.tif b/tests/data/SanAnd_dem.tif new file mode 100644 index 000000000..1acbd9a04 Binary files /dev/null and b/tests/data/SanAnd_dem.tif differ diff --git a/tests/data/Sdelta_129_02.h5 b/tests/data/Sdelta_129_02.h5 deleted file mode 100644 index 2872ffe84..000000000 Binary files a/tests/data/Sdelta_129_02.h5 and /dev/null differ diff --git a/tests/data/Sdelta_138_02.h5 b/tests/data/Sdelta_138_02.h5 deleted file mode 100644 index 89ea26a52..000000000 Binary files a/tests/data/Sdelta_138_02.h5 and /dev/null differ diff --git a/tests/data/Sdelta_dem.tif b/tests/data/Sdelta_dem.tif deleted file mode 100644 index dc9056ce0..000000000 Binary files a/tests/data/Sdelta_dem.tif and /dev/null differ diff --git a/tests/data/bandpass_test.yaml b/tests/data/bandpass_test.yaml index f3008979c..cb4d16627 100644 --- a/tests/data/bandpass_test.yaml +++ b/tests/data/bandpass_test.yaml @@ -7,12 +7,12 @@ runconfig: input_file_group: # REQUIRED - Two NISAR L1 RSLC formatted HDF5 files - reference_rslc_file: @ISCETEST@/Sdelta_129_02.h5 - secondary_rslc_file: @ISCETEST@/Sdelta_138_02.h5 + reference_rslc_file: @ISCETEST@/SanAnd_129.h5 + secondary_rslc_file: @ISCETEST@/SanAnd_138.h5 dynamic_ancillary_file_group: # REQUIRED - Use the provided DEM as input - dem_file: @ISCETEST@/Sdelta_dem.tif + dem_file: @ISCETEST@/SanAnd_dem.tif orbit_files: reference_orbit_file: secondary_orbit_file: diff --git a/tests/data/ionosphere_main_side_test.yaml b/tests/data/ionosphere_main_side_test.yaml index 5d3bd6399..160b87805 100644 --- a/tests/data/ionosphere_main_side_test.yaml +++ b/tests/data/ionosphere_main_side_test.yaml @@ -7,12 +7,12 @@ runconfig: input_file_group: # REQUIRED - Two NISAR L1B RSLC formatted HDF5 files - reference_rslc_file: @ISCETEST@/Sdelta_129_02.h5 - secondary_rslc_file: @ISCETEST@/Sdelta_129_02.h5 + reference_rslc_file: @ISCETEST@/SanAnd_129.h5 + secondary_rslc_file: @ISCETEST@/SanAnd_129.h5 dynamic_ancillary_file_group: # REQUIRED - Use the provided DEM as input - dem_file: @ISCETEST@/Sdelta_dem.tif + dem_file: @ISCETEST@/SanAnd_dem.tif product_path_group: # REQUIRED - Directory where PGE will place results. Irrelevant to SAS. diff --git a/tests/data/ionosphere_test.yaml b/tests/data/ionosphere_test.yaml index 19ca7826c..f17a76c6f 100644 --- a/tests/data/ionosphere_test.yaml +++ b/tests/data/ionosphere_test.yaml @@ -7,12 +7,13 @@ runconfig: input_file_group: # REQUIRED - Two NISAR L1B RSLC formatted HDF5 files - reference_rslc_file: @ISCETEST@/Sdelta_129_02.h5 - secondary_rslc_file: @ISCETEST@/Sdelta_129_02.h5 + reference_rslc_file: @ISCETEST@/SanAnd_129.h5 + secondary_rslc_file: @ISCETEST@/SanAnd_129.h5 dynamic_ancillary_file_group: # REQUIRED - Use the provided DEM as input - dem_file: @ISCETEST@/Sdelta_dem.tif + dem_file: @ISCETEST@/SanAnd_dem.tif + orbit_files: reference_orbit_file: secondary_orbit_file: diff --git a/tests/python/packages/nisar/workflows/bandpass_insar.py b/tests/python/packages/nisar/workflows/bandpass_insar.py index 87ec03529..35bd4cbdb 100644 --- a/tests/python/packages/nisar/workflows/bandpass_insar.py +++ b/tests/python/packages/nisar/workflows/bandpass_insar.py @@ -2,14 +2,13 @@ import os import h5py +import iscetest import numpy as np import numpy.testing as npt - -from nisar.workflows import bandpass_insar, h5_prep, insar +from nisar.workflows import bandpass_insar, insar, prepare_insar_hdf5 from nisar.workflows.insar_runconfig import InsarRunConfig from nisar.workflows.persistence import Persistence -import iscetest def test_bandpass_run(): ''' @@ -29,17 +28,14 @@ def test_bandpass_run(): runconfig = InsarRunConfig(args) runconfig.geocode_common_arg_load() - h5_prep.run(runconfig.cfg) - - # insar.run(runconfig.cfg) - out_paths = h5_prep.run(runconfig.cfg) + out_paths = prepare_insar_hdf5.run(runconfig.cfg) persist = Persistence('insar.log', restart=True) persist.run_steps['dense_offsets'] = False - # input test data is missing mandatory metadata that - # causes the test fail + # input test data is missing mandatory metadata that + # causes the test fail persist.run_steps['baseline'] = False - + persist.run_steps['solid_earth_tides'] = False insar.run(runconfig.cfg, out_paths, persist.run_steps) @@ -48,13 +44,15 @@ def test_bandpass_validate(): Validate products generated by bandpass workflow. ''' scratch_path = '.' - + group_path = '/science/LSAR/RIFG/swaths/frequencyA/interferogram/HH' with h5py.File(os.path.join(scratch_path, 'rifg_bandpass.h5'), 'r') as h_rifg: - # check if phase of generated interferogram is sufficiently close to 0. + # check if phase of generated interferogram is sufficiently close to 0. igram = h_rifg[f'{group_path}/wrappedInterferogram'][()] - npt.assert_allclose(np.nanmean(np.angle(igram)), 0, atol=1e-2) + coh = h_rifg[f'{group_path}/coherenceMagnitude'][()] + + npt.assert_allclose(np.nanmean(coh[3:-2, 3:-2]), 1, atol=2 * 1e-2) if __name__ == "__main__": diff --git a/tests/python/packages/nisar/workflows/crossmul.py b/tests/python/packages/nisar/workflows/crossmul.py index ebbc048a5..c726fa20e 100644 --- a/tests/python/packages/nisar/workflows/crossmul.py +++ b/tests/python/packages/nisar/workflows/crossmul.py @@ -2,14 +2,12 @@ import os import h5py +import iscetest import numpy as np import numpy.testing as npt - -from nisar.workflows import crossmul, h5_prep +from nisar.workflows import crossmul, prepare_insar_hdf5 from nisar.workflows.crossmul_runconfig import CrossmulRunConfig -import iscetest - def test_crossmul_run(): ''' @@ -30,9 +28,11 @@ def test_crossmul_run(): runconfig = CrossmulRunConfig(args) runconfig.geocode_common_arg_load() - h5_prep.run(runconfig.cfg) + prepare_insar_hdf5.run(runconfig.cfg) - crossmul.run(runconfig.cfg) + crossmul.run(runconfig.cfg, + output_hdf5 = \ + runconfig.cfg['product_path_group']['sas_output_file']) def test_crossmul_validate(): diff --git a/tests/python/packages/nisar/workflows/cuda_insar.py b/tests/python/packages/nisar/workflows/cuda_insar.py index 9fd20d21c..5f6266d12 100644 --- a/tests/python/packages/nisar/workflows/cuda_insar.py +++ b/tests/python/packages/nisar/workflows/cuda_insar.py @@ -2,12 +2,11 @@ import os import isce3.ext.isce3 as isce3 -from nisar.workflows import h5_prep, insar +import iscetest +from nisar.workflows import insar, prepare_insar_hdf5 from nisar.workflows.insar_runconfig import InsarRunConfig from nisar.workflows.persistence import Persistence -import iscetest - def test_insar_run(): ''' @@ -37,7 +36,7 @@ def test_insar_run(): insar_runcfg.geocode_common_arg_load() insar_runcfg.yaml_check() - out_paths = h5_prep.run(insar_runcfg.cfg) + out_paths = prepare_insar_hdf5.run(insar_runcfg.cfg) persist = Persistence('insar.log', restart=True) # the baseline step is disabled because the winnipeg test dataset diff --git a/tests/python/packages/nisar/workflows/filter_interferogram.py b/tests/python/packages/nisar/workflows/filter_interferogram.py index cc4e85589..ebe554e17 100644 --- a/tests/python/packages/nisar/workflows/filter_interferogram.py +++ b/tests/python/packages/nisar/workflows/filter_interferogram.py @@ -2,14 +2,13 @@ import os import h5py -from isce3.signal.filter_data import filter_data import iscetest import numpy as np -from osgeo import gdal - -from nisar.workflows import filter_interferogram, h5_prep +from isce3.signal.filter_data import filter_data +from nisar.workflows import filter_interferogram, prepare_insar_hdf5 from nisar.workflows.filter_interferogram_runconfig import \ FilterInterferogramRunConfig +from osgeo import gdal from scipy.signal import convolve2d @@ -34,7 +33,7 @@ def test_filter_interferogram_run(): runconfig = FilterInterferogramRunConfig(args) runconfig.geocode_common_arg_load() - out_paths = h5_prep.run(runconfig.cfg) + out_paths = prepare_insar_hdf5.run(runconfig.cfg) # Modify the interferogram data to have something meaningful product_path = '/science/LSAR/RIFG/swaths/frequencyA/interferogram/HH' diff --git a/tests/python/packages/nisar/workflows/geocode_insar.py b/tests/python/packages/nisar/workflows/geocode_insar.py index 5f58562c2..c0c2d9a45 100644 --- a/tests/python/packages/nisar/workflows/geocode_insar.py +++ b/tests/python/packages/nisar/workflows/geocode_insar.py @@ -1,15 +1,14 @@ import argparse import os -from osgeo import gdal import h5py -import numpy as np - import isce3 -from nisar.workflows import geocode_insar, h5_prep +import iscetest +import numpy as np +from nisar.workflows import geocode_insar, prepare_insar_hdf5 from nisar.workflows.geocode_insar_runconfig import GeocodeInsarRunConfig +from osgeo import gdal -import iscetest def test_geocode_run(): ''' @@ -42,7 +41,7 @@ def test_geocode_run(): geo_tx.tofile('gunw_geogrid.txt', sep=',') # prepare HDF5 outputs - out_paths = h5_prep.run(runconfig.cfg) + out_paths = prepare_insar_hdf5.run(runconfig.cfg) # insert rdr2geo outputs into RUNW HDF5 rdr2geo_dict = {'x': 'unwrappedPhase', 'y': 'coherenceMagnitude'} @@ -85,7 +84,7 @@ def test_geocode_validate(): continue path_gunw = os.path.join(scratch_path, f'{pu}_gunw.h5') - product_path = 'science/LSAR/GUNW/grids/frequencyA/interferogram/unwrapped/HH' + product_path = 'science/LSAR/GUNW/grids/frequencyA/unwrappedInterferogram/HH' with h5py.File(path_gunw, 'r', libver='latest', swmr=True) as h: # iterate over axis rdr2geo_dict = {'x': 'unwrappedPhase', 'y': 'coherenceMagnitude'} diff --git a/tests/python/packages/nisar/workflows/insar.py b/tests/python/packages/nisar/workflows/insar.py index ea4f8b64b..4f0b93842 100644 --- a/tests/python/packages/nisar/workflows/insar.py +++ b/tests/python/packages/nisar/workflows/insar.py @@ -2,7 +2,7 @@ import os import isce3.ext.isce3 as isce3 -from nisar.workflows import h5_prep, insar +from nisar.workflows import prepare_insar_hdf5, insar from nisar.workflows.insar_runconfig import InsarRunConfig from nisar.workflows.persistence import Persistence @@ -36,7 +36,7 @@ def test_insar_run(): insar_runcfg.geocode_common_arg_load() insar_runcfg.yaml_check() - out_paths = h5_prep.run(insar_runcfg.cfg) + out_paths = prepare_insar_hdf5.run(insar_runcfg.cfg) persist = Persistence(restart=True, logfile_path='insar.log') # No CPU dense offsets. Turn off dense_offsets, diff --git a/tests/python/packages/nisar/workflows/ionosphere.py b/tests/python/packages/nisar/workflows/ionosphere.py index b6760bcd2..037176b46 100644 --- a/tests/python/packages/nisar/workflows/ionosphere.py +++ b/tests/python/packages/nisar/workflows/ionosphere.py @@ -1,15 +1,15 @@ import argparse import os -from osgeo import gdal import h5py +import isce3 import iscetest import numpy as np -import isce3 from isce3.atmosphere import ionosphere_estimation -from nisar.workflows import h5_prep, insar +from nisar.workflows import insar, prepare_insar_hdf5 from nisar.workflows.insar_runconfig import InsarRunConfig from nisar.workflows.persistence import Persistence +from osgeo import gdal def test_split_main_band_run(): @@ -34,7 +34,7 @@ def test_split_main_band_run(): insar_runcfg.geocode_common_arg_load() insar_runcfg.yaml_check() - out_paths = h5_prep.run(insar_runcfg.cfg) + out_paths = prepare_insar_hdf5.run(insar_runcfg.cfg) persist = Persistence(restart=True, logfile_path='ionosphere.log') # No CPU dense offsets. Turn off dense_offsets, @@ -70,7 +70,7 @@ def test_main_side_band_run(): insar_runcfg.geocode_common_arg_load() insar_runcfg.yaml_check() - out_paths = h5_prep.run(insar_runcfg.cfg) + out_paths = prepare_insar_hdf5.run(insar_runcfg.cfg) persist = Persistence(restart=True, logfile_path='ionosphere.log') # No CPU dense offsets. Turn off dense_offsets, diff --git a/tests/python/packages/nisar/workflows/rubbersheet.py b/tests/python/packages/nisar/workflows/rubbersheet.py index 769414037..abb8e3620 100644 --- a/tests/python/packages/nisar/workflows/rubbersheet.py +++ b/tests/python/packages/nisar/workflows/rubbersheet.py @@ -2,14 +2,12 @@ import os import h5py +import iscetest import numpy as np import numpy.testing as npt - -from nisar.workflows import rubbersheet, h5_prep +from nisar.workflows import prepare_insar_hdf5, rubbersheet from nisar.workflows.rubbersheet_runconfig import RubbersheetRunConfig -import iscetest - def test_run_rubbersheet(): ''' @@ -31,10 +29,12 @@ def test_run_rubbersheet(): runconfig.geocode_common_arg_load() # Prepare output HDF5 products - h5_prep.run(runconfig.cfg) + prepare_insar_hdf5.run(runconfig.cfg) # Run rubbersheet - rubbersheet.run(runconfig.cfg) + rubbersheet.run(runconfig.cfg, + output_hdf5=\ + runconfig.cfg['product_path_group']['sas_output_file']) def test_validate_rubbersheet(): diff --git a/tests/python/packages/nisar/workflows/unwrap.py b/tests/python/packages/nisar/workflows/unwrap.py index b73737131..6193b017b 100644 --- a/tests/python/packages/nisar/workflows/unwrap.py +++ b/tests/python/packages/nisar/workflows/unwrap.py @@ -2,14 +2,12 @@ import os import h5py +import iscetest import numpy as np import numpy.testing as npt - -from nisar.workflows import unwrap, h5_prep +from nisar.workflows import prepare_insar_hdf5, unwrap from nisar.workflows.unwrap_runconfig import UnwrapRunConfig -import iscetest - def test_unwrap_run(): ''' @@ -30,7 +28,7 @@ def test_unwrap_run(): runconfig = UnwrapRunConfig(args) runconfig.geocode_common_arg_load() - out_paths = h5_prep.run(runconfig.cfg) + out_paths = prepare_insar_hdf5.run(runconfig.cfg) product_path = 'science/LSAR/RIFG/swaths/frequencyA/interferogram/HH'