-
Notifications
You must be signed in to change notification settings - Fork 76
Add IDW interpolation to rubbersheeting algorithm #239
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 39 commits
da95af1
3a8e2ec
b6cfbbb
f764f2f
a920ae6
1d28fae
84f3874
2bb897a
da76f88
6313217
ce2b301
8394511
72645bd
6f93fd3
eebd988
9b108ce
b1c78f7
e46f53f
54d6ab4
73b6b9b
2eb1387
42f2514
39c9883
f6cfd55
6176caa
ed3ced2
e50145f
43f4d1f
ea3acd5
dd786c7
2b6b9b6
ac487fb
5318933
44c2902
f163154
f301f71
62fc456
3e9552a
9723310
f350df3
6a895d5
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change | ||||||||
|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -12,9 +12,10 @@ | |||||||||
| import numpy as np | ||||||||||
| from isce3.io import HDF5OptimizedReader | ||||||||||
| from isce3.math import offsets_polyfit | ||||||||||
| from isce3.unwrap.preprocess import interpret_subswath_mask | ||||||||||
| from nisar.products.insar.product_paths import RIFGGroupsPaths | ||||||||||
| from nisar.products.readers import SLC | ||||||||||
| from nisar.workflows import prepare_insar_hdf5 | ||||||||||
| from nisar.workflows import h5_prep | ||||||||||
| from nisar.workflows.compute_stats import compute_stats_real_hdf5_dataset | ||||||||||
| from nisar.workflows.helpers import (get_cfg_freq_pols, | ||||||||||
| get_ground_track_velocity_product, | ||||||||||
|
|
@@ -23,6 +24,7 @@ | |||||||||
| from nisar.workflows.yaml_argparse import YamlArgparse | ||||||||||
| from osgeo import gdal | ||||||||||
| from scipy import interpolate, ndimage, signal | ||||||||||
| from scipy.spatial import cKDTree | ||||||||||
|
|
||||||||||
|
|
||||||||||
| def run_rubbersheet_with_polyfit(cfg: dict, output_hdf5: str = None): | ||||||||||
|
|
@@ -291,6 +293,14 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None): | |||||||||
| zero_doppler_time, | ||||||||||
| dem_file, | ||||||||||
| rubbersheet_dir) | ||||||||||
| valid_mask = None | ||||||||||
| # Apply the subswath mask to the outlier detection | ||||||||||
| if rubbersheet_params['subswath_mask_apply_enabled']: | ||||||||||
| pixel_offsets_mask_path = f'{pixel_offsets_path}/mask' | ||||||||||
| subswath_mask = dst_h5[pixel_offsets_mask_path][()] | ||||||||||
| ref_valid, sec_valid, _ = interpret_subswath_mask(subswath_mask) | ||||||||||
| valid_mask = ref_valid & sec_valid | ||||||||||
|
|
||||||||||
| for pol in pol_list: | ||||||||||
| # Create input and output directories for pol under processing | ||||||||||
| pol_group_path = f'{pixel_offsets_path}/{pol}' | ||||||||||
|
|
@@ -305,7 +315,7 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None): | |||||||||
| # Identify outliers | ||||||||||
| offset_az_culled, offset_rg_culled = identify_outliers( | ||||||||||
| str(dense_offsets_dir), | ||||||||||
| rubbersheet_params) | ||||||||||
| rubbersheet_params, valid_mask) | ||||||||||
| # Fill outliers holes | ||||||||||
| offset_az = fill_outliers_holes(offset_az_culled, | ||||||||||
| rubbersheet_params) | ||||||||||
|
|
@@ -325,7 +335,9 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None): | |||||||||
| key.startswith('layer')] | ||||||||||
| # Apply offset blending | ||||||||||
| offset_az, offset_rg = _offset_blending(off_product_dir, | ||||||||||
| rubbersheet_params, layer_keys) | ||||||||||
| rubbersheet_params, | ||||||||||
| layer_keys, | ||||||||||
| valid_mask) | ||||||||||
|
|
||||||||||
| # Get correlation peak path for the first offset layer | ||||||||||
| corr_peak_path = str(f'{off_prod_dir}/{layer_keys[0]}/correlation_peak') | ||||||||||
|
|
@@ -336,8 +348,16 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None): | |||||||||
| # If there are residual NaNs, use interpolation to fill residual holes | ||||||||||
| nan_count = np.count_nonzero(np.isnan(offset)) | ||||||||||
| if nan_count > 0: | ||||||||||
| offsets[k] = _interpolate_offsets(offset, | ||||||||||
| rubbersheet_params['interpolation_method']) | ||||||||||
| if rubbersheet_params['interpolation_method'] == 'idw': | ||||||||||
| # Get the IDW parameters | ||||||||||
| power, number, radius = ( rubbersheet_params['idw_interpolation'].get(k) | ||||||||||
| for k in ('power', 'number', 'radius')) | ||||||||||
| offsets[k] = _interpolate_offsets_by_idw(offset, power, number, radius) | ||||||||||
| # Additional linear interpolation is required to fill the extra outliers beyond the radius | ||||||||||
| offsets[k] = _interpolate_offsets(offsets[k],'linear') | ||||||||||
| else: | ||||||||||
| offsets[k] = _interpolate_offsets(offset, | ||||||||||
| rubbersheet_params['interpolation_method']) | ||||||||||
| # If required, filter offsets | ||||||||||
| offsets[k] = _filter_offsets(offsets[k], rubbersheet_params) | ||||||||||
| # Save offsets on disk for resampling | ||||||||||
|
|
@@ -434,7 +454,7 @@ def _write_to_disk(outpath, array, format='ENVI', | |||||||||
| ds.GetRasterBand(1).WriteArray(array) | ||||||||||
| ds.FlushCache() | ||||||||||
|
|
||||||||||
| def identify_outliers(offsets_dir, rubbersheet_params): | ||||||||||
| def identify_outliers(offsets_dir, rubbersheet_params, mask = None): | ||||||||||
| ''' | ||||||||||
| Identify outliers in the offset fields. | ||||||||||
| Outliers are identified by a thresholding | ||||||||||
|
|
@@ -448,6 +468,8 @@ def identify_outliers(offsets_dir, rubbersheet_params): | |||||||||
| where pixel offsets are located | ||||||||||
| rubbersheet_params: cfg | ||||||||||
| Dictionary containing rubbersheet parameters | ||||||||||
| mask: np.ndarray | ||||||||||
| A mask indicating the interested region (default: None) | ||||||||||
|
|
||||||||||
| Returns | ||||||||||
| ------- | ||||||||||
|
|
@@ -468,6 +490,9 @@ def identify_outliers(offsets_dir, rubbersheet_params): | |||||||||
| elif metric == 'median_filter': | ||||||||||
| offset_az = _open_raster(f'{offsets_dir}/dense_offsets', 1) | ||||||||||
| offset_rg = _open_raster(f'{offsets_dir}/dense_offsets', 2) | ||||||||||
| if mask is not None: | ||||||||||
| offset_az[~mask] = np.nan | ||||||||||
| offset_rg[~mask] = np.nan | ||||||||||
| mask_data = compute_mad_mask(offset_az, window_az, window_rg, threshold) | \ | ||||||||||
| compute_mad_mask(offset_rg, window_az, window_rg, threshold) | ||||||||||
| elif metric == 'covariance': | ||||||||||
|
|
@@ -671,7 +696,7 @@ def _fill_nan_with_mean(arr_in, arr_out, neighborhood_size): | |||||||||
| return filled_arr | ||||||||||
|
|
||||||||||
|
|
||||||||||
| def _offset_blending(off_product_dir, rubbersheet_params, layer_keys): | ||||||||||
| def _offset_blending(off_product_dir, rubbersheet_params, layer_keys, mask = None): | ||||||||||
| ''' | ||||||||||
| Blends offsets layers at different resolution. Implements a | ||||||||||
| pyramidal filling algorithm using the offset layer at higher | ||||||||||
|
|
@@ -689,6 +714,8 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys): | |||||||||
| Dictionary containing the user-defined rubbersheet options | ||||||||||
| layer_keys: list | ||||||||||
| List of layers within the offset product | ||||||||||
| mask: np.ndarray | ||||||||||
| A mask indicting the interested region, default: None | ||||||||||
|
|
||||||||||
| Returns | ||||||||||
| ------- | ||||||||||
|
|
@@ -700,7 +727,7 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys): | |||||||||
|
|
||||||||||
| # Filter outliers from layer one | ||||||||||
| offset_az, offset_rg = identify_outliers(str(off_product_dir / layer_keys[0]), | ||||||||||
| rubbersheet_params) | ||||||||||
| rubbersheet_params, mask) | ||||||||||
|
|
||||||||||
| # Replace the NaN locations in layer1 with the mean of pixels in layers | ||||||||||
| # at lower resolution computed in a neighborhood centered at the NaN location | ||||||||||
|
|
@@ -711,12 +738,12 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys): | |||||||||
|
|
||||||||||
| if nan_count_az > 0: | ||||||||||
| offset_az_culled, _ = identify_outliers(str(off_product_dir / layer_key), | ||||||||||
| rubbersheet_params) | ||||||||||
| rubbersheet_params, mask) | ||||||||||
| offset_az = _fill_nan_with_mean(offset_az, offset_az_culled, filter_size) | ||||||||||
|
|
||||||||||
| if nan_count_rg > 0: | ||||||||||
| _, offset_rg_culled = identify_outliers(str(off_product_dir / layer_key), | ||||||||||
| rubbersheet_params) | ||||||||||
| rubbersheet_params, mask) | ||||||||||
| offset_rg = _fill_nan_with_mean(offset_rg, offset_rg_culled, filter_size) | ||||||||||
|
|
||||||||||
| # Fill remaining holes by iteratively filling the output offset layer | ||||||||||
|
|
@@ -727,11 +754,107 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys): | |||||||||
|
|
||||||||||
| return offset_az, offset_rg | ||||||||||
|
|
||||||||||
| def _interpolate_offsets_by_idw( | ||||||||||
| Z: np.ndarray, | ||||||||||
| power: float = 2.0, | ||||||||||
| number: int | None = 100, | ||||||||||
| radius: float | None = 200.0, | ||||||||||
| eps: float = 1e-12): | ||||||||||
| ''' | ||||||||||
| Performs IDW interpolation on a 2D grid containing NaN values. | ||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||||||
| Missing pixels are filled using inverse distance weighting (IDW) | ||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||||||
| based on nearby valid samples. The interpolation can optionally | ||||||||||
| limit the number of nearest neighbors and the maximum search | ||||||||||
| radius to improve efficiency and control smoothing. | ||||||||||
|
|
||||||||||
| import journal | ||||||||||
| import numpy as np | ||||||||||
| from scipy import interpolate | ||||||||||
| Parameters | ||||||||||
| --------- | ||||||||||
| Z: np.ndarray | ||||||||||
| 2D array of shape (H, W) containing NaN values to be filled | ||||||||||
| power: float | ||||||||||
| Power parameter p controlling the distance weighting | ||||||||||
| number: int or None | ||||||||||
| Number of nearest neighbors used for interpolation. | ||||||||||
| If None, all valid points are used (default: 100) | ||||||||||
| radius: float or None | ||||||||||
| Maximum search radius in pixels. If None, no radius | ||||||||||
| limitation is applied (default: 200) | ||||||||||
| eps: float | ||||||||||
| Small value added to distance to avoid division by zero | ||||||||||
|
|
||||||||||
| Returns | ||||||||||
| ------- | ||||||||||
| filled: np.ndarray | ||||||||||
| 2D array with the same shape as Z where NaN values | ||||||||||
| have been filled using IDW interpolation | ||||||||||
| ''' | ||||||||||
|
|
||||||||||
| valid = np.isfinite(Z) | ||||||||||
| yy, xx = np.nonzero(valid) | ||||||||||
| if yy.size == 0: | ||||||||||
| raise ValueError("No valid points found to interpolate from.") | ||||||||||
|
|
||||||||||
| points = np.column_stack([xx, yy]).astype(float) # (x, y) | ||||||||||
| values = Z[yy, xx].astype(float) | ||||||||||
|
|
||||||||||
| # Query points (NaNs) | ||||||||||
| qmask = ~valid | ||||||||||
| yq, xq = np.nonzero(qmask) | ||||||||||
| if yq.size == 0: | ||||||||||
| return Z.copy() | ||||||||||
|
|
||||||||||
| qpoints = np.column_stack([xq, yq]).astype(float) | ||||||||||
|
|
||||||||||
| # KDTree for neighbors | ||||||||||
| tree = cKDTree(points) | ||||||||||
|
|
||||||||||
| # If number is None, use all valid points | ||||||||||
| k = points.shape[0] if number is None else number | ||||||||||
|
|
||||||||||
| # Query neighbors | ||||||||||
| if radius is None: | ||||||||||
| dists, idxs = tree.query(qpoints, k=min(k, points.shape[0])) | ||||||||||
| # dists: (M, k) | ||||||||||
| # idxs : (M, k) | ||||||||||
| use_mask = np.isfinite(dists) | ||||||||||
| else: | ||||||||||
| dists, idxs = tree.query(qpoints, k=min(k, points.shape[0]), distance_upper_bound=radius) | ||||||||||
| # For neighbors not found within radius, idxs == points.shape[0], dists == inf | ||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. need to combine together |
||||||||||
| use_mask = np.isfinite(dists) & (idxs < points.shape[0]) | ||||||||||
|
|
||||||||||
| # Filled values | ||||||||||
| filled_vals = np.full(qpoints.shape[0], np.nan, dtype=float) | ||||||||||
|
|
||||||||||
| # Fill the value with nearest neighbor if distance is too small. | ||||||||||
| zero_hit = np.any((dists <= eps) & use_mask, axis=1) | ||||||||||
| if np.any(zero_hit): | ||||||||||
| row = np.where(zero_hit)[0] | ||||||||||
| col = np.argmax(((dists <= eps) & use_mask)[row], axis=1) | ||||||||||
| filled_vals[row] = values[idxs[row, col]] | ||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. might not be neccessary here since the distance in 2D grid is usually > 1 |
||||||||||
|
|
||||||||||
| # For the rest, compute IDW | ||||||||||
| rest = ~zero_hit | ||||||||||
| if np.any(rest): | ||||||||||
| d = dists[rest] | ||||||||||
| ii = idxs[rest] | ||||||||||
| m = use_mask[rest] | ||||||||||
|
|
||||||||||
| # weights = 1 / d^p, ignore invalid neighbors | ||||||||||
| w = np.zeros_like(d, dtype=float) | ||||||||||
| w[m] = 1.0 / np.maximum(d[m], eps) ** power | ||||||||||
|
|
||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same here |
||||||||||
| v = np.zeros_like(d, dtype=float) | ||||||||||
| v[m] = values[ii[m]] | ||||||||||
|
|
||||||||||
| ws = w.sum(axis=1) | ||||||||||
| # If no neighbors (ws==0), leave as NaN (no extrapolation) | ||||||||||
| ok = ws > 0 | ||||||||||
| filled_vals[np.where(rest)[0][ok]] = (w[ok] * v[ok]).sum(axis=1) / ws[ok] | ||||||||||
|
|
||||||||||
| out = Z.copy() | ||||||||||
| out[yq, xq] = filled_vals | ||||||||||
|
|
||||||||||
| return out | ||||||||||
|
|
||||||||||
| def _interpolate_offsets(offset, interp_method): | ||||||||||
| ''' | ||||||||||
|
|
@@ -851,5 +974,5 @@ def run(cfg: dict, output_hdf5: str = None): | |||||||||
|
|
||||||||||
| # Prepare RIFG. Culled offsets will be | ||||||||||
| # allocated in RIFG product | ||||||||||
| out_paths = prepare_insar_hdf5.run(rubbersheet_runconfig.cfg) | ||||||||||
| _, out_paths = h5_prep.get_products_and_paths(rubbersheet_runconfig.cfg) | ||||||||||
| run(rubbersheet_runconfig.cfg, out_paths['RIFG']) | ||||||||||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -306,7 +306,7 @@ runconfig: | |
| # When `min_cluster_pixels` is 0, this pre-removal step is | ||
| # skipped entirely. | ||
| min_cluster_pixels: 2 | ||
|
|
||
| bridge_algorithm_enabled: True | ||
| # Maximum radius used when bridging disconnected | ||
| # regions. | ||
|
|
@@ -544,7 +544,20 @@ runconfig: | |
| kernel_size: 3 | ||
| # Interpolation method. Interpolation is used to fill residual outlier | ||
| # holes if present | ||
| interpolation_method: linear | ||
| interpolation_method: idw | ||
|
|
||
| # Flag to enable the use of subswath mask for the outlier detection | ||
| subswath_mask_apply_enabled: True | ||
|
|
||
| # IDW interpolation method parameters | ||
| idw_interpolation: | ||
| # Power parameter controlling the distance weighting | ||
| power: 2 | ||
| # Number of nearest neighbors used for interpolation | ||
| number: 100 | ||
| # Maximum search radius in pixels | ||
| radius: 200 | ||
|
|
||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. keep the default to be linear |
||
| # It is good practice to filter the offsets prior to interferogram | ||
| # formation to reduce noise. We expose: no_filter: do not filter the offsets/ | ||
| # degrade offsets resolution. median_filter, boxcar_filter (moving average), | ||
|
|
@@ -780,7 +793,7 @@ runconfig: | |
| # During deramping, uniform sampling is applied | ||
| # when number of samples is larger than maximum pixel | ||
| bridge_ramp_maximum_pixel: 1e6 | ||
|
|
||
| correction_luts: | ||
| # Boolean flag to activate/deactivate model-based solid earth tide corrections | ||
| solid_earth_tides_enabled: True | ||
|
|
||
| Original file line number | Diff line number | Diff line change | ||||||||||||||||
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
|
|
@@ -548,7 +548,13 @@ rubbersheet_options: | |||||||||||||||||
|
|
||||||||||||||||||
| # Interpolation method. Interpolation is used to fill residual | ||||||||||||||||||
| # outlier holes (if present) | ||||||||||||||||||
| interpolation_method: enum('no_interpolation', 'nearest', 'linear', 'cubic', required=False) | ||||||||||||||||||
| interpolation_method: enum('no_interpolation', 'nearest', 'linear', 'cubic', 'idw', required=False) | ||||||||||||||||||
|
|
||||||||||||||||||
| # Inverse distance weighting (IDW) interpolation method | ||||||||||||||||||
| idw_interpolation: include('idw_interpolation_options', required=False) | ||||||||||||||||||
|
|
||||||||||||||||||
| # Flag to enable the use of the subswath mask for the outlier detection | ||||||||||||||||||
| subswath_mask_apply_enabled: bool(required=False) | ||||||||||||||||||
|
|
||||||||||||||||||
| # It is good practice to filter the offsets prior to interferogram | ||||||||||||||||||
| # formation to reduce noise. We expose: no_filter: do not filter the offsets/ | ||||||||||||||||||
|
|
@@ -595,6 +601,14 @@ polyfitting_options: | |||||||||||||||||
| # Number of samples along the azimuth in pixels for polyfitting | ||||||||||||||||||
| samples_along_azimuth: num(min=0, required=False) | ||||||||||||||||||
|
|
||||||||||||||||||
| idw_interpolation_options: | ||||||||||||||||||
| # Power parameter controlling the distance weighting | ||||||||||||||||||
| power: int(min=0, required=False) | ||||||||||||||||||
| # Number of nearest neighbors used for interpolation | ||||||||||||||||||
| number: int(min=0, required=False) | ||||||||||||||||||
| # Maximum search radius in pixels | ||||||||||||||||||
| radius: num(min=0, required=False) | ||||||||||||||||||
|
Contributor
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Suggested change
|
||||||||||||||||||
|
|
||||||||||||||||||
|
xhuang-jpl marked this conversation as resolved.
|
||||||||||||||||||
| crossmul_options: | ||||||||||||||||||
| # Path to HDF5 file or directory with coregistered SLCs | ||||||||||||||||||
| # If directory then the following directory tree is required: | ||||||||||||||||||
|
|
||||||||||||||||||
Uh oh!
There was an error while loading. Please reload this page.