Skip to content
Merged
Show file tree
Hide file tree
Changes from 39 commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
da95af1
sm r3.4
Sep 19, 2023
3a8e2ec
merge R4.0.2beta-1
Mar 11, 2024
b6cfbbb
change the SM commit id for R4.0.2
May 23, 2024
f764f2f
merge upstream develop branch
Jun 4, 2024
a920ae6
Merge remote-tracking branch 'upstream/develop' into develop
Jun 5, 2024
1d28fae
Merge remote-tracking branch 'upstream/develop' into develop
Jun 13, 2024
84f3874
Merge remote-tracking branch 'upstream/develop' into develop
Aug 12, 2024
2bb897a
Merge remote-tracking branch 'upstream/develop' into develop
Sep 14, 2024
da76f88
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Oct 2, 2024
6313217
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Oct 9, 2024
ce2b301
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Oct 15, 2024
8394511
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Oct 24, 2024
72645bd
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Oct 29, 2024
6f93fd3
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Dec 9, 2024
eebd988
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Jan 21, 2025
9b108ce
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Jan 29, 2025
b1c78f7
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Mar 18, 2025
e46f53f
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Apr 10, 2025
54d6ab4
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
May 12, 2025
73b6b9b
Merge branch 'develop' of https://github-fn.jpl.nasa.gov/isce-3/isce …
Jun 10, 2025
2eb1387
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Sep 30, 2025
42f2514
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Nov 14, 2025
39c9883
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Dec 1, 2025
f6cfd55
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Jan 14, 2026
6176caa
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Jan 16, 2026
ed3ced2
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Mar 18, 2026
e50145f
remove redundant imports
Mar 18, 2026
43f4d1f
add the idw
Mar 18, 2026
ea3acd5
update the schema
Mar 18, 2026
dd786c7
add the connected components function
Mar 18, 2026
2b6b9b6
update the default value
Mar 18, 2026
ac487fb
run isort
Mar 18, 2026
5318933
fix bugs
Mar 18, 2026
44c2902
update the crossmul main
Mar 18, 2026
f163154
add subswath mask
Mar 18, 2026
f301f71
add the subswath mask to the runconfig
Mar 19, 2026
62fc456
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Mar 23, 2026
3e9552a
Merge branch 'develop' of https://github.com/isce-framework/isce3 int…
Mar 25, 2026
9723310
remove unused functions
Apr 7, 2026
f350df3
Apply suggestions from code review
xhuang-jpl Apr 9, 2026
6a895d5
update all comments
Apr 9, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions python/packages/nisar/workflows/crossmul.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

from isce3.io import HDF5OptimizedReader
from nisar.products.readers import SLC
from nisar.workflows import prepare_insar_hdf5
from nisar.workflows import h5_prep
from nisar.workflows.compute_stats import compute_stats_real_data
from nisar.workflows.crossmul_runconfig import CrossmulRunConfig
from nisar.workflows.helpers import (copy_raster,
Expand Down Expand Up @@ -220,6 +220,6 @@ def stats_offsets(h5_ds, freq, pol):
# get a runconfig dict from command line args
crossmul_runconfig = CrossmulRunConfig(args, resample_type)
# prepare RIFG HDF5
out_paths = prepare_insar_hdf5.run(crossmul_runconfig.cfg)
_, out_paths = h5_prep.get_products_and_paths(crossmul_runconfig.cfg)
# run crossmul
run(crossmul_runconfig.cfg, out_paths['RIFG'], resample_type)
run(crossmul_runconfig.cfg, out_paths['RIFG'], resample_type, dump_on_disk=True)
151 changes: 137 additions & 14 deletions python/packages/nisar/workflows/rubbersheet.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,10 @@
import numpy as np
from isce3.io import HDF5OptimizedReader
from isce3.math import offsets_polyfit
from isce3.unwrap.preprocess import interpret_subswath_mask
from nisar.products.insar.product_paths import RIFGGroupsPaths
from nisar.products.readers import SLC
from nisar.workflows import prepare_insar_hdf5
from nisar.workflows import h5_prep
from nisar.workflows.compute_stats import compute_stats_real_hdf5_dataset
from nisar.workflows.helpers import (get_cfg_freq_pols,
get_ground_track_velocity_product,
Expand All @@ -23,6 +24,7 @@
from nisar.workflows.yaml_argparse import YamlArgparse
from osgeo import gdal
from scipy import interpolate, ndimage, signal
from scipy.spatial import cKDTree


def run_rubbersheet_with_polyfit(cfg: dict, output_hdf5: str = None):
Expand Down Expand Up @@ -291,6 +293,14 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None):
zero_doppler_time,
dem_file,
rubbersheet_dir)
valid_mask = None
# Apply the subswath mask to the outlier detection
if rubbersheet_params['subswath_mask_apply_enabled']:
pixel_offsets_mask_path = f'{pixel_offsets_path}/mask'
subswath_mask = dst_h5[pixel_offsets_mask_path][()]
ref_valid, sec_valid, _ = interpret_subswath_mask(subswath_mask)
valid_mask = ref_valid & sec_valid

for pol in pol_list:
# Create input and output directories for pol under processing
pol_group_path = f'{pixel_offsets_path}/{pol}'
Expand All @@ -305,7 +315,7 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None):
# Identify outliers
offset_az_culled, offset_rg_culled = identify_outliers(
str(dense_offsets_dir),
rubbersheet_params)
rubbersheet_params, valid_mask)
# Fill outliers holes
offset_az = fill_outliers_holes(offset_az_culled,
rubbersheet_params)
Expand All @@ -325,7 +335,9 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None):
key.startswith('layer')]
# Apply offset blending
offset_az, offset_rg = _offset_blending(off_product_dir,
rubbersheet_params, layer_keys)
rubbersheet_params,
layer_keys,
valid_mask)

# Get correlation peak path for the first offset layer
corr_peak_path = str(f'{off_prod_dir}/{layer_keys[0]}/correlation_peak')
Expand All @@ -336,8 +348,16 @@ def run_rubbersheet_with_interpolation(cfg: dict, output_hdf5: str = None):
# If there are residual NaNs, use interpolation to fill residual holes
nan_count = np.count_nonzero(np.isnan(offset))
if nan_count > 0:
offsets[k] = _interpolate_offsets(offset,
rubbersheet_params['interpolation_method'])
if rubbersheet_params['interpolation_method'] == 'idw':
# Get the IDW parameters
power, number, radius = ( rubbersheet_params['idw_interpolation'].get(k)
for k in ('power', 'number', 'radius'))
Comment thread
xhuang-jpl marked this conversation as resolved.
Outdated
offsets[k] = _interpolate_offsets_by_idw(offset, power, number, radius)
# Additional linear interpolation is required to fill the extra outliers beyond the radius
offsets[k] = _interpolate_offsets(offsets[k],'linear')
else:
offsets[k] = _interpolate_offsets(offset,
rubbersheet_params['interpolation_method'])
# If required, filter offsets
offsets[k] = _filter_offsets(offsets[k], rubbersheet_params)
# Save offsets on disk for resampling
Expand Down Expand Up @@ -434,7 +454,7 @@ def _write_to_disk(outpath, array, format='ENVI',
ds.GetRasterBand(1).WriteArray(array)
ds.FlushCache()

def identify_outliers(offsets_dir, rubbersheet_params):
def identify_outliers(offsets_dir, rubbersheet_params, mask = None):
'''
Identify outliers in the offset fields.
Outliers are identified by a thresholding
Expand All @@ -448,6 +468,8 @@ def identify_outliers(offsets_dir, rubbersheet_params):
where pixel offsets are located
rubbersheet_params: cfg
Dictionary containing rubbersheet parameters
mask: np.ndarray
A mask indicating the interested region (default: None)

Returns
-------
Expand All @@ -468,6 +490,9 @@ def identify_outliers(offsets_dir, rubbersheet_params):
elif metric == 'median_filter':
offset_az = _open_raster(f'{offsets_dir}/dense_offsets', 1)
offset_rg = _open_raster(f'{offsets_dir}/dense_offsets', 2)
if mask is not None:
offset_az[~mask] = np.nan
offset_rg[~mask] = np.nan
mask_data = compute_mad_mask(offset_az, window_az, window_rg, threshold) | \
compute_mad_mask(offset_rg, window_az, window_rg, threshold)
elif metric == 'covariance':
Expand Down Expand Up @@ -671,7 +696,7 @@ def _fill_nan_with_mean(arr_in, arr_out, neighborhood_size):
return filled_arr


def _offset_blending(off_product_dir, rubbersheet_params, layer_keys):
def _offset_blending(off_product_dir, rubbersheet_params, layer_keys, mask = None):
'''
Blends offsets layers at different resolution. Implements a
pyramidal filling algorithm using the offset layer at higher
Expand All @@ -689,6 +714,8 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys):
Dictionary containing the user-defined rubbersheet options
layer_keys: list
List of layers within the offset product
mask: np.ndarray
A mask indicting the interested region, default: None

Returns
-------
Expand All @@ -700,7 +727,7 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys):

# Filter outliers from layer one
offset_az, offset_rg = identify_outliers(str(off_product_dir / layer_keys[0]),
rubbersheet_params)
rubbersheet_params, mask)

# Replace the NaN locations in layer1 with the mean of pixels in layers
# at lower resolution computed in a neighborhood centered at the NaN location
Expand All @@ -711,12 +738,12 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys):

if nan_count_az > 0:
offset_az_culled, _ = identify_outliers(str(off_product_dir / layer_key),
rubbersheet_params)
rubbersheet_params, mask)
offset_az = _fill_nan_with_mean(offset_az, offset_az_culled, filter_size)

if nan_count_rg > 0:
_, offset_rg_culled = identify_outliers(str(off_product_dir / layer_key),
rubbersheet_params)
rubbersheet_params, mask)
offset_rg = _fill_nan_with_mean(offset_rg, offset_rg_culled, filter_size)

# Fill remaining holes by iteratively filling the output offset layer
Expand All @@ -727,11 +754,107 @@ def _offset_blending(off_product_dir, rubbersheet_params, layer_keys):

return offset_az, offset_rg

def _interpolate_offsets_by_idw(
Z: np.ndarray,
power: float = 2.0,
number: int | None = 100,
radius: float | None = 200.0,
eps: float = 1e-12):
'''
Performs IDW interpolation on a 2D grid containing NaN values.
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
'''
Performs IDW interpolation on a 2D grid containing NaN values.
'''
Performs Inverse Distance Weighting (IDW) interpolation on a 2D grid containing NaN values.

Missing pixels are filled using inverse distance weighting (IDW)
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
Missing pixels are filled using inverse distance weighting (IDW)
Missing pixels are filled using IDW

based on nearby valid samples. The interpolation can optionally
limit the number of nearest neighbors and the maximum search
radius to improve efficiency and control smoothing.

import journal
import numpy as np
from scipy import interpolate
Parameters
---------
Z: np.ndarray
2D array of shape (H, W) containing NaN values to be filled
power: float
Power parameter p controlling the distance weighting
number: int or None
Number of nearest neighbors used for interpolation.
If None, all valid points are used (default: 100)
radius: float or None
Maximum search radius in pixels. If None, no radius
limitation is applied (default: 200)
eps: float
Small value added to distance to avoid division by zero

Returns
-------
filled: np.ndarray
2D array with the same shape as Z where NaN values
have been filled using IDW interpolation
'''

valid = np.isfinite(Z)
yy, xx = np.nonzero(valid)
if yy.size == 0:
raise ValueError("No valid points found to interpolate from.")

points = np.column_stack([xx, yy]).astype(float) # (x, y)
values = Z[yy, xx].astype(float)

# Query points (NaNs)
qmask = ~valid
yq, xq = np.nonzero(qmask)
if yq.size == 0:
return Z.copy()

qpoints = np.column_stack([xq, yq]).astype(float)

# KDTree for neighbors
tree = cKDTree(points)

# If number is None, use all valid points
k = points.shape[0] if number is None else number

# Query neighbors
if radius is None:
dists, idxs = tree.query(qpoints, k=min(k, points.shape[0]))
# dists: (M, k)
# idxs : (M, k)
use_mask = np.isfinite(dists)
else:
dists, idxs = tree.query(qpoints, k=min(k, points.shape[0]), distance_upper_bound=radius)
# For neighbors not found within radius, idxs == points.shape[0], dists == inf
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

need to combine together

use_mask = np.isfinite(dists) & (idxs < points.shape[0])

# Filled values
filled_vals = np.full(qpoints.shape[0], np.nan, dtype=float)

# Fill the value with nearest neighbor if distance is too small.
zero_hit = np.any((dists <= eps) & use_mask, axis=1)
if np.any(zero_hit):
row = np.where(zero_hit)[0]
col = np.argmax(((dists <= eps) & use_mask)[row], axis=1)
filled_vals[row] = values[idxs[row, col]]
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

might not be neccessary here since the distance in 2D grid is usually > 1


# For the rest, compute IDW
rest = ~zero_hit
if np.any(rest):
d = dists[rest]
ii = idxs[rest]
m = use_mask[rest]

# weights = 1 / d^p, ignore invalid neighbors
w = np.zeros_like(d, dtype=float)
w[m] = 1.0 / np.maximum(d[m], eps) ** power

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same here

v = np.zeros_like(d, dtype=float)
v[m] = values[ii[m]]

ws = w.sum(axis=1)
# If no neighbors (ws==0), leave as NaN (no extrapolation)
ok = ws > 0
filled_vals[np.where(rest)[0][ok]] = (w[ok] * v[ok]).sum(axis=1) / ws[ok]

out = Z.copy()
out[yq, xq] = filled_vals

return out

def _interpolate_offsets(offset, interp_method):
'''
Expand Down Expand Up @@ -851,5 +974,5 @@ def run(cfg: dict, output_hdf5: str = None):

# Prepare RIFG. Culled offsets will be
# allocated in RIFG product
out_paths = prepare_insar_hdf5.run(rubbersheet_runconfig.cfg)
_, out_paths = h5_prep.get_products_and_paths(rubbersheet_runconfig.cfg)
run(rubbersheet_runconfig.cfg, out_paths['RIFG'])
19 changes: 16 additions & 3 deletions share/nisar/defaults/insar.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ runconfig:
# When `min_cluster_pixels` is 0, this pre-removal step is
# skipped entirely.
min_cluster_pixels: 2

bridge_algorithm_enabled: True
# Maximum radius used when bridging disconnected
# regions.
Expand Down Expand Up @@ -544,7 +544,20 @@ runconfig:
kernel_size: 3
# Interpolation method. Interpolation is used to fill residual outlier
# holes if present
interpolation_method: linear
interpolation_method: idw

# Flag to enable the use of subswath mask for the outlier detection
subswath_mask_apply_enabled: True

# IDW interpolation method parameters
idw_interpolation:
# Power parameter controlling the distance weighting
power: 2
# Number of nearest neighbors used for interpolation
number: 100
# Maximum search radius in pixels
radius: 200

Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

keep the default to be linear

# It is good practice to filter the offsets prior to interferogram
# formation to reduce noise. We expose: no_filter: do not filter the offsets/
# degrade offsets resolution. median_filter, boxcar_filter (moving average),
Expand Down Expand Up @@ -780,7 +793,7 @@ runconfig:
# During deramping, uniform sampling is applied
# when number of samples is larger than maximum pixel
bridge_ramp_maximum_pixel: 1e6

correction_luts:
# Boolean flag to activate/deactivate model-based solid earth tide corrections
solid_earth_tides_enabled: True
Expand Down
16 changes: 15 additions & 1 deletion share/nisar/schemas/insar.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,13 @@ rubbersheet_options:

# Interpolation method. Interpolation is used to fill residual
# outlier holes (if present)
interpolation_method: enum('no_interpolation', 'nearest', 'linear', 'cubic', required=False)
interpolation_method: enum('no_interpolation', 'nearest', 'linear', 'cubic', 'idw', required=False)

# Inverse distance weighting (IDW) interpolation method
idw_interpolation: include('idw_interpolation_options', required=False)

# Flag to enable the use of the subswath mask for the outlier detection
subswath_mask_apply_enabled: bool(required=False)

# It is good practice to filter the offsets prior to interferogram
# formation to reduce noise. We expose: no_filter: do not filter the offsets/
Expand Down Expand Up @@ -595,6 +601,14 @@ polyfitting_options:
# Number of samples along the azimuth in pixels for polyfitting
samples_along_azimuth: num(min=0, required=False)

idw_interpolation_options:
# Power parameter controlling the distance weighting
power: int(min=0, required=False)
# Number of nearest neighbors used for interpolation
number: int(min=0, required=False)
# Maximum search radius in pixels
radius: num(min=0, required=False)
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
# Number of nearest neighbors used for interpolation
number: int(min=0, required=False)
# Maximum search radius in pixels
radius: num(min=0, required=False)
# Number of nearest neighbors used for interpolation
number: int(min=1, required=False)
# Maximum search radius in pixels, if not specified, it will use all of the valid pixels in the image
radius: num(min=1, required=False)


Comment thread
xhuang-jpl marked this conversation as resolved.
crossmul_options:
# Path to HDF5 file or directory with coregistered SLCs
# If directory then the following directory tree is required:
Expand Down
Loading