Skip to content

Commit f85e929

Browse files
committed
fixup! fixup! fixup! Issue #114/#141 convert inline GeoJSON in aggregate_spatial to VectorCube
1 parent 2864f25 commit f85e929

File tree

4 files changed

+41
-27
lines changed

4 files changed

+41
-27
lines changed

openeo_driver/ProcessGraphDeserializer.py

+8-1
Original file line numberDiff line numberDiff line change
@@ -1321,6 +1321,10 @@ def run_udf(args: dict, env: EvalEnv):
13211321
# TODO #114 add support for DriverVectorCube
13221322
if isinstance(data, AggregatePolygonResult):
13231323
pass
1324+
if isinstance(data, DriverVectorCube):
1325+
# TODO: this is temporary adaption to old style save results. Better have proper DriverVectorCube support in run_udf?
1326+
data = data.to_legacy_save_result()
1327+
13241328
if isinstance(data, (DelayedVector, dict)):
13251329
if isinstance(data, dict):
13261330
data = DelayedVector.from_json_dict(data)
@@ -1338,7 +1342,10 @@ def run_udf(args: dict, env: EvalEnv):
13381342
)
13391343
else:
13401344
raise ProcessParameterInvalidException(
1341-
parameter='data', process='run_udf', reason=f"Invalid data type {type(data)!r} expected raster-cube.")
1345+
parameter="data",
1346+
process="run_udf",
1347+
reason=f"Unsupported data type {type(data)}.",
1348+
)
13421349

13431350
_log.info(f"[run_udf] Running UDF {str_truncate(udf, width=256)!r} on {data!r}")
13441351
result_data = openeo.udf.run_udf_code(udf, data)

openeo_driver/datacube.py

+24-25
Original file line numberDiff line numberDiff line change
@@ -280,40 +280,39 @@ def write_assets(
280280
def to_multipolygon(self) -> shapely.geometry.MultiPolygon:
281281
return shapely.ops.unary_union(self._geometries.geometry)
282282

283-
def _write_legacy_aggregate_polygon_result_json(
284-
self, directory: Path
285-
) -> Dict[str, StacAsset]:
286-
"""Export to legacy AggregatePolygonResult JSON format"""
287-
# TODO: eliminate this legacy, non-standard format?
283+
def to_legacy_save_result(self) -> Union["AggregatePolygonResult", "JSONResult"]:
284+
"""
285+
Export to legacy AggregatePolygonResult/JSONResult objects.
286+
Provided as temporary adaption layer while migrating to real vector cubes.
287+
"""
288+
# TODO: eliminate these legacy, non-standard format?
288289
from openeo_driver.save_result import AggregatePolygonResult, JSONResult
289290

290-
def write_spatiotemporal(cube: xarray.DataArray) -> Dict[str, StacAsset]:
291-
"""Export to legacy AggregatePolygonResult JSON format"""
291+
cube = self._cube
292+
# TODO: more flexible temporal/band dimension detection?
293+
if cube.dims == (self.DIM_GEOMETRIES, "t"):
294+
# Add single band dimension
295+
cube = cube.expand_dims({"bands": ["band"]}, axis=-1)
296+
if cube.dims == (self.DIM_GEOMETRIES, "t", "bands"):
292297
cube = cube.transpose("t", self.DIM_GEOMETRIES, "bands")
293298
timeseries = {
294299
t.item(): t_slice.values.tolist()
295300
for t, t_slice in zip(cube.coords["t"], cube)
296301
}
297-
result = AggregatePolygonResult(timeseries=timeseries, regions=self)
298-
return result.write_assets(directory=directory / "ignored")
299-
300-
def write_spatial(cube: xarray.DataArray) -> Dict[str, StacAsset]:
302+
return AggregatePolygonResult(timeseries=timeseries, regions=self)
303+
elif cube.dims == (self.DIM_GEOMETRIES, "bands"):
301304
cube = cube.transpose(self.DIM_GEOMETRIES, "bands")
302-
result = JSONResult(data=cube.values.tolist())
303-
return result.write_assets(directory / "ignored")
305+
return JSONResult(data=cube.values.tolist())
306+
raise ValueError(
307+
f"Unsupported cube configuration {cube.dims} for _write_legacy_aggregate_polygon_result_json"
308+
)
304309

305-
cube = self._cube
306-
# TODO: more flexible temporal/band dimension detection?
307-
if cube.dims == (self.DIM_GEOMETRIES, "t"):
308-
return write_spatiotemporal(cube.expand_dims({"bands": ["band"]}, axis=-1))
309-
elif cube.dims == (self.DIM_GEOMETRIES, "t", "bands"):
310-
return write_spatiotemporal(cube)
311-
elif cube.dims == (self.DIM_GEOMETRIES, "bands"):
312-
return write_spatial(cube)
313-
else:
314-
raise ValueError(
315-
f"Unsupported cube configuration {cube.dims} for _write_legacy_aggregate_polygon_result_json"
316-
)
310+
def _write_legacy_aggregate_polygon_result_json(
311+
self, directory: Path
312+
) -> Dict[str, StacAsset]:
313+
"""Export to legacy AggregatePolygonResult JSON format"""
314+
# TODO: eliminate this legacy, non-standard format?
315+
return self.to_legacy_save_result().write_assets(directory=directory)
317316

318317
def get_bounding_box(self) -> Tuple[float, float, float, float]:
319318
return tuple(self._geometries.total_bounds)

tests/data/pg/1.0/run_udf_on_timeseries.json

+4
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,10 @@
1212
"temporal_extent": [
1313
"2017-11-21",
1414
"2017-11-21"
15+
],
16+
"bands": [
17+
"B02",
18+
"B03"
1519
]
1620
}
1721
},

tests/test_views_execute.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -1115,7 +1115,11 @@ def test_run_udf_on_json(api100, udf_code):
11151115
preprocess=lambda s: s.replace('"PLACEHOLDER_UDF"', repr(udf_code))
11161116
)
11171117
resp = api100.check_result(process_graph)
1118-
assert resp.json == {'len': 2, 'keys': ['2015-07-06T00:00:00Z', '2015-08-22T00:00:00Z'], 'values': [[[2.345]], [[None]]]}
1118+
assert resp.json == {
1119+
"len": 2,
1120+
"keys": ["2015-07-06T00:00:00Z", "2015-08-22T00:00:00Z"],
1121+
"values": [[[2.345, None]], [[2.0, 3.0]]],
1122+
}
11191123

11201124

11211125
@pytest.mark.parametrize("udf_code", [

0 commit comments

Comments
 (0)