@@ -677,13 +677,13 @@ def apply_neighborhood(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
677
677
678
678
@process
679
679
def apply_dimension (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
680
- data_cube = args .get_required ("data" , expected_type = DriverDataCube )
680
+ data_cube = args .get_required ("data" , expected_type = ( DriverDataCube , DriverVectorCube ) )
681
681
process = args .get_deep ("process" , "process_graph" , expected_type = dict )
682
- dimension = args .get_required ("dimension" , expected_type = str )
682
+ dimension = args .get_required (
683
+ "dimension" , expected_type = str , validator = ProcessArgs .validator_one_of (data_cube .get_dimension_names ())
684
+ )
683
685
target_dimension = args .get_optional ("target_dimension" , default = None , expected_type = str )
684
686
context = args .get_optional ("context" , default = None )
685
- # do check_dimension here for error handling
686
- dimension , band_dim , temporal_dim = _check_dimension (cube = data_cube , dim = dimension , process = "apply_dimension" )
687
687
688
688
cube = data_cube .apply_dimension (
689
689
process = process , dimension = dimension , target_dimension = target_dimension , context = context , env = env
@@ -747,10 +747,10 @@ def apply(args: ProcessArgs, env: EvalEnv) -> DriverDataCube:
747
747
def reduce_dimension (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
748
748
data_cube : DriverDataCube = args .get_required ("data" , expected_type = DriverDataCube )
749
749
reduce_pg = args .get_deep ("reducer" , "process_graph" , expected_type = dict )
750
- dimension = args .get_required ("dimension" , expected_type = str )
750
+ dimension = args .get_required (
751
+ "dimension" , expected_type = str , validator = ProcessArgs .validator_one_of (data_cube .get_dimension_names ())
752
+ )
751
753
context = args .get_optional ("context" , default = None )
752
- # do check_dimension here for error handling
753
- dimension , band_dim , temporal_dim = _check_dimension (cube = data_cube , dim = dimension , process = "reduce_dimension" )
754
754
return data_cube .reduce_dimension (reducer = reduce_pg , dimension = dimension , context = context , env = env )
755
755
756
756
@@ -915,60 +915,35 @@ def rename_labels(args: dict, env: EvalEnv) -> DriverDataCube:
915
915
)
916
916
917
917
918
- def _check_dimension (cube : DriverDataCube , dim : str , process : str ):
919
- """
920
- Helper to check/validate the requested and available dimensions of a cube.
921
-
922
- :return: tuple (requested dimension, name of band dimension, name of temporal dimension)
923
- """
924
- # Note: large part of this is support/adapting for old client
925
- # (pre https://github.com/Open-EO/openeo-python-client/issues/93)
926
- # TODO remove this legacy support when not necessary anymore
927
- metadata = cube .metadata
928
- try :
929
- band_dim = metadata .band_dimension .name
930
- except MetadataException :
931
- band_dim = None
932
- try :
933
- temporal_dim = metadata .temporal_dimension .name
934
- except MetadataException :
935
- temporal_dim = None
936
-
937
- if dim not in metadata .dimension_names ():
938
- if dim in ["spectral_bands" , "bands" ] and band_dim :
939
- _log .warning ("Probably old client requesting band dimension {d!r},"
940
- " but actual band dimension name is {n!r}" .format (d = dim , n = band_dim ))
941
- dim = band_dim
942
- elif dim == "temporal" and temporal_dim :
943
- _log .warning ("Probably old client requesting temporal dimension {d!r},"
944
- " but actual temporal dimension name is {n!r}" .format (d = dim , n = temporal_dim ))
945
- dim = temporal_dim
946
- else :
947
- raise ProcessParameterInvalidException (
948
- parameter = "dimension" , process = process ,
949
- reason = "got {d!r}, but should be one of {n!r}" .format (d = dim , n = metadata .dimension_names ()))
950
-
951
- return dim , band_dim , temporal_dim
952
-
953
-
954
918
@process
955
919
def aggregate_temporal (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
956
920
data_cube = args .get_required ("data" , expected_type = DriverDataCube )
957
- reduce_pg = args .get_deep ("reducer" , "process_graph" , expected_type = dict )
958
- context = args .get_optional ("context" , default = None )
959
921
intervals = args .get_required ("intervals" )
922
+ reduce_pg = args .get_deep ("reducer" , "process_graph" , expected_type = dict )
960
923
labels = args .get_optional ("labels" , default = None )
961
- dimension = _get_time_dim_or_default (args , data_cube )
962
- return data_cube .aggregate_temporal (intervals = intervals ,labels = labels ,reducer = reduce_pg , dimension = dimension , context = context )
924
+ dimension = args .get_optional (
925
+ "dimension" ,
926
+ default = lambda : data_cube .metadata .temporal_dimension .name ,
927
+ validator = ProcessArgs .validator_one_of (data_cube .get_dimension_names ()),
928
+ )
929
+ context = args .get_optional ("context" , default = None )
930
+
931
+ return data_cube .aggregate_temporal (
932
+ intervals = intervals , labels = labels , reducer = reduce_pg , dimension = dimension , context = context
933
+ )
963
934
964
935
965
936
@process_registry_100 .add_function
966
937
def aggregate_temporal_period (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
967
938
data_cube = args .get_required ("data" , expected_type = DriverDataCube )
939
+ period = args .get_required ("period" )
968
940
reduce_pg = args .get_deep ("reducer" , "process_graph" , expected_type = dict )
941
+ dimension = args .get_optional (
942
+ "dimension" ,
943
+ default = lambda : data_cube .metadata .temporal_dimension .name ,
944
+ validator = ProcessArgs .validator_one_of (data_cube .get_dimension_names ()),
945
+ )
969
946
context = args .get_optional ("context" , default = None )
970
- period = args .get_required ("period" )
971
- dimension = _get_time_dim_or_default (args , data_cube , "aggregate_temporal_period" )
972
947
973
948
dry_run_tracer : DryRunDataTracer = env .get (ENV_DRY_RUN_TRACER )
974
949
if dry_run_tracer :
@@ -1045,24 +1020,6 @@ def _period_to_intervals(start, end, period) -> List[Tuple[pd.Timestamp, pd.Time
1045
1020
return intervals
1046
1021
1047
1022
1048
- def _get_time_dim_or_default (args : ProcessArgs , data_cube , process_id = "aggregate_temporal" ):
1049
- dimension = args .get_optional ("dimension" , None )
1050
- if dimension is not None :
1051
- dimension , _ , _ = _check_dimension (cube = data_cube , dim = dimension , process = process_id )
1052
- else :
1053
- # default: there is a single temporal dimension
1054
- try :
1055
- dimension = data_cube .metadata .temporal_dimension .name
1056
- except MetadataException :
1057
- raise ProcessParameterInvalidException (
1058
- parameter = "dimension" , process = process_id ,
1059
- reason = "No dimension was set, and no temporal dimension could be found. Available dimensions: {n!r}" .format (
1060
- n = data_cube .metadata .dimension_names ()))
1061
- # do check_dimension here for error handling
1062
- dimension , band_dim , temporal_dim = _check_dimension (cube = data_cube , dim = dimension , process = process_id )
1063
- return dimension
1064
-
1065
-
1066
1023
@process_registry_100 .add_function
1067
1024
def aggregate_spatial (args : ProcessArgs , env : EvalEnv ) -> DriverDataCube :
1068
1025
cube = args .get_required ("data" , expected_type = DriverDataCube )
@@ -1624,14 +1581,28 @@ def load_uploaded_files(args: dict, env: EvalEnv) -> Union[DriverVectorCube,Driv
1624
1581
.returns ("vector-cube" , schema = {"type" : "object" , "subtype" : "vector-cube" })
1625
1582
)
1626
1583
def to_vector_cube (args : Dict , env : EvalEnv ):
1627
- # TODO: standardization of something like this? https://github.com/Open-EO/openeo-processes/issues/346
1584
+ _log .warning ("Experimental process `to_vector_cube` is deprecated, use `load_geojson` instead" )
1585
+ # TODO: remove this experimental/deprecated process
1628
1586
data = extract_arg (args , "data" , process_id = "to_vector_cube" )
1629
1587
if isinstance (data , dict ) and data .get ("type" ) in {"Polygon" , "MultiPolygon" , "Feature" , "FeatureCollection" }:
1630
1588
return env .backend_implementation .vector_cube_cls .from_geojson (data )
1631
- # TODO: support more inputs: string with geojson, string with WKT, list of WKT, string with URL to GeoJSON, ...
1632
1589
raise FeatureUnsupportedException (f"Converting { type (data )} to vector cube is not supported" )
1633
1590
1634
1591
1592
+ @process_registry_100 .add_function (spec = read_spec ("openeo-processes/2.x/proposals/load_geojson.json" ))
1593
+ def load_geojson (args : ProcessArgs , env : EvalEnv ) -> DriverVectorCube :
1594
+ data = args .get_required (
1595
+ "data" ,
1596
+ validator = ProcessArgs .validator_geojson_dict (
1597
+ # TODO: also allow LineString and MultiLineString?
1598
+ allowed_types = ["Point" , "MultiPoint" , "Polygon" , "MultiPolygon" , "Feature" , "FeatureCollection" ]
1599
+ ),
1600
+ )
1601
+ properties = args .get_optional ("properties" , default = [], expected_type = (list , tuple ))
1602
+ vector_cube = env .backend_implementation .vector_cube_cls .from_geojson (data , columns_for_cube = properties )
1603
+ return vector_cube
1604
+
1605
+
1635
1606
@non_standard_process (
1636
1607
ProcessSpec ("get_geometries" , description = "Reads vector data from a file or a URL or get geometries from a FeatureCollection" )
1637
1608
.param ('filename' , description = "filename or http url of a vector file" , schema = {"type" : "string" }, required = False )
0 commit comments