Skip to content

Commit 354a79e

Browse files
committed
Issue #704 Add support for log_level in create_job()/execute_job()
refs #424, Open-EO/openeo-api#329
1 parent c1589a8 commit 354a79e

File tree

8 files changed

+111
-10
lines changed

8 files changed

+111
-10
lines changed

CHANGELOG.md

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1111

1212
- Added `show_error_logs` argument to `cube.execute_batch()`/`job.start_and_wait()`/... to toggle the automatic printing of error logs on failure ([#505](https://github.com/Open-EO/openeo-python-client/issues/505))
1313
- Added `Connection.web_editor()` to build link to the openEO backend in the openEO Web Editor
14+
- Add support for `log_level` in `create_job()` and `execute_job()` ([#704](https://github.com/Open-EO/openeo-python-client/issues/704))
1415

1516
### Changed
1617

openeo/rest/connection.py

+9-3
Original file line numberDiff line numberDiff line change
@@ -893,7 +893,7 @@ def list_collections(self) -> List[dict]:
893893
894894
:return: list of dictionaries with basic collection metadata.
895895
"""
896-
# TODO: add caching #383
896+
# TODO: add caching #383, but reset cache on auth change #254
897897
data = self.get('/collections', expected_status=200).json()["collections"]
898898
return VisualList("collections", data=data)
899899

@@ -1816,6 +1816,7 @@ def create_job(
18161816
additional: Optional[dict] = None,
18171817
job_options: Optional[dict] = None,
18181818
validate: Optional[bool] = None,
1819+
log_level: Optional[str] = None,
18191820
) -> BatchJob:
18201821
"""
18211822
Create a new job from given process graph on the back-end.
@@ -1836,21 +1837,26 @@ def create_job(
18361837
(under top-level property "job_options")
18371838
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
18381839
(overruling the connection's ``auto_validate`` setting).
1840+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
1841+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
18391842
:return: Created job
18401843
18411844
.. versionchanged:: 0.35.0
18421845
Add :ref:`multi-result support <multi-result-process-graphs>`.
18431846
1844-
.. versionadded:: 0.36.0
1847+
.. versionchanged:: 0.36.0
18451848
Added argument ``job_options``.
1849+
1850+
.. versionchanged:: 0.37.0
1851+
Added argument ``log_level``.
18461852
"""
18471853
# TODO move all this (BatchJob factory) logic to BatchJob?
18481854

18491855
pg_with_metadata = self._build_request_with_process_graph(
18501856
process_graph=process_graph,
18511857
additional=additional,
18521858
job_options=job_options,
1853-
**dict_no_none(title=title, description=description, plan=plan, budget=budget)
1859+
**dict_no_none(title=title, description=description, plan=plan, budget=budget, log_level=log_level),
18541860
)
18551861

18561862
self._preflight_validation(pg_with_metadata=pg_with_metadata, validate=validate)

openeo/rest/datacube.py

+18-4
Original file line numberDiff line numberDiff line change
@@ -2353,7 +2353,7 @@ def download(
23532353
.. versionchanged:: 0.32.0
23542354
Added ``auto_add_save_result`` option
23552355
2356-
.. versionadded:: 0.36.0
2356+
.. versionchanged:: 0.36.0
23572357
Added arguments ``additional`` and ``job_options``.
23582358
"""
23592359
# TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
@@ -2478,6 +2478,7 @@ def execute_batch(
24782478
validate: Optional[bool] = None,
24792479
auto_add_save_result: bool = True,
24802480
show_error_logs: bool = True,
2481+
log_level: Optional[str] = None,
24812482
# TODO: deprecate `format_options` as keyword arguments
24822483
**format_options,
24832484
) -> BatchJob:
@@ -2496,15 +2497,20 @@ def execute_batch(
24962497
(overruling the connection's ``auto_validate`` setting).
24972498
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
24982499
:param show_error_logs: whether to automatically print error logs when the batch job failed.
2500+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
2501+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
24992502
25002503
.. versionchanged:: 0.32.0
25012504
Added ``auto_add_save_result`` option
25022505
2503-
.. versionadded:: 0.36.0
2506+
.. versionchanged:: 0.36.0
25042507
Added argument ``additional``.
25052508
25062509
.. versionchanged:: 0.37.0
25072510
Added argument ``show_error_logs``.
2511+
2512+
.. versionchanged:: 0.37.0
2513+
Added argument ``log_level``.
25082514
"""
25092515
# TODO: start showing deprecation warnings about these inconsistent argument names
25102516
if "format" in format_options and not out_format:
@@ -2531,6 +2537,7 @@ def execute_batch(
25312537
job_options=job_options,
25322538
validate=validate,
25332539
auto_add_save_result=False,
2540+
log_level=log_level,
25342541
)
25352542
return job.run_synchronous(
25362543
outputfile=outputfile,
@@ -2552,6 +2559,7 @@ def create_job(
25522559
job_options: Optional[dict] = None,
25532560
validate: Optional[bool] = None,
25542561
auto_add_save_result: bool = True,
2562+
log_level: Optional[str] = None,
25552563
# TODO: avoid `format_options` as keyword arguments
25562564
**format_options,
25572565
) -> BatchJob:
@@ -2575,14 +2583,19 @@ def create_job(
25752583
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
25762584
(overruling the connection's ``auto_validate`` setting).
25772585
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
2586+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
2587+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
25782588
25792589
:return: Created job.
25802590
2581-
.. versionadded:: 0.32.0
2591+
.. versionchanged:: 0.32.0
25822592
Added ``auto_add_save_result`` option
25832593
2584-
.. versionadded:: 0.36.0
2594+
.. versionchanged:: 0.36.0
25852595
Added ``additional`` argument.
2596+
2597+
.. versionchanged:: 0.37.0
2598+
Added argument ``log_level``.
25862599
"""
25872600
# TODO: add option to also automatically start the job?
25882601
# TODO: avoid using all kwargs as format_options
@@ -2605,6 +2618,7 @@ def create_job(
26052618
validate=validate,
26062619
additional=additional,
26072620
job_options=job_options,
2621+
log_level=log_level,
26082622
)
26092623

26102624
send_job = legacy_alias(create_job, name="send_job", since="0.10.0")

openeo/rest/mlmodel.py

+16-2
Original file line numberDiff line numberDiff line change
@@ -72,6 +72,7 @@ def execute_batch(
7272
additional: Optional[dict] = None,
7373
job_options: Optional[dict] = None,
7474
show_error_logs: bool = True,
75+
log_level: Optional[str] = None,
7576
) -> BatchJob:
7677
"""
7778
Evaluate the process graph by creating a batch job, and retrieving the results when it is finished.
@@ -87,12 +88,17 @@ def execute_batch(
8788
:param job_options: dictionary of job options to pass to the backend
8889
(under top-level property "job_options")
8990
:param show_error_logs: whether to automatically print error logs when the batch job failed.
91+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
92+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
9093
91-
.. versionadded:: 0.36.0
94+
.. versionchanged:: 0.36.0
9295
Added argument ``additional``.
9396
9497
.. versionchanged:: 0.37.0
9598
Added argument ``show_error_logs``.
99+
100+
.. versionchanged:: 0.37.0
101+
Added argument ``log_level``.
96102
"""
97103
job = self.create_job(
98104
title=title,
@@ -101,6 +107,7 @@ def execute_batch(
101107
budget=budget,
102108
additional=additional,
103109
job_options=job_options,
110+
log_level=log_level,
104111
)
105112
return job.run_synchronous(
106113
# TODO #135 support multi file result sets too
@@ -120,6 +127,7 @@ def create_job(
120127
budget: Optional[float] = None,
121128
additional: Optional[dict] = None,
122129
job_options: Optional[dict] = None,
130+
log_level: Optional[str] = None,
123131
) -> BatchJob:
124132
"""
125133
Sends a job to the backend and returns a ClientJob instance.
@@ -133,10 +141,15 @@ def create_job(
133141
:param job_options: dictionary of job options to pass to the backend
134142
(under top-level property "job_options")
135143
:param format_options: String Parameters for the job result format
144+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
145+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
136146
:return: Created job.
137147
138-
.. versionadded:: 0.36.0
148+
.. versionchanged:: 0.36.0
139149
Added argument ``additional``.
150+
151+
.. versionchanged:: 0.37.0
152+
Added argument ``log_level``.
140153
"""
141154
# TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
142155
pg = self
@@ -151,4 +164,5 @@ def create_job(
151164
budget=budget,
152165
additional=additional,
153166
job_options=job_options,
167+
log_level=log_level,
154168
)

openeo/rest/multiresult.py

+4
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ def create_job(
8282
additional: Optional[dict] = None,
8383
job_options: Optional[dict] = None,
8484
validate: Optional[bool] = None,
85+
log_level: Optional[str] = None,
8586
) -> BatchJob:
8687
return self._connection.create_job(
8788
process_graph=self._multi_leaf_graph,
@@ -90,6 +91,7 @@ def create_job(
9091
additional=additional,
9192
job_options=job_options,
9293
validate=validate,
94+
log_level=log_level,
9395
)
9496

9597
def execute_batch(
@@ -100,12 +102,14 @@ def execute_batch(
100102
additional: Optional[dict] = None,
101103
job_options: Optional[dict] = None,
102104
validate: Optional[bool] = None,
105+
log_level: Optional[str] = None,
103106
) -> BatchJob:
104107
job = self.create_job(
105108
title=title,
106109
description=description,
107110
additional=additional,
108111
job_options=job_options,
109112
validate=validate,
113+
log_level=log_level,
110114
)
111115
return job.run_synchronous()

openeo/rest/vectorcube.py

+15-1
Original file line numberDiff line numberDiff line change
@@ -260,6 +260,7 @@ def execute_batch(
260260
validate: Optional[bool] = None,
261261
auto_add_save_result: bool = True,
262262
show_error_logs: bool = True,
263+
log_level: Optional[str] = None,
263264
# TODO: avoid using kwargs as format options
264265
**format_options,
265266
) -> BatchJob:
@@ -279,18 +280,23 @@ def execute_batch(
279280
(overruling the connection's ``auto_validate`` setting).
280281
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
281282
:param show_error_logs: whether to automatically print error logs when the batch job failed.
283+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
284+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
282285
283286
.. versionchanged:: 0.21.0
284287
When not specified explicitly, output format is guessed from output file extension.
285288
286289
.. versionchanged:: 0.32.0
287290
Added ``auto_add_save_result`` option
288291
289-
.. versionadded:: 0.36.0
292+
.. versionchanged:: 0.36.0
290293
Added argument ``additional``.
291294
292295
.. versionchanged:: 0.37.0
293296
Added argument ``show_error_logs``.
297+
298+
.. versionchanged:: 0.37.0
299+
Added argument ``log_level``.
294300
"""
295301
cube = self
296302
if auto_add_save_result:
@@ -311,6 +317,7 @@ def execute_batch(
311317
job_options=job_options,
312318
validate=validate,
313319
auto_add_save_result=False,
320+
log_level=log_level,
314321
)
315322
return job.run_synchronous(
316323
# TODO #135 support multi file result sets too
@@ -333,6 +340,7 @@ def create_job(
333340
job_options: Optional[dict] = None,
334341
validate: Optional[bool] = None,
335342
auto_add_save_result: bool = True,
343+
log_level: Optional[str] = None,
336344
**format_options,
337345
) -> BatchJob:
338346
"""
@@ -351,11 +359,16 @@ def create_job(
351359
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
352360
(overruling the connection's ``auto_validate`` setting).
353361
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
362+
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
363+
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
354364
355365
:return: Created job.
356366
357367
.. versionchanged:: 0.32.0
358368
Added ``auto_add_save_result`` option
369+
370+
.. versionchanged:: 0.37.0
371+
Added argument ``log_level``.
359372
"""
360373
# TODO: avoid using all kwargs as format_options
361374
# TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
@@ -377,6 +390,7 @@ def create_job(
377390
additional=additional,
378391
job_options=job_options,
379392
validate=validate,
393+
log_level=log_level,
380394
)
381395

382396
send_job = legacy_alias(create_job, name="send_job", since="0.10.0")

tests/rest/datacube/test_datacube.py

+16
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
from datetime import date, datetime
1212
from unittest import mock
1313

14+
import dirty_equals
1415
import numpy as np
1516
import pytest
1617
import requests
@@ -871,6 +872,21 @@ def test_create_job_auto_add_save_result(self, s2cube, dummy_backend, auto_add_s
871872
s2cube.create_job(auto_add_save_result=auto_add_save_result)
872873
assert set(n["process_id"] for n in dummy_backend.get_pg().values()) == process_ids
873874

875+
@pytest.mark.parametrize(
876+
["create_kwargs", "expected"],
877+
[
878+
({}, {}),
879+
({"log_level": None}, {}),
880+
({"log_level": "error"}, {"log_level": "error"}),
881+
],
882+
)
883+
def test_create_job_log_level(self, s2cube, dummy_backend, create_kwargs, expected):
884+
s2cube.create_job(**create_kwargs)
885+
assert dummy_backend.get_batch_post_data() == {
886+
"process": {"process_graph": dirty_equals.IsPartialDict()},
887+
**expected,
888+
}
889+
874890
def test_execute_batch_defaults(self, s2cube, get_create_job_pg, recwarn, caplog):
875891
s2cube.execute_batch()
876892
pg = get_create_job_pg()

tests/rest/test_connection.py

+32
Original file line numberDiff line numberDiff line change
@@ -2955,6 +2955,38 @@ def test_create_job_with_additional_and_job_options(dummy_backend):
29552955
}
29562956

29572957

2958+
def test_create_job_log_level_basic(dummy_backend):
2959+
job = dummy_backend.connection.create_job(
2960+
{"foo1": {"process_id": "foo"}},
2961+
log_level="warning",
2962+
)
2963+
assert isinstance(job, BatchJob)
2964+
assert dummy_backend.get_batch_post_data() == {
2965+
"process": {"process_graph": {"foo1": {"process_id": "foo"}}},
2966+
"log_level": "warning",
2967+
}
2968+
2969+
2970+
@pytest.mark.parametrize(
2971+
["create_kwargs", "expected"],
2972+
[
2973+
({}, {}),
2974+
({"log_level": None}, {}),
2975+
({"log_level": "error"}, {"log_level": "error"}),
2976+
],
2977+
)
2978+
def test_create_job_log_level(dummy_backend, create_kwargs, expected):
2979+
job = dummy_backend.connection.create_job(
2980+
{"foo1": {"process_id": "foo"}},
2981+
**create_kwargs,
2982+
)
2983+
assert isinstance(job, BatchJob)
2984+
assert dummy_backend.get_batch_post_data() == {
2985+
"process": {"process_graph": {"foo1": {"process_id": "foo"}}},
2986+
**expected,
2987+
}
2988+
2989+
29582990
@pytest.mark.parametrize(
29592991
"pg",
29602992
[

0 commit comments

Comments
 (0)