Skip to content

Commit

Permalink
Issue #704 Add support for log_level in create_job()/execute_job()
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Jan 15, 2025
1 parent c1589a8 commit 354a79e
Show file tree
Hide file tree
Showing 8 changed files with 111 additions and 10 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0

- Added `show_error_logs` argument to `cube.execute_batch()`/`job.start_and_wait()`/... to toggle the automatic printing of error logs on failure ([#505](https://github.com/Open-EO/openeo-python-client/issues/505))
- Added `Connection.web_editor()` to build link to the openEO backend in the openEO Web Editor
- Add support for `log_level` in `create_job()` and `execute_job()` ([#704](https://github.com/Open-EO/openeo-python-client/issues/704))

### Changed

Expand Down
12 changes: 9 additions & 3 deletions openeo/rest/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -893,7 +893,7 @@ def list_collections(self) -> List[dict]:
:return: list of dictionaries with basic collection metadata.
"""
# TODO: add caching #383
# TODO: add caching #383, but reset cache on auth change #254
data = self.get('/collections', expected_status=200).json()["collections"]
return VisualList("collections", data=data)

Expand Down Expand Up @@ -1816,6 +1816,7 @@ def create_job(
additional: Optional[dict] = None,
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
log_level: Optional[str] = None,
) -> BatchJob:
"""
Create a new job from given process graph on the back-end.
Expand All @@ -1836,21 +1837,26 @@ def create_job(
(under top-level property "job_options")
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
(overruling the connection's ``auto_validate`` setting).
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
:return: Created job
.. versionchanged:: 0.35.0
Add :ref:`multi-result support <multi-result-process-graphs>`.
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added argument ``job_options``.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
# TODO move all this (BatchJob factory) logic to BatchJob?

pg_with_metadata = self._build_request_with_process_graph(
process_graph=process_graph,
additional=additional,
job_options=job_options,
**dict_no_none(title=title, description=description, plan=plan, budget=budget)
**dict_no_none(title=title, description=description, plan=plan, budget=budget, log_level=log_level),
)

self._preflight_validation(pg_with_metadata=pg_with_metadata, validate=validate)
Expand Down
22 changes: 18 additions & 4 deletions openeo/rest/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -2353,7 +2353,7 @@ def download(
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added arguments ``additional`` and ``job_options``.
"""
# TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
Expand Down Expand Up @@ -2478,6 +2478,7 @@ def execute_batch(
validate: Optional[bool] = None,
auto_add_save_result: bool = True,
show_error_logs: bool = True,
log_level: Optional[str] = None,
# TODO: deprecate `format_options` as keyword arguments
**format_options,
) -> BatchJob:
Expand All @@ -2496,15 +2497,20 @@ def execute_batch(
(overruling the connection's ``auto_validate`` setting).
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
:param show_error_logs: whether to automatically print error logs when the batch job failed.
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added argument ``additional``.
.. versionchanged:: 0.37.0
Added argument ``show_error_logs``.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
# TODO: start showing deprecation warnings about these inconsistent argument names
if "format" in format_options and not out_format:
Expand All @@ -2531,6 +2537,7 @@ def execute_batch(
job_options=job_options,
validate=validate,
auto_add_save_result=False,
log_level=log_level,
)
return job.run_synchronous(
outputfile=outputfile,
Expand All @@ -2552,6 +2559,7 @@ def create_job(
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
auto_add_save_result: bool = True,
log_level: Optional[str] = None,
# TODO: avoid `format_options` as keyword arguments
**format_options,
) -> BatchJob:
Expand All @@ -2575,14 +2583,19 @@ def create_job(
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
(overruling the connection's ``auto_validate`` setting).
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
:return: Created job.
.. versionadded:: 0.32.0
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added ``additional`` argument.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
# TODO: add option to also automatically start the job?
# TODO: avoid using all kwargs as format_options
Expand All @@ -2605,6 +2618,7 @@ def create_job(
validate=validate,
additional=additional,
job_options=job_options,
log_level=log_level,
)

send_job = legacy_alias(create_job, name="send_job", since="0.10.0")
Expand Down
18 changes: 16 additions & 2 deletions openeo/rest/mlmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@ def execute_batch(
additional: Optional[dict] = None,
job_options: Optional[dict] = None,
show_error_logs: bool = True,
log_level: Optional[str] = None,
) -> BatchJob:
"""
Evaluate the process graph by creating a batch job, and retrieving the results when it is finished.
Expand All @@ -87,12 +88,17 @@ def execute_batch(
:param job_options: dictionary of job options to pass to the backend
(under top-level property "job_options")
:param show_error_logs: whether to automatically print error logs when the batch job failed.
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added argument ``additional``.
.. versionchanged:: 0.37.0
Added argument ``show_error_logs``.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
job = self.create_job(
title=title,
Expand All @@ -101,6 +107,7 @@ def execute_batch(
budget=budget,
additional=additional,
job_options=job_options,
log_level=log_level,
)
return job.run_synchronous(
# TODO #135 support multi file result sets too
Expand All @@ -120,6 +127,7 @@ def create_job(
budget: Optional[float] = None,
additional: Optional[dict] = None,
job_options: Optional[dict] = None,
log_level: Optional[str] = None,
) -> BatchJob:
"""
Sends a job to the backend and returns a ClientJob instance.
Expand All @@ -133,10 +141,15 @@ def create_job(
:param job_options: dictionary of job options to pass to the backend
(under top-level property "job_options")
:param format_options: String Parameters for the job result format
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
:return: Created job.
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added argument ``additional``.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
# TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
pg = self
Expand All @@ -151,4 +164,5 @@ def create_job(
budget=budget,
additional=additional,
job_options=job_options,
log_level=log_level,
)
4 changes: 4 additions & 0 deletions openeo/rest/multiresult.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,7 @@ def create_job(
additional: Optional[dict] = None,
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
log_level: Optional[str] = None,
) -> BatchJob:
return self._connection.create_job(
process_graph=self._multi_leaf_graph,
Expand All @@ -90,6 +91,7 @@ def create_job(
additional=additional,
job_options=job_options,
validate=validate,
log_level=log_level,
)

def execute_batch(
Expand All @@ -100,12 +102,14 @@ def execute_batch(
additional: Optional[dict] = None,
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
log_level: Optional[str] = None,
) -> BatchJob:
job = self.create_job(
title=title,
description=description,
additional=additional,
job_options=job_options,
validate=validate,
log_level=log_level,
)
return job.run_synchronous()
16 changes: 15 additions & 1 deletion openeo/rest/vectorcube.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,6 +260,7 @@ def execute_batch(
validate: Optional[bool] = None,
auto_add_save_result: bool = True,
show_error_logs: bool = True,
log_level: Optional[str] = None,
# TODO: avoid using kwargs as format options
**format_options,
) -> BatchJob:
Expand All @@ -279,18 +280,23 @@ def execute_batch(
(overruling the connection's ``auto_validate`` setting).
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
:param show_error_logs: whether to automatically print error logs when the batch job failed.
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
.. versionchanged:: 0.21.0
When not specified explicitly, output format is guessed from output file extension.
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
.. versionadded:: 0.36.0
.. versionchanged:: 0.36.0
Added argument ``additional``.
.. versionchanged:: 0.37.0
Added argument ``show_error_logs``.
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
cube = self
if auto_add_save_result:
Expand All @@ -311,6 +317,7 @@ def execute_batch(
job_options=job_options,
validate=validate,
auto_add_save_result=False,
log_level=log_level,
)
return job.run_synchronous(
# TODO #135 support multi file result sets too
Expand All @@ -333,6 +340,7 @@ def create_job(
job_options: Optional[dict] = None,
validate: Optional[bool] = None,
auto_add_save_result: bool = True,
log_level: Optional[str] = None,
**format_options,
) -> BatchJob:
"""
Expand All @@ -351,11 +359,16 @@ def create_job(
:param validate: Optional toggle to enable/prevent validation of the process graphs before execution
(overruling the connection's ``auto_validate`` setting).
:param auto_add_save_result: Automatically add a ``save_result`` node to the process graph if there is none yet.
:param log_level: Optional minimum severity level for log entries that the back-end should keep track of.
One of "error" (highest severity), "warning", "info", and "debug" (lowest severity).
:return: Created job.
.. versionchanged:: 0.32.0
Added ``auto_add_save_result`` option
.. versionchanged:: 0.37.0
Added argument ``log_level``.
"""
# TODO: avoid using all kwargs as format_options
# TODO #278 centralize download/create_job/execute_job logic in DataCube, VectorCube, MlModel, ...
Expand All @@ -377,6 +390,7 @@ def create_job(
additional=additional,
job_options=job_options,
validate=validate,
log_level=log_level,
)

send_job = legacy_alias(create_job, name="send_job", since="0.10.0")
Expand Down
16 changes: 16 additions & 0 deletions tests/rest/datacube/test_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from datetime import date, datetime
from unittest import mock

import dirty_equals
import numpy as np
import pytest
import requests
Expand Down Expand Up @@ -871,6 +872,21 @@ def test_create_job_auto_add_save_result(self, s2cube, dummy_backend, auto_add_s
s2cube.create_job(auto_add_save_result=auto_add_save_result)
assert set(n["process_id"] for n in dummy_backend.get_pg().values()) == process_ids

@pytest.mark.parametrize(
["create_kwargs", "expected"],
[
({}, {}),
({"log_level": None}, {}),
({"log_level": "error"}, {"log_level": "error"}),
],
)
def test_create_job_log_level(self, s2cube, dummy_backend, create_kwargs, expected):
s2cube.create_job(**create_kwargs)
assert dummy_backend.get_batch_post_data() == {
"process": {"process_graph": dirty_equals.IsPartialDict()},
**expected,
}

def test_execute_batch_defaults(self, s2cube, get_create_job_pg, recwarn, caplog):
s2cube.execute_batch()
pg = get_create_job_pg()
Expand Down
32 changes: 32 additions & 0 deletions tests/rest/test_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -2955,6 +2955,38 @@ def test_create_job_with_additional_and_job_options(dummy_backend):
}


def test_create_job_log_level_basic(dummy_backend):
job = dummy_backend.connection.create_job(
{"foo1": {"process_id": "foo"}},
log_level="warning",
)
assert isinstance(job, BatchJob)
assert dummy_backend.get_batch_post_data() == {
"process": {"process_graph": {"foo1": {"process_id": "foo"}}},
"log_level": "warning",
}


@pytest.mark.parametrize(
["create_kwargs", "expected"],
[
({}, {}),
({"log_level": None}, {}),
({"log_level": "error"}, {"log_level": "error"}),
],
)
def test_create_job_log_level(dummy_backend, create_kwargs, expected):
job = dummy_backend.connection.create_job(
{"foo1": {"process_id": "foo"}},
**create_kwargs,
)
assert isinstance(job, BatchJob)
assert dummy_backend.get_batch_post_data() == {
"process": {"process_graph": {"foo1": {"process_id": "foo"}}},
**expected,
}


@pytest.mark.parametrize(
"pg",
[
Expand Down

0 comments on commit 354a79e

Please sign in to comment.