Skip to content

Commit

Permalink
Issue #412 synchronize create_job in DataCube/VectorCube/MlModel
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Apr 6, 2023
1 parent 9b87888 commit f216477
Show file tree
Hide file tree
Showing 5 changed files with 87 additions and 20 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
moved accordingly to the `VectorCube` class.
- Improved documentation on `openeo.processes` and `ProcessBuilder`
([#390](https://github.com/Open-EO/openeo-python-client/issues/390)).
- `DataCube.create_job()` now requires keyword arguments for all but the `out_format` argument.
([#412](https://github.com/Open-EO/openeo-python-client/issues/412)).

### Removed

Expand All @@ -44,6 +46,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Fixed

- Reinstated old behavior of authentication related user files (e.g. refresh token store) on Windows: when `PrivateJsonFile` may be readable by others, just log a message instead of raising `PermissionError` ([387](https://github.com/Open-EO/openeo-python-client/issues/387))
- `VectorCube.create_job()` and `MlModel.create_job()` are properly aligned with `DataCube.create_job()`
regarding setting job title, description, etc.
([#412](https://github.com/Open-EO/openeo-python-client/issues/412)).


## [0.15.0] - 2023-03-03
Expand Down
4 changes: 2 additions & 2 deletions openeo/rest/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -1174,8 +1174,8 @@ def create_job(
:param process_graph: (flat) dict representing a process graph, or process graph as raw JSON string,
or as local file path or URL
:param title: String title of the job
:param description: String description of the job
:param title: job title
:param description: job description
:param plan: billing plan
:param budget: maximum cost the request is allowed to produce
:param additional: additional job options to pass to the backend
Expand Down
31 changes: 24 additions & 7 deletions openeo/rest/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -1896,8 +1896,15 @@ def execute_batch(
)

def create_job(
self, out_format=None, title: str = None, description: str = None, plan: str = None, budget=None,
job_options=None, **format_options
self,
out_format: Optional[str] = None,
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
job_options: Optional[dict] = None,
**format_options,
) -> BatchJob:
"""
Sends the datacube's process graph as a batch job to the back-end
Expand All @@ -1908,18 +1915,28 @@ def create_job(
Use :py:meth:`execute_batch` instead to have the openEO Python client take care of that job management.
:param out_format: String Format of the job result.
:param title: job title
:param description: job description
:param plan: billing plan
:param budget: maximum cost the request is allowed to produce
:param job_options: A dictionary containing (custom) job options
:param format_options: String Parameters for the job result format
:return: status: Job resulting job.
:return: Created job.
"""
# TODO: add option to also automatically start the job?
img = self
# TODO: avoid using all kwargs as format_options
# TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
cube = self
if out_format:
# add `save_result` node
img = img.save_result(format=out_format, options=format_options)
cube = cube.save_result(format=out_format, options=format_options)
return self._connection.create_job(
process_graph=img.flat_graph(),
title=title, description=description, plan=plan, budget=budget, additional=job_options
process_graph=cube.flat_graph(),
title=title,
description=description,
plan=plan,
budget=budget,
additional=job_options,
)

send_job = legacy_alias(create_job, name="send_job", since="0.10.0")
Expand Down
32 changes: 27 additions & 5 deletions openeo/rest/mlmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,16 +79,38 @@ def execute_batch(
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
)

def create_job(self, **kwargs) -> BatchJob:
def create_job(
self,
out_format=None,
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
job_options: Optional[dict] = None,
) -> BatchJob:
"""
Sends a job to the backend and returns a ClientJob instance.
See :py:meth:`Connection.create_job` for additional arguments (e.g. to set job title, description, ...)
:return: resulting job.
:param out_format: String Format of the job result.
:param title: job title
:param description: job description
:param plan: billing plan
:param budget: maximum cost the request is allowed to produce
:param job_options: A dictionary containing (custom) job options
:param format_options: String Parameters for the job result format
:return: Created job.
"""
# TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
pg = self
if pg.result_node().process_id not in {"save_ml_model"}:
_log.warning("Process graph has no final `save_ml_model`. Adding it automatically.")
pg = pg.save_ml_model()
return self._connection.create_job(process_graph=pg.flat_graph(), **kwargs)
return self._connection.create_job(
process_graph=pg.flat_graph(),
title=title,
description=description,
plan=plan,
budget=budget,
additional=job_options,
)
35 changes: 29 additions & 6 deletions openeo/rest/vectorcube.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,20 +132,43 @@ def execute_batch(
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval
)

def create_job(self, out_format=None, job_options=None, **format_options) -> BatchJob:
def create_job(
self,
out_format=None,
*,
title: Optional[str] = None,
description: Optional[str] = None,
plan: Optional[str] = None,
budget: Optional[float] = None,
job_options: Optional[dict] = None,
**format_options,
) -> BatchJob:
"""
Sends a job to the backend and returns a ClientJob instance.
:param out_format: String Format of the job result.
:param job_options:
:param title: job title
:param description: job description
:param plan: billing plan
:param budget: maximum cost the request is allowed to produce
:param job_options: A dictionary containing (custom) job options
:param format_options: String Parameters for the job result format
:return: status: ClientJob resulting job.
:return: Created job.
"""
shp = self
# TODO: avoid using all kwargs as format_options
# TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ...
cube = self
if out_format:
# add `save_result` node
shp = shp.save_result(format=out_format, options=format_options)
return self._connection.create_job(process_graph=shp.flat_graph(), additional=job_options)
cube = cube.save_result(format=out_format, options=format_options)
return self._connection.create_job(
process_graph=cube.flat_graph(),
title=title,
description=description,
plan=plan,
budget=budget,
additional=job_options,
)

send_job = legacy_alias(create_job, name="send_job", since="0.10.0")

Expand Down

0 comments on commit f216477

Please sign in to comment.