diff --git a/CHANGELOG.md b/CHANGELOG.md index d860fd18e..6df95dd76 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 moved accordingly to the `VectorCube` class. - Improved documentation on `openeo.processes` and `ProcessBuilder` ([#390](https://github.com/Open-EO/openeo-python-client/issues/390)). +- `DataCube.create_job()` now requires keyword arguments for all but the `out_format` argument. + ([#412](https://github.com/Open-EO/openeo-python-client/issues/412)). ### Removed @@ -44,6 +46,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Reinstated old behavior of authentication related user files (e.g. refresh token store) on Windows: when `PrivateJsonFile` may be readable by others, just log a message instead of raising `PermissionError` ([387](https://github.com/Open-EO/openeo-python-client/issues/387)) +- `VectorCube.create_job()` and `MlModel.create_job()` are properly aligned with `DataCube.create_job()` + regarding setting job title, description, etc. + ([#412](https://github.com/Open-EO/openeo-python-client/issues/412)). ## [0.15.0] - 2023-03-03 diff --git a/openeo/rest/connection.py b/openeo/rest/connection.py index 6ef25f3f9..b85fc4b2a 100644 --- a/openeo/rest/connection.py +++ b/openeo/rest/connection.py @@ -1174,8 +1174,8 @@ def create_job( :param process_graph: (flat) dict representing a process graph, or process graph as raw JSON string, or as local file path or URL - :param title: String title of the job - :param description: String description of the job + :param title: job title + :param description: job description :param plan: billing plan :param budget: maximum cost the request is allowed to produce :param additional: additional job options to pass to the backend diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index 213791acc..c724c6518 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -1896,8 +1896,15 @@ def execute_batch( ) def create_job( - self, out_format=None, title: str = None, description: str = None, plan: str = None, budget=None, - job_options=None, **format_options + self, + out_format: Optional[str] = None, + *, + title: Optional[str] = None, + description: Optional[str] = None, + plan: Optional[str] = None, + budget: Optional[float] = None, + job_options: Optional[dict] = None, + **format_options, ) -> BatchJob: """ Sends the datacube's process graph as a batch job to the back-end @@ -1908,18 +1915,28 @@ def create_job( Use :py:meth:`execute_batch` instead to have the openEO Python client take care of that job management. :param out_format: String Format of the job result. + :param title: job title + :param description: job description + :param plan: billing plan + :param budget: maximum cost the request is allowed to produce :param job_options: A dictionary containing (custom) job options :param format_options: String Parameters for the job result format - :return: status: Job resulting job. + :return: Created job. """ # TODO: add option to also automatically start the job? - img = self + # TODO: avoid using all kwargs as format_options + # TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ... + cube = self if out_format: # add `save_result` node - img = img.save_result(format=out_format, options=format_options) + cube = cube.save_result(format=out_format, options=format_options) return self._connection.create_job( - process_graph=img.flat_graph(), - title=title, description=description, plan=plan, budget=budget, additional=job_options + process_graph=cube.flat_graph(), + title=title, + description=description, + plan=plan, + budget=budget, + additional=job_options, ) send_job = legacy_alias(create_job, name="send_job", since="0.10.0") diff --git a/openeo/rest/mlmodel.py b/openeo/rest/mlmodel.py index 47fb9fc6a..6a9effdfd 100644 --- a/openeo/rest/mlmodel.py +++ b/openeo/rest/mlmodel.py @@ -79,16 +79,38 @@ def execute_batch( print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def create_job(self, **kwargs) -> BatchJob: + def create_job( + self, + out_format=None, + *, + title: Optional[str] = None, + description: Optional[str] = None, + plan: Optional[str] = None, + budget: Optional[float] = None, + job_options: Optional[dict] = None, + ) -> BatchJob: """ Sends a job to the backend and returns a ClientJob instance. - See :py:meth:`Connection.create_job` for additional arguments (e.g. to set job title, description, ...) - - :return: resulting job. + :param out_format: String Format of the job result. + :param title: job title + :param description: job description + :param plan: billing plan + :param budget: maximum cost the request is allowed to produce + :param job_options: A dictionary containing (custom) job options + :param format_options: String Parameters for the job result format + :return: Created job. """ + # TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ... pg = self if pg.result_node().process_id not in {"save_ml_model"}: _log.warning("Process graph has no final `save_ml_model`. Adding it automatically.") pg = pg.save_ml_model() - return self._connection.create_job(process_graph=pg.flat_graph(), **kwargs) + return self._connection.create_job( + process_graph=pg.flat_graph(), + title=title, + description=description, + plan=plan, + budget=budget, + additional=job_options, + ) diff --git a/openeo/rest/vectorcube.py b/openeo/rest/vectorcube.py index cddd7f1cd..30ba7c9e1 100644 --- a/openeo/rest/vectorcube.py +++ b/openeo/rest/vectorcube.py @@ -132,20 +132,43 @@ def execute_batch( print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval ) - def create_job(self, out_format=None, job_options=None, **format_options) -> BatchJob: + def create_job( + self, + out_format=None, + *, + title: Optional[str] = None, + description: Optional[str] = None, + plan: Optional[str] = None, + budget: Optional[float] = None, + job_options: Optional[dict] = None, + **format_options, + ) -> BatchJob: """ Sends a job to the backend and returns a ClientJob instance. :param out_format: String Format of the job result. - :param job_options: + :param title: job title + :param description: job description + :param plan: billing plan + :param budget: maximum cost the request is allowed to produce + :param job_options: A dictionary containing (custom) job options :param format_options: String Parameters for the job result format - :return: status: ClientJob resulting job. + :return: Created job. """ - shp = self + # TODO: avoid using all kwargs as format_options + # TODO: centralize `create_job` for `DataCube`, `VectorCube`, `MlModel`, ... + cube = self if out_format: # add `save_result` node - shp = shp.save_result(format=out_format, options=format_options) - return self._connection.create_job(process_graph=shp.flat_graph(), additional=job_options) + cube = cube.save_result(format=out_format, options=format_options) + return self._connection.create_job( + process_graph=cube.flat_graph(), + title=title, + description=description, + plan=plan, + budget=budget, + additional=job_options, + ) send_job = legacy_alias(create_job, name="send_job", since="0.10.0")