Skip to content

Commit 4ce7d78

Browse files
committed
Fix #652 preserve job title in execute_batch
1 parent 7c321be commit 4ce7d78

File tree

8 files changed

+131
-7
lines changed

8 files changed

+131
-7
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
1515

1616
### Fixed
1717

18+
- Fixed support for `title` and `description` in `execute_batch()` ([#652](https://github.com/Open-EO/openeo-python-client/issues/652))
19+
1820

1921
## [0.33.0] - 2024-10-18
2022

openeo/rest/_testing.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -87,6 +87,9 @@ def _handle_post_jobs(self, request, context):
8787
pg = post_data["process"]["process_graph"]
8888
job_id = f"job-{len(self.batch_jobs):03d}"
8989
job_data = {"job_id": job_id, "pg": pg, "status": "created"}
90+
for field in ["title", "description"]:
91+
if field in post_data:
92+
job_data[field] = post_data[field]
9093
for field in self.extra_job_metadata_fields:
9194
job_data[field] = post_data.get(field)
9295
self.batch_jobs[job_id] = job_data

openeo/rest/datacube.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2361,6 +2361,10 @@ def execute_batch(
23612361
outputfile: Optional[Union[str, pathlib.Path]] = None,
23622362
out_format: Optional[str] = None,
23632363
*,
2364+
title: Optional[str] = None,
2365+
description: Optional[str] = None,
2366+
plan: Optional[str] = None,
2367+
budget: Optional[float] = None,
23642368
print: typing.Callable[[str], None] = print,
23652369
max_poll_interval: float = 60,
23662370
connection_retry_interval: float = 30,
@@ -2402,7 +2406,15 @@ def execute_batch(
24022406
method="DataCube.execute_batch()",
24032407
)
24042408

2405-
job = cube.create_job(job_options=job_options, validate=validate, auto_add_save_result=False)
2409+
job = cube.create_job(
2410+
title=title,
2411+
description=description,
2412+
plan=plan,
2413+
budget=budget,
2414+
job_options=job_options,
2415+
validate=validate,
2416+
auto_add_save_result=False,
2417+
)
24062418
return job.run_synchronous(
24072419
outputfile=outputfile,
24082420
print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval

openeo/rest/mlmodel.py

Lines changed: 12 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -59,10 +59,17 @@ def load_ml_model(connection: Connection, id: Union[str, BatchJob]) -> MlModel:
5959
return MlModel(graph=PGNode(process_id="load_ml_model", id=id), connection=connection)
6060

6161
def execute_batch(
62-
self,
63-
outputfile: Union[str, pathlib.Path],
64-
print=print, max_poll_interval=60, connection_retry_interval=30,
65-
job_options=None,
62+
self,
63+
outputfile: Union[str, pathlib.Path],
64+
*,
65+
title: Optional[str] = None,
66+
description: Optional[str] = None,
67+
plan: Optional[str] = None,
68+
budget: Optional[float] = None,
69+
print=print,
70+
max_poll_interval=60,
71+
connection_retry_interval=30,
72+
job_options=None,
6673
) -> BatchJob:
6774
"""
6875
Evaluate the process graph by creating a batch job, and retrieving the results when it is finished.
@@ -75,7 +82,7 @@ def execute_batch(
7582
:param out_format: (optional) Format of the job result.
7683
:param format_options: String Parameters for the job result format
7784
"""
78-
job = self.create_job(job_options=job_options)
85+
job = self.create_job(title=title, description=description, plan=plan, budget=budget, job_options=job_options)
7986
return job.run_synchronous(
8087
# TODO #135 support multi file result sets too
8188
outputfile=outputfile,

openeo/rest/vectorcube.py

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -248,6 +248,10 @@ def execute_batch(
248248
outputfile: Optional[Union[str, pathlib.Path]] = None,
249249
out_format: Optional[str] = None,
250250
*,
251+
title: Optional[str] = None,
252+
description: Optional[str] = None,
253+
plan: Optional[str] = None,
254+
budget: Optional[float] = None,
251255
print=print,
252256
max_poll_interval: float = 60,
253257
connection_retry_interval: float = 30,
@@ -287,7 +291,15 @@ def execute_batch(
287291
default_format=self._DEFAULT_VECTOR_FORMAT,
288292
method="VectorCube.execute_batch()",
289293
)
290-
job = cube.create_job(job_options=job_options, validate=validate, auto_add_save_result=False)
294+
job = cube.create_job(
295+
title=title,
296+
description=description,
297+
plan=plan,
298+
budget=budget,
299+
job_options=job_options,
300+
validate=validate,
301+
auto_add_save_result=False,
302+
)
291303
return job.run_synchronous(
292304
# TODO #135 support multi file result sets too
293305
outputfile=outputfile,

tests/extra/test_job_management.py

Lines changed: 34 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1089,6 +1089,8 @@ def test_minimal(self, con, dummy_backend, remote_process_definitions):
10891089
}
10901090
},
10911091
"status": "created",
1092+
"title": "Process '3plus5' with {}",
1093+
"description": "Process '3plus5' (namespace https://remote.test/3plus5.json) with {}",
10921094
}
10931095
}
10941096

@@ -1150,6 +1152,8 @@ def test_basic_parameterization(self, con, dummy_backend, parameter_defaults, ro
11501152
}
11511153
},
11521154
"status": "created",
1155+
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
1156+
"description": dirty_equals.IsStr(regex="Process 'increment' .*"),
11531157
}
11541158
}
11551159

@@ -1199,6 +1203,8 @@ def test_process_references_in_constructor(
11991203
"job_id": "job-000",
12001204
"pg": {"3plus51": {**expected, "arguments": {}, "result": True}},
12011205
"status": "created",
1206+
"title": "Process '3plus5' with {}",
1207+
"description": f"Process '3plus5' (namespace {namespace}) with {{}}",
12021208
}
12031209
}
12041210

@@ -1251,6 +1257,8 @@ def test_with_job_manager_remote_basic(
12511257
}
12521258
},
12531259
"status": "finished",
1260+
"title": "Process 'increment' with {'data': 1, 'increment': 5}",
1261+
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 1, 'increment': 5}",
12541262
},
12551263
"job-001": {
12561264
"job_id": "job-001",
@@ -1263,6 +1271,8 @@ def test_with_job_manager_remote_basic(
12631271
}
12641272
},
12651273
"status": "finished",
1274+
"title": "Process 'increment' with {'data': 2, 'increment': 5}",
1275+
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 2, 'increment': 5}",
12661276
},
12671277
"job-002": {
12681278
"job_id": "job-002",
@@ -1275,6 +1285,8 @@ def test_with_job_manager_remote_basic(
12751285
}
12761286
},
12771287
"status": "finished",
1288+
"title": "Process 'increment' with {'data': 3, 'increment': 5}",
1289+
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 3, 'increment': 5}",
12781290
},
12791291
}
12801292

@@ -1353,6 +1365,8 @@ def test_with_job_manager_remote_parameter_handling(
13531365
}
13541366
},
13551367
"status": "finished",
1368+
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
1369+
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
13561370
},
13571371
"job-001": {
13581372
"job_id": "job-001",
@@ -1365,6 +1379,8 @@ def test_with_job_manager_remote_parameter_handling(
13651379
}
13661380
},
13671381
"status": "finished",
1382+
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
1383+
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
13681384
},
13691385
"job-002": {
13701386
"job_id": "job-002",
@@ -1377,6 +1393,8 @@ def test_with_job_manager_remote_parameter_handling(
13771393
}
13781394
},
13791395
"status": "finished",
1396+
"title": dirty_equals.IsStr(regex="Process 'increment' with .*"),
1397+
"description": dirty_equals.IsStr(regex="Process 'increment'.*"),
13801398
},
13811399
}
13821400

@@ -1436,6 +1454,8 @@ def test_with_job_manager_remote_geometry(self, tmp_path, requests_mock, dummy_b
14361454
}
14371455
},
14381456
"status": "finished",
1457+
"title": "Process 'offset_polygon' with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (1...",
1458+
"description": "Process 'offset_polygon' (namespace https://remote.test/offset_polygon.json) with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (1.0, 2.0)}, 'offset': 11}",
14391459
},
14401460
"job-001": {
14411461
"job_id": "job-001",
@@ -1452,6 +1472,8 @@ def test_with_job_manager_remote_geometry(self, tmp_path, requests_mock, dummy_b
14521472
}
14531473
},
14541474
"status": "finished",
1475+
"title": "Process 'offset_polygon' with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (3...",
1476+
"description": "Process 'offset_polygon' (namespace https://remote.test/offset_polygon.json) with {'data': 123, 'polygons': {'type': 'Point', 'coordinates': (3.0, 4.0)}, 'offset': 22}",
14551477
},
14561478
}
14571479

@@ -1527,6 +1549,8 @@ def test_with_job_manager_remote_geometry_after_resume(
15271549
}
15281550
},
15291551
"status": "finished",
1552+
"title": dirty_equals.IsStr(regex="Process 'offset_polygon' with.*"),
1553+
"description": dirty_equals.IsStr(regex="Process 'offset_polygon' .*"),
15301554
},
15311555
"job-001": {
15321556
"job_id": "job-001",
@@ -1543,6 +1567,8 @@ def test_with_job_manager_remote_geometry_after_resume(
15431567
}
15441568
},
15451569
"status": "finished",
1570+
"title": dirty_equals.IsStr(regex="Process 'offset_polygon' with.*"),
1571+
"description": dirty_equals.IsStr(regex="Process 'offset_polygon' .*"),
15461572
},
15471573
}
15481574

@@ -1586,6 +1612,8 @@ def test_with_job_manager_udp_basic(
15861612
}
15871613
},
15881614
"status": "finished",
1615+
"title": "Process 'increment' with {'data': 3, 'increment': 5}",
1616+
"description": "Process 'increment' (namespace None) with {'data': 3, 'increment': 5}",
15891617
},
15901618
"job-001": {
15911619
"job_id": "job-001",
@@ -1597,6 +1625,8 @@ def test_with_job_manager_udp_basic(
15971625
}
15981626
},
15991627
"status": "finished",
1628+
"title": "Process 'increment' with {'data': 5, 'increment': 5}",
1629+
"description": "Process 'increment' (namespace None) with {'data': 5, 'increment': 5}",
16001630
},
16011631
}
16021632

@@ -1638,6 +1668,8 @@ def test_with_job_manager_parameter_column_map(
16381668
}
16391669
},
16401670
"status": "finished",
1671+
"title": "Process 'increment' with {'data': 3, 'increment': 100}",
1672+
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 3, 'increment': 100}",
16411673
},
16421674
"job-001": {
16431675
"job_id": "job-001",
@@ -1650,5 +1682,7 @@ def test_with_job_manager_parameter_column_map(
16501682
}
16511683
},
16521684
"status": "finished",
1685+
"title": "Process 'increment' with {'data': 5, 'increment': 200}",
1686+
"description": "Process 'increment' (namespace https://remote.test/increment.json) with {'data': 5, 'increment': 200}",
16531687
},
16541688
}

tests/rest/datacube/test_datacube.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1236,3 +1236,30 @@ def test_cube_execute_batch_validation(self, dummy_backend, connection, validate
12361236
else:
12371237
assert dummy_backend.validation_requests == []
12381238
assert caplog.messages == []
1239+
1240+
1241+
def test_execute_batch_with_title(s2cube, dummy_backend):
1242+
"""
1243+
Support title/description in execute_batch
1244+
https://github.com/Open-EO/openeo-python-client/issues/652
1245+
"""
1246+
s2cube.execute_batch(title="S2 job", description="Lorem ipsum dolor S2 amet")
1247+
assert dummy_backend.batch_jobs == {
1248+
"job-000": {
1249+
"job_id": "job-000",
1250+
"pg": {
1251+
"loadcollection1": {
1252+
"process_id": "load_collection",
1253+
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
1254+
},
1255+
"saveresult1": {
1256+
"process_id": "save_result",
1257+
"arguments": {"data": {"from_node": "loadcollection1"}, "format": "GTiff", "options": {}},
1258+
"result": True,
1259+
},
1260+
},
1261+
"status": "finished",
1262+
"title": "S2 job",
1263+
"description": "Lorem ipsum dolor S2 amet",
1264+
}
1265+
}

tests/rest/datacube/test_vectorcube.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -829,3 +829,30 @@ def test_vector_to_raster(s2cube, vector_cube, requests_mock, target_parameter,
829829
"result": True,
830830
},
831831
}
832+
833+
834+
def test_execute_batch_with_title(vector_cube, dummy_backend):
835+
"""
836+
Support title/description in execute_batch
837+
https://github.com/Open-EO/openeo-python-client/issues/652
838+
"""
839+
vector_cube.execute_batch(title="S2 job", description="Lorem ipsum dolor S2 amet")
840+
assert dummy_backend.batch_jobs == {
841+
"job-000": {
842+
"job_id": "job-000",
843+
"pg": {
844+
"loadgeojson1": {
845+
"process_id": "load_geojson",
846+
"arguments": {"data": {"coordinates": [1, 2], "type": "Point"}, "properties": []},
847+
},
848+
"saveresult1": {
849+
"process_id": "save_result",
850+
"arguments": {"data": {"from_node": "loadgeojson1"}, "format": "GeoJSON", "options": {}},
851+
"result": True,
852+
},
853+
},
854+
"status": "finished",
855+
"title": "S2 job",
856+
"description": "Lorem ipsum dolor S2 amet",
857+
}
858+
}

0 commit comments

Comments
 (0)