Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ensure run_udf returns a DryRunDataCube if required #323 #324

Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion openeo_driver/ProcessGraphDeserializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1516,7 +1516,7 @@ def run_udf(args: dict, env: EvalEnv):
# Note: Other data types do execute the UDF during the dry-run.
# E.g. A DelayedVector (when the user directly provides geometries as input).
# This way a weak_spatial_extent can be calculated from the UDF's output.
return JSONResult({})
return data.run_udf()

if isinstance(data, SupportsRunUdf) and data.supports_udf(udf=udf, runtime=runtime):
_log.info(f"run_udf: data of type {type(data)} has direct run_udf support")
Expand Down
2 changes: 2 additions & 0 deletions openeo_driver/dry_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -627,6 +627,8 @@ def raster_to_vector(self):

return self._process(operation="raster_to_vector", arguments={},metadata=CollectionMetadata(metadata={}, dimensions=dimensions))

def run_udf(self):
return self._process(operation="run_udf", arguments={})

def resample_cube_spatial(self, target: 'DryRunDataCube', method: str = 'near') -> 'DryRunDataCube':
cube = self._process("process_type", [ProcessType.FOCAL_SPACE])
Expand Down
48 changes: 47 additions & 1 deletion tests/test_views_execute.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
from openeo_driver.datacube import DriverDataCube, DriverVectorCube
from openeo_driver.datastructs import ResolutionMergeArgs, SarBackscatterArgs
from openeo_driver.delayed_vector import DelayedVector
from openeo_driver.dry_run import ProcessType
from openeo_driver.dry_run import ProcessType, DryRunDataCube, DryRunDataTracer
from openeo_driver.dummy import dummy_backend
from openeo_driver.dummy.dummy_backend import DummyVisitor
from openeo_driver.errors import (
Expand Down Expand Up @@ -1564,6 +1564,52 @@ def test_run_udf_on_list(api, udf_code):
assert resp.json == [1, 4, 9, 25, 64]


@pytest.mark.parametrize(
"udf_code",
[
"""
from openeo.udf import UdfData, StructuredData
def transform(data: UdfData) -> UdfData:
return data
""",
],
)
def test_run_udf_on_aggregate_spatial(api, udf_code):
udf_code = textwrap.dedent(udf_code)
path = get_path("geojson/FeatureCollection02.json")
process_graph = {
"lc": {"process_id": "load_collection", "arguments": {"id": "S2_FOOBAR", "bands": ["B02", "B03", "B04"]}},
"lf": {
"process_id": "load_uploaded_files",
soxofaan marked this conversation as resolved.
Show resolved Hide resolved
"arguments": {"paths": [str(path)], "format": "GeoJSON", "options": {"columns_for_cube": []}},
},
"ag": {
"process_id": "aggregate_spatial",
"arguments": {
"data": {"from_node": "lc"},
"geometries": {"from_node": "lf"},
"reducer": {
"process_graph": {
"mean": {
"process_id": "mean",
"arguments": {"data": {"from_parameter": "data"}},
"result": True,
}
}
},
},
},
"udf": {
"process_id": "run_udf",
"arguments": {"data": {"from_node": "ag"}, "udf": udf_code, "runtime": "Python"},
"result": True,
},
}
resp = api.check_result(process_graph)
assert "2015-07-06T00:00:00Z" in resp.json.keys()
soxofaan marked this conversation as resolved.
Show resolved Hide resolved
assert "2015-08-22T00:00:00Z" in resp.json.keys()


@pytest.mark.parametrize(["runtime", "version", "failure"], [
("Python", None, None),
("pYthOn", None, None),
Expand Down