Skip to content

Commit

Permalink
fixup! Issue #442 make collection metadata based normalization optional
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Sep 14, 2023
1 parent e5d5b40 commit 1242e2e
Show file tree
Hide file tree
Showing 4 changed files with 181 additions and 31 deletions.
7 changes: 7 additions & 0 deletions tests/rest/datacube/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,10 @@ def con100(requests_mock, support_udp) -> Connection:
@pytest.fixture
def s2cube(connection, api_version) -> DataCube:
return connection.load_collection("S2")


@pytest.fixture
def s2cube_without_metadata(connection, api_version) -> DataCube:
cube = connection.load_collection("S2", fetch_metadata=None)
assert cube.metadata is None
return cube
25 changes: 25 additions & 0 deletions tests/rest/datacube/test_bandmath.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,3 +366,28 @@ def test_log3(con100):
'data/1.0.0/bm_log.json',
preprocess=lambda s: s.replace('"base": 10', '"base": 3')
)


def test_band_invalid_band_with_metadata(s2cube):
with pytest.raises(ValueError, match="Invalid band name/index 'banana'"):
_ = s2cube.band("banana")


def test_band_invalid_band_no_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.band("banana")
assert get_download_graph(cube)["reducedimension1"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "bands",
"reducer": {
"process_graph": {
"arrayelement1": {
"arguments": {"data": {"from_parameter": "data"}, "index": "banana"},
"process_id": "array_element",
"result": True,
}
}
},
},
}
42 changes: 40 additions & 2 deletions tests/rest/datacube/test_datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,9 +29,29 @@ def test_apply_dimension_temporal_cumsum(s2cube, api_version):
assert actual_graph == expected_graph


def test_apply_dimension_invalid_dimension(s2cube):
def test_apply_dimension_invalid_dimension_with_metadata(s2cube):
with pytest.raises(ValueError, match="Invalid dimension"):
s2cube.apply_dimension('cumsum', dimension="olapola")
s2cube.apply_dimension("cumsum", dimension="olapola")


def test_apply_dimension_invalid_dimension_no_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.apply_dimension("cumsum", dimension="olapola")
assert get_download_graph(cube)["applydimension1"] == {
"process_id": "apply_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "olapola",
"process": {
"process_graph": {
"cumsum1": {
"arguments": {"data": {"from_parameter": "data"}},
"process_id": "cumsum",
"result": True,
}
}
},
},
}


def test_min_time(s2cube, api_version):
Expand Down Expand Up @@ -166,6 +186,24 @@ def test_filter_bands_index(s2cube, api_version):
assert im.flat_graph() == expected


def test_filter_bands_invalid_bands_with_metadata(s2cube):
with pytest.raises(ValueError, match="Invalid band name/index 'apple'"):
_ = s2cube.filter_bands(["apple", "banana"])


def test_filter_bands_invalid_bands_without_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.filter_bands(["apple", "banana"])
assert get_download_graph(cube)["filterbands1"] == {
"process_id": "filter_bands",
"arguments": {"data": {"from_node": "loadcollection1"}, "bands": ["apple", "banana"]},
}
cube = cube.filter_bands(["banana"])
assert get_download_graph(cube)["filterbands2"] == {
"process_id": "filter_bands",
"arguments": {"data": {"from_node": "filterbands1"}, "bands": ["banana"]},
}


def test_filter_bbox_minimal(s2cube):
im = s2cube.filter_bbox(west=3.0, east=3.1, north=51.1, south=51.0)
graph = _get_leaf_node(im)
Expand Down
138 changes: 109 additions & 29 deletions tests/rest/datacube/test_datacube100.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from openeo.rest.datacube import THIS, UDF, DataCube
from openeo.rest.vectorcube import VectorCube

from .. import get_download_graph
from ... import load_json_resource
from .conftest import API_URL, DEFAULT_S2_METADATA, setup_collection_metadata

Expand Down Expand Up @@ -980,10 +981,9 @@ def test_ndvi_args(con100: Connection):
assert ndvi.metadata.band_dimension.band_names == ["B02", "B03", "B04", "B08", "ndvii"]


def test_rename_dimension(con100):
s2 = con100.load_collection("S2")
x = s2.rename_dimension(source="bands", target="ThisIsNotTheBandsDimension")
assert x.flat_graph() == {
def test_rename_dimension(s2cube):
cube = s2cube.rename_dimension(source="bands", target="ThisIsNotTheBandsDimension")
assert cube.flat_graph() == {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
Expand All @@ -1000,6 +1000,30 @@ def test_rename_dimension(con100):
}


def test_rename_dimension_invalid_dimension_with_metadata(s2cube):
with pytest.raises(ValueError, match="Invalid dimension 'applepie'."):
_ = s2cube.rename_dimension(source="applepie", target="icecream")


def test_rename_dimension_invalid_dimension_no_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.rename_dimension(source="applepie", target="icecream")
assert cube.flat_graph() == {
"loadcollection1": {
"process_id": "load_collection",
"arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None},
},
"renamedimension1": {
"process_id": "rename_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"source": "applepie",
"target": "icecream",
},
"result": True,
},
}


def test_add_dimension(con100):
s2 = con100.load_collection("S2")
x = s2.add_dimension(name="james_band", label="alpha")
Expand Down Expand Up @@ -1151,29 +1175,70 @@ def test_reduce_dimension_context(con100):
}}


def test_reduce_bands(con100):
s2 = con100.load_collection("S2")
x = s2.reduce_bands(reducer="mean")
assert x.flat_graph() == {
'loadcollection1': {
'process_id': 'load_collection',
'arguments': {'id': 'S2', 'spatial_extent': None, 'temporal_extent': None},
def test_reduce_dimension_invalid_dimension_with_metadata(s2cube):
with pytest.raises(ValueError, match="ola"):
s2cube.reduce_dimension(dimension="olapola", reducer="mean")


def test_reduce_dimension_invalid_dimension_no_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.reduce_dimension(dimension="olapola", reducer="mean")
assert get_download_graph(cube)["reducedimension1"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "olapola",
"reducer": {
"process_graph": {
"mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True}
}
},
},
'reducedimension1': {
'process_id': 'reduce_dimension',
'arguments': {
'data': {'from_node': 'loadcollection1'},
'dimension': 'bands',
'reducer': {'process_graph': {
'mean1': {
'process_id': 'mean',
'arguments': {'data': {'from_parameter': 'data'}},
'result': True
}
}}
}
cube = cube.reduce_dimension(dimension="jamanee", reducer="max")
assert get_download_graph(cube)["reducedimension2"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "reducedimension1"},
"dimension": "jamanee",
"reducer": {
"process_graph": {
"max1": {"process_id": "max", "arguments": {"data": {"from_parameter": "data"}}, "result": True}
}
},
'result': True
}}
},
}


def test_reduce_bands(s2cube):
cube = s2cube.reduce_bands(reducer="mean")
assert get_download_graph(cube)["reducedimension1"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "bands",
"reducer": {
"process_graph": {
"mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True}
}
},
},
}


def test_reduce_bands_no_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.reduce_bands(reducer="mean")
assert get_download_graph(cube)["reducedimension1"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "bands",
"reducer": {
"process_graph": {
"mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True}
}
},
},
}


def test_reduce_bands_udf(con100):
Expand Down Expand Up @@ -1205,10 +1270,9 @@ def test_reduce_bands_udf(con100):
}}


def test_reduce_temporal(con100):
s2 = con100.load_collection("S2")
x = s2.reduce_temporal(reducer="mean")
assert x.flat_graph() == {
def test_reduce_temporal(s2cube):
cube = s2cube.reduce_temporal(reducer="mean")
assert cube.flat_graph() == {
'loadcollection1': {
'process_id': 'load_collection',
'arguments': {'id': 'S2', 'spatial_extent': None, 'temporal_extent': None},
Expand Down Expand Up @@ -1260,6 +1324,22 @@ def test_reduce_temporal_udf(con100):
}}


def test_reduce_temporal_without_metadata(s2cube_without_metadata):
cube = s2cube_without_metadata.reduce_temporal(reducer="mean")
assert get_download_graph(cube)["reducedimension1"] == {
"process_id": "reduce_dimension",
"arguments": {
"data": {"from_node": "loadcollection1"},
"dimension": "t",
"reducer": {
"process_graph": {
"mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True}
}
},
},
}


def test_chunk_polygon_basic(con100: Connection):
img = con100.load_collection("S2")
polygon: shapely.geometry.Polygon = shapely.geometry.box(0, 0, 1, 1)
Expand Down

0 comments on commit 1242e2e

Please sign in to comment.