diff --git a/openeo/metadata.py b/openeo/metadata.py index 43e39d206..c14050894 100644 --- a/openeo/metadata.py +++ b/openeo/metadata.py @@ -231,14 +231,6 @@ def __init__(self, metadata: dict, dimensions: List[Dimension] = None): if dim.type == "temporal": self._temporal_dimension = dim - @classmethod - def get_or_create(cls, metadata: Union[dict, "CollectionMetadata", None]) -> CollectionMetadata: - """Get or create CollectionMetadata from given argument.""" - if isinstance(metadata, cls): - return metadata - else: - return cls(metadata=metadata or {}) - def __eq__(self, o: Any) -> bool: return isinstance(o, CollectionMetadata) and self._dimensions == o._dimensions @@ -348,15 +340,11 @@ def extent(self) -> dict: def dimension_names(self) -> List[str]: return list(d.name for d in self._dimensions) - def assert_valid_dimension(self, dimension: str, just_warn: bool = False) -> str: + def assert_valid_dimension(self, dimension: str) -> str: """Make sure given dimension name is valid.""" names = self.dimension_names() if dimension not in names: - msg = f"Invalid dimension {dimension!r}. Should be one of {names}" - if just_warn: - _log.warning(msg) - else: - raise ValueError(msg) + raise ValueError(f"Invalid dimension {dimension!r}. Should be one of {names}") return dimension def has_band_dimension(self) -> bool: @@ -397,6 +385,7 @@ def band_common_names(self) -> List[str]: return self.band_dimension.common_names def get_band_index(self, band: Union[int, str]) -> int: + # TODO: eliminate this shortcut for smaller API surface return self.band_dimension.band_index(band) def filter_bands(self, band_names: List[Union[int, str]]) -> CollectionMetadata: @@ -452,6 +441,8 @@ def rename_dimension(self, source: str, target: str) -> CollectionMetadata: def reduce_dimension(self, dimension_name: str) -> CollectionMetadata: """Create new metadata object by collapsing/reducing a dimension.""" # TODO: option to keep reduced dimension (with a single value)? + # TODO: rename argument to `name` for more internal consistency + # TODO: merge with drop_dimension (which does the same). self.assert_valid_dimension(dimension_name) loc = self.dimension_names().index(dimension_name) dimensions = self._dimensions[:loc] + self._dimensions[loc + 1:] diff --git a/openeo/rest/connection.py b/openeo/rest/connection.py index b765a5309..b7d50d482 100644 --- a/openeo/rest/connection.py +++ b/openeo/rest/connection.py @@ -1183,15 +1183,6 @@ def load_result( :return: a :py:class:`DataCube` """ # TODO: add check that back-end supports `load_result` process? - metadata = CollectionMetadata( - {}, - dimensions=[ - SpatialDimension(name="x", extent=[]), - SpatialDimension(name="y", extent=[]), - TemporalDimension(name="t", extent=[]), - BandDimension(name="bands", bands=[Band(name="unknown")]), - ], - ) cube = self.datacube_from_process( process_id="load_result", id=id, @@ -1201,7 +1192,6 @@ def load_result( bands=bands, ), ) - cube.metadata = metadata return cube @openeo_process @@ -1309,15 +1299,6 @@ def load_stac( """ # TODO #425 move this implementation to `DataCube` and just forward here (like with `load_collection`) # TODO #425 detect actual metadata from URL - metadata = CollectionMetadata( - {}, - dimensions=[ - SpatialDimension(name="x", extent=[]), - SpatialDimension(name="y", extent=[]), - TemporalDimension(name="t", extent=[]), - BandDimension(name="bands", bands=[Band(name="unknown")]), - ], - ) arguments = {"url": url} # TODO #425 more normalization/validation of extent/band parameters if spatial_extent: @@ -1331,7 +1312,6 @@ def load_stac( prop: build_child_callback(pred, parent_parameters=["value"]) for prop, pred in properties.items() } cube = self.datacube_from_process(process_id="load_stac", **arguments) - cube.metadata = metadata return cube def load_ml_model(self, id: Union[str, BatchJob]) -> MlModel: diff --git a/openeo/rest/datacube.py b/openeo/rest/datacube.py index c105b1e1e..74df68996 100644 --- a/openeo/rest/datacube.py +++ b/openeo/rest/datacube.py @@ -30,10 +30,7 @@ from openeo.internal.warnings import UserDeprecationWarning, deprecated, legacy_alias from openeo.metadata import ( Band, - BandDimension, CollectionMetadata, - SpatialDimension, - TemporalDimension, ) from openeo.rest import BandMathException, OpenEoClientException, OperatorException from openeo.rest._datacube import ( @@ -66,8 +63,6 @@ log = logging.getLogger(__name__) - - class DataCube(_ProcessGraphAbstraction): """ Class representing a openEO (raster) data cube. @@ -79,9 +74,9 @@ class DataCube(_ProcessGraphAbstraction): # TODO: set this based on back-end or user preference? _DEFAULT_RASTER_FORMAT = "GTiff" - def __init__(self, graph: PGNode, connection: Connection, metadata: CollectionMetadata = None): + def __init__(self, graph: PGNode, connection: Connection, metadata: Optional[CollectionMetadata] = None): super().__init__(pgnode=graph, connection=connection) - self.metadata = CollectionMetadata.get_or_create(metadata) + self.metadata: Optional[CollectionMetadata] = metadata def process( self, @@ -118,6 +113,15 @@ def process_with_node(self, pg: PGNode, metadata: Optional[CollectionMetadata] = # TODO: deprecate `process_with_node``: little added value over just calling DataCube() directly return DataCube(graph=pg, connection=self._connection, metadata=metadata or self.metadata) + def _do_metadata_normalization(self) -> bool: + """Do metadata-based normalization/validation of dimension names, band names, ...""" + return isinstance(self.metadata, CollectionMetadata) + + def _assert_valid_dimension_name(self, name: str) -> str: + if self._do_metadata_normalization(): + self.metadata.assert_valid_dimension(name) + return name + @classmethod @openeo_process def load_collection( @@ -157,17 +161,15 @@ def load_collection( } if isinstance(collection_id, Parameter): fetch_metadata = False - metadata = connection.collection_metadata(collection_id) if fetch_metadata else None + metadata: Optional[CollectionMetadata] = ( + connection.collection_metadata(collection_id) if fetch_metadata else None + ) if bands: if isinstance(bands, str): bands = [bands] if metadata: bands = [b if isinstance(b, str) else metadata.band_dimension.band_name(b) for b in bands] metadata = metadata.filter_bands(bands) - else: - # Ensure minimal metadata with best effort band dimension guess (based on `bands` argument). - band_dimension = BandDimension("bands", bands=[Band(name=b) for b in bands]) - metadata = CollectionMetadata({}, dimensions=[band_dimension]) arguments['bands'] = bands if max_cloud_cover: properties = properties or {} @@ -216,17 +218,7 @@ def load_disk_collection(cls, connection: Connection, file_format: str, glob_pat 'options': options } ) - - metadata = CollectionMetadata( - {}, - dimensions=[ - SpatialDimension(name="x", extent=[]), - SpatialDimension(name="y", extent=[]), - TemporalDimension(name="t", extent=[]), - BandDimension(name="bands", bands=[Band(name="unknown")]), - ], - ) - return cls(graph=pg, connection=connection, metadata=metadata) + return cls(graph=pg, connection=connection) @classmethod def _get_temporal_extent( @@ -437,13 +429,13 @@ def filter_bands(self, bands: Union[List[Union[str, int]], str]) -> DataCube: """ if isinstance(bands, str): bands = [bands] - bands = [self.metadata.band_dimension.band_name(b) for b in bands] + if self._do_metadata_normalization(): + bands = [self.metadata.band_dimension.band_name(b) for b in bands] cube = self.process( process_id="filter_bands", arguments={"data": THIS, "bands": bands}, + metadata=self.metadata.filter_bands(bands) if self.metadata else None, ) - if cube.metadata: - cube.metadata = cube.metadata.filter_bands(bands) return cube band_filter = legacy_alias(filter_bands, "band_filter", since="0.1.0") @@ -455,14 +447,14 @@ def band(self, band: Union[str, int]) -> DataCube: :param band: band name, band common name or band index. :return: a DataCube instance """ - band_index = self.metadata.get_band_index(band) - return self.reduce_bands(reducer=PGNode( - process_id='array_element', - arguments={ - 'data': {'from_parameter': 'data'}, - 'index': band_index - }, - )) + if self._do_metadata_normalization(): + band = self.metadata.band_dimension.band_index(band) + return self.reduce_bands( + reducer=PGNode( + process_id="array_element", + arguments={"data": {"from_parameter": "data"}, "index": band}, + ) + ) @openeo_process def resample_spatial( @@ -1084,7 +1076,7 @@ def apply_dimension( arguments = { "data": THIS, "process": process, - "dimension": self.metadata.assert_valid_dimension(dimension), + "dimension": self._assert_valid_dimension_name(dimension), } if target_dimension is not None: arguments["target_dimension"] = target_dimension @@ -1129,15 +1121,18 @@ def reduce_dimension( process=reducer, parent_parameters=["data", "context"], connection=self.connection ) - return self.process_with_node(ReduceNode( - process_id=process_id, - data=self, - reducer=reducer, - dimension=self.metadata.assert_valid_dimension(dimension), - context=context, - # TODO #123 is it (still) necessary to make "band" math a special case? - band_math_mode=band_math_mode - ), metadata=self.metadata.reduce_dimension(dimension_name=dimension)) + return self.process_with_node( + ReduceNode( + process_id=process_id, + data=self, + reducer=reducer, + dimension=self._assert_valid_dimension_name(dimension), + context=context, + # TODO #123 is it (still) necessary to make "band" math a special case? + band_math_mode=band_math_mode, + ), + metadata=self.metadata.reduce_dimension(dimension_name=dimension) if self.metadata else None, + ) # @openeo_process def chunk_polygon( @@ -1189,7 +1184,11 @@ def reduce_bands(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> Dat :param reducer: "child callback" function, see :ref:`callbackfunctions` """ - return self.reduce_dimension(dimension=self.metadata.band_dimension.name, reducer=reducer, band_math_mode=True) + return self.reduce_dimension( + dimension=self.metadata.band_dimension.name if self.metadata else "bands", + reducer=reducer, + band_math_mode=True, + ) def reduce_temporal(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> DataCube: """ @@ -1197,7 +1196,10 @@ def reduce_temporal(self, reducer: Union[str, PGNode, typing.Callable, UDF]) -> :param reducer: "child callback" function, see :ref:`callbackfunctions` """ - return self.reduce_dimension(dimension=self.metadata.temporal_dimension.name, reducer=reducer) + return self.reduce_dimension( + dimension=self.metadata.temporal_dimension.name if self.metadata else "t", + reducer=reducer, + ) @deprecated( "Use :py:meth:`reduce_bands` with :py:class:`UDF ` as reducer.", @@ -1227,7 +1229,7 @@ def add_dimension(self, name: str, label: str, type: Optional[str] = None): return self.process( process_id="add_dimension", arguments=dict_no_none({"data": self, "name": name, "label": label, "type": type}), - metadata=self.metadata.add_dimension(name=name, label=label, type=type) + metadata=self.metadata.add_dimension(name=name, label=label, type=type) if self.metadata else None, ) @openeo_process @@ -1245,7 +1247,7 @@ def drop_dimension(self, name: str): return self.process( process_id="drop_dimension", arguments={"data": self, "name": name}, - metadata=self.metadata.drop_dimension(name=name), + metadata=self.metadata.drop_dimension(name=name) if self.metadata else None, ) @deprecated( @@ -1500,9 +1502,12 @@ def ndvi(self, nir: str = None, red: str = None, target_band: str = None) -> Dat :return: a DataCube instance """ - if target_band is None: + if self.metadata is None: + metadata = None + elif target_band is None: metadata = self.metadata.reduce_dimension(self.metadata.band_dimension.name) else: + # TODO: first drop "bands" dim and re-add it with single "ndvi" band metadata = self.metadata.append_band(Band(name=target_band, common_name="ndvi")) return self.process( process_id="ndvi", @@ -1522,16 +1527,16 @@ def rename_dimension(self, source: str, target: str): :return: A new datacube with the dimension renamed. """ - if target in self.metadata.dimension_names(): + if self._do_metadata_normalization() and target in self.metadata.dimension_names(): raise ValueError('Target dimension name conflicts with existing dimension: %s.' % target) return self.process( process_id="rename_dimension", arguments=dict_no_none( data=THIS, - source=self.metadata.assert_valid_dimension(source), + source=self._assert_valid_dimension_name(source), target=target, ), - metadata=self.metadata.rename_dimension(source, target), + metadata=self.metadata.rename_dimension(source, target) if self.metadata else None, ) @openeo_process @@ -1549,11 +1554,11 @@ def rename_labels(self, dimension: str, target: list, source: list = None) -> Da process_id="rename_labels", arguments=dict_no_none( data=THIS, - dimension=self.metadata.assert_valid_dimension(dimension), + dimension=self._assert_valid_dimension_name(dimension), target=target, source=source, ), - metadata=self.metadata.rename_labels(dimension, target, source), + metadata=self.metadata.rename_labels(dimension, target, source) if self.metadata else None, ) @openeo_process(mode="apply") @@ -1670,12 +1675,20 @@ def merge_cubes( arguments = {"cube1": self, "cube2": other} if overlap_resolver: arguments["overlap_resolver"] = build_child_callback(overlap_resolver, parent_parameters=["x", "y"]) - # Minimal client side metadata merging - merged_metadata = self.metadata - if self.metadata.has_band_dimension() and isinstance(other, DataCube) and other.metadata.has_band_dimension(): + if ( + self.metadata + and self.metadata.has_band_dimension() + and isinstance(other, DataCube) + and other.metadata + and other.metadata.has_band_dimension() + ): + # Minimal client side metadata merging + merged_metadata = self.metadata for b in other.metadata.band_dimension.bands: if b not in merged_metadata.bands: merged_metadata = merged_metadata.append_band(b) + else: + merged_metadata = None # Overlapping bands without overlap resolver will give an error in the backend if context: arguments["context"] = context @@ -1755,8 +1768,7 @@ def raster_to_vector(self) -> VectorCube: :return: a :py:class:`~openeo.rest.vectorcube.VectorCube` """ pg_node = PGNode(process_id="raster_to_vector", arguments={"data": self}) - # TODO: properly update metadata (e.g. "geometry" dimension) related to #457 - return VectorCube(pg_node, connection=self._connection, metadata=self.metadata) + return VectorCube(pg_node, connection=self._connection) ####VIEW methods ####### @@ -2330,9 +2342,10 @@ def dimension_labels(self, dimension: str) -> DataCube: :param dimension: The name of the dimension to get the labels for. """ - dimension_names = self.metadata.dimension_names() - if dimension_names and dimension not in dimension_names: - raise ValueError(f"Invalid dimension name {dimension!r}, should be one of {dimension_names}") + if self._do_metadata_normalization(): + dimension_names = self.metadata.dimension_names() + if dimension_names and dimension not in dimension_names: + raise ValueError(f"Invalid dimension name {dimension!r}, should be one of {dimension_names}") return self.process(process_id="dimension_labels", arguments={"data": THIS, "dimension": dimension}) @openeo_process diff --git a/openeo/rest/vectorcube.py b/openeo/rest/vectorcube.py index 74c02cbe4..0f3cc7eac 100644 --- a/openeo/rest/vectorcube.py +++ b/openeo/rest/vectorcube.py @@ -7,7 +7,6 @@ import shapely.geometry.base -import openeo from openeo.api.process import Parameter from openeo.internal.documentation import openeo_process from openeo.internal.graph_building import PGNode @@ -37,9 +36,9 @@ class VectorCube(_ProcessGraphAbstraction): A geometry is specified in a 'coordinate reference system'. https://www.w3.org/TR/sdw-bp/#dfn-coordinate-reference-system-(crs) """ - def __init__(self, graph: PGNode, connection: Connection, metadata: CollectionMetadata = None): + def __init__(self, graph: PGNode, connection: Connection, metadata: Optional[CollectionMetadata] = None): super().__init__(pgnode=graph, connection=connection) - self.metadata = metadata or self._build_metadata() + self.metadata = metadata @classmethod def _build_metadata(cls, add_properties: bool = False) -> CollectionMetadata: @@ -48,7 +47,7 @@ def _build_metadata(cls, add_properties: bool = False) -> CollectionMetadata: dimensions = [Dimension(name="geometry", type="geometry")] if add_properties: dimensions.append(Dimension(name="properties", type="other")) - # TODO: use a more generic metadata container than "collection" metadata + # TODO #464: use a more generic metadata container than "collection" metadata return CollectionMetadata(metadata={}, dimensions=dimensions) def process( @@ -533,8 +532,7 @@ def apply_dimension( { "data": THIS, "process": process, - # TODO: drop `just_warn`? - "dimension": self.metadata.assert_valid_dimension(dimension, just_warn=True), + "dimension": dimension, "target_dimension": target_dimension, "context": context, } diff --git a/tests/rest/conftest.py b/tests/rest/conftest.py index 7e3df403f..3914168ff 100644 --- a/tests/rest/conftest.py +++ b/tests/rest/conftest.py @@ -1,8 +1,6 @@ import contextlib import re -import time import typing -from typing import List from unittest import mock import pytest diff --git a/tests/rest/datacube/conftest.py b/tests/rest/datacube/conftest.py index b8bebf0e0..158807d75 100644 --- a/tests/rest/datacube/conftest.py +++ b/tests/rest/datacube/conftest.py @@ -88,3 +88,10 @@ def con100(requests_mock, support_udp) -> Connection: @pytest.fixture def s2cube(connection, api_version) -> DataCube: return connection.load_collection("S2") + + +@pytest.fixture +def s2cube_without_metadata(connection, api_version) -> DataCube: + cube = connection.load_collection("S2", fetch_metadata=None) + assert cube.metadata is None + return cube diff --git a/tests/rest/datacube/test_bandmath.py b/tests/rest/datacube/test_bandmath.py index e2fea5a5b..6571e900c 100644 --- a/tests/rest/datacube/test_bandmath.py +++ b/tests/rest/datacube/test_bandmath.py @@ -366,3 +366,28 @@ def test_log3(con100): 'data/1.0.0/bm_log.json', preprocess=lambda s: s.replace('"base": 10', '"base": 3') ) + + +def test_band_invalid_band_with_metadata(s2cube): + with pytest.raises(ValueError, match="Invalid band name/index 'banana'"): + _ = s2cube.band("banana") + + +def test_band_invalid_band_no_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.band("banana") + assert get_download_graph(cube)["reducedimension1"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "bands", + "reducer": { + "process_graph": { + "arrayelement1": { + "arguments": {"data": {"from_parameter": "data"}, "index": "banana"}, + "process_id": "array_element", + "result": True, + } + } + }, + }, + } diff --git a/tests/rest/datacube/test_datacube.py b/tests/rest/datacube/test_datacube.py index 1198076e2..1cfb51495 100644 --- a/tests/rest/datacube/test_datacube.py +++ b/tests/rest/datacube/test_datacube.py @@ -13,7 +13,6 @@ import shapely import shapely.geometry -from openeo.capabilities import ComparableVersion from openeo.rest import BandMathException from openeo.rest.datacube import DataCube @@ -29,9 +28,29 @@ def test_apply_dimension_temporal_cumsum(s2cube, api_version): assert actual_graph == expected_graph -def test_apply_dimension_invalid_dimension(s2cube): +def test_apply_dimension_invalid_dimension_with_metadata(s2cube): with pytest.raises(ValueError, match="Invalid dimension"): - s2cube.apply_dimension('cumsum', dimension="olapola") + s2cube.apply_dimension("cumsum", dimension="olapola") + + +def test_apply_dimension_invalid_dimension_no_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.apply_dimension("cumsum", dimension="olapola") + assert get_download_graph(cube)["applydimension1"] == { + "process_id": "apply_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "olapola", + "process": { + "process_graph": { + "cumsum1": { + "arguments": {"data": {"from_parameter": "data"}}, + "process_id": "cumsum", + "result": True, + } + } + }, + }, + } def test_min_time(s2cube, api_version): @@ -166,6 +185,24 @@ def test_filter_bands_index(s2cube, api_version): assert im.flat_graph() == expected +def test_filter_bands_invalid_bands_with_metadata(s2cube): + with pytest.raises(ValueError, match="Invalid band name/index 'apple'"): + _ = s2cube.filter_bands(["apple", "banana"]) + + +def test_filter_bands_invalid_bands_without_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.filter_bands(["apple", "banana"]) + assert get_download_graph(cube)["filterbands1"] == { + "process_id": "filter_bands", + "arguments": {"data": {"from_node": "loadcollection1"}, "bands": ["apple", "banana"]}, + } + cube = cube.filter_bands(["banana"]) + assert get_download_graph(cube)["filterbands2"] == { + "process_id": "filter_bands", + "arguments": {"data": {"from_node": "filterbands1"}, "bands": ["banana"]}, + } + + def test_filter_bbox_minimal(s2cube): im = s2cube.filter_bbox(west=3.0, east=3.1, north=51.1, south=51.0) graph = _get_leaf_node(im) diff --git a/tests/rest/datacube/test_datacube100.py b/tests/rest/datacube/test_datacube100.py index 34892c2ee..451091c97 100644 --- a/tests/rest/datacube/test_datacube100.py +++ b/tests/rest/datacube/test_datacube100.py @@ -9,7 +9,6 @@ import json import pathlib import re -import sys import textwrap from typing import Optional @@ -30,9 +29,9 @@ from openeo.rest._testing import build_capabilities from openeo.rest.connection import Connection from openeo.rest.datacube import THIS, UDF, DataCube -from openeo.rest.vectorcube import VectorCube from ... import load_json_resource +from .. import get_download_graph from .conftest import API_URL, DEFAULT_S2_METADATA, setup_collection_metadata basic_geometry_types = [ @@ -980,10 +979,9 @@ def test_ndvi_args(con100: Connection): assert ndvi.metadata.band_dimension.band_names == ["B02", "B03", "B04", "B08", "ndvii"] -def test_rename_dimension(con100): - s2 = con100.load_collection("S2") - x = s2.rename_dimension(source="bands", target="ThisIsNotTheBandsDimension") - assert x.flat_graph() == { +def test_rename_dimension(s2cube): + cube = s2cube.rename_dimension(source="bands", target="ThisIsNotTheBandsDimension") + assert cube.flat_graph() == { "loadcollection1": { "process_id": "load_collection", "arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None}, @@ -1000,6 +998,30 @@ def test_rename_dimension(con100): } +def test_rename_dimension_invalid_dimension_with_metadata(s2cube): + with pytest.raises(ValueError, match="Invalid dimension 'applepie'."): + _ = s2cube.rename_dimension(source="applepie", target="icecream") + + +def test_rename_dimension_invalid_dimension_no_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.rename_dimension(source="applepie", target="icecream") + assert cube.flat_graph() == { + "loadcollection1": { + "process_id": "load_collection", + "arguments": {"id": "S2", "spatial_extent": None, "temporal_extent": None}, + }, + "renamedimension1": { + "process_id": "rename_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "source": "applepie", + "target": "icecream", + }, + "result": True, + }, + } + + def test_add_dimension(con100): s2 = con100.load_collection("S2") x = s2.add_dimension(name="james_band", label="alpha") @@ -1151,29 +1173,70 @@ def test_reduce_dimension_context(con100): }} -def test_reduce_bands(con100): - s2 = con100.load_collection("S2") - x = s2.reduce_bands(reducer="mean") - assert x.flat_graph() == { - 'loadcollection1': { - 'process_id': 'load_collection', - 'arguments': {'id': 'S2', 'spatial_extent': None, 'temporal_extent': None}, +def test_reduce_dimension_invalid_dimension_with_metadata(s2cube): + with pytest.raises(ValueError, match="ola"): + s2cube.reduce_dimension(dimension="olapola", reducer="mean") + + +def test_reduce_dimension_invalid_dimension_no_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.reduce_dimension(dimension="olapola", reducer="mean") + assert get_download_graph(cube)["reducedimension1"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "olapola", + "reducer": { + "process_graph": { + "mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True} + } + }, }, - 'reducedimension1': { - 'process_id': 'reduce_dimension', - 'arguments': { - 'data': {'from_node': 'loadcollection1'}, - 'dimension': 'bands', - 'reducer': {'process_graph': { - 'mean1': { - 'process_id': 'mean', - 'arguments': {'data': {'from_parameter': 'data'}}, - 'result': True - } - }} + } + cube = cube.reduce_dimension(dimension="jamanee", reducer="max") + assert get_download_graph(cube)["reducedimension2"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "reducedimension1"}, + "dimension": "jamanee", + "reducer": { + "process_graph": { + "max1": {"process_id": "max", "arguments": {"data": {"from_parameter": "data"}}, "result": True} + } }, - 'result': True - }} + }, + } + + +def test_reduce_bands(s2cube): + cube = s2cube.reduce_bands(reducer="mean") + assert get_download_graph(cube)["reducedimension1"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "bands", + "reducer": { + "process_graph": { + "mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True} + } + }, + }, + } + + +def test_reduce_bands_no_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.reduce_bands(reducer="mean") + assert get_download_graph(cube)["reducedimension1"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "bands", + "reducer": { + "process_graph": { + "mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True} + } + }, + }, + } def test_reduce_bands_udf(con100): @@ -1205,10 +1268,9 @@ def test_reduce_bands_udf(con100): }} -def test_reduce_temporal(con100): - s2 = con100.load_collection("S2") - x = s2.reduce_temporal(reducer="mean") - assert x.flat_graph() == { +def test_reduce_temporal(s2cube): + cube = s2cube.reduce_temporal(reducer="mean") + assert cube.flat_graph() == { 'loadcollection1': { 'process_id': 'load_collection', 'arguments': {'id': 'S2', 'spatial_extent': None, 'temporal_extent': None}, @@ -1260,6 +1322,22 @@ def test_reduce_temporal_udf(con100): }} +def test_reduce_temporal_without_metadata(s2cube_without_metadata): + cube = s2cube_without_metadata.reduce_temporal(reducer="mean") + assert get_download_graph(cube)["reducedimension1"] == { + "process_id": "reduce_dimension", + "arguments": { + "data": {"from_node": "loadcollection1"}, + "dimension": "t", + "reducer": { + "process_graph": { + "mean1": {"process_id": "mean", "arguments": {"data": {"from_parameter": "data"}}, "result": True} + } + }, + }, + } + + def test_chunk_polygon_basic(con100: Connection): img = con100.load_collection("S2") polygon: shapely.geometry.Polygon = shapely.geometry.box(0, 0, 1, 1) diff --git a/tests/rest/datacube/test_vectorcube.py b/tests/rest/datacube/test_vectorcube.py index 3f2ea69d3..10d490fd3 100644 --- a/tests/rest/datacube/test_vectorcube.py +++ b/tests/rest/datacube/test_vectorcube.py @@ -6,7 +6,6 @@ import openeo.processes from openeo.api.process import Parameter -from openeo.internal.graph_building import PGNode from openeo.rest._testing import DummyBackend from openeo.rest.vectorcube import VectorCube from openeo.util import InvalidBBoxException @@ -272,14 +271,13 @@ def test_load_url(con100, dummy_backend): @pytest.mark.parametrize( - ["dimension", "expect_warning"], + ["dimension"], [ - ("geometry", False), - ("geometries", True), - ("wibbles", True), + ("geometry",), + ("geometries",), ], ) -def test_apply_dimension(vector_cube, dummy_backend, dimension, expect_warning, caplog): +def test_apply_dimension(vector_cube, dummy_backend, dimension, caplog): vc = vector_cube.apply_dimension("sort", dimension=dimension) assert dummy_backend.execute(vc, process_id="apply_dimension") == { "process_id": "apply_dimension", @@ -299,9 +297,6 @@ def test_apply_dimension(vector_cube, dummy_backend, dimension, expect_warning, "result": True, } - assert ( - f"Invalid dimension {dimension!r}. Should be one of ['geometry', 'properties']" in caplog.messages - ) == expect_warning def test_filter_bands(vector_cube, dummy_backend): diff --git a/tests/rest/test_connection.py b/tests/rest/test_connection.py index b2d88fa78..09a97ac25 100644 --- a/tests/rest/test_connection.py +++ b/tests/rest/test_connection.py @@ -15,7 +15,7 @@ import shapely.geometry import openeo -from openeo.capabilities import ApiVersionException, ComparableVersion +from openeo.capabilities import ApiVersionException from openeo.internal.compat import nullcontext from openeo.internal.graph_building import FlatGraphableMixin, PGNode from openeo.rest import CapabilitiesException, OpenEoApiError, OpenEoClientException, OpenEoRestError @@ -2299,9 +2299,6 @@ def test_load_result(requests_mock): requests_mock.get(API_URL, json={"api_version": "1.0.0"}) con = Connection(API_URL) cube = con.load_result("j0bi6") - assert cube.metadata.has_band_dimension() - assert cube.metadata.has_temporal_dimension() - assert len(cube.metadata.spatial_dimensions)==2 assert cube.flat_graph() == { "loadresult1": {"process_id": "load_result", "arguments": {"id": "j0bi6"}, "result": True} }