From 97da6d27ce2c69f4a335f0ad756102fe8259df73 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Tue, 7 Mar 2023 10:31:28 +0100 Subject: [PATCH] Issue #134/#100 Drop 0.4.0-style ImageCollectionClient --- CHANGELOG.md | 4 + examples/mundialis_extended.py | 3 +- examples/mundialis_mini.py | 6 +- examples/py3_process_wrapper-wcps_eurac.py | 2 +- openeo/_version.py | 2 +- openeo/internal/graphbuilder_040.py | 153 --- openeo/rest/connection.py | 23 +- openeo/rest/imagecollectionclient.py | 1046 -------------------- tests/internal/test_graphbuilder_040.py | 115 --- tests/rest/__init__.py | 6 +- tests/rest/conftest.py | 15 +- tests/rest/datacube/conftest.py | 7 - tests/rest/datacube/test_bandmath.py | 21 - tests/rest/datacube/test_datacube.py | 70 +- tests/rest/test_connection.py | 21 - tests/rest/test_imagecollectionclient.py | 87 -- tests/rest/test_job.py | 1 + tests/rest/test_job_results.py | 1 + 18 files changed, 24 insertions(+), 1559 deletions(-) delete mode 100644 openeo/internal/graphbuilder_040.py delete mode 100644 openeo/rest/imagecollectionclient.py delete mode 100644 tests/internal/test_graphbuilder_040.py delete mode 100644 tests/rest/test_imagecollectionclient.py diff --git a/CHANGELOG.md b/CHANGELOG.md index eceb2ee5e..18ad98d5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,10 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Removed +- Remove `ImageCollectionClient` (and related helpers), + a now unused leftover from the pre-1.0.0 versions of the openEO API + ([#134](https://github.com/Open-EO/openeo-python-client/issues/134), [#100](https://github.com/Open-EO/openeo-python-client/issues/100)) + ### Fixed diff --git a/examples/mundialis_extended.py b/examples/mundialis_extended.py index 4a3be2c59..e22f73009 100644 --- a/examples/mundialis_extended.py +++ b/examples/mundialis_extended.py @@ -80,8 +80,7 @@ def execute( image_collection, json.dumps(connection.describe_collection(image_collection), indent=2), ) - # TODO: change to con.load_collection() - cube = ImageCollectionClient.load_collection(session = connection, collection_id = image_collection, bands = all_bands) + cube = connection.load_collection(collection_id=image_collection, bands=all_bands) cube = cube.filter_bbox( **bbox) cube = cube.filter_temporal(extent=temporal_extent) diff --git a/examples/mundialis_mini.py b/examples/mundialis_mini.py index d3336e640..952c1596d 100644 --- a/examples/mundialis_mini.py +++ b/examples/mundialis_mini.py @@ -20,11 +20,9 @@ spectral_extent = ["S2_8", "S2_4", "S2_2"] # connect to mundialis backend -session = openeo.connect(backend_url).authenticate_basic() +connection = openeo.connect(backend_url).authenticate_basic() -# TODO change to s2_radiometry = session.load_collection( ...) -s2_radiometry = ImageCollectionClient.load_collection( - session=session, +s2_radiometry = connection.load_collection( collection_id=collection_id, temporal_extent=temporal_extent, spatial_extent=spatial_extent diff --git a/examples/py3_process_wrapper-wcps_eurac.py b/examples/py3_process_wrapper-wcps_eurac.py index 763dea36d..f18df6f52 100644 --- a/examples/py3_process_wrapper-wcps_eurac.py +++ b/examples/py3_process_wrapper-wcps_eurac.py @@ -78,7 +78,7 @@ def execute( image_collection, json.dumps(connection.describe_collection(image_collection), indent=2), ) - cube = ImageCollectionClient.load_collection(session = connection, collection_id = image_collection, bands = all_bands) + cube = connection.load_collection(collection_id=image_collection, bands=all_bands) cube = cube.filter_bbox( **bbox) cube = cube.filter_temporal(extent=temporal_extent) diff --git a/openeo/_version.py b/openeo/_version.py index 97c2dfd1c..a46e27564 100644 --- a/openeo/_version.py +++ b/openeo/_version.py @@ -1 +1 @@ -__version__ = "0.15.1a1" +__version__ = "0.16.0a1" diff --git a/openeo/internal/graphbuilder_040.py b/openeo/internal/graphbuilder_040.py deleted file mode 100644 index d01941018..000000000 --- a/openeo/internal/graphbuilder_040.py +++ /dev/null @@ -1,153 +0,0 @@ -import copy -from typing import Dict, Union - - -class GraphBuilder(): - - #id_counter is a class level field, this way we ensure that id's are unique, and don't have to make them unique when merging graphs - id_counter = {} - - def __init__(self, graph = None): - """ - Create a process graph builder. - If a graph is provided, its nodes will be added to this builder, this does not necessarily preserve id's of the nodes. - - :param graph: Dict : Optional, existing process graph - """ - self.processes = {} - - if graph is not None: - self._merge_processes(graph) - - def copy(self,return_key_map=False): - the_copy = GraphBuilder() - return the_copy._merge_processes(self.processes,return_key_map=return_key_map) - - def shallow_copy(self): - """ - Copy, but don't update keys - :return: - """ - the_copy = GraphBuilder() - the_copy.processes = copy.deepcopy(self.processes) - return the_copy - - @classmethod - def from_process_graph(cls,graph:Dict): - builder = GraphBuilder() - builder.processes = copy.deepcopy(graph) - return builder - - - def add_process(self,process_id,result=None, **args): - process_id = self.process(process_id, args) - if result is not None: - self.processes[process_id]["result"] = result - return process_id - - def process(self,process_id, args): - """ - Add a process and return the id. Do not add a new process if it already exists in the graph. - - :param process_id: - :param args: - :return: - """ - new_process = { - 'process_id': process_id, - 'arguments': args, - 'result': False - } - #try: - # existing_id = list(self.processes.keys())[list(self.processes.values()).index(new_process)] - # return existing_id - #except ValueError as e: - # pass - id = self._generate_id(process_id) - self.processes[id] = new_process - return id - - def _generate_id(self,name:str): - name = name.replace("_","") - if( not GraphBuilder.id_counter.get(name)): - GraphBuilder.id_counter[name] = 1 - else: - GraphBuilder.id_counter[name] += 1 - return name + str(GraphBuilder.id_counter[name]) - - def merge(self, other: 'GraphBuilder'): - return GraphBuilder.from_process_graph(self.processes)._merge_processes(other.processes) - - def _merge_processes(self, processes: Dict, return_key_map=False): - # Maps original node key to new key in merged result - key_map = {} - node_refs = [] - for key,process in sorted(processes.items()): - process_id = process['process_id'] - args = process['arguments'] - result = process.get('result', None) - args_copy = copy.deepcopy(args) - id = self.process(process_id, args_copy) - if id != key: - key_map[key] = id - node_refs += self._extract_node_references(args_copy) - - if result is not None: - self.processes[id]['result'] = result - - for node_ref in node_refs: - old_node_id = node_ref['from_node'] - if old_node_id in key_map: - node_ref['from_node'] = key_map[old_node_id] - - if return_key_map: - return self, key_map - else: - return self - - def _extract_node_references(self, arguments): - node_ref_list = [] - for argument in arguments.values(): - if isinstance(argument, dict): - if 'from_node' in argument: - node_ref_list.append(argument) - if isinstance(argument,list): - for element in argument: - if isinstance(element, dict): - if 'from_node' in element: - node_ref_list.append(element) - return node_ref_list - - def find_result_node_id(self): - result_node_ids = [k for k,v in self.processes.items() if v.get('result',False)] - if len(result_node_ids) == 1: - return result_node_ids[0] - else: - raise RuntimeError("Invalid list of result node id's: " + str(result_node_ids)) - - @classmethod - def combine(cls, operator: str, first: Union['GraphBuilder', dict], second: Union['GraphBuilder', dict], arg_name='data'): - """Combine two GraphBuilders to a new merged one using the given operator""" - merged = cls() - - def insert_builder(builder: GraphBuilder): - nonlocal merged - result_node = builder.find_result_node_id() - _, key_map = merged._merge_processes(builder.processes, return_key_map=True) - key = key_map.get(result_node, result_node) - merged.processes[key]['result'] = False - return {'from_node': key} - - if isinstance(first, GraphBuilder): - first = insert_builder(first) - assert isinstance(first, dict) - if isinstance(second, GraphBuilder): - second = insert_builder(second) - assert isinstance(second, dict) - - args = { - arg_name:[first, second] - } - - merged.add_process(operator, result=True, **args) - return merged diff --git a/openeo/rest/connection.py b/openeo/rest/connection.py index 06f3d7fc8..3e5e1f6bf 100644 --- a/openeo/rest/connection.py +++ b/openeo/rest/connection.py @@ -31,7 +31,6 @@ OidcClientInfo, OidcAuthenticator, OidcRefreshTokenAuthenticator, OidcResourceOwnerPasswordAuthenticator, \ OidcDeviceAuthenticator, OidcProviderInfo, OidcException, DefaultOidcClientGrant, GrantsChecker from openeo.rest.datacube import DataCube -from openeo.rest.imagecollectionclient import ImageCollectionClient from openeo.rest.mlmodel import MlModel from openeo.rest.userfile import UserFile from openeo.rest.job import BatchJob, RESTJob @@ -1001,18 +1000,13 @@ def load_collection( .. versionadded:: 0.13.0 added the ``max_cloud_cover`` argument. """ - if self._api_version.at_least("1.0.0"): - return DataCube.load_collection( + assert self._api_version.at_least("1.0.0") + return DataCube.load_collection( collection_id=collection_id, connection=self, spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands, properties=properties, max_cloud_cover=max_cloud_cover, fetch_metadata=fetch_metadata, ) - else: - return ImageCollectionClient.load_collection( - collection_id=collection_id, session=self, - spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=bands - ) imagecollection = legacy_alias(load_collection, name="imagecollection") @@ -1245,7 +1239,9 @@ def service(self, service_id: str) -> Service: """ return Service(service_id, connection=self) - def load_disk_collection(self, format: str, glob_pattern: str, options: dict = {}) -> ImageCollectionClient: + def load_disk_collection( + self, format: str, glob_pattern: str, options: Optional[dict] = None + ) -> DataCube: """ Loads image data from disk as an ImageCollection. @@ -1254,11 +1250,10 @@ def load_disk_collection(self, format: str, glob_pattern: str, options: dict = { :param options: options specific to the file format :return: the data as an ImageCollection """ - - if self._api_version.at_least("1.0.0"): - return DataCube.load_disk_collection(self, format, glob_pattern, **options) - else: - return ImageCollectionClient.load_disk_collection(self, format, glob_pattern, **options) + assert self._api_version.at_least("1.0.0") + return DataCube.load_disk_collection( + self, format, glob_pattern, **(options or {}) + ) def as_curl(self, data: Union[dict, DataCube], path="/result", method="POST") -> str: """ diff --git a/openeo/rest/imagecollectionclient.py b/openeo/rest/imagecollectionclient.py deleted file mode 100644 index 5a09b3bcd..000000000 --- a/openeo/rest/imagecollectionclient.py +++ /dev/null @@ -1,1046 +0,0 @@ -import copy -import datetime -import logging -import pathlib -import typing -from typing import List, Dict, Union, Tuple - -from shapely.geometry import Polygon, MultiPolygon, mapping - -from openeo.imagecollection import ImageCollection -from openeo.internal.graphbuilder_040 import GraphBuilder -from openeo.internal.warnings import legacy_alias -from openeo.metadata import CollectionMetadata -from openeo.rest import BandMathException -from openeo.rest.job import BatchJob, RESTJob -from openeo.rest.service import Service -from openeo.util import get_temporal_extent, dict_no_none, guess_format - -if typing.TYPE_CHECKING: - # Imports for type checking only (circular import issue at runtime). - from openeo.rest.connection import Connection - - -_log = logging.getLogger(__name__) - - -class ImageCollectionClient(ImageCollection): - """Class representing an Image Collection. (In the API as 'imagery') - Supports 0.4. - """ - - def __init__(self, node_id: str, builder: GraphBuilder, session: 'Connection', metadata: CollectionMetadata = None): - self.node_id = node_id - self.builder= builder - self.session = session - self.graph = builder.processes - self.metadata = CollectionMetadata.get_or_create(metadata) - - def __str__(self): - return "ImageCollection: %s" % self.node_id - - @property - def _api_version(self): - return self.session.capabilities().api_version_check - - @property - def connection(self): - return self.session - - def flat_graph(self): - return self.graph - - @classmethod - def load_collection( - cls, collection_id: str, session: 'Connection' = None, - spatial_extent: Union[Dict[str, float], None] = None, - temporal_extent: Union[List[Union[str,datetime.datetime,datetime.date]], None] = None, - bands: Union[List[str], None] = None, - fetch_metadata=True - ): - """ - Create a new Image Collection/Raster Data cube. - - :param collection_id: A collection id, should exist in the backend. - :param session: The session to use to connect with the backend. - :param spatial_extent: limit data to specified bounding box or polygons - :param temporal_extent: limit data to specified temporal interval - :param bands: only add the specified bands - :return: - """ - # TODO: rename function to load_collection for better similarity with corresponding process id? - builder = GraphBuilder() - process_id = 'load_collection' - normalized_temporal_extent = list(get_temporal_extent(extent=temporal_extent)) if temporal_extent is not None else None - arguments = { - 'id': collection_id, - 'spatial_extent': spatial_extent, - 'temporal_extent': normalized_temporal_extent, - } - metadata = session.collection_metadata(collection_id) if fetch_metadata else None - if bands: - if isinstance(bands, str): - bands = [bands] - if metadata: - bands = [metadata.band_dimension.band_name(b, allow_common=False) for b in bands] - arguments['bands'] = bands - node_id = builder.process(process_id, arguments) - if bands: - metadata = metadata.filter_bands(bands) - return cls(node_id, builder, session, metadata=metadata) - - create_collection = legacy_alias(load_collection, "create_collection") - - @classmethod - def load_disk_collection(cls, session: 'Connection', file_format: str, glob_pattern: str, **options) -> 'ImageCollection': - """ - Loads image data from disk as an ImageCollection. - - :param session: The session to use to connect with the backend. - :param file_format: the file format, e.g. 'GTiff' - :param glob_pattern: a glob pattern that matches the files to load from disk - :param options: options specific to the file format - :return: the data as an ImageCollection - """ - builder = GraphBuilder() - - process_id = 'load_disk_data' - arguments = { - 'format': file_format, - 'glob_pattern': glob_pattern, - 'options': options - } - - node_id = builder.process(process_id, arguments) - - return cls(node_id, builder, session, metadata={}) - - def _filter_temporal(self, start: str, end: str) -> 'ImageCollection': - return self.graph_add_process( - process_id='filter_temporal', - args={ - 'data': {'from_node': self.node_id}, - 'extent': [start, end] - } - ) - - def filter_bbox(self, west, east, north, south, crs=None, base=None, height=None) -> 'ImageCollection': - extent = {'west': west, 'east': east, 'north': north, 'south': south} - extent.update(dict_no_none(crs=crs, base=base, height=height)) - return self.graph_add_process( - process_id='filter_bbox', - args={ - 'data': {'from_node': self.node_id}, - 'extent': extent - } - ) - - def filter_bands(self, bands: Union[List[Union[str, int]], str]) -> 'ImageCollection': - """ - Filter the imagery by the given bands - :param bands: list of band names, common names or band indices. Single band name can also be given as string. - :return a DataCube instance - """ - if isinstance(bands, str): - bands = [bands] - bands = [self.metadata.band_dimension.band_name(b) for b in bands] - im = self.graph_add_process( - process_id='filter_bands', - args={ - 'data': {'from_node': self.node_id}, - 'bands': [b for b in bands if b in self.metadata.band_names], - 'common_names': [b for b in bands if b in self.metadata.band_common_names] - }) - if im.metadata: - im.metadata = im.metadata.filter_bands(bands) - return im - - band_filter = legacy_alias(filter_bands, "band_filter") - - def band(self, band: Union[str, int]) -> 'ImageCollection': - """Filter the imagery by the given bands - :param band: band name, band common name or band index. - :return An ImageCollection instance - """ - - process_id = 'reduce' - band_index = self.metadata.get_band_index(band) - - args = { - 'data': {'from_node': self.node_id}, - 'dimension': self.metadata.band_dimension.name, - 'reducer': { - 'callback': { - 'r1': { - 'arguments': { - 'data': { - 'from_argument': 'data' - }, - 'index': band_index - }, - 'process_id': 'array_element', - 'result': True - } - } - } - } - - return self.graph_add_process(process_id, args) - - def resample_spatial(self, resolution: Union[float, Tuple[float, float]], - projection: Union[int, str] = None, method: str = 'near', align: str = 'upper-left'): - return self.graph_add_process('resample_spatial', { - 'data': {'from_node': self.node_id}, - 'resolution': resolution, - 'projection': projection, - 'method': method, - 'align': align - }) - - def subtract(self, other:Union[ImageCollection,Union[int,float]]): - """ - Subtract other from this datacube, so the result is: this - other - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this - other - """ - operator = "subtract" - if isinstance(other, int) or isinstance(other, float): - return self._reduce_bands_binary_const(operator, other) - elif isinstance(other, ImageCollection): - return self._reduce_bands_binary(operator, other) - else: - raise ValueError("Unsupported right-hand operand: " + str(other)) - - def divide(self, other:Union[ImageCollection,Union[int,float]]): - """ - Subtraction other from this datacube, so the result is: this - other - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this - other - """ - operator = "divide" - if isinstance(other, int) or isinstance(other, float): - return self._reduce_bands_binary_const(operator, other) - elif isinstance(other, ImageCollection): - return self._reduce_bands_binary(operator, other) - else: - raise ValueError("Unsupported right-hand operand: " + str(other)) - - def product(self, other:Union[ImageCollection,Union[int,float]]): - """ - Multiply other with this datacube, so the result is: this * other - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this - other - """ - operator = "product" - if isinstance(other, int) or isinstance(other, float): - return self._reduce_bands_binary_const(operator, other) - elif isinstance(other, ImageCollection): - return self._reduce_bands_binary(operator, other) - else: - raise ValueError("Unsupported right-hand operand: " + str(other)) - - def logical_or(self, other: ImageCollection): - """ - Apply element-wise logical `or` operation - :param other: - :return ImageCollection: logical_or(this, other) - """ - return self._reduce_bands_binary(operator='or', other=other,arg_name='expressions') - - def logical_and(self, other: ImageCollection): - """ - Apply element-wise logical `and` operation - :param other: - :return ImageCollection: logical_and(this, other) - """ - return self._reduce_bands_binary(operator='and', other=other,arg_name='expressions') - - def __invert__(self): - """ - - :return: - """ - operator = 'not' - my_builder = self._get_band_graph_builder() - new_builder = None - extend_previous_callback_graph = my_builder is not None - # TODO: why does these `add_process` calls use "expression" instead of "data" like the other cases? - if not extend_previous_callback_graph: - new_builder = GraphBuilder() - # TODO merge both process graphs? - new_builder.add_process(operator, expression={'from_argument': 'data'}, result=True) - else: - new_builder = my_builder.copy() - current_result = new_builder.find_result_node_id() - new_builder.processes[current_result]['result'] = False - new_builder.add_process(operator, expression={'from_node': current_result}, result=True) - - return self._create_reduced_collection(new_builder, extend_previous_callback_graph) - - def __ne__(self, other: Union[ImageCollection, Union[int, float]]): - return self._reduce_bands_binary_xy('neq', other) - - def __eq__(self, other:Union[ImageCollection,Union[int,float]]): - """ - Pixelwise comparison of this data cube with another cube or constant. - - :param other: Another data cube, or a constant - :return: - """ - return self._reduce_bands_binary_xy('eq', other) - - def __gt__(self, other:Union[ImageCollection,Union[int,float]]): - """ - Pairwise comparison of the bands in this data cube with the bands in the 'other' data cube. - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this + other - """ - return self._reduce_bands_binary_xy('gt', other) - - def __ge__(self, other:Union[ImageCollection,Union[int,float]]): - return self._reduce_bands_binary_xy('gte', other) - - def __lt__(self, other:Union[ImageCollection,Union[int,float]]): - """ - Pairwise comparison of the bands in this data cube with the bands in the 'other' data cube. - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this + other - """ - return self._reduce_bands_binary_xy('lt', other) - - def __le__(self, other:Union[ImageCollection,Union[int,float]]): - return self._reduce_bands_binary_xy('lte',other) - - def _create_reduced_collection(self, callback_graph_builder, extend_previous_callback_graph): - if not extend_previous_callback_graph: - # there was no previous reduce step - args = { - 'data': {'from_node': self.node_id}, - 'dimension': self.metadata.band_dimension.name, - 'reducer': { - 'callback': callback_graph_builder.processes - } - } - return self.graph_add_process("reduce", args) - else: - process_graph_copy = self.builder.shallow_copy() - process_graph_copy.processes[self.node_id]['arguments']['reducer']['callback'] = callback_graph_builder.processes - - # now current_node should be a reduce node, let's modify it - # TODO: properly update metadata of reduced cube? #metadatareducedimension - return ImageCollectionClient(self.node_id, process_graph_copy, self.session, metadata=self.metadata) - - def __truediv__(self, other): - return self.divide(other) - - def __sub__(self, other): - return self.subtract(other) - - def __radd__(self, other): - return self.add(other) - - def __add__(self, other): - return self.add(other) - - def __neg__(self): - return self.product(-1) - - def __mul__(self, other): - return self.product(other) - - def __rmul__(self, other): - return self.product(other) - - def __or__(self, other): - return self.logical_or(other) - - def __and__(self, other): - return self.logical_and(other) - - def add(self, other:Union[ImageCollection,Union[int,float]]): - """ - Pairwise addition of the bands in this data cube with the bands in the 'other' data cube. - The number of bands in both data cubes has to be the same. - - :param other: - :return ImageCollection: this + other - """ - operator = "sum" - if isinstance(other, int) or isinstance(other, float): - return self._reduce_bands_binary_const(operator, other) - elif isinstance(other, ImageCollection): - return self._reduce_bands_binary(operator, other) - else: - raise ValueError("Unsupported right-hand operand: " + str(other)) - - def _reduce_bands_binary(self, operator, other: 'ImageCollectionClient',arg_name='data'): - # first we create the callback - my_builder = self._get_band_graph_builder() - other_builder = other._get_band_graph_builder() - merged = GraphBuilder.combine( - operator=operator, - first=my_builder or {'from_argument': 'data'}, - second=other_builder or {'from_argument': 'data'}, - arg_name=arg_name) - # callback is ready, now we need to properly set up the reduce process that will invoke it - if my_builder is None and other_builder is None: - # there was no previous reduce step, perhaps this is a cube merge? - # cube merge is happening when node id's differ, otherwise we can use regular reduce - if (self.node_id != other.node_id): - # we're combining data from two different datacubes: http://api.openeo.org/v/0.4.0/processreference/#merge_cubes - - # set result node id's first, to keep track - my_builder = self.builder - my_builder.processes[self.node_id]['result'] = True - other_builder = other.builder - other_builder.processes[other.node_id]['result'] = True - - cubes_merged = GraphBuilder.combine(operator="merge_cubes", - first=my_builder, - second=other_builder, arg_name="cubes") - node_id = cubes_merged.find_result_node_id() - the_node = cubes_merged.processes[node_id] - the_node["result"] = False - cubes = the_node["arguments"]["cubes"] - the_node["arguments"]["cube1"] = cubes[0] - the_node["arguments"]["cube2"] = cubes[1] - del the_node["arguments"]["cubes"] - - #there can be only one process for now - cube_list = list(merged.processes.values())[0]["arguments"][arg_name] - assert len(cube_list) == 2 - # it is really not clear if this is the agreed way to go - cube_list[0]["from_argument"] = "x" - cube_list[1]["from_argument"] = "y" - the_node["arguments"]["overlap_resolver"] = { - 'callback': merged.processes - } - the_node["arguments"]["binary"] = True - return ImageCollectionClient(node_id, cubes_merged, self.session, metadata=self.metadata) - else: - args = { - 'data': {'from_node': self.node_id}, - 'reducer': { - 'callback': merged.processes - } - } - return self.graph_add_process("reduce", args) - else: - left_data_arg = self.builder.processes[self.node_id]["arguments"]["data"] - right_data_arg = other.builder.processes[other.node_id]["arguments"]["data"] - if left_data_arg != right_data_arg: - raise BandMathException("'Band math' between bands of different image collections is not supported yet.") - node_id = self.node_id - reducing_graph = self - if reducing_graph.graph[node_id]["process_id"] != "reduce": - node_id = other.node_id - reducing_graph = other - new_builder = reducing_graph.builder.shallow_copy() - new_builder.processes[node_id]['arguments']['reducer']['callback'] = merged.processes - # now current_node should be a reduce node, let's modify it - # TODO: properly update metadata of reduced cube? #metadatareducedimension - return ImageCollectionClient(node_id, new_builder, reducing_graph.session, metadata=self.metadata) - - def _reduce_bands_binary_xy(self,operator,other:Union[ImageCollection,Union[int,float]]): - """ - Pixelwise comparison of this data cube with another cube or constant. - - :param other: Another data cube, or a constant - :return: - """ - if isinstance(other, int) or isinstance(other, float): - my_builder = self._get_band_graph_builder() - new_builder = None - extend_previous_callback_graph = my_builder is not None - if not extend_previous_callback_graph: - new_builder = GraphBuilder() - # TODO merge both process graphs? - new_builder.add_process(operator, x={'from_argument': 'data'}, y = other, result=True) - else: - new_builder = my_builder.shallow_copy() - current_result = new_builder.find_result_node_id() - new_builder.processes[current_result]['result'] = False - new_builder.add_process(operator, x={'from_node': current_result}, y = other, result=True) - - return self._create_reduced_collection(new_builder, extend_previous_callback_graph) - elif isinstance(other, ImageCollection): - return self._reduce_bands_binary(operator, other) - else: - raise ValueError("Unsupported right-hand operand: " + str(other)) - - def _reduce_bands_binary_const(self, operator, other:Union[int,float]): - my_builder = self._get_band_graph_builder() - new_builder = None - extend_previous_callback_graph = my_builder is not None - if not extend_previous_callback_graph: - new_builder = GraphBuilder() - # TODO merge both process graphs? - new_builder.add_process(operator, data=[{'from_argument': 'data'}, other], result=True) - else: - current_result = my_builder.find_result_node_id() - new_builder = my_builder.shallow_copy() - new_builder.processes[current_result]['result'] = False - new_builder.add_process(operator, data=[{'from_node': current_result}, other], result=True) - - return self._create_reduced_collection(new_builder,extend_previous_callback_graph) - - def _get_band_graph_builder(self): - current_node = self.graph[self.node_id] - if current_node["process_id"] == "reduce": - # TODO: check "dimension" of "reduce" in some way? - callback_graph = current_node["arguments"]["reducer"]["callback"] - return GraphBuilder.from_process_graph(callback_graph) - return None - - def add_dimension(self, name: str, label: Union[str, int, float], type: str = "other"): - if type == "bands" and self.metadata.has_band_dimension(): - # TODO: remove old "bands" dimension in appropriate places (see #metadatareducedimension) - _log.warning('Adding new "bands" dimension on top of existing one.') - return self.graph_add_process( - process_id='add_dimension', - args={ - 'data': {'from_node': self.node_id}, - 'name': name, 'value': label, 'type': type, - }, - metadata=self.metadata.add_dimension(name, label, type) - ) - - def apply_dimension(self, code: str, runtime=None, version="latest", dimension='t', target_dimension=None) -> 'ImageCollection': - """ - Applies an n-ary process (i.e. takes an array of pixel values instead of a single pixel value) to a raster data cube. - In contrast, the process apply applies an unary process to all pixel values. - - By default, apply_dimension applies the the process on all pixel values in the data cube as apply does, but the parameter dimension can be specified to work only on a particular dimension only. For example, if the temporal dimension is specified the process will work on a time series of pixel values. - - The n-ary process must return as many elements in the returned array as there are in the input array. Otherwise a CardinalityChanged error must be returned. - - - :param code: UDF code or process identifier - :param runtime: - :param version: - :param dimension: - :return: - :raises: CardinalityChangedError - """ - process_id = 'apply_dimension' - if runtime: - callback = { - 'udf': self._create_run_udf(code, runtime, version) - } - else: - callback = { - 'process': { - "arguments": { - "data": { - "from_argument": "data" - } - }, - "process_id": code, - "result": True - } - } - args = { - 'data': { - 'from_node': self.node_id - }, - 'dimension': self.metadata.assert_valid_dimension(dimension), - 'process': { - 'callback': callback - } - } - return self.graph_add_process(process_id, args) - - def reduce_bands_udf(self, code: str, runtime="Python", version="latest") -> 'ImageCollection': - """ - Reduce "band" dimension with a UDF - """ - process_id = 'reduce' - args = { - 'data': { - 'from_node': self.node_id - }, - 'dimension': self.metadata.band_dimension.name, - 'binary': False, - 'reducer': { - 'callback': { - 'udf': self._create_run_udf(code, runtime, version) - } - } - } - return self.graph_add_process(process_id, args) - - def _create_run_udf(self, code, runtime, version): - return { - "arguments": { - "data": { - "from_argument": "data" - }, - "runtime": runtime, - "version": version, - "udf": code - - }, - "process_id": "run_udf", - "result": True - } - - def reduce_temporal_udf(self, code: str, runtime="Python", version="latest"): - """ - Apply reduce (`reduce_dimension`) process with given UDF along temporal dimension. - - :param code: The UDF code, compatible with the given runtime and version - :param runtime: The UDF runtime - :param version: The UDF runtime version - """ - process_id = 'reduce' - args = { - 'data': { - 'from_node': self.node_id - }, - 'dimension': self.metadata.temporal_dimension.name, - 'binary': False, - 'reducer': { - 'callback': { - 'udf': self._create_run_udf(code, runtime, version) - } - } - } - return self.graph_add_process(process_id, args) - - reduce_tiles_over_time = legacy_alias(reduce_temporal_udf, "reduce_tiles_over_time") - - def apply(self, process: str, data_argument='data',arguments={}) -> 'ImageCollection': - process_id = 'apply' - arguments[data_argument] = \ - { - "from_argument": data_argument - } - args = { - 'data': {'from_node': self.node_id}, - 'process':{ - 'callback':{ - "unary":{ - "arguments":arguments, - "process_id":process, - "result":True - } - } - } - } - - return self.graph_add_process(process_id, args) - - def _reduce_time(self, reduce_function = "max"): - process_id = 'reduce' - - args = { - 'data': {'from_node': self.node_id}, - 'dimension': self.metadata.temporal_dimension.name, - 'reducer': { - 'callback': { - 'r1': { - 'arguments': { - 'data': { - 'from_argument': 'data' - } - }, - 'process_id': reduce_function, - 'result': True - } - } - } - } - - return self.graph_add_process(process_id, args) - - def min_time(self) -> 'ImageCollection': - """Finds the minimum value of a time series for all bands of the input dataset. - - :return: An ImageCollection instance - """ - - return self._reduce_time(reduce_function="min") - - def max_time(self) -> 'ImageCollection': - """ - Finds the maximum value of a time series for all bands of the input dataset. - - :return: An ImageCollection instance - """ - return self._reduce_time(reduce_function="max") - - def mean_time(self) -> 'ImageCollection': - """Finds the mean value of a time series for all bands of the input dataset. - - :return: An ImageCollection instance - """ - return self._reduce_time(reduce_function="mean") - - def median_time(self) -> 'ImageCollection': - """Finds the median value of a time series for all bands of the input dataset. - - :return: An ImageCollection instance - """ - - return self._reduce_time(reduce_function="median") - - def count_time(self) -> 'ImageCollection': - """Counts the number of images with a valid mask in a time series for all bands of the input dataset. - - :return: An ImageCollection instance - """ - return self._reduce_time(reduce_function="count") - - def ndvi(self, name="ndvi") -> 'ImageCollection': - """ Normalized Difference Vegetation Index (NDVI) - - :param name: Name of the newly created band - - :return: An ImageCollection instance - """ - process_id = 'ndvi' - args = { - 'data': {'from_node': self.node_id}, - 'name': name - } - return self.graph_add_process(process_id, args) - - def normalized_difference(self, other: ImageCollection) -> 'ImageCollection': - return self._reduce_bands_binary("normalized_difference", other) - - def linear_scale_range(self, input_min, input_max, output_min, output_max) -> 'ImageCollection': - """ Color stretching - :param input_min: Minimum input value - :param input_max: Maximum input value - :param output_min: Minimum output value - :param output_max: Maximum output value - :return An ImageCollection instance - """ - process_id = 'linear_scale_range' - args = { - 'x': {'from_node': self.node_id}, - 'inputMin': input_min, - 'inputMax': input_max, - 'outputMin': output_min, - 'outputMax': output_max - } - return self.graph_add_process(process_id, args) - - def mask(self, polygon: Union[Polygon, MultiPolygon,str]=None, srs=None, rastermask: 'ImageCollection'=None, - replacement=None) -> 'ImageCollection': - """ - Mask the image collection using either a polygon or a raster mask. - - All pixels outside the polygon should be set to the nodata value. - All pixels inside, or intersecting the polygon should retain their original value. - - All pixels are replaced for which the corresponding pixels in the mask are non-zero (for numbers) or True - (for boolean values). - - The pixel values are replaced with the value specified for replacement, which defaults to None (no data). - No data values will be left untouched by the masking operation. - - # TODO: just provide a single `mask` argument and detect the type: polygon or process graph - # TODO: also see `mask` vs `mask_polygon` processes in https://github.com/Open-EO/openeo-processes/pull/110 - - :param polygon: A polygon, provided as a :class:`shapely.geometry.Polygon` or :class:`shapely.geometry.MultiPolygon`, or a filename pointing to a valid vector file - :param srs: The reference system of the provided polygon, by default this is Lat Lon (EPSG:4326). - :param rastermask: the raster mask - :param replacement: the value to replace the masked pixels with - :raise: :class:`ValueError` if a polygon is supplied and its area is 0. - :return: A new ImageCollection, with the mask applied. - """ - mask = None - new_collection = None - if polygon is not None: - if isinstance(polygon, (str, pathlib.Path)): - # TODO: default to loading file client side? - # TODO: change read_vector to load_uploaded_files https://github.com/Open-EO/openeo-processes/pull/106 - new_collection = self.graph_add_process('read_vector', args={ - 'filename': str(polygon) - }) - - mask = { - 'from_node': new_collection.node_id - } - else: - if polygon.area == 0: - raise ValueError("Mask {m!s} has an area of {a!r}".format(m=polygon, a=polygon.area)) - - geojson = mapping(polygon) - if srs: - geojson['crs'] = {'type': 'name', 'properties': {'name': srs}} - mask = geojson - new_collection = self - elif rastermask is not None: - mask_node = rastermask.graph[rastermask.node_id] - mask_node['result']=True - new_collection = self._graph_merge(rastermask.graph) - #mask node id may have changed! - mask_id = new_collection.builder.find_result_node_id() - mask_node = new_collection.graph[mask_id] - mask_node['result']=False - mask = { - 'from_node': mask_id - } - - else: - raise AttributeError("mask process: either a polygon or a rastermask should be provided.") - - process_id = 'mask' - - args = { - 'data': {'from_node': self.node_id}, - 'mask': mask - } - if replacement is not None: - args['replacement'] = replacement - - return new_collection.graph_add_process(process_id, args) - - def merge(self, other: 'ImageCollection', overlap_resolver: str = None) -> 'ImageCollection': - other_node = other.graph[other.node_id] - other_node['result'] = True - new_collection = self._graph_merge(other.graph) - # mask node id may have changed! - mask_id = new_collection.builder.find_result_node_id() - other_node = new_collection.graph[mask_id] - other_node['result'] = False - cube2 = { - 'from_node': mask_id - } - args = { - 'cube1': {'from_node': self.node_id}, - 'cube2': cube2 - } - if overlap_resolver: - # Assume simple math operation - # TODO support general overlap resolvers. - assert isinstance(overlap_resolver, str) - args["overlap_resolver"] = {"callback": {"r1": { - "process_id": overlap_resolver, - "arguments": {"data": [{"from_argument": "x"}, {"from_argument": "y"}]}, - "result": True, - }}} - args["binary"] = True - return new_collection.graph_add_process('merge_cubes', args) - - - - def apply_kernel(self, kernel, factor=1.0, border = 0, replace_invalid=0) -> 'ImageCollection': - """ - Applies a focal operation based on a weighted kernel to each value of the specified dimensions in the data cube. - - :param kernel: The kernel to be applied on the data cube. It should be a 2D numpy array. - :param factor: A factor that is multiplied to each value computed by the focal operation. This is basically a shortcut for explicitly multiplying each value by a factor afterwards, which is often required for some kernel-based algorithms such as the Gaussian blur. - :return: A data cube with the newly computed values. The resolution, cardinality and the number of dimensions are the same as for the original data cube. - """ - return self.graph_add_process('apply_kernel', { - 'data': {'from_node': self.node_id}, - 'kernel':kernel.tolist(), - 'factor':factor, - 'border': border, - 'replace_invalid': replace_invalid - }) - - ####VIEW methods ####### - - def polygonal_mean_timeseries(self, polygon: Union[Polygon, MultiPolygon, str]) -> 'ImageCollection': - """ - Extract a mean time series for the given (multi)polygon. Its points are - expected to be in the EPSG:4326 coordinate - reference system. - - :param polygon: The (multi)polygon; or a file path or HTTP URL to a GeoJSON file or shape file - :return: ImageCollection - """ - - return self._polygonal_timeseries(polygon, "mean") - - def polygonal_histogram_timeseries(self, polygon: Union[Polygon, MultiPolygon, str]) -> 'ImageCollection': - """ - Extract a histogram time series for the given (multi)polygon. Its points are - expected to be in the EPSG:4326 coordinate - reference system. - - :param polygon: The (multi)polygon; or a file path or HTTP URL to a GeoJSON file or shape file - :return: ImageCollection - """ - - return self._polygonal_timeseries(polygon, "histogram") - - def polygonal_median_timeseries(self, polygon: Union[Polygon, MultiPolygon, str]) -> 'ImageCollection': - """ - Extract a median time series for the given (multi)polygon. Its points are - expected to be in the EPSG:4326 coordinate - reference system. - - :param polygon: The (multi)polygon; or a file path or HTTP URL to a GeoJSON file or shape file - :return: ImageCollection - """ - - return self._polygonal_timeseries(polygon, "median") - - def polygonal_standarddeviation_timeseries(self, polygon: Union[Polygon, MultiPolygon, str]) -> 'ImageCollection': - """ - Extract a time series of standard deviations for the given (multi)polygon. Its points are - expected to be in the EPSG:4326 coordinate - reference system. - - :param polygon: The (multi)polygon; or a file path or HTTP URL to a GeoJSON file or shape file - :return: ImageCollection - """ - - return self._polygonal_timeseries(polygon, "sd") - - def _polygonal_timeseries(self, polygon: Union[Polygon, MultiPolygon, str], func: str) -> 'ImageCollection': - def graph_add_aggregate_process(graph) -> 'ImageCollection': - process_id = 'aggregate_polygon' - args = { - 'data': {'from_node': self.node_id}, - 'polygons': polygons, - 'reducer': { - 'callback': { - "unary": { - "arguments": { - "data": { - "from_argument": "data" - } - }, - "process_id": func, - "result": True - } - } - } - } - return graph.graph_add_process(process_id, args) - - if isinstance(polygon, str): - with_read_vector = self.graph_add_process('read_vector', args={ - 'filename': polygon - }) - polygons = { - 'from_node': with_read_vector.node_id - } - return graph_add_aggregate_process(with_read_vector) - else: - polygons = mapping(polygon) - return graph_add_aggregate_process(self) - - def save_result(self, format: str = "GTIFF", options: dict = None): - return self.graph_add_process( - process_id="save_result", - args={ - "data": {"from_node": self.node_id}, - "format": format, - "options": options or {} - } - ) - - def download(self, outputfile: str = None, format: str = None, options: dict = None): - """Download image collection, e.g. as GeoTIFF.""" - if not format: - format = guess_format(outputfile) if outputfile else "GTiff" - - newcollection = self.save_result(format=format, options=options) - newcollection.graph[newcollection.node_id]["result"] = True - return self.session.download(newcollection.graph, outputfile) - - def tiled_viewing_service(self, type: str, **kwargs) -> Service: - self.graph[self.node_id]['result'] = True - return self.session.create_service(self.graph, type=type, **kwargs) - - def execute_batch( - self, - outputfile: Union[str, pathlib.Path], out_format: str = None, - print=print, max_poll_interval=60, connection_retry_interval=30, - job_options=None, **format_options): - """ - Evaluate the process graph by creating a batch job, and retrieving the results when it is finished. - This method is mostly recommended if the batch job is expected to run in a reasonable amount of time. - - For very long running jobs, you probably do not want to keep the client running. - - :param job_options: - :param outputfile: The path of a file to which a result can be written - :param out_format: String Format of the job result. - :param format_options: String Parameters for the job result format - - """ - job = self.create_job(out_format, job_options=job_options, **format_options) - return job.run_synchronous( - # TODO #135 support multi file result sets too - outputfile=outputfile, - print=print, max_poll_interval=max_poll_interval, connection_retry_interval=connection_retry_interval - ) - - def create_job( - self, out_format=None, title: str = None, description: str = None, plan: str = None, budget=None, - job_options=None, **format_options - ) -> BatchJob: - """ - Sends a job to the backend and returns a Job instance. The job will still need to be started and managed explicitly. - The :func:`~openeo.imagecollection.ImageCollection.execute_batch` method allows you to run batch jobs without managing it. - - :param out_format: String Format of the job result. - :param job_options: A dictionary containing (custom) job options - :param format_options: String Parameters for the job result format - :return: status: Job resulting job. - """ - img = self - if out_format: - # add `save_result` node - img = img.save_result(format=out_format, options=format_options) - img.graph[img.node_id]["result"] = True - return self.session.create_job( - process_graph=img.graph, - title=title, description=description, plan=plan, budget=budget, additional=job_options - ) - - send_job = legacy_alias(create_job, name="send_job") - - def execute(self) -> Dict: - """Executes the process graph of the imagery. """ - newbuilder = self.builder.shallow_copy() - newbuilder.processes[self.node_id]['result'] = True - return self.session.execute(newbuilder.processes) - - ####### HELPER methods ####### - - def _graph_merge(self, other_graph:Dict): - newbuilder = self.builder.shallow_copy() - merged = newbuilder.merge(GraphBuilder.from_process_graph(other_graph)) - # TODO: properly update metadata as well? - newCollection = ImageCollectionClient(self.node_id, merged, self.session, metadata=self.metadata) - return newCollection - - def graph_add_process(self, process_id: str, args: dict, - metadata: CollectionMetadata = None) -> 'ImageCollectionClient': - """ - Returns a new imagecollection with an added process with the given process - id and a dictionary of arguments - - :param process_id: String, Process Id of the added process. - :param args: Dict, Arguments of the process. - - :return: new ImageCollectionClient instance - """ - #don't modify in place, return new builder - newbuilder = self.builder.shallow_copy() - id = newbuilder.process(process_id,args) - - # TODO: properly update metadata as well? - newCollection = ImageCollectionClient( - node_id=id, builder=newbuilder, session=self.session, metadata=metadata or copy.copy(self.metadata) - ) - return newCollection diff --git a/tests/internal/test_graphbuilder_040.py b/tests/internal/test_graphbuilder_040.py deleted file mode 100644 index 18f10361a..000000000 --- a/tests/internal/test_graphbuilder_040.py +++ /dev/null @@ -1,115 +0,0 @@ -from unittest import TestCase -from openeo.internal.graphbuilder_040 import GraphBuilder - - -class GraphBuilderTest(TestCase): - - def setUp(self) -> None: - GraphBuilder.id_counter = {} - - def test_create_empty(self): - builder = GraphBuilder() - builder.process("sum",{}) - self.assertEqual(1,len(builder.processes)) - - def test_create_from_existing(self): - graph = { - "sum_01": { - "arguments": { - "data1": { - "from_node": "node1" - }, - "data2": { - "from_node": "node3" - } - }, - "process_id":"sum", - "result": True - }, - "sum_02": { - "arguments": { - "data": { - "from_node": "node4" - } - }, - "process_id": "sum", - } - } - - builder = GraphBuilder(graph) - - print(builder.processes) - self.assertEqual(2,builder.id_counter["sum"]) - - def test_merge(self): - graph1 = { - "sum1": { - "arguments": { - "data1": { - "from_node": "node1" - }, - "data2": { - "from_node": "node3" - } - }, - "process_id": "sum", - "result": True - } - - } - - graph2 = { - "sum1": { - "arguments": { - "data": { - "from_node": "node4" - }, - "data2": [ - { - "from_node": "node4" - } - ] - }, - "process_id": "sum", - }, - "sum2": { - "arguments": { - "data": { - "from_node": "sum1" - }, - "data2": [ - { - "from_node": "sum1" - } - ] - }, - "process_id": "sum", - } - } - - builder1 = GraphBuilder(graph1) - builder2 = GraphBuilder(graph2) - - merged = builder1.merge(builder2).processes - - import json - print(json.dumps(merged, indent=2)) - self.assertIn("sum1", merged) - self.assertIn("sum4",merged) - self.assertIn("sum5", merged) - self.assertEqual("sum4",merged["sum5"]["arguments"]["data"]["from_node"]) - self.assertEqual("sum4", merged["sum5"]["arguments"]["data2"][0]["from_node"]) - - def test_merge_issue50(self): - """https://github.com/Open-EO/openeo-python-client/issues/50""" - graph = { - 'op3': {'process_id': 'op', 'arguments': {'data': {'from_node': 'op1', 'ref': 'A'}}}, - 'op2': {'process_id': 'op', 'arguments': {'data': {'from_node': 'src', 'ref': 'B'}}}, - 'op1': {'process_id': 'op', 'arguments': {'data': {'from_node': 'op2', 'ref': 'C'}}}, - 'op4': {'process_id': 'op', 'arguments': {'data': {'from_node': 'op3', 'ref': 'D'}}}, - } - builder = GraphBuilder(graph) - assert builder.processes['op1']['arguments']['data'] == {'from_node': 'op2', 'ref': 'C'} - assert builder.processes['op2']['arguments']['data'] == {'from_node': 'src', 'ref': 'B'} - assert builder.processes['op3']['arguments']['data'] == {'from_node': 'op1', 'ref': 'A'} - assert builder.processes['op4']['arguments']['data'] == {'from_node': 'op3', 'ref': 'D'} diff --git a/tests/rest/__init__.py b/tests/rest/__init__.py index 358da6fd4..0472aa51d 100644 --- a/tests/rest/__init__.py +++ b/tests/rest/__init__.py @@ -1,13 +1,11 @@ import json -from typing import Union import mock from openeo.rest.datacube import DataCube -from openeo.rest.imagecollectionclient import ImageCollectionClient -def get_download_graph(cube: Union[DataCube, ImageCollectionClient]) -> dict: +def get_download_graph(cube: DataCube) -> dict: """ Do fake download of a cube and intercept the process graph :param cube: cube to download @@ -22,7 +20,7 @@ def get_download_graph(cube: Union[DataCube, ImageCollectionClient]) -> dict: return actual_graph -def get_execute_graph(cube: Union[DataCube, ImageCollectionClient]) -> dict: +def get_execute_graph(cube: DataCube) -> dict: """ Do fake execute of a cube and intercept the process graph :param cube: cube to download diff --git a/tests/rest/conftest.py b/tests/rest/conftest.py index 8a36033c5..b2f631840 100644 --- a/tests/rest/conftest.py +++ b/tests/rest/conftest.py @@ -1,19 +1,6 @@ import pytest -import openeo.internal.graphbuilder_040 - -@pytest.fixture(params=["0.4.0", "1.0.0"]) +@pytest.fixture(params=["1.0.0"]) def api_version(request): return request.param - - -def reset_graphbuilder(): - # Reset 0.4.0 style graph builder - openeo.internal.graphbuilder_040.GraphBuilder.id_counter = {} - - -@pytest.fixture(autouse=True) -def auto_reset(): - """Fixture to automatically reset builders, counters, ...""" - reset_graphbuilder() diff --git a/tests/rest/datacube/conftest.py b/tests/rest/datacube/conftest.py index f67c9606d..26b6eee4e 100644 --- a/tests/rest/datacube/conftest.py +++ b/tests/rest/datacube/conftest.py @@ -3,7 +3,6 @@ import pytest import openeo -import openeo.internal.graphbuilder_040 from openeo.rest.connection import Connection from openeo.rest.datacube import DataCube @@ -71,12 +70,6 @@ def connection(api_version, requests_mock) -> Connection: return _setup_connection(api_version, requests_mock) -@pytest.fixture -def con040(requests_mock) -> Connection: - """Connection fixture to a 0.4.0 backend with some image collections.""" - return _setup_connection("0.4.0", requests_mock) - - @pytest.fixture def con100(requests_mock) -> Connection: """Connection fixture to a 1.0.0 backend with some image collections.""" diff --git a/tests/rest/datacube/test_bandmath.py b/tests/rest/datacube/test_bandmath.py index dd27ea577..dfcf05ece 100644 --- a/tests/rest/datacube/test_bandmath.py +++ b/tests/rest/datacube/test_bandmath.py @@ -12,7 +12,6 @@ import openeo from openeo.rest import BandMathException from .. import get_download_graph -from ..conftest import reset_graphbuilder from ... import load_json_resource from .test_datacube import _get_leaf_node @@ -21,29 +20,9 @@ def test_band_basic(connection, api_version): cube = connection.load_collection("SENTINEL2_RADIOMETRY_10M") expected_graph = load_json_resource('data/%s/band0.json' % api_version) assert cube.band(0).flat_graph() == expected_graph - reset_graphbuilder() assert cube.band("B02").flat_graph() == expected_graph -def test_indexing_040(con040): - cube = con040.load_collection("SENTINEL2_RADIOMETRY_10M") - expected_graph = load_json_resource('data/0.4.0/band_red.json') - reset_graphbuilder() - assert cube.band("B04").flat_graph() == expected_graph - reset_graphbuilder() - assert cube.band("red").flat_graph() == expected_graph - reset_graphbuilder() - assert cube.band(2).flat_graph() == expected_graph - - cube2 = cube.filter_bands(['B04', 'B03']) - expected_graph = load_json_resource('data/0.4.0/band_red_filtered.json') - reset_graphbuilder() - assert cube2.band("B04").flat_graph() == expected_graph - reset_graphbuilder() - assert cube2.band("red").flat_graph() == expected_graph - reset_graphbuilder() - assert cube2.band(0).flat_graph() == expected_graph - def test_indexing_100(con100): cube = con100.load_collection("SENTINEL2_RADIOMETRY_10M") diff --git a/tests/rest/datacube/test_datacube.py b/tests/rest/datacube/test_datacube.py index 0b1d155a4..580ef4c19 100644 --- a/tests/rest/datacube/test_datacube.py +++ b/tests/rest/datacube/test_datacube.py @@ -17,10 +17,8 @@ from openeo.capabilities import ComparableVersion from openeo.rest import BandMathException from openeo.rest.datacube import DataCube -from openeo.rest.imagecollectionclient import ImageCollectionClient from .conftest import API_URL from .. import get_download_graph -from ..conftest import reset_graphbuilder from ... import load_json_resource @@ -45,9 +43,7 @@ def test_min_time(s2cube, api_version): def _get_leaf_node(cube, force_flat=True) -> dict: """Get leaf node (node with result=True), supporting old and new style of graph building.""" - if isinstance(cube, ImageCollectionClient): - return cube.flat_graph()[cube.node_id] - elif isinstance(cube, DataCube): + if isinstance(cube, DataCube): if force_flat: flat_graph = cube.flat_graph() node, = [n for n in flat_graph.values() if n.get("result")] @@ -58,21 +54,6 @@ def _get_leaf_node(cube, force_flat=True) -> dict: raise ValueError(repr(cube)) -def test_date_range_filter(con040): - s2cube = con040.load_collection("S2") - im = s2cube.date_range_filter("2016-01-01", "2016-03-10") - graph = _get_leaf_node(im) - assert graph['process_id'] == 'filter_temporal' - assert graph['arguments']['extent'] == ["2016-01-01", "2016-03-10"] - - -def test_filter_daterange(con040): - s2cube = con040.load_collection("S2") - im = s2cube.filter_daterange(extent=("2016-01-01", "2016-03-10")) - graph = _get_leaf_node(im) - assert graph['process_id'] == 'filter_temporal' - assert graph['arguments']['extent'] == ["2016-01-01", "2016-03-10"] - def test_filter_temporal(s2cube): im = s2cube.filter_temporal("2016-01-01", "2016-03-10") @@ -192,15 +173,6 @@ def test_filter_bands_index(s2cube, api_version): assert im.flat_graph() == expected -def test_pipe(con040): - def ndvi_percent(cube): - return cube.ndvi().linear_scale_range(0, 1, 0, 100) - - s2cube = con040.load_collection("S2") - im = s2cube.pipe(ndvi_percent) - assert im.flat_graph() == load_json_resource('data/0.4.0/pipe.json') - - def test_filter_bbox_minimal(s2cube): im = s2cube.filter_bbox(west=3.0, east=3.1, north=51.1, south=51.0) graph = _get_leaf_node(im) @@ -258,45 +230,6 @@ def test_filter_bbox_default_handling(s2cube, kwargs, expected): assert graph["arguments"]["extent"] == dict(west=3, east=4, south=8, north=9, **expected) -def test_bbox_filter_nsew(con040): - # TODO: remove this test for deprecated `bbox_filter` - s2cube = con040.load_collection("S2") - im = s2cube.bbox_filter( - west=652000, east=672000, north=5161000, south=5181000, crs=32632 - ) - graph = _get_leaf_node(im) - assert graph["process_id"] == "filter_bbox" - assert graph["arguments"]["extent"] == { - "west": 652000, "east": 672000, "north": 5161000, "south": 5181000, "crs": 32632 - } - - -def test_bbox_filter_tblr(con040): - # TODO: remove this test for deprecated `bbox_filter` - s2cube = con040.load_collection("S2") - im = s2cube.bbox_filter( - left=652000, right=672000, top=5161000, bottom=5181000, srs=32632 - ) - graph = _get_leaf_node(im) - assert graph["process_id"] == "filter_bbox" - assert graph["arguments"]["extent"] == { - "west": 652000, "east": 672000, "north": 5161000, "south": 5181000, "crs": 32632 - } - - -def test_bbox_filter_nsew_zero(con040): - # TODO: remove this test for deprecated `bbox_filter` - s2cube = con040.load_collection("S2") - im = s2cube.bbox_filter( - west=0, east=0, north=0, south=0, crs=32632 - ) - graph = _get_leaf_node(im) - assert graph["process_id"] == "filter_bbox" - assert graph["arguments"]["extent"] == { - "west": 0, "east": 0, "north": 0, "south": 0, "crs": 32632 - } - - def test_max_time(s2cube, api_version): im = s2cube.max_time() graph = _get_leaf_node(im, force_flat=True) @@ -447,7 +380,6 @@ def test_apply_dimension(connection, requests_mock): s22 = connection.load_collection("S22") for dim in ["color", "alpha", "date"]: - reset_graphbuilder() cube = s22.apply_dimension(dimension=dim, code="subtract_mean") assert cube.flat_graph()["applydimension1"]["process_id"] == "apply_dimension" assert cube.flat_graph()["applydimension1"]["arguments"]["dimension"] == dim diff --git a/tests/rest/test_connection.py b/tests/rest/test_connection.py index 5bfa3ddbe..c32a1664c 100644 --- a/tests/rest/test_connection.py +++ b/tests/rest/test_connection.py @@ -1716,27 +1716,6 @@ def get_me(request: requests.Request, context): assert "re-auth" not in caplog.text -def test_load_collection_arguments_040(requests_mock): - requests_mock.get(API_URL, json={"api_version": "0.4.0"}) - conn = Connection(API_URL) - requests_mock.get(API_URL + "collections/FOO", json={ - "properties": {"eo:bands": [{"name": "red"}, {"name": "green"}, {"name": "blue"}]} - }) - spatial_extent = {"west": 1, "south": 2, "east": 3, "north": 4} - temporal_extent = ["2019-01-01", "2019-01-22"] - im = conn.load_collection( - "FOO", spatial_extent=spatial_extent, temporal_extent=temporal_extent, bands=["red", "green"] - ) - node = im.flat_graph()[im.node_id] - assert node["process_id"] == "load_collection" - assert node["arguments"] == { - "id": "FOO", - "spatial_extent": spatial_extent, - "temporal_extent": temporal_extent, - "bands": ["red", "green"] - } - - def test_load_collection_arguments_100(requests_mock): requests_mock.get(API_URL, json={"api_version": "1.0.0"}) conn = Connection(API_URL) diff --git a/tests/rest/test_imagecollectionclient.py b/tests/rest/test_imagecollectionclient.py deleted file mode 100644 index 0a5753e89..000000000 --- a/tests/rest/test_imagecollectionclient.py +++ /dev/null @@ -1,87 +0,0 @@ -import pathlib - -import pytest - -import openeo -from openeo.internal.graphbuilder_040 import GraphBuilder -import openeo.metadata -from openeo.rest.imagecollectionclient import ImageCollectionClient - -API_URL = "https://oeo.test" - - -@pytest.fixture -def session040(requests_mock): - requests_mock.get(API_URL + "/", json={ - "api_version": "0.4.0", - "endpoints": [{"path": "/credentials/basic", "methods": ["GET"]}] - }) - session = openeo.connect(API_URL) - return session - - -def test_metadata_from_api(session040, requests_mock): - requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"}) - metadata = session040.collection_metadata("SENTINEL2") - assert metadata.get("foo") == "bar" - - -def test_metadata_load_collection_040(session040, requests_mock): - requests_mock.get(API_URL + "/collections/SENTINEL2", json={ - "properties": { - "cube:dimensions": { - "bands": {"type": "bands", "values": ["B2", "B3"]} - }, - "eo:bands": [ - {"name": "B2", "common_name": "blue"}, - {"name": "B3", "common_name": "green"}, - ] - } - }) - im = ImageCollectionClient.load_collection('SENTINEL2', session=session040) - assert im.metadata.bands == [ - openeo.metadata.Band("B2", "blue", None), - openeo.metadata.Band("B3", "green", None) - ] - - -def test_empty_mask(): - from shapely import geometry - polygon = geometry.Polygon([[1.0, 1.0], [2.0, 1.0], [2.0, 1.0], [1.0, 1.0]]) - - client = ImageCollectionClient(node_id=None, builder=GraphBuilder(), session=None) - - with pytest.raises(ValueError, match=r"Mask .+ has an area of 0.0"): - client.mask(polygon) - - -def test_download(session040, requests_mock, tmpdir): - requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"}) - requests_mock.post(API_URL + '/result', text="tiffdata") - path = tmpdir.join("tmp.tiff") - session040.load_collection("SENTINEL2").download(str(path), format="GTIFF") - assert path.read() == "tiffdata" - - -def test_download_pathlib(session040, requests_mock, tmpdir): - requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"}) - requests_mock.post(API_URL + '/result', text="tiffdata") - path = tmpdir.join("tmp.tiff") - session040.load_collection("SENTINEL2").download(pathlib.Path(str(path)), format="GTIFF") - assert path.read() == "tiffdata" - - -def test_download_with_bearer_token(session040, requests_mock, tmpdir): - """https://github.com/Open-EO/openeo-python-client/issues/95""" - requests_mock.get(API_URL + "/collections/SENTINEL2", json={"foo": "bar"}) - requests_mock.get(API_URL + '/credentials/basic', json={"access_token": "w3lc0m3"}) - session040.authenticate_basic("test", "test123") - - def result_callback(request, context): - assert request.headers["Authorization"] == "Bearer w3lc0m3" - return "tiffdata" - - requests_mock.post(API_URL + '/result', text=result_callback) - path = tmpdir.join("tmp.tiff") - session040.load_collection("SENTINEL2").download(str(path), format="GTIFF") - assert path.read() == "tiffdata" diff --git a/tests/rest/test_job.py b/tests/rest/test_job.py index d65f42f77..a57cc7920 100644 --- a/tests/rest/test_job.py +++ b/tests/rest/test_job.py @@ -19,6 +19,7 @@ @pytest.fixture def session040(requests_mock): + # TODO #134 eliminate 0.4.0 support requests_mock.get(API_URL + "/", json={ "api_version": "0.4.0", "endpoints": [{"path": "/credentials/basic", "methods": ["GET"]}] diff --git a/tests/rest/test_job_results.py b/tests/rest/test_job_results.py index 9f6c45b83..acf0dc860 100644 --- a/tests/rest/test_job_results.py +++ b/tests/rest/test_job_results.py @@ -6,6 +6,7 @@ @pytest.fixture def session040(requests_mock): + # TODO #134 eliminate 0.4.0 support requests_mock.get(API_URL + "/", json={"api_version": "0.4.0"}) session = openeo.connect(API_URL) return session