From 4467f4d516733833da80981e5fc2c864d66227c0 Mon Sep 17 00:00:00 2001 From: Stefaan Lippens Date: Tue, 15 Oct 2019 17:13:53 +0200 Subject: [PATCH] Issue #72: drop 0.3-style RestImagery - cross-check check and streamline test_rastercube unit tests in the process --- openeo/rest/imagecollectionclient.py | 1 + openeo/rest/imagery.py | 363 --------------------------- tests/test_imagery.py | 119 --------- tests/test_rastercube.py | 134 +++++----- 4 files changed, 64 insertions(+), 553 deletions(-) delete mode 100644 openeo/rest/imagery.py delete mode 100644 tests/test_imagery.py diff --git a/openeo/rest/imagecollectionclient.py b/openeo/rest/imagecollectionclient.py index 641e10e2a..db591e99d 100644 --- a/openeo/rest/imagecollectionclient.py +++ b/openeo/rest/imagecollectionclient.py @@ -691,6 +691,7 @@ def ndvi(self, name="ndvi") -> 'ImageCollection': } return self.graph_add_process(process_id, args) + @deprecated("use 'linear_scale_range' instead") def stretch_colors(self, min, max) -> 'ImageCollection': """ Color stretching deprecated, use 'linear_scale_range' instead diff --git a/openeo/rest/imagery.py b/openeo/rest/imagery.py deleted file mode 100644 index 39f6dca89..000000000 --- a/openeo/rest/imagery.py +++ /dev/null @@ -1,363 +0,0 @@ -import base64 -from typing import List, Dict, Union - -import cloudpickle -from pandas import Series -from shapely.geometry import Polygon, MultiPolygon, mapping - -from openeo.imagecollection import ImageCollection -from openeo.job import Job -from openeo.rest.connection import Connection -from openeo.rest.job import RESTJob - - -class RestImagery(ImageCollection): - """Class representing an Image Collection. (In the API as 'imagery') - DEPRECATED support 0.3 only. - """ - - def __init__(self, parentgraph:Dict,session:Connection): - self.graph = parentgraph - self.session = session - - def _filter_temporal(self, start_date: str, end_date: str) -> 'ImageCollection': - """Drops observations from a collection that have been captured before - a start or after a given end date. - :param start_date: starting date of the filter - :param end_date: ending date of the filter - :return An ImageCollection instance - """ - process_id = 'filter_daterange' - args = { - 'imagery': self.graph, - 'extent': [start_date, end_date] - } - - return self.graph_add_process(process_id, args) - - def filter_bbox(self, west, east, north, south, crs=None, base=None, height=None) -> 'ImageCollection': - """Drops observations from a collection that are located outside - of a given bounding box. - :param left: left boundary (longitude / easting) - :param right: right boundary (longitude / easting) - :param top: top boundary (latitude / northing) - :param bottom: top boundary (latitude / northing) - :param srs: spatial reference system of boundaries as - proj4 or EPSG:12345 like string - :return An ImageCollection instance - """ - process_id = 'filter_bbox' - args = { - 'imagery': self.graph, - 'extent': - { - 'west': west, 'east': east, 'north': north, 'south': south, - 'crs': crs, - } - } - return self.graph_add_process(process_id, args) - - def band_filter(self, bands) -> 'ImageCollection': - """Filter the imagery by the given bands - :param bands: List of band names or single band name as a string. - :return An ImageCollection instance - """ - - process_id = 'filter_bands' - args = { - 'imagery': self.graph, - 'bands': bands - } - return self.graph_add_process(process_id, args) - - def zonal_statistics(self, regions, func, scale=1000, interval="day") -> 'ImageCollection': - """Calculates statistics for each zone specified in a file. - :param regions: GeoJSON or a path to a GeoJSON file containing the - regions. For paths you must specify the path to a - user-uploaded file without the user id in the path. - :param func: Statistical function to calculate for the specified - zones. example values: min, max, mean, median, mode - :param scale: A nominal scale in meters of the projection to work - in. Defaults to 1000. - :param interval: Interval to group the time series. Allowed values: - day, wee, month, year. Defaults to day. - :return An ImageCollection instance - """ - regions_geojson = regions - if isinstance(regions,Polygon) or isinstance(regions,MultiPolygon): - regions_geojson = mapping(regions) - process_id = 'zonal_statistics' - args = { - 'imagery': self.graph, - 'regions': regions_geojson, - 'func': func, - 'scale': scale, - 'interval': interval - } - - return self.graph_add_process(process_id, args) - - def apply_pixel(self, bands:List, bandfunction) -> 'ImageCollection': - """Apply a function to the given set of bands in this image collection.""" - pickled_lambda = cloudpickle.dumps(bandfunction) - - process_id = 'apply_pixel' - args = { - 'imagery':self.graph, - 'bands':bands, - 'function': str(base64.b64encode(pickled_lambda), "UTF-8") - } - - return self.graph_add_process(process_id, args) - - def apply_tiles(self, code: str) -> 'ImageCollection': - """Apply a function to the given set of tiles in this image collection. - Code should follow the OpenEO UDF conventions. - :param code: String representing Python code to be executed in the backend. - """ - - process_id = 'apply_tiles' - args = { - 'imagery':self.graph, - 'code':{ - 'language':'python', - 'source':code - } - } - - return self.graph_add_process(process_id, args) - - def aggregate_time(self, temporal_window, aggregationfunction) -> Series : - """ Applies a windowed reduction to a timeseries by applying a user - defined function. - :param temporal_window: The time window to group by - :param aggregationfunction: The function to apply to each time window. - Takes a pandas Timeseries as input. - :return A pandas Timeseries object - """ - pickled_lambda = cloudpickle.dumps(aggregationfunction) - - process_id = 'reduce_by_time' - args = { - 'imagery':self.graph, - 'temporal_window': temporal_window, - 'function': str(base64.b64encode(pickled_lambda), "UTF-8") - } - - return self.graph_add_process(process_id, args) - - def min_time(self) -> 'ImageCollection': - """Finds the minimum value of a time series for all bands of the input dataset. - :return An ImageCollection instance - """ - - process_id = 'min_time' - args = { - 'imagery': self.graph - } - - return self.graph_add_process(process_id, args) - - def max_time(self) -> 'ImageCollection': - """Finds the maximum value of a time series for all bands of the input dataset. - :return An ImageCollection instance - """ - - process_id = 'max_time' - - args = { - 'imagery': self.graph - } - - return self.graph_add_process(process_id, args) - - def mean_time(self) -> 'ImageCollection': - """Finds the mean value of a time series for all bands of the input dataset. - :return An ImageCollection instance - """ - - process_id = 'mean_time' - - args = { - 'imagery': self.graph - } - - return self.graph_add_process(process_id, args) - - def median_time(self) -> 'ImageCollection': - """Finds the median value of a time series for all bands of the input dataset. - :return An ImageCollection instance - """ - - process_id = 'median_time' - - args = { - 'imagery': self.graph - } - - return self.graph_add_process(process_id, args) - - def count_time(self) -> 'ImageCollection': - """Counts the number of images with a valid mask in a time series for all bands of the input dataset. - :return An ImageCollection instance - """ - - process_id = 'count_time' - - args = { - 'imagery': self.graph - } - - return self.graph_add_process(process_id, args) - - def ndvi(self, red, nir) -> 'ImageCollection': - """ NDVI - :param red: Reference to the red band - :param nir: Reference to the nir band - :return An ImageCollection instance - """ - process_id = 'NDVI' - - args = { - 'imagery': self.graph, - 'red': red, - 'nir': nir - } - - return self.graph_add_process(process_id, args) - - def stretch_colors(self, min, max) -> 'ImageCollection': - """ Color stretching - :param min: Minimum value - :param max: Maximum value - :return An ImageCollection instance - """ - process_id = 'stretch_colors' - args = { - 'imagery': self.graph, - 'min': min, - 'max': max - } - - return self.graph_add_process(process_id, args) - - def mask(self, polygon: Union[Polygon, MultiPolygon], srs="EPSG:4326") -> 'ImageCollection': - """ - Mask the image collection using a polygon. All pixels outside the polygon should be set to the nodata value. - All pixels inside, or intersecting the polygon should retain their original value. - - :param polygon: A polygon, provided as a Shapely Polygon or MultiPolygon - :param srs: The reference system of the provided polygon, by default this is Lat Lon (EPSG:4326). - :return: A new ImageCollection, with the mask applied. - """ - geojson = mapping(polygon) - geojson['crs'] = { - 'type': 'name', - 'properties': { - 'name': srs - } - } - - process_id = 'mask' - - args = { - 'imagery': self.graph, - 'mask_shape': geojson - } - - return self.graph_add_process(process_id, args) - - ####VIEW methods ####### - def timeseries(self, x, y, srs="EPSG:4326") -> Dict: - """ - Extract a time series for the given point location. - - :param x: The x coordinate of the point - :param y: The y coordinate of the point - :param srs: The spatial reference system of the coordinates, by default - this is 'EPSG:4326', where x=longitude and y=latitude. - :return: Dict: A timeseries - """ - return self.session.point_timeseries({"process_graph":self.graph}, x, y, srs) - - def polygonal_mean_timeseries(self, polygon: Union[Polygon, MultiPolygon]) -> 'ImageCollection': - """ - Extract a mean time series for the given (multi)polygon. Its points are - expected to be in the EPSG:4326 coordinate - reference system. - - :param polygon: The (multi)polygon - :param srs: The spatial reference system of the coordinates, by default - this is 'EPSG:4326' - :return: ImageCollection - """ - - geojson = mapping(polygon) - geojson['crs'] = { - 'type': 'name', - 'properties': { - 'name': 'EPSG:4326' - } - } - - process_id = 'zonal_statistics' - - args = { - 'imagery': self.graph, - 'regions': geojson, - 'func': 'avg' - } - - return self.graph_add_process(process_id, args) - - def download(self, outputfile:str, **format_options) -> str: - """Extraxts a geotiff from this image collection.""" - return self.session.download(self.graph, outputfile, format_options) - - def tiled_viewing_service(self,**kwargs) -> Dict: - return self.session.create_service(self.graph,**kwargs) - - def send_job(self, out_format=None, **format_options) -> Job: - """ - Sends a job to the backend and returns a ClientJob instance. - :param out_format: String Format of the job result. - :param format_options: String Parameters for the job result format - :return: status: ClientJob resulting job. - """ - if out_format: - return RESTJob(self.session.create_job({"process_graph": self.graph, - 'output': { - 'format': out_format, - 'parameters': format_options - }}).job_id, self.session) - else: - return RESTJob(self.session.create_job({"process_graph": self.graph}).job_id, self.session) - - def execute(self) -> Dict: - """Executes the process graph of the imagery. """ - return self.session.execute({"process_graph": self.graph}) - - ####### HELPER methods ####### - - def graph_add_process(self, process_id, args) -> 'ImageCollection': - """ - Returns a new restimagery with an added process with the given process - id and a dictionary of arguments - :param process_id: String, Process Id of the added process. - :param args: Dict, Arguments of the process. - :return: imagery: Instance of the RestImagery class - """ - graph = { - 'process_id': process_id, - - } - - for key, value in args.items(): - graph[key] = value - - #graph = { - # 'process_id': process_id, - # 'args': args - #} - - return RestImagery(graph, self.session) diff --git a/tests/test_imagery.py b/tests/test_imagery.py deleted file mode 100644 index dd584e203..000000000 --- a/tests/test_imagery.py +++ /dev/null @@ -1,119 +0,0 @@ -from unittest import TestCase - -from openeo.rest.imagery import RestImagery - - -class TestImagery(TestCase): - - def setUp(self): - self.processes = RestImagery({},None) - - def test_date_range_filter(self): - new_imagery = self.processes.date_range_filter("2016-01-01", "2016-03-10") - graph = new_imagery.graph - assert graph == {'process_id': 'filter_daterange', 'imagery': {}, 'extent': ["2016-01-01", "2016-03-10"]} - - def test_filter_temporal(self): - new_imagery = self.processes.filter_temporal("2016-01-01", "2016-03-10") - graph = new_imagery.graph - assert graph == {'process_id': 'filter_daterange', 'imagery': {}, 'extent': ["2016-01-01", "2016-03-10"]} - - def test_filter_bbox(self): - im = self.processes.filter_bbox( - west=652000, east=672000, north=5161000, south=5181000, crs="EPSG:32632" - ) - assert im.graph == { - "process_id": "filter_bbox", - "imagery": {}, - "extent": {"west": 652000, "east": 672000, "north": 5161000, "south": 5181000, "crs": "EPSG:32632"} - } - - def test_bbox_filter_nsew(self): - new_imagery = self.processes.bbox_filter( - west=652000, east=672000, north=5161000, south=5181000, crs="EPSG:32632" - ) - assert new_imagery.graph == { - "process_id": "filter_bbox", - "imagery": {}, - "extent": {"west": 652000, "east": 672000, "north": 5161000, "south": 5181000, "crs": "EPSG:32632"} - } - - def test_bbox_filter_tblr(self): - new_imagery = self.processes.bbox_filter( - left=652000, right=672000, top=5161000, bottom=5181000, srs="EPSG:32632" - ) - assert new_imagery.graph == { - "process_id": "filter_bbox", - "imagery": {}, - "extent": {"west": 652000, "east": 672000, "north": 5161000, "south": 5181000, "crs": "EPSG:32632"} - } - - def test_bbox_filter_nsew_zero(self): - new_imagery = self.processes.bbox_filter( - north=0, south=0, east=0, west=0, srs="EPSG:32632" - ) - assert new_imagery.graph == { - "process_id": "filter_bbox", - "imagery": {}, - "extent": {"west": 0, "east": 0, "north": 0, "south": 0, "crs": "EPSG:32632"} - } - - def test_apply_pixel(self): - bandFunction = lambda cells,nodata: (cells[3]-cells[2])/(cells[3]+cells[2]) - new_imagery = self.processes.apply_pixel([], bandFunction) - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"],"apply_pixel") - self.assertEqual(graph["imagery"], {}) - self.assertEqual(graph["bands"], []) - self.assertIsNotNone(graph["function"]) - - def test_min_time(self): - new_imagery = self.processes.min_time() - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"], "min_time") - self.assertEqual(graph["imagery"], {}) - - def test_max_time(self): - new_imagery = self.processes.max_time() - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"], "max_time") - self.assertEqual(graph["imagery"], {}) - - def test_ndvi(self): - new_imagery = self.processes.ndvi("B04", "B8A") - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"], "NDVI") - self.assertEqual(graph["imagery"], {}) - self.assertEqual(graph["red"], "B04") - self.assertEqual(graph["nir"], "B8A") - - def test_mask(self): - from shapely import geometry - polygon = geometry.Polygon([[0, 0], [1.9, 0], [1.9, 1.9], [0, 1.9]]) - new_imagery = self.processes.mask(polygon) - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"], "mask") - self.assertEqual(graph["mask_shape"], - {'coordinates': (((0.0, 0.0), (1.9, 0.0), (1.9, 1.9), (0.0, 1.9), (0.0, 0.0)),), - 'crs': {'properties': {'name': 'EPSG:4326'}, 'type': 'name'}, - 'type': 'Polygon'}) - - def test_strech_colors(self): - new_imagery = self.processes.stretch_colors(-1, 1) - - graph = new_imagery.graph - - self.assertEqual(graph["process_id"], "stretch_colors") - self.assertEqual(graph["imagery"], {}) - self.assertEqual(graph["min"], -1) - self.assertEqual(graph["max"], 1) diff --git a/tests/test_rastercube.py b/tests/test_rastercube.py index 26061ab8b..78c0a2293 100644 --- a/tests/test_rastercube.py +++ b/tests/test_rastercube.py @@ -3,11 +3,12 @@ import numpy as np import pytest -from mock import MagicMock, patch +import shapely +from mock import MagicMock from openeo.capabilities import Capabilities -from openeo.rest.connection import Connection from openeo.graphbuilder import GraphBuilder +from openeo.rest.connection import Connection from openeo.rest.imagecollectionclient import ImageCollectionClient @@ -97,14 +98,14 @@ def setUp(self): connection.capabilities.return_value = capabilities capabilities.version.return_value = "0.4.0" - self.imagery = ImageCollectionClient(id, builder, connection) + self.img = ImageCollectionClient(id, builder, connection) builder = GraphBuilder() mask_id = builder.process("get_collection", {'name': 'S1_Mask'}) self.mask = ImageCollectionClient(mask_id, builder, connection) def test_filter_bbox(self): - im = self.imagery.filter_bbox( + im = self.img.filter_bbox( west=652000, east=672000, north=5161000, south=5181000, crs="EPSG:32632" ) graph = im.graph[im.node_id] @@ -114,7 +115,7 @@ def test_filter_bbox(self): } def test_filter_bbox_base_height(self): - im = self.imagery.filter_bbox( + im = self.img.filter_bbox( west=652000, east=672000, north=5161000, south=5181000, crs="EPSG:32632", base=100, height=200, ) @@ -126,7 +127,7 @@ def test_filter_bbox_base_height(self): } def test_bbox_filter_nsew(self): - im = self.imagery.bbox_filter( + im = self.img.bbox_filter( west=652000, east=672000, north=5161000, south=5181000, crs="EPSG:32632" ) graph = im.graph[im.node_id] @@ -136,7 +137,7 @@ def test_bbox_filter_nsew(self): } def test_bbox_filter_tblr(self): - im = self.imagery.bbox_filter( + im = self.img.bbox_filter( left=652000, right=672000, top=5161000, bottom=5181000, srs="EPSG:32632" ) graph = im.graph[im.node_id] @@ -146,7 +147,7 @@ def test_bbox_filter_tblr(self): } def test_bbox_filter_nsew_zero(self): - im = self.imagery.bbox_filter( + im = self.img.bbox_filter( west=0, east=0, north=0, south=0, crs="EPSG:32632" ) graph = im.graph[im.node_id] @@ -156,61 +157,55 @@ def test_bbox_filter_nsew_zero(self): } def test_min_time(self): - new_imagery = self.imagery.min_time() - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "reduce") - self.assertIn("data", graph['arguments']) + img = self.img.min_time() + graph = img.graph[img.node_id] + assert graph["process_id"] == "reduce" + assert graph["arguments"]["data"] == {'from_node': 'getcollection1'} + assert graph["arguments"]["dimension"] == "temporal" + callback, = graph["arguments"]["reducer"]["callback"].values() + assert callback == {'arguments': {'data': {'from_argument': 'data'}}, 'process_id': 'min', 'result': True} def test_max_time(self): - new_imagery = self.imagery.max_time() - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "reduce") - self.assertIn("data", graph['arguments']) + img = self.img.max_time() + graph = img.graph[img.node_id] + assert graph["process_id"] == "reduce" + assert graph["arguments"]["data"] == {'from_node': 'getcollection1'} + assert graph["arguments"]["dimension"] == "temporal" + callback, = graph["arguments"]["reducer"]["callback"].values() + assert callback == {'arguments': {'data': {'from_argument': 'data'}}, 'process_id': 'max', 'result': True} def test_reduce_time_udf(self): - new_imagery = self.imagery.reduce_tiles_over_time("my custom code") - - graph = new_imagery.graph[new_imagery.node_id] - - import json - print(json.dumps(graph,indent=2)) - + img = self.img.reduce_tiles_over_time("my custom code") + graph = img.graph[img.node_id] self.assertEqual(graph["process_id"], "reduce") self.assertIn("data", graph['arguments']) def test_ndvi(self): - new_imagery = self.imagery.ndvi() - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "ndvi") - self.assertIn("data", graph['arguments']) + img = self.img.ndvi() + graph = img.graph[img.node_id] + assert graph["process_id"] == "ndvi" + assert graph["arguments"] == { + 'data': {'from_node': 'getcollection1'}, 'name': 'ndvi' + } def test_mask(self): - from shapely import geometry - polygon = geometry.Polygon([[0, 0], [1.9, 0], [1.9, 1.9], [0, 1.9]]) - new_imagery = self.imagery.mask(polygon) - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "mask") - self.assertEqual(graph["arguments"]["mask"], - {'coordinates': (((0.0, 0.0), (1.9, 0.0), (1.9, 1.9), (0.0, 1.9), (0.0, 0.0)),), - 'crs': {'properties': {'name': 'EPSG:4326'}, 'type': 'name'}, - 'type': 'Polygon'}) + polygon = shapely.geometry.Polygon([[0, 0], [1.9, 0], [1.9, 1.9], [0, 1.9]]) + img = self.img.mask(polygon) + graph = img.graph[img.node_id] + assert graph["process_id"] == "mask" + assert graph["arguments"] == { + "data": {'from_node': 'getcollection1'}, + "mask": { + 'coordinates': (((0.0, 0.0), (1.9, 0.0), (1.9, 1.9), (0.0, 1.9), (0.0, 0.0)),), + 'crs': {'properties': {'name': 'EPSG:4326'}, 'type': 'name'}, + 'type': 'Polygon' + } + } def test_mask_raster(self): - new_imagery = self.imagery.mask(rastermask=self.mask,replacement=102) - - graph = new_imagery.graph[new_imagery.node_id] - import json - print(json.dumps(new_imagery.graph,indent=4)) - - expected_mask_node = { + img = self.img.mask(rastermask=self.mask, replacement=102) + graph = img.graph[img.node_id] + assert graph == { "process_id": "mask", "arguments": { "data": { @@ -224,26 +219,23 @@ def test_mask_raster(self): "result": False } - self.assertDictEqual(expected_mask_node,graph) - - def test_strech_colors(self): - new_imagery = self.imagery.stretch_colors(-1, 1) - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "stretch_colors") - self.assertIn("data", graph['arguments']) - self.assertEqual(graph["arguments"]["min"], -1) - self.assertEqual(graph["arguments"]["max"], 1) + def test_stretch_colors(self): + img = self.img.stretch_colors(-1, 1) + graph = img.graph[img.node_id] + assert graph["process_id"] == "stretch_colors" + assert graph["arguments"] == { + 'data': {'from_node': 'getcollection1'}, + 'max': 1, + 'min': -1, + } def test_apply_kernel(self): - kernel = [[0, 1, 0], [1, 1, 1], [0, 1, 0]] - new_imagery = self.imagery.apply_kernel(np.asarray(kernel), 3) - - graph = new_imagery.graph[new_imagery.node_id] - - self.assertEqual(graph["process_id"], "apply_kernel") - self.assertIn("data", graph["arguments"]) - self.assertEqual(graph["arguments"]["factor"], 3) - self.assertEqual(graph["arguments"]["kernel"], kernel) + img = self.img.apply_kernel(np.asarray(kernel), 3) + graph = img.graph[img.node_id] + assert graph["process_id"] == "apply_kernel" + assert graph["arguments"] == { + 'data': {'from_node': 'getcollection1'}, + 'factor': 3, + 'kernel': [[0, 1, 0], [1, 1, 1], [0, 1, 0]] + }