-
Notifications
You must be signed in to change notification settings - Fork 5
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #370 from Open-EO/cleanup-logging
For testing on dev. Revert this if it raises any issues.
- Loading branch information
Showing
5 changed files
with
188 additions
and
25 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
from geopyspark import TiledRasterLayer, Extent | ||
from openeo_driver.utils import EvalEnv | ||
from shapely.geometry import MultiPolygon | ||
|
||
from openeogeotrellis.backend import GeoPySparkBackendImplementation | ||
from openeogeotrellis.geopysparkdatacube import GeopysparkDataCube | ||
|
||
|
||
# Note: Ensure that the python environment has all the required modules installed. | ||
# Numpy should be installed before Jep for off-heap memory tiles to work! | ||
# | ||
# Note: In order to run these tests you need to set several environment variables. | ||
# If you use the virtual environment venv (with JEP and Numpy installed): | ||
# 1. LD_LIBRARY_PATH = .../venv/lib/python3.6/site-packages/jep | ||
# This will look for the shared library 'jep.so'. This is the compiled C code that binds Java and Python objects. | ||
|
||
def test_chunk_polygon_exception(imagecollection_with_two_bands_and_three_dates): | ||
udf_code = """ | ||
import xarray | ||
from openeo.udf import XarrayDataCube | ||
def function_in_root(): | ||
raise Exception("This error message should be visible to user") | ||
def apply_datacube(cube: XarrayDataCube, context: dict) -> XarrayDataCube: | ||
function_in_root() | ||
array = cube.get_array() | ||
return XarrayDataCube(array) | ||
""" | ||
udf_add_to_bands = { | ||
"udf_process": { | ||
"arguments": { | ||
"data": { | ||
"from_argument": "dimension_data" | ||
}, | ||
"udf": udf_code | ||
}, | ||
"process_id": "run_udf", | ||
"result": True | ||
}, | ||
} | ||
env = EvalEnv() | ||
|
||
polygon1 = Extent(0.0, 0.0, 4.0, 4.0).to_polygon | ||
chunks = MultiPolygon([polygon1]) | ||
cube: GeopysparkDataCube = imagecollection_with_two_bands_and_three_dates | ||
try: | ||
result_cube: GeopysparkDataCube = cube.chunk_polygon(udf_add_to_bands, chunks=chunks, mask_value=None, env=env) | ||
result_layer: TiledRasterLayer = result_cube.pyramid.levels[0] | ||
result_layer.to_numpy_rdd().collect() | ||
except Exception as e: | ||
error_summary = GeoPySparkBackendImplementation.summarize_exception_static(e) | ||
print(error_summary.summary) | ||
assert "This error message should be visible to user" in error_summary.summary | ||
else: | ||
raise Exception("There should have been an exception raised in the try clause.") |