Skip to content

Commit

Permalink
Merge branch 'issue197-vector-cube-udf'
Browse files Browse the repository at this point in the history
  • Loading branch information
soxofaan committed Aug 3, 2023
2 parents 29af246 + 5864c8b commit f83d1cf
Show file tree
Hide file tree
Showing 7 changed files with 509 additions and 73 deletions.
2 changes: 1 addition & 1 deletion openeo_driver/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.59.0a1"
__version__ = "0.60.0a1"
174 changes: 150 additions & 24 deletions openeo_driver/datacube.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import io

import geopandas as gpd
import numpy as np
import numpy
import pyproj
import shapely.geometry
import shapely.geometry.base
Expand All @@ -17,11 +17,13 @@
import requests

from openeo.metadata import CollectionMetadata
from openeo.util import ensure_dir
from openeo.util import ensure_dir, str_truncate
import openeo.udf
from openeo_driver.datastructs import SarBackscatterArgs, ResolutionMergeArgs, StacAsset
from openeo_driver.errors import FeatureUnsupportedException, InternalException
from openeo_driver.util.geometry import GeometryBufferer, validate_geojson_coordinates
from openeo_driver.util.ioformats import IOFORMATS
from openeo_driver.util.pgparsing import SingleRunUDFProcessGraph
from openeo_driver.util.utm import area_in_square_meters
from openeo_driver.utils import EvalEnv

Expand Down Expand Up @@ -214,13 +216,15 @@ class DriverVectorCube:
These components are "joined" on the GeoPandas dataframe's index and DataArray first dimension
"""
DIM_GEOMETRIES = "geometries"
FLATTEN_PREFIX = "vc"
DIM_BANDS = "bands"
DIM_PROPERTIES = "properties"
COLUMN_SELECTION_ALL = "all"
COLUMN_SELECTION_NUMERICAL = "numerical"

def __init__(
self,
geometries: gpd.GeoDataFrame,
cube: Optional[xarray.DataArray] = None,
flatten_prefix: str = FLATTEN_PREFIX,
):
"""
Expand All @@ -234,18 +238,77 @@ def __init__(
log.error(f"First cube dim should be {self.DIM_GEOMETRIES!r} but got dims {cube.dims!r}")
raise VectorCubeError("Cube's first dimension is invalid.")
if not geometries.index.equals(cube.indexes[cube.dims[0]]):
log.error(f"Invalid VectorCube components {geometries.index!r} != {cube.indexes[cube.dims[0]]!r}")
log.error(f"Invalid VectorCube components {geometries.index=} != {cube.indexes[cube.dims[0]]=}")
raise VectorCubeError("Incompatible vector cube components")
self._geometries: gpd.GeoDataFrame = geometries
self._cube = cube
self._flatten_prefix = flatten_prefix

def with_cube(self, cube: xarray.DataArray, flatten_prefix: str = FLATTEN_PREFIX) -> "DriverVectorCube":
def with_cube(self, cube: xarray.DataArray) -> "DriverVectorCube":
"""Create new vector cube with same geometries but new cube"""
log.info(f"Creating vector cube with new cube {cube.name!r}")
return type(self)(
geometries=self._geometries, cube=cube, flatten_prefix=flatten_prefix
)
return type(self)(geometries=self._geometries, cube=cube)

@classmethod
def from_geodataframe(
cls,
data: gpd.GeoDataFrame,
*,
columns_for_cube: Union[List[str], str] = COLUMN_SELECTION_NUMERICAL,
dimension_name: str = DIM_PROPERTIES,
) -> "DriverVectorCube":
"""
Build a DriverVectorCube from given GeoPandas data frame,
using the data frame geometries as vector cube geometries
and other columns (as specified) as cube values along a "bands" dimension
:param data: geopandas data frame
:param columns_for_cube: which data frame columns to use as cube values.
One of:
- "numerical": automatically pick numerical columns
- "all": use all columns as cube values
- list of column names
:param dimension_name: name of the "bands" dimension
:return: vector cube
"""
available_columns = [c for c in data.columns if c != "geometry"]

if columns_for_cube is None:
# TODO #114: what should default selection be?
columns_for_cube = cls.COLUMN_SELECTION_NUMERICAL

if columns_for_cube == cls.COLUMN_SELECTION_NUMERICAL:
columns_for_cube = [c for c in available_columns if numpy.issubdtype(data[c].dtype, numpy.number)]
elif columns_for_cube == cls.COLUMN_SELECTION_ALL:
columns_for_cube = available_columns
elif isinstance(columns_for_cube, list):
# TODO #114 limit to subset with available columns (and automatically fill in missing columns with nodata)?
columns_for_cube = columns_for_cube
else:
raise ValueError(columns_for_cube)
assert isinstance(columns_for_cube, list)

if columns_for_cube:
cube_df = data[columns_for_cube]
# TODO: remove `columns_for_cube` from geopandas data frame?
# Enabling that triggers failure of som existing tests that use `aggregate_spatial`
# to "enrich" a vector cube with pre-existing properties
# Also see https://github.com/Open-EO/openeo-api/issues/504
# geometries_df = data.drop(columns=columns_for_cube)
geometries_df = data

# TODO: leverage pandas `to_xarray` and xarray `to_array` instead of this manual building?
cube: xarray.DataArray = xarray.DataArray(
data=cube_df.values,
dims=[cls.DIM_GEOMETRIES, dimension_name],
coords={
cls.DIM_GEOMETRIES: data.geometry.index.to_list(),
dimension_name: cube_df.columns,
},
)
return cls(geometries=geometries_df, cube=cube)

else:
return cls(geometries=data)

@classmethod
def from_fiona(
Expand All @@ -258,15 +321,21 @@ def from_fiona(
if len(paths) != 1:
# TODO #114 EP-3981: support multiple paths
raise FeatureUnsupportedException(message="Loading a vector cube from multiple files is not supported")
columns_for_cube = (options or {}).get("columns_for_cube", cls.COLUMN_SELECTION_NUMERICAL)
# TODO #114 EP-3981: lazy loading like/with DelayedVector
# note for GeoJSON: will consider Feature.id as well as Feature.properties.id
if "parquet" == driver:
return cls.from_parquet(paths=paths)
return cls.from_parquet(paths=paths, columns_for_cube=columns_for_cube)
else:
return cls(geometries=gpd.read_file(paths[0], driver=driver))
gdf = gpd.read_file(paths[0], driver=driver)
return cls.from_geodataframe(gdf, columns_for_cube=columns_for_cube)

@classmethod
def from_parquet(cls, paths: List[Union[str, Path]]):
def from_parquet(
cls,
paths: List[Union[str, Path]],
columns_for_cube: Union[List[str], str] = COLUMN_SELECTION_NUMERICAL,
):
if len(paths) != 1:
# TODO #114 EP-3981: support multiple paths
raise FeatureUnsupportedException(
Expand All @@ -284,10 +353,14 @@ def from_parquet(cls, paths: List[Union[str, Path]]):
if "OGC:CRS84" in str(df.crs) or "WGS 84 (CRS84)" in str(df.crs):
# workaround for not being able to decode ogc:crs84
df.crs = CRS.from_epsg(4326)
return cls(geometries=df)
return cls.from_geodataframe(df, columns_for_cube=columns_for_cube)

@classmethod
def from_geojson(cls, geojson: dict) -> "DriverVectorCube":
def from_geojson(
cls,
geojson: dict,
columns_for_cube: Union[List[str], str] = COLUMN_SELECTION_NUMERICAL,
) -> "DriverVectorCube":
"""Construct vector cube from GeoJson dict structure"""
validate_geojson_coordinates(geojson)
# TODO support more geojson types?
Expand All @@ -305,7 +378,8 @@ def from_geojson(cls, geojson: dict) -> "DriverVectorCube":
raise FeatureUnsupportedException(
f"Can not construct DriverVectorCube from {geojson.get('type', type(geojson))!r}"
)
return cls(geometries=gpd.GeoDataFrame.from_features(features))
gdf = gpd.GeoDataFrame.from_features(features)
return cls.from_geodataframe(gdf, columns_for_cube=columns_for_cube)

@classmethod
def from_geometry(
Expand All @@ -320,7 +394,9 @@ def from_geometry(
geometry = [geometry]
return cls(geometries=gpd.GeoDataFrame(geometry=geometry))

def _as_geopandas_df(self) -> gpd.GeoDataFrame:
def _as_geopandas_df(
self, flatten_prefix: Optional[str] = None, flatten_name_joiner: str = "~"
) -> gpd.GeoDataFrame:
"""Join geometries and cube as a geopandas dataframe"""
# TODO: avoid copy?
df = self._geometries.copy(deep=True)
Expand All @@ -331,18 +407,20 @@ def _as_geopandas_df(self) -> gpd.GeoDataFrame:
if self._cube.dims[1:]:
stacked = self._cube.stack(prop=self._cube.dims[1:])
log.info(f"Flattened cube component of vector cube to {stacked.shape[1]} properties")
name_prefix = [flatten_prefix] if flatten_prefix else []
for p in stacked.indexes["prop"]:
name = "~".join(str(x) for x in [self._flatten_prefix] + list(p))
name = flatten_name_joiner.join(str(x) for x in name_prefix + list(p))
# TODO: avoid column collisions?
df[name] = stacked.sel(prop=p)
else:
df[self._flatten_prefix] = self._cube
# TODO: better fallback column/property name in this case?
df[flatten_prefix or "_vc"] = self._cube

return df

def to_geojson(self) -> dict:
def to_geojson(self, flatten_prefix: Optional[str] = None) -> dict:
"""Export as GeoJSON FeatureCollection."""
return shapely.geometry.mapping(self._as_geopandas_df())
return shapely.geometry.mapping(self._as_geopandas_df(flatten_prefix=flatten_prefix))

def to_wkt(self) -> List[str]:
wkts = [str(g) for g in self._geometries.geometry]
Expand All @@ -366,7 +444,8 @@ def write_assets(
)
return self.to_legacy_save_result().write_assets(directory)

self._as_geopandas_df().to_file(path, driver=format_info.fiona_driver)
gdf = self._as_geopandas_df(flatten_prefix=options.get("flatten_prefix"))
gdf.to_file(path, driver=format_info.fiona_driver)

if not format_info.multi_file:
# single file format
Expand Down Expand Up @@ -461,6 +540,9 @@ def geometry_count(self) -> int:
def get_geometries(self) -> Sequence[shapely.geometry.base.BaseGeometry]:
return self._geometries.geometry

def get_cube(self) -> Optional[xarray.DataArray]:
return self._cube

def get_ids(self) -> Optional[Sequence]:
return self._geometries.get("id")

Expand All @@ -471,8 +553,9 @@ def get_xarray_cube_basics(self) -> Tuple[tuple, dict]:
return dims, coords

def __eq__(self, other):
return (isinstance(other, DriverVectorCube)
and np.array_equal(self._as_geopandas_df().values, other._as_geopandas_df().values))
return isinstance(other, DriverVectorCube) and numpy.array_equal(
self._as_geopandas_df().values, other._as_geopandas_df().values
)

def fit_class_random_forest(
self,
Expand Down Expand Up @@ -504,6 +587,49 @@ def buffer_points(self, distance: float = 10) -> "DriverVectorCube":
]
)

def apply_dimension(
self,
process: dict,
*,
dimension: str,
target_dimension: Optional[str] = None,
context: Optional[dict] = None,
env: EvalEnv,
) -> "DriverVectorCube":
single_run_udf = SingleRunUDFProcessGraph.parse_or_none(process)

if single_run_udf:
# Process with single "run_udf" node
# TODO: check provided dimension with actual dimension of the cube
if dimension in (self.DIM_BANDS, self.DIM_PROPERTIES) and target_dimension is None:
log.warning(
f"Using experimental feature: DriverVectorCube.apply_dimension along dim {dimension} and empty cube"
)
# TODO: this is non-standard special case: vector cube with only geometries, but no "cube" data
gdf = self._as_geopandas_df()
feature_collection = openeo.udf.FeatureCollection(id="_", data=gdf)
udf_data = openeo.udf.UdfData(
proj={"EPSG": self._geometries.crs.to_epsg()},
feature_collection_list=[feature_collection],
user_context=context,
)
log.info(f"[run_udf] Running UDF {str_truncate(single_run_udf.udf, width=256)!r} on {udf_data!r}")
result_data = env.backend_implementation.processing.run_udf(udf=single_run_udf.udf, data=udf_data)
log.info(f"[run_udf] UDF resulted in {result_data!r}")

if not isinstance(result_data, openeo.udf.UdfData):
raise ValueError(f"UDF should return UdfData, but got {type(result_data)}")
result_features = result_data.get_feature_collection_list()
if not (result_features and len(result_features) == 1):
raise ValueError(
f"UDF should return single feature collection but got {result_features and len(result_features)}"
)
return DriverVectorCube(geometries=result_features[0].data)

raise FeatureUnsupportedException(
message=f"DriverVectorCube.apply_dimension with {dimension=} and {bool(single_run_udf)=}"
)


class DriverMlModel:
"""Base class for driver-side 'ml-model' data structures"""
Expand Down
2 changes: 1 addition & 1 deletion openeo_driver/dummy/dummy_backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ def assert_polygon_sequence(geometries: Union[Sequence, BaseMultipartGeometry])
coords=coords,
name="aggregate_spatial",
)
return geometries.with_cube(cube=cube, flatten_prefix="agg")
return geometries.with_cube(cube=cube)
elif isinstance(geometries, str):
geometries = [geometry for geometry in DelayedVector(geometries).geometries]
n_geometries = assert_polygon_sequence(geometries)
Expand Down
45 changes: 45 additions & 0 deletions openeo_driver/util/pgparsing.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
import dataclasses
from typing import Optional


class NotASingleRunUDFProcessGraph(ValueError):
pass


@dataclasses.dataclass(frozen=True)
class SingleRunUDFProcessGraph:
"""
Container (and parser) for a callback process graph containing only a single `run_udf` node.
"""

data: dict
udf: str
runtime: str
version: Optional[str] = None
context: Optional[dict] = None

@classmethod
def parse(cls, process_graph: dict) -> "SingleRunUDFProcessGraph":
try:
(node,) = process_graph.values()
assert node["process_id"] == "run_udf"
assert node["result"] is True
arguments = node["arguments"]
assert {"data", "udf", "runtime"}.issubset(arguments.keys())

return cls(
data=arguments["data"],
udf=arguments["udf"],
runtime=arguments["runtime"],
version=arguments.get("version"),
context=arguments.get("context") or {},
)
except Exception as e:
raise NotASingleRunUDFProcessGraph(str(e)) from e

@classmethod
def parse_or_none(cls, process_graph: dict) -> Optional["SingleNodeRunUDFProcessGraph"]:
try:
return cls.parse(process_graph=process_graph)
except NotASingleRunUDFProcessGraph:
return None
Loading

0 comments on commit f83d1cf

Please sign in to comment.