diff --git a/openeo_processes_dask/process_implementations/cubes/aggregate.py b/openeo_processes_dask/process_implementations/cubes/aggregate.py index 5bc28b4f..3af1d0d0 100644 --- a/openeo_processes_dask/process_implementations/cubes/aggregate.py +++ b/openeo_processes_dask/process_implementations/cubes/aggregate.py @@ -136,9 +136,16 @@ def aggregate_spatial( feature["properties"] = {} elif feature["properties"] is None: feature["properties"] = {} - DEFAULT_CRS = ( - geometries.get("crs", {}).get("properties", {}).get("name", DEFAULT_CRS) - ) + if isinstance(geometries.get("crs", {}), dict): + DEFAULT_CRS = ( + geometries.get("crs", {}) + .get("properties", {}) + .get("name", DEFAULT_CRS) + ) + else: + DEFAULT_CRS = int(geometries.get("crs", {})) + logger.info(f"CRS in geometries: {DEFAULT_CRS}.") + if "type" in geometries and geometries["type"] == "FeatureCollection": gdf = gpd.GeoDataFrame.from_features(geometries, crs=DEFAULT_CRS) elif "type" in geometries and geometries["type"] in ["Polygon"]: @@ -146,6 +153,14 @@ def aggregate_spatial( gdf = gpd.GeoDataFrame(geometry=[polygon]) gdf.crs = DEFAULT_CRS + if isinstance(geometries, xr.Dataset): + if hasattr(geometries, "xvec"): + gdf = geometries.xvec.to_geodataframe() + + if isinstance(geometries, gpd.GeoDataFrame): + gdf = geometries + + gdf = gdf.to_crs(data.rio.crs) geometries = gdf.geometry.values positional_parameters = {"data": 0} diff --git a/tests/test_aggregate.py b/tests/test_aggregate.py index b1d7c8d2..5ed18bb4 100644 --- a/tests/test_aggregate.py +++ b/tests/test_aggregate.py @@ -3,6 +3,8 @@ import geopandas as gpd import numpy as np import pytest +import xarray as xr +import xvec from openeo_pg_parser_networkx.pg_schema import ParameterReference, TemporalInterval from openeo_processes_dask.process_implementations.cubes.aggregate import ( @@ -148,6 +150,34 @@ def test_aggregate_spatial( assert (output_cube.values == expected_values).all() + gdf = gpd.GeoDataFrame.from_features(polygon_geometry_small, crs="EPSG:4326") + gdf_equi7 = gdf.to_crs( + "+proj=aeqd +lat_0=53 +lon_0=24 +x_0=5837287.81977 +y_0=2121415.69617 +datum=WGS84 +units=m +no_defs" + ) + output_cube_transform = aggregate_spatial( + data=reduced_cube, geometries=gdf_equi7, reducer=reducer + ) + assert len(output_cube_transform.dims) == len(output_cube.dims) + assert output_cube_transform.shape == output_cube.shape + + geometry_cube = xr.Dataset( + data_vars={"variable": (["geometry"], np.arange(len(gdf)))}, + coords={"geometry": gdf["geometry"].values}, + ).xvec.set_geom_indexes("geometry", crs=gdf.crs) + output_cube_transform = aggregate_spatial( + data=reduced_cube, geometries=geometry_cube, reducer=reducer + ) + assert len(output_cube_transform.dims) == len(output_cube.dims) + assert output_cube_transform.shape == output_cube.shape + + polygon_geometry_small["crs"] = 4326 + + output_cube = aggregate_spatial( + data=reduced_cube, geometries=polygon_geometry_small, reducer=reducer + ) + + assert len(output_cube.dims) < len(reduced_cube.dims) + geometry_url = "https://raw.githubusercontent.com/ValentinaHutter/polygons/master/polygons_small.json" output_cube = aggregate_spatial( data=reduced_cube, geometries=geometry_url, reducer=reducer