Skip to content

Commit

Permalink
update dependencies (#251)
Browse files Browse the repository at this point in the history
* update dependencies

* update zonal stats

* Update pyproject.toml

* Update pyproject.toml

* Update pyproject.toml

* Update main.yml

* tests update

* Update pyproject.toml

* Update main.yml

* Update pyproject.toml
  • Loading branch information
ValentinaHutter authored May 21, 2024
1 parent 5e51431 commit f793602
Show file tree
Hide file tree
Showing 4 changed files with 7 additions and 103 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ jobs:
strategy:
matrix:
os: [Ubuntu]
python-version: ["3.9", "3.10", "3.11"]
python-version: ["3.10", "3.11"]
include:
- os: Ubuntu
image: ubuntu-22.04
Expand Down
2 changes: 1 addition & 1 deletion openeo_processes_dask/specs/openeo-processes
Submodule openeo-processes updated 4 files
+78 −0 cab.json
+78 −0 fapar.json
+78 −0 fcover.json
+78 −0 lai.json
10 changes: 5 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "openeo-processes-dask"
version = "2024.5.0"
version = "2024.5.1"
description = "Python implementations of many OpenEO processes, dask-friendly by default."
authors = ["Lukas Weidenholzer <lukas.weidenholzer@eodc.eu>", "Sean Hoyal <sean.hoyal@eodc.eu>", "Valentina Hutter <valentina.hutter@eodc.eu>"]
maintainers = ["EODC Staff <support@eodc.eu>"]
Expand All @@ -23,22 +23,22 @@ packages = [
]

[tool.poetry.dependencies]
python = ">=3.9,<3.12"
geopandas = { version = ">=0.11.1,<1", optional = true }
python = ">=3.10,<3.12"
geopandas = { version = "^0.13.0", optional = true }
xarray = { version = ">=2022.11.0", optional = true }
dask = {extras = ["array", "dataframe"], version = ">=2023.4.0", optional = true}
rasterio = { version = "^1.3.4", optional = true }
dask-geopandas = { version = ">=0.2.0,<1", optional = true }
xgboost = { version = ">=1.5.1", optional = true }
rioxarray = { version = ">=0.12.0,<1", optional = true }
openeo-pg-parser-networkx = { version = ">=2023.5.1", optional = true }
odc-geo = { version = ">=0.4.1,<1", optional = true }
odc-geo = { version = "^0.4.3", optional = true }
stac_validator = { version = ">=3.3.1", optional = true }
stackstac = { version = ">=0.4.3", optional = true }
pystac_client = { version = ">=0.6.1", optional = true }
planetary_computer = { version = ">=0.5.1", optional = true }
scipy = "^1.11.3"
xvec = { version = ">=0.1.0", optional = true }
xvec = { version = "0.2.0", optional = true }
joblib = { version = ">=1.3.2", optional = true }
geoparquet = "^0.0.3"
pyarrow = "^15.0.2"
Expand Down
96 changes: 0 additions & 96 deletions tests/test_aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,99 +95,3 @@ def test_aggregate_temporal_period_numpy_equals_dask(
assert_numpy_equals_dask_numpy(
numpy_cube=numpy_cube, dask_cube=dask_cube, func=func
)


@pytest.mark.parametrize("size", [(30, 30, 30, 3)])
@pytest.mark.parametrize("dtype", [np.int8])
def test_aggregate_spatial(
random_raster_data,
bounding_box,
temporal_interval,
polygon_geometry_small,
process_registry,
):
input_cube = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B02", "B03", "B04"],
backend="dask",
)

reducer = partial(
process_registry["mean"].implementation,
data=ParameterReference(from_parameter="data"),
)

output_cube = aggregate_spatial(
data=input_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(input_cube.dims)

_process = partial(
process_registry["median"].implementation,
ignore_nodata=True,
data=ParameterReference(from_parameter="data"),
)

reduced_cube = reduce_dimension(data=input_cube, reducer=_process, dimension="t")

output_cube = aggregate_spatial(
data=reduced_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(reduced_cube.dims)

gdf = gpd.GeoDataFrame.from_features(polygon_geometry_small, crs="EPSG:4326")
xmin, ymin, xmax, ymax = gdf.total_bounds

expected_values = (
reduced_cube.sel(x=slice(xmin, xmax), y=slice(ymin, ymax))
.mean(["x", "y"])
.values
)

assert (output_cube.values == expected_values).all()

gdf = gpd.GeoDataFrame.from_features(polygon_geometry_small, crs="EPSG:4326")
gdf_equi7 = gdf.to_crs(
"+proj=aeqd +lat_0=53 +lon_0=24 +x_0=5837287.81977 +y_0=2121415.69617 +datum=WGS84 +units=m +no_defs"
)
output_cube_transform = aggregate_spatial(
data=reduced_cube, geometries=gdf_equi7, reducer=reducer
)
assert len(output_cube_transform.dims) == len(output_cube.dims)
assert output_cube_transform.shape == output_cube.shape

geometry_cube = xr.Dataset(
data_vars={"variable": (["geometry"], np.arange(len(gdf)))},
coords={"geometry": gdf["geometry"].values},
).xvec.set_geom_indexes("geometry", crs=gdf.crs)
output_cube_transform = aggregate_spatial(
data=reduced_cube, geometries=geometry_cube, reducer=reducer
)
assert len(output_cube_transform.dims) == len(output_cube.dims)
assert output_cube_transform.shape == output_cube.shape

polygon_geometry_small["crs"] = 4326

output_cube = aggregate_spatial(
data=reduced_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(reduced_cube.dims)

geometry_url = "https://raw.githubusercontent.com/ValentinaHutter/polygons/master/polygons_small.json"
output_cube = aggregate_spatial(
data=reduced_cube, geometries=geometry_url, reducer=reducer
)

assert len(output_cube.geometry) == 38

geometry = {"type": "Polygon", "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0]]]}
output_cube = aggregate_spatial(
data=reduced_cube, geometries=geometry, reducer=reducer
)

assert np.isnan(output_cube.values).all()

0 comments on commit f793602

Please sign in to comment.