Skip to content

Commit

Permalink
Fix/pydantic 2 pytest (#259)
Browse files Browse the repository at this point in the history
* fix ressample_spatial dims

* fix pydantic 2 style

* retry tests aggregate_spatial

* limit xarray version

* set min pandas version to 2.0.0
  • Loading branch information
clausmichele authored Jun 17, 2024
1 parent 9267e4c commit d275ad3
Show file tree
Hide file tree
Showing 3 changed files with 100 additions and 3 deletions.
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,8 @@ packages = [
[tool.poetry.dependencies]
python = ">=3.10,<3.12"
geopandas = { version = ">=0.11.1,<1", optional = true }
xarray = { version = ">=2022.11.0", optional = true }
pandas = { version = ">=2.0.0", optional = true }
xarray = { version = ">=2022.11.0,<=2024.3.0", optional = true }
dask = {extras = ["array", "dataframe"], version = ">=2023.4.0", optional = true}
rasterio = { version = "^1.3.4", optional = true }
dask-geopandas = { version = ">=0.2.0,<1", optional = true }
Expand Down
4 changes: 2 additions & 2 deletions tests/mockdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def create_fake_rastercube(
with warnings.catch_warnings():
warnings.filterwarnings("ignore", category=DeprecationWarning)
t_coords = pd.date_range(
start=np.datetime64(temporal_extent.__root__[0].__root__),
end=np.datetime64(temporal_extent.__root__[1].__root__),
start=np.datetime64(temporal_extent.root[0].root),
end=np.datetime64(temporal_extent.root[1].root),
periods=data.shape[2],
).values

Expand Down
96 changes: 96 additions & 0 deletions tests/test_aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -95,3 +95,99 @@ def test_aggregate_temporal_period_numpy_equals_dask(
assert_numpy_equals_dask_numpy(
numpy_cube=numpy_cube, dask_cube=dask_cube, func=func
)


@pytest.mark.parametrize("size", [(30, 30, 30, 3)])
@pytest.mark.parametrize("dtype", [np.int8])
def test_aggregate_spatial(
random_raster_data,
bounding_box,
temporal_interval,
polygon_geometry_small,
process_registry,
):
input_cube = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B02", "B03", "B04"],
backend="dask",
)

reducer = partial(
process_registry["mean"].implementation,
data=ParameterReference(from_parameter="data"),
)

output_cube = aggregate_spatial(
data=input_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(input_cube.dims)

_process = partial(
process_registry["median"].implementation,
ignore_nodata=True,
data=ParameterReference(from_parameter="data"),
)

reduced_cube = reduce_dimension(data=input_cube, reducer=_process, dimension="t")

output_cube = aggregate_spatial(
data=reduced_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(reduced_cube.dims)

gdf = gpd.GeoDataFrame.from_features(polygon_geometry_small, crs="EPSG:4326")
xmin, ymin, xmax, ymax = gdf.total_bounds

expected_values = (
reduced_cube.sel(x=slice(xmin, xmax), y=slice(ymin, ymax))
.mean(["x", "y"])
.values
)

assert (output_cube.values == expected_values).all()

gdf = gpd.GeoDataFrame.from_features(polygon_geometry_small, crs="EPSG:4326")
gdf_equi7 = gdf.to_crs(
"+proj=aeqd +lat_0=53 +lon_0=24 +x_0=5837287.81977 +y_0=2121415.69617 +datum=WGS84 +units=m +no_defs"
)
output_cube_transform = aggregate_spatial(
data=reduced_cube, geometries=gdf_equi7, reducer=reducer
)
assert len(output_cube_transform.dims) == len(output_cube.dims)
assert output_cube_transform.shape == output_cube.shape

geometry_cube = xr.Dataset(
data_vars={"variable": (["geometry"], np.arange(len(gdf)))},
coords={"geometry": gdf["geometry"].values},
).xvec.set_geom_indexes("geometry", crs=gdf.crs)
output_cube_transform = aggregate_spatial(
data=reduced_cube, geometries=geometry_cube, reducer=reducer
)
assert len(output_cube_transform.dims) == len(output_cube.dims)
assert output_cube_transform.shape == output_cube.shape

polygon_geometry_small["crs"] = 4326

output_cube = aggregate_spatial(
data=reduced_cube, geometries=polygon_geometry_small, reducer=reducer
)

assert len(output_cube.dims) < len(reduced_cube.dims)

geometry_url = "https://raw.githubusercontent.com/ValentinaHutter/polygons/master/polygons_small.json"
output_cube = aggregate_spatial(
data=reduced_cube, geometries=geometry_url, reducer=reducer
)

assert len(output_cube.geometry) == 38

geometry = {"type": "Polygon", "coordinates": [[[0, 0], [0, 1], [1, 1], [1, 0]]]}
output_cube = aggregate_spatial(
data=reduced_cube, geometries=geometry, reducer=reducer
)

assert np.isnan(output_cube.values).all()

0 comments on commit d275ad3

Please sign in to comment.