Skip to content

Commit

Permalink
fix: handle incompatible coords in merge_cubes (#148)
Browse files Browse the repository at this point in the history
* Fix case caused by load_stac

* user compat="override"

* add comment

---------

Co-authored-by: Lukas Weidenholzer <lukas.weidenholzer@eodc.eu>
  • Loading branch information
clausmichele and Lukas Weidenholzer authored Aug 11, 2023
1 parent ede97a8 commit f861d49
Show file tree
Hide file tree
Showing 2 changed files with 32 additions and 1 deletion.
4 changes: 3 additions & 1 deletion openeo_processes_dask/process_implementations/cubes/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,10 @@ def merge_cubes(
cube1 = cube1.to_dataset(cube1.openeo.band_dims[0])
cube2 = cube2.to_dataset(cube2.openeo.band_dims[0])

# compat="override" to deal with potentially conflicting coords
# see https://github.com/Open-EO/openeo-processes-dask/pull/148 for context
merged_cube = xr.combine_by_coords(
[cube1, cube2], combine_attrs="drop_conflicts"
[cube1, cube2], combine_attrs="drop_conflicts", compat="override"
)
if isinstance(merged_cube, xr.Dataset):
merged_cube = merged_cube.to_array(dim="bands")
Expand Down
29 changes: 29 additions & 0 deletions tests/test_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,3 +145,32 @@ def test_merge_cubes_type_4(

assert isinstance(merged_cube_2.data, dask.array.Array)
xr.testing.assert_equal(merged_cube_2, cube_1 + 1)


@pytest.mark.parametrize("size", [(6, 5, 4, 1)])
@pytest.mark.parametrize("dtype", [np.float64])
def test_conflicting_coords(
temporal_interval, bounding_box, random_raster_data, process_registry
):
# See https://github.com/Open-EO/openeo-processes-dask/pull/148 for why is is necessary
# This is basically broadcasting the smaller datacube and then applying the overlap resolver.
cube_1 = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B01"],
backend="dask",
)
cube_1["s2:processing_baseline"] = "05.8"
cube_2 = create_fake_rastercube(
data=random_raster_data,
spatial_extent=bounding_box,
temporal_extent=temporal_interval,
bands=["B02"],
backend="dask",
)
cube_2["s2:processing_baseline"] = "05.9"

merged_cube_1 = merge_cubes(cube_1, cube_2)

assert isinstance(merged_cube_1.data, dask.array.Array)

0 comments on commit f861d49

Please sign in to comment.