Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix multiindex level serialization after reset_index #8672

Merged
merged 4 commits into from
Jan 31, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ Deprecations
Bug fixes
~~~~~~~~~

- Fixed a regression that prevented multi-index level coordinates being
serialized after resetting or dropping the multi-index (:issue:`8628`, :pull:`8672`).
By `Benoit Bovy <https://github.com/benbovy>`_.
- Fix bug with broadcasting when wrapping array API-compliant classes. (:issue:`8665`, :pull:`8669`)
By `Tom Nicholas <https://github.com/TomNicholas>`_.
- Ensure :py:meth:`DataArray.unstack` works when wrapping array API-compliant classes. (:issue:`8666`, :pull:`8668`)
Expand Down
18 changes: 11 additions & 7 deletions xarray/conventions.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
)
from xarray.core.pycompat import is_duck_dask_array
from xarray.core.utils import emit_user_level_warning
from xarray.core.variable import Variable
from xarray.core.variable import IndexVariable, Variable

CF_RELATED_DATA = (
"bounds",
Expand Down Expand Up @@ -84,13 +84,17 @@ def _infer_dtype(array, name=None):


def ensure_not_multiindex(var: Variable, name: T_Name = None) -> None:
# only the pandas multi-index dimension coordinate cannot be serialized (tuple values)
if isinstance(var._data, indexing.PandasMultiIndexingAdapter):
raise NotImplementedError(
f"variable {name!r} is a MultiIndex, which cannot yet be "
"serialized. Instead, either use reset_index() "
"to convert MultiIndex levels into coordinate variables instead "
"or use https://cf-xarray.readthedocs.io/en/latest/coding.html."
)
if name is None and isinstance(var, IndexVariable):
name = var.name
if var.dims == (name,):
raise NotImplementedError(
f"variable {name!r} is a MultiIndex, which cannot yet be "
"serialized. Instead, either use reset_index() "
"to convert MultiIndex levels into coordinate variables instead "
"or use https://cf-xarray.readthedocs.io/en/latest/coding.html."
)


def _copy_with_dtype(data, dtype: np.typing.DTypeLike):
Expand Down
5 changes: 5 additions & 0 deletions xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -1245,6 +1245,11 @@
with self.roundtrip(ds):
pass

# regression GH8628 (can serialize reset multi-index level coordinates)
ds_reset = ds.reset_index("x")
with self.roundtrip(ds_reset) as actual:
assert_identical(actual, ds_reset)


class NetCDFBase(CFEncodedBase):
"""Tests for all netCDF3 and netCDF4 backends."""
Expand Down Expand Up @@ -4467,14 +4472,14 @@
with open_dataset(tmp, chunks=chunks) as dask_ds:
assert_identical(data, dask_ds)
with create_tmp_file() as tmp2:
dask_ds.to_netcdf(tmp2)

Check failure on line 4475 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_dask_roundtrip Failed: Timeout >180.0s
with open_dataset(tmp2) as on_disk:
assert_identical(data, on_disk)

def test_deterministic_names(self) -> None:
with create_tmp_file() as tmp:
data = create_test_data()
data.to_netcdf(tmp)

Check failure on line 4482 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_deterministic_names Failed: Timeout >180.0s
with open_mfdataset(tmp, combine="by_coords") as ds:
original_names = {k: v.data.name for k, v in ds.data_vars.items()}
with open_mfdataset(tmp, combine="by_coords") as ds:
Expand All @@ -4492,7 +4497,7 @@
computed = actual.compute()
assert not actual._in_memory
assert computed._in_memory
assert_allclose(actual, computed, decode_bytes=False)

Check failure on line 4500 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_dataarray_compute Failed: Timeout >180.0s

def test_save_mfdataset_compute_false_roundtrip(self) -> None:
from dask.delayed import Delayed
Expand All @@ -4505,7 +4510,7 @@
datasets, [tmp1, tmp2], engine=self.engine, compute=False
)
assert isinstance(delayed_obj, Delayed)
delayed_obj.compute()

Check failure on line 4513 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_save_mfdataset_compute_false_roundtrip Failed: Timeout >180.0s
with open_mfdataset(
[tmp1, tmp2], combine="nested", concat_dim="x"
) as actual:
Expand All @@ -4514,7 +4519,7 @@
def test_load_dataset(self) -> None:
with create_tmp_file() as tmp:
original = Dataset({"foo": ("x", np.random.randn(10))})
original.to_netcdf(tmp)

Check failure on line 4522 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_load_dataset Failed: Timeout >180.0s
ds = load_dataset(tmp)
# this would fail if we used open_dataset instead of load_dataset
ds.to_netcdf(tmp)
Expand All @@ -4522,7 +4527,7 @@
def test_load_dataarray(self) -> None:
with create_tmp_file() as tmp:
original = Dataset({"foo": ("x", np.random.randn(10))})
original.to_netcdf(tmp)

Check failure on line 4530 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_load_dataarray Failed: Timeout >180.0s
ds = load_dataarray(tmp)
# this would fail if we used open_dataarray instead of
# load_dataarray
Expand All @@ -4535,7 +4540,7 @@
def test_inline_array(self) -> None:
with create_tmp_file() as tmp:
original = Dataset({"foo": ("x", np.random.randn(10))})
original.to_netcdf(tmp)

Check failure on line 4543 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestDask.test_inline_array Failed: Timeout >180.0s
chunks = {"time": 10}

def num_graph_nodes(obj):
Expand Down Expand Up @@ -4588,7 +4593,7 @@
yield actual, expected

def test_cmp_local_file(self) -> None:
with self.create_datasets() as (actual, expected):

Check failure on line 4596 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestPydap.test_cmp_local_file Failed: Timeout >180.0s
assert_equal(actual, expected)

# global attributes should be global attributes on the dataset
Expand Down Expand Up @@ -4622,7 +4627,7 @@

def test_compatible_to_netcdf(self) -> None:
# make sure it can be saved as a netcdf
with self.create_datasets() as (actual, expected):

Check failure on line 4630 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestPydap.test_compatible_to_netcdf Failed: Timeout >180.0s
with create_tmp_file() as tmp_file:
actual.to_netcdf(tmp_file)
with open_dataset(tmp_file) as actual2:
Expand All @@ -4631,7 +4636,7 @@

@requires_dask
def test_dask(self) -> None:
with self.create_datasets(chunks={"j": 2}) as (actual, expected):

Check failure on line 4639 in xarray/tests/test_backends.py

View workflow job for this annotation

GitHub Actions / ubuntu-latest py3.12

TestPydap.test_dask Failed: Timeout >180.0s
assert_equal(actual, expected)


Expand Down
Loading