diff --git a/xarray/namedarray/daskmanager.py b/xarray/namedarray/daskmanager.py index df8681ec290..c2b0ed6f72b 100644 --- a/xarray/namedarray/daskmanager.py +++ b/xarray/namedarray/daskmanager.py @@ -41,25 +41,6 @@ def __init__(self) -> None: def is_chunked_array(self, data: duckarray[Any, Any]) -> bool: return is_duck_dask_array(data) - def normalize_chunks( - self, - chunks: T_Chunks | _NormalizedChunks, - shape: tuple[int, ...] | None = None, - limit: int | None = None, - dtype: _DType_co | None = None, - previous_chunks: _NormalizedChunks | None = None, - ) -> Any: - """Called by open_dataset""" - from dask.array.core import normalize_chunks - - return normalize_chunks( - chunks, - shape=shape, - limit=limit, - dtype=dtype, - previous_chunks=previous_chunks, - ) # type: ignore[no-untyped-call] - def from_array( self, data: Any, chunks: T_Chunks | _NormalizedChunks, **kwargs: Any ) -> DaskArray | Any: diff --git a/xarray/namedarray/parallelcompat.py b/xarray/namedarray/parallelcompat.py index c84813e2001..1451ddf7c5c 100644 --- a/xarray/namedarray/parallelcompat.py +++ b/xarray/namedarray/parallelcompat.py @@ -21,10 +21,8 @@ if TYPE_CHECKING: from xarray.namedarray._typing import ( _Chunks, - _DType, _DType_co, _NormalizedChunks, - _ShapeType, duckarray, ) @@ -218,43 +216,6 @@ def is_chunked_array(self, data: duckarray[Any, Any]) -> bool: """ return isinstance(data, self.array_cls) - @abstractmethod - def normalize_chunks( - self, - chunks: _Chunks | _NormalizedChunks, - shape: _ShapeType | None = None, - limit: int | None = None, - dtype: _DType | None = None, - previous_chunks: _NormalizedChunks | None = None, - ) -> _NormalizedChunks: - """ - Normalize given chunking pattern into an explicit tuple of tuples representation. - - Exposed primarily because different chunking backends may want to make different decisions about how to - automatically chunk along dimensions not given explicitly in the input chunks. - - Called internally by xarray.open_dataset. - - Parameters - ---------- - chunks : tuple, int, dict, or string - The chunks to be normalized. - shape : Tuple[int] - The shape of the array - limit : int (optional) - The maximum block size to target in bytes, - if freedom is given to choose - dtype : np.dtype - previous_chunks : Tuple[Tuple[int]], optional - Chunks from a previous array that we should use for inspiration when - rechunking dimensions automatically. - - See Also - -------- - dask.array.core.normalize_chunks - """ - raise NotImplementedError() - @abstractmethod def from_array( self, data: duckarray[Any, Any], chunks: _Chunks, **kwargs: Any diff --git a/xarray/tests/test_parallelcompat.py b/xarray/tests/test_parallelcompat.py index a9d85bee862..57a9eae8730 100644 --- a/xarray/tests/test_parallelcompat.py +++ b/xarray/tests/test_parallelcompat.py @@ -6,7 +6,7 @@ import numpy as np import pytest -from xarray.core.types import T_Chunks, T_DuckArray, T_NormalizedChunks +from xarray.core.types import T_DuckArray, T_NormalizedChunks from xarray.namedarray._typing import _Chunks from xarray.namedarray.core import NamedArray from xarray.namedarray.daskmanager import DaskManager @@ -79,18 +79,6 @@ def __init__(self): def is_chunked_array(self, data: Any) -> bool: return isinstance(data, DummyChunkedArray) - def normalize_chunks( - self, - chunks: T_Chunks | T_NormalizedChunks, - shape: tuple[int, ...] | None = None, - limit: int | None = None, - dtype: np.dtype | None = None, - previous_chunks: T_NormalizedChunks | None = None, - ) -> T_NormalizedChunks: - from dask.array.core import normalize_chunks - - return normalize_chunks(chunks, shape, limit, dtype, previous_chunks) - def from_array( self, data: T_DuckArray | np.typing.ArrayLike, chunks: _Chunks, **kwargs ) -> DummyChunkedArray: