Skip to content

Commit

Permalink
Deprecated partial read/writes in v2
Browse files Browse the repository at this point in the history
  • Loading branch information
dstansby committed Feb 17, 2025
1 parent 2bf7e45 commit 3b387af
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 21 deletions.
19 changes: 13 additions & 6 deletions docs/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -14,9 +14,19 @@ Release notes
# re-indented so that it does not show up in the notes.
.. note::
Zarr-Python 2.18.* is expected be the final release in the 2.* series. Work on Zarr-Python 3.0 is underway.
See `GH1777 <https://github.com/zarr-developers/zarr-python/issues/1777>`_ for more details on the upcoming
3.0 release.
Zarr-Python 2.* is in support mode now, and no new features will be added.


Unreleased
----------

Deprecations
~~~~~~~~~~~~

* Deprecated support for ``partial_decompress`` when creating an array.
This functionality is no longer supported in ``numcodecs``, and will be removed
in ``zarr-python`` 2.19.0.
By :user:`David Stansby <dstansby>`

.. _release_2.18.4:

Expand All @@ -40,9 +50,6 @@ Maintenance
the Delta filter (see https://github.com/zarr-developers/numcodecs/issues/653 for more information).
By :user:`David Stansby <dstansby>` (:issue:`2544`).

Deprecations
~~~~~~~~~~~~

.. _release_2.18.3:

2.18.3
Expand Down
20 changes: 11 additions & 9 deletions zarr/core.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
import binascii
import hashlib
from inspect import stack
import itertools
import math
import operator
import re
from functools import reduce
from functools import partial, reduce
from typing import Any
import warnings

import numpy as np
from numcodecs.compat import ensure_bytes
Expand Down Expand Up @@ -90,13 +92,6 @@ class Array:
If True (default), user attributes will be cached for attribute read
operations. If False, user attributes are reloaded from the store prior
to all attribute read operations.
partial_decompress : bool, optional
If True and while the chunk_store is a FSStore and the compression used
is Blosc, when getting data from the array chunks will be partially
read and decompressed when possible.
.. versionadded:: 2.7
write_empty_chunks : bool, optional
If True, all chunks will be stored regardless of their contents. If
False (default), each chunk is compared to the array's fill value prior
Expand Down Expand Up @@ -124,7 +119,7 @@ def __init__(
synchronizer=None,
cache_metadata=True,
cache_attrs=True,
partial_decompress=False,
partial_decompress=None,
write_empty_chunks=True,
zarr_version=None,
meta_array=None,
Expand Down Expand Up @@ -154,6 +149,13 @@ def __init__(
self._synchronizer = synchronizer
self._cache_metadata = cache_metadata
self._is_view = False
if partial_decompress is not None:
warnings.warn(
"Support for partial decompression is no longer supported in numcodecs. "
"Support for partial decompression will be removed in a future version of zarr-python v2.",
DeprecationWarning,
stacklevel=1,
)
self._partial_decompress = partial_decompress
self._write_empty_chunks = write_empty_chunks
if meta_array is not None:
Expand Down
6 changes: 1 addition & 5 deletions zarr/creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -466,7 +466,7 @@ def open_array(
object_codec=None,
chunk_store=None,
storage_options=None,
partial_decompress=False,
partial_decompress=None,
write_empty_chunks=True,
*,
zarr_version=None,
Expand Down Expand Up @@ -522,10 +522,6 @@ def open_array(
storage_options : dict
If using an fsspec URL to create the store, these will be passed to
the backend implementation. Ignored otherwise.
partial_decompress : bool, optional
If True and while the chunk_store is a FSStore and the compression used
is Blosc, when getting data from the array chunks will be partially
read and decompressed when possible.
write_empty_chunks : bool, optional
If True (default), all chunks will be stored regardless of their
contents. If False, each chunk is compared to the array's fill value
Expand Down
15 changes: 14 additions & 1 deletion zarr/tests/test_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
import sys
import pickle
import shutil
import re

from typing import Any, Literal, Optional, Tuple, Union, Sequence
import unittest
from itertools import zip_longest
Expand Down Expand Up @@ -84,6 +86,11 @@

# noinspection PyMethodMayBeStatic

pytestmark = [
pytest.mark.filterwarnings("ignore:Call to deprecated function .* \_cbuffer\_sizes.*"),
pytest.mark.filterwarnings("ignore:Call to deprecated function .* \_cbuffer\_metainfo.*"),
]


class TestArray:
version = 2
Expand All @@ -94,7 +101,7 @@ class TestArray:
dimension_separator: Optional[DIMENSION_SEPARATOR] = None
cache_metadata = True
cache_attrs = True
partial_decompress: bool = False
partial_decompress: bool | None = None
write_empty_chunks = True
read_only = False
storage_transformers: Tuple[Any, ...] = ()
Expand Down Expand Up @@ -2481,6 +2488,9 @@ def expected(self):


@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStorePartialRead(TestArray):
compressor = Blosc(blocksize=256)
partial_decompress = True
Expand Down Expand Up @@ -2547,6 +2557,9 @@ def expected(self):


@pytest.mark.skipif(have_fsspec is False, reason="needs fsspec")
@pytest.mark.filterwarnings(
"ignore:.*Support for partial decompression will be removed in a future version.*"
)
class TestArrayWithFSStoreNestedPartialRead(TestArrayWithFSStore):
compressor = Blosc()
dimension_separator = "/"
Expand Down

0 comments on commit 3b387af

Please sign in to comment.