Skip to content

Commit

Permalink
Enforce ruff/refurb rules (FURB) (#2373)
Browse files Browse the repository at this point in the history
* Apply ruff/refurb rule FURB110

FURB110 Replace ternary `if` expression with `or` operator

* Apply ruff/refurb rule FURB118

FURB118 Use `operator.itemgetter(0)` instead of defining a lambda

* Apply ruff/refurb rule FURB140

FURB140 Use `itertools.starmap` instead of the generator

* Apply ruff/refurb rule FURB188

FURB188 Prefer `removesuffix` over conditionally replacing with slice.

* Apply ruff/refurb rules (FURB)

---------

Co-authored-by: Joe Hamman <joe@earthmover.io>
  • Loading branch information
DimitriPapadopoulos and jhamman authored Oct 19, 2024
1 parent afdbb75 commit 9dd9ac6
Show file tree
Hide file tree
Showing 9 changed files with 25 additions and 18 deletions.
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -214,6 +214,7 @@ extend-select = [
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"FLY", # flynt
"FURB", # refurb
"G", # flake8-logging-format
"I", # isort
"ISC", # flake8-implicit-str-concat
Expand Down
3 changes: 2 additions & 1 deletion src/zarr/abc/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from abc import ABC, abstractmethod
from asyncio import gather
from itertools import starmap
from typing import TYPE_CHECKING, NamedTuple, Protocol, runtime_checkable

if TYPE_CHECKING:
Expand Down Expand Up @@ -282,7 +283,7 @@ async def _set_many(self, values: Iterable[tuple[str, Buffer]]) -> None:
"""
Insert multiple (key, value) pairs into storage.
"""
await gather(*(self.set(key, value) for key, value in values))
await gather(*starmap(self.set, values))
return

@property
Expand Down
5 changes: 3 additions & 2 deletions src/zarr/core/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
from asyncio import gather
from dataclasses import dataclass, field, replace
from itertools import starmap
from logging import getLogger
from typing import TYPE_CHECKING, Any, Generic, Literal, cast, overload

Expand Down Expand Up @@ -816,7 +817,7 @@ def cdata_shape(self) -> ChunkCoords:
Tuple[int]
The shape of the chunk grid for this array.
"""
return tuple(ceildiv(s, c) for s, c in zip(self.shape, self.chunks, strict=False))
return tuple(starmap(ceildiv, zip(self.shape, self.chunks, strict=False)))

@property
def nchunks(self) -> int:
Expand Down Expand Up @@ -1385,7 +1386,7 @@ def cdata_shape(self) -> ChunkCoords:
"""
The shape of the chunk grid for this array.
"""
return tuple(ceildiv(s, c) for s, c in zip(self.shape, self.chunks, strict=False))
return tuple(starmap(ceildiv, zip(self.shape, self.chunks, strict=False)))

@property
def nchunks(self) -> int:
Expand Down
2 changes: 1 addition & 1 deletion src/zarr/core/chunk_grids.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,6 @@ def all_chunk_coords(self, array_shape: ChunkCoords) -> Iterator[ChunkCoords]:
def get_nchunks(self, array_shape: ChunkCoords) -> int:
return reduce(
operator.mul,
(ceildiv(s, c) for s, c in zip(array_shape, self.chunk_shape, strict=True)),
itertools.starmap(ceildiv, zip(array_shape, self.chunk_shape, strict=True)),
1,
)
3 changes: 2 additions & 1 deletion src/zarr/core/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import operator
from collections.abc import Iterable, Mapping
from enum import Enum
from itertools import starmap
from typing import (
TYPE_CHECKING,
Any,
Expand Down Expand Up @@ -52,7 +53,7 @@ async def concurrent_map(
items: Iterable[T], func: Callable[..., Awaitable[V]], limit: int | None = None
) -> list[V]:
if limit is None:
return await asyncio.gather(*[func(*item) for item in items])
return await asyncio.gather(*list(starmap(func, items)))

else:
sem = asyncio.Semaphore(limit)
Expand Down
4 changes: 2 additions & 2 deletions src/zarr/core/indexing.py
Original file line number Diff line number Diff line change
Expand Up @@ -1129,7 +1129,7 @@ def __init__(
chunks_multi_index_broadcast = np.broadcast_arrays(*chunks_multi_index)

# remember shape of selection, because we will flatten indices for processing
sel_shape = selection_broadcast[0].shape if selection_broadcast[0].shape else (1,)
sel_shape = selection_broadcast[0].shape or (1,)

# flatten selection
selection_broadcast = tuple(dim_sel.reshape(-1) for dim_sel in selection_broadcast)
Expand All @@ -1150,7 +1150,7 @@ def __init__(
else:
sel_sort = None

shape = selection_broadcast[0].shape if selection_broadcast[0].shape else (1,)
shape = selection_broadcast[0].shape or (1,)

# precompute number of selected items for each chunk
chunk_nitems = np.bincount(chunks_raveled_indices, minlength=nchunks)
Expand Down
3 changes: 1 addition & 2 deletions src/zarr/storage/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,8 +156,7 @@ async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]:

async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
# docstring inherited
if prefix.endswith("/"):
prefix = prefix[:-1]
prefix = prefix.rstrip("/")

if prefix == "":
keys_unique = {k.split("/")[0] for k in self._store_dict}
Expand Down
3 changes: 1 addition & 2 deletions src/zarr/storage/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -245,8 +245,7 @@ async def list_prefix(self, prefix: str) -> AsyncGenerator[str, None]:

async def list_dir(self, prefix: str) -> AsyncGenerator[str, None]:
# docstring inherited
if prefix.endswith("/"):
prefix = prefix[:-1]
prefix = prefix.rstrip("/")

keys = self._zf.namelist()
seen = set()
Expand Down
19 changes: 12 additions & 7 deletions tests/test_group.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import contextlib
import operator
import pickle
import warnings
from typing import TYPE_CHECKING, Any, Literal
Expand Down Expand Up @@ -533,14 +534,14 @@ def test_group_child_iterators(store: Store, zarr_format: ZarrFormat, consolidat
ConsolidatedMetadata(metadata={}),
)

result = sorted(group.groups(), key=lambda x: x[0])
result = sorted(group.groups(), key=operator.itemgetter(0))
assert result == expected_groups

assert sorted(group.groups(), key=lambda x: x[0]) == expected_groups
assert sorted(group.groups(), key=operator.itemgetter(0)) == expected_groups
assert sorted(group.group_keys()) == expected_group_keys
assert sorted(group.group_values(), key=lambda x: x.name) == expected_group_values

assert sorted(group.arrays(), key=lambda x: x[0]) == expected_arrays
assert sorted(group.arrays(), key=operator.itemgetter(0)) == expected_arrays
assert sorted(group.array_keys()) == expected_array_keys
assert sorted(group.array_values(), key=lambda x: x.name) == expected_array_values

Expand Down Expand Up @@ -1000,7 +1001,7 @@ async def test_group_members_async(store: Store, consolidated_metadata: bool) ->
g2 = await g1.create_group("g2")

# immediate children
children = sorted([x async for x in group.members()], key=lambda x: x[0])
children = sorted([x async for x in group.members()], key=operator.itemgetter(0))
assert children == [
("a0", a0),
("g0", g0),
Expand All @@ -1010,7 +1011,7 @@ async def test_group_members_async(store: Store, consolidated_metadata: bool) ->
assert nmembers == 2

# partial
children = sorted([x async for x in group.members(max_depth=1)], key=lambda x: x[0])
children = sorted([x async for x in group.members(max_depth=1)], key=operator.itemgetter(0))
expected = [
("a0", a0),
("g0", g0),
Expand All @@ -1022,7 +1023,9 @@ async def test_group_members_async(store: Store, consolidated_metadata: bool) ->
assert nmembers == 4

# all children
all_children = sorted([x async for x in group.members(max_depth=None)], key=lambda x: x[0])
all_children = sorted(
[x async for x in group.members(max_depth=None)], key=operator.itemgetter(0)
)
expected = [
("a0", a0),
("g0", g0),
Expand Down Expand Up @@ -1053,7 +1056,9 @@ async def test_group_members_async(store: Store, consolidated_metadata: bool) ->
"consolidated_metadata",
None,
)
all_children = sorted([x async for x in group.members(max_depth=None)], key=lambda x: x[0])
all_children = sorted(
[x async for x in group.members(max_depth=None)], key=operator.itemgetter(0)
)
assert len(all_children) == 4
nmembers = await group.nmembers(max_depth=None)
assert nmembers == 4
Expand Down

0 comments on commit 9dd9ac6

Please sign in to comment.