Skip to content

Commit

Permalink
Merge branch 'main' into pydap-server
Browse files Browse the repository at this point in the history
  • Loading branch information
kmuehlbauer authored Nov 7, 2024
2 parents f536b1f + 5515aec commit c279321
Show file tree
Hide file tree
Showing 15 changed files with 59 additions and 61 deletions.
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -245,11 +245,11 @@ ignore = [
"E501",
"E731",
"UP007",
"PERF20",
"RUF001",
"RUF002",
"RUF003",
"RUF005",
"RUF007",
"RUF012",
]
extend-select = [
Expand All @@ -259,6 +259,7 @@ extend-select = [
"W",
"TID", # flake8-tidy-imports (absolute imports)
"I", # isort
"PERF", # Perflint
"PGH", # pygrep-hooks
"RUF",
"UP", # Pyupgrade
Expand Down
6 changes: 1 addition & 5 deletions xarray/backends/memory.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,7 @@ def get_variables(self):
return self._variables

def get_dimensions(self):
dims = {}
for v in self._variables.values():
for d, s in v.dims.items():
dims[d] = s
return dims
return {d: s for v in self._variables.values() for d, s in v.dims.items()}

def prepare_variable(self, k, v, *args, **kwargs):
new_var = Variable(v.dims, np.empty_like(v), v.attrs)
Expand Down
2 changes: 1 addition & 1 deletion xarray/backends/plugins.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ def backends_dict_from_pkg(
def set_missing_parameters(
backend_entrypoints: dict[str, type[BackendEntrypoint]],
) -> None:
for _, backend in backend_entrypoints.items():
for backend in backend_entrypoints.values():
if backend.open_dataset_parameters is None:
open_dataset = backend.open_dataset
backend.open_dataset_parameters = detect_parameters(open_dataset)
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/coordinates.py
Original file line number Diff line number Diff line change
Expand Up @@ -752,7 +752,7 @@ def _update_coords(
# check for inconsistent state *before* modifying anything in-place
dims = calculate_dimensions(variables)
new_coord_names = set(coords)
for dim, _size in dims.items():
for dim in dims.keys():
if dim in variables:
new_coord_names.add(dim)

Expand Down
13 changes: 7 additions & 6 deletions xarray/core/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5606,7 +5606,7 @@ def _unstack_once(
new_indexes, clean_index = index.unstack()
indexes.update(new_indexes)

for _name, idx in new_indexes.items():
for idx in new_indexes.values():
variables.update(idx.create_variables(index_vars))

for name, var in self.variables.items():
Expand Down Expand Up @@ -5647,7 +5647,7 @@ def _unstack_full_reindex(
indexes.update(new_indexes)

new_index_variables = {}
for _name, idx in new_indexes.items():
for idx in new_indexes.values():
new_index_variables.update(idx.create_variables(index_vars))

new_dim_sizes = {k: v.size for k, v in new_index_variables.items()}
Expand Down Expand Up @@ -9364,10 +9364,11 @@ def pad(
# keep indexes that won't be affected by pad and drop all other indexes
xindexes = self.xindexes
pad_dims = set(pad_width)
indexes = {}
for k, idx in xindexes.items():
if not pad_dims.intersection(xindexes.get_all_dims(k)):
indexes[k] = idx
indexes = {
k: idx
for k, idx in xindexes.items()
if not pad_dims.intersection(xindexes.get_all_dims(k))
}

for name, var in self.variables.items():
var_pad_width = {k: v for k, v in pad_width.items() if k in var.dims}
Expand Down
2 changes: 1 addition & 1 deletion xarray/core/merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@ def merge_core(
coord_names.intersection_update(variables)
if explicit_coords is not None:
coord_names.update(explicit_coords)
for dim, _size in dims.items():
for dim in dims.keys():
if dim in variables:
coord_names.add(dim)
ambiguous_coords = coord_names.intersection(noncoord_names)
Expand Down
4 changes: 2 additions & 2 deletions xarray/groupers.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import datetime
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from itertools import pairwise
from typing import TYPE_CHECKING, Any, Literal, cast

import numpy as np
Expand Down Expand Up @@ -496,8 +497,7 @@ def factorize(self, group: T_Group) -> EncodedGroups:
full_index, first_items, codes_ = self._get_index_and_items()
sbins = first_items.values.astype(np.int64)
group_indices: GroupIndices = tuple(
[slice(i, j) for i, j in zip(sbins[:-1], sbins[1:], strict=True)]
+ [slice(sbins[-1], None)]
[slice(i, j) for i, j in pairwise(sbins)] + [slice(sbins[-1], None)]
)

unique_coord = Variable(
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -847,7 +847,7 @@ def find_and_validate_array(obj):
else:
raise TypeError(f"{type(obj.array)} is wrapped by {type(obj)}")

for _k, v in ds.variables.items():
for v in ds.variables.values():
find_and_validate_array(v._data)

def test_array_type_after_indexing(self) -> None:
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_combine.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@

def assert_combined_tile_ids_equal(dict1, dict2):
assert len(dict1) == len(dict2)
for k, _v in dict1.items():
for k in dict1.keys():
assert k in dict2.keys()
assert_equal(dict1[k], dict2[k])

Expand Down
58 changes: 28 additions & 30 deletions xarray/tests/test_concat.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,40 +74,38 @@ def create_typed_datasets(
num_datasets: int = 2, seed: int | None = None
) -> list[Dataset]:
var_strings = ["a", "b", "c", "d", "e", "f", "g", "h"]
result = []
rng = np.random.default_rng(seed)
lat = rng.standard_normal(size=(1, 4))
lon = rng.standard_normal(size=(1, 4))
for i in range(num_datasets):
result.append(
Dataset(
data_vars={
"float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
"float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
"string": (
["x", "y", "day"],
rng.choice(var_strings, size=(1, 4, 2)),
),
"int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))),
"datetime64": (
["x", "y", "day"],
np.arange(
np.datetime64("2017-01-01"), np.datetime64("2017-01-09")
).reshape(1, 4, 2),
),
"timedelta64": (
["x", "y", "day"],
np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]),
),
},
coords={
"lat": (["x", "y"], lat),
"lon": (["x", "y"], lon),
"day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)],
},
)
return [
Dataset(
data_vars={
"float": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
"float2": (["x", "y", "day"], rng.standard_normal(size=(1, 4, 2))),
"string": (
["x", "y", "day"],
rng.choice(var_strings, size=(1, 4, 2)),
),
"int": (["x", "y", "day"], rng.integers(0, 10, size=(1, 4, 2))),
"datetime64": (
["x", "y", "day"],
np.arange(
np.datetime64("2017-01-01"), np.datetime64("2017-01-09")
).reshape(1, 4, 2),
),
"timedelta64": (
["x", "y", "day"],
np.reshape([pd.Timedelta(days=i) for i in range(8)], [1, 4, 2]),
),
},
coords={
"lat": (["x", "y"], lat),
"lon": (["x", "y"], lon),
"day": ["day" + str(i * 2 + 1), "day" + str(i * 2 + 2)],
},
)
return result
for i in range(num_datasets)
]


def test_concat_compat() -> None:
Expand Down
4 changes: 2 additions & 2 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3036,12 +3036,12 @@ def test_drop_encoding(self) -> None:
vencoding = {"scale_factor": 10}
orig.encoding = {"foo": "bar"}

for k, _v in orig.variables.items():
for k in orig.variables.keys():
orig[k].encoding = vencoding

actual = orig.drop_encoding()
assert actual.encoding == {}
for _k, v in actual.variables.items():
for v in actual.variables.values():
assert v.encoding == {}

assert_equal(actual, orig)
Expand Down
7 changes: 4 additions & 3 deletions xarray/tests/test_duck_array_ops.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,12 +390,13 @@ def series_reduce(da, func, dim, **kwargs):
se = da.to_series()
return from_series_or_scalar(getattr(se, func)(**kwargs))
else:
da1 = []
dims = list(da.dims)
dims.remove(dim)
d = dims[0]
for i in range(len(da[d])):
da1.append(series_reduce(da.isel(**{d: i}), func, dim, **kwargs))
da1 = [
series_reduce(da.isel(**{d: i}), func, dim, **kwargs)
for i in range(len(da[d]))
]

if d in da.coords:
return concat(da1, dim=da[d])
Expand Down
3 changes: 2 additions & 1 deletion xarray/tests/test_groupby.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import datetime
import operator
import warnings
from itertools import pairwise
from unittest import mock

import numpy as np
Expand Down Expand Up @@ -1732,7 +1733,7 @@ def test_groupby_bins_multidim(self) -> None:
bincoord = np.array(
[
pd.Interval(left, right, closed="right")
for left, right in zip(bins[:-1], bins[1:], strict=True)
for left, right in pairwise(bins)
],
dtype=object,
)
Expand Down
10 changes: 5 additions & 5 deletions xarray/tests/test_plot.py
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,11 @@ def property_in_axes_text(
has the property assigned to property_str
"""
alltxt: list[mpl.text.Text] = ax.findobj(mpl.text.Text) # type: ignore[assignment]
check = []
for t in alltxt:
if t.get_text() == target_txt:
check.append(plt.getp(t, property) == property_str)
return all(check)
return all(
plt.getp(t, property) == property_str
for t in alltxt
if t.get_text() == target_txt
)


def easy_array(shape: tuple[int, ...], start: float = 0, stop: float = 1) -> np.ndarray:
Expand Down
2 changes: 1 addition & 1 deletion xarray/tests/test_strategies.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def test_restrict_names(self, data):

def check_dict_values(dictionary: dict, allowed_attrs_values_types) -> bool:
"""Helper function to assert that all values in recursive dict match one of a set of types."""
for _key, value in dictionary.items():
for value in dictionary.values():
if isinstance(value, allowed_attrs_values_types) or value is None:
continue
elif isinstance(value, dict):
Expand Down

0 comments on commit c279321

Please sign in to comment.