From c1965c215d01f3d7d0545b2954c7bab9eaf94f8f Mon Sep 17 00:00:00 2001 From: Justus Magin Date: Thu, 11 Jul 2024 14:00:12 +0200 Subject: [PATCH] switch the documentation to run with `numpy>=2` (#9177) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * `NaN` → `nan` * new numpy scalar repr * unpin `numpy` in the docs [skip-ci] * try extracting the scalars to pass to `linspace` * use `numpy` instead of `numpy.array_api` [skip-ci] and mention that we're falling back to `numpy.array_api` in `numpy<2.0` * Update ci/requirements/doc.yml * Remove 2D cross product doctests * One more fix --------- Co-authored-by: Jessica Scheick Co-authored-by: Deepak Cherian Co-authored-by: Deepak Cherian --- ci/requirements/doc.yml | 2 +- doc/getting-started-guide/faq.rst | 4 +-- doc/user-guide/computation.rst | 4 +-- doc/user-guide/interpolation.rst | 4 +-- doc/user-guide/testing.rst | 5 ++-- xarray/core/computation.py | 47 +------------------------------ xarray/plot/facetgrid.py | 4 +-- xarray/plot/utils.py | 6 ++-- 8 files changed, 16 insertions(+), 60 deletions(-) diff --git a/ci/requirements/doc.yml b/ci/requirements/doc.yml index 116eee7f702..cbbbaa16d7a 100644 --- a/ci/requirements/doc.yml +++ b/ci/requirements/doc.yml @@ -21,7 +21,7 @@ dependencies: - nbsphinx - netcdf4>=1.5 - numba - - numpy>=1.21,<2 + - numpy>=2 - packaging>=21.3 - pandas>=1.4,!=2.1.0 - pooch diff --git a/doc/getting-started-guide/faq.rst b/doc/getting-started-guide/faq.rst index 7f99fa77e3a..58d5448cdf5 100644 --- a/doc/getting-started-guide/faq.rst +++ b/doc/getting-started-guide/faq.rst @@ -352,9 +352,9 @@ Some packages may have additional functionality beyond what is shown here. You c How does xarray handle missing values? -------------------------------------- -**xarray can handle missing values using ``np.NaN``** +**xarray can handle missing values using ``np.nan``** -- ``np.NaN`` is used to represent missing values in labeled arrays and datasets. It is a commonly used standard for representing missing or undefined numerical data in scientific computing. ``np.NaN`` is a constant value in NumPy that represents "Not a Number" or missing values. +- ``np.nan`` is used to represent missing values in labeled arrays and datasets. It is a commonly used standard for representing missing or undefined numerical data in scientific computing. ``np.nan`` is a constant value in NumPy that represents "Not a Number" or missing values. - Most of xarray's computation methods are designed to automatically handle missing values appropriately. diff --git a/doc/user-guide/computation.rst b/doc/user-guide/computation.rst index f99d41be538..23ecbedf61e 100644 --- a/doc/user-guide/computation.rst +++ b/doc/user-guide/computation.rst @@ -426,7 +426,7 @@ However, the functions also take missing values in the data into account: .. ipython:: python - data = xr.DataArray([np.NaN, 2, 4]) + data = xr.DataArray([np.nan, 2, 4]) weights = xr.DataArray([8, 1, 1]) data.weighted(weights).mean() @@ -444,7 +444,7 @@ If the weights add up to to 0, ``sum`` returns 0: data.weighted(weights).sum() -and ``mean``, ``std`` and ``var`` return ``NaN``: +and ``mean``, ``std`` and ``var`` return ``nan``: .. ipython:: python diff --git a/doc/user-guide/interpolation.rst b/doc/user-guide/interpolation.rst index 311e1bf0129..53d8a52b342 100644 --- a/doc/user-guide/interpolation.rst +++ b/doc/user-guide/interpolation.rst @@ -292,8 +292,8 @@ Let's see how :py:meth:`~xarray.DataArray.interp` works on real data. axes[0].set_title("Raw data") # Interpolated data - new_lon = np.linspace(ds.lon[0], ds.lon[-1], ds.sizes["lon"] * 4) - new_lat = np.linspace(ds.lat[0], ds.lat[-1], ds.sizes["lat"] * 4) + new_lon = np.linspace(ds.lon[0].item(), ds.lon[-1].item(), ds.sizes["lon"] * 4) + new_lat = np.linspace(ds.lat[0].item(), ds.lat[-1].item(), ds.sizes["lat"] * 4) dsi = ds.interp(lat=new_lat, lon=new_lon) dsi.air.plot(ax=axes[1]) @savefig interpolation_sample3.png width=8in diff --git a/doc/user-guide/testing.rst b/doc/user-guide/testing.rst index 13279eccb0b..d82d9d7d7d9 100644 --- a/doc/user-guide/testing.rst +++ b/doc/user-guide/testing.rst @@ -239,9 +239,10 @@ If the array type you want to generate has an array API-compliant top-level name you can use this neat trick: .. ipython:: python - :okwarning: - from numpy import array_api as xp # available in numpy 1.26.0 + import numpy as xp # compatible in numpy 2.0 + + # use `import numpy.array_api as xp` in numpy>=1.23,<2.0 from hypothesis.extra.array_api import make_strategies_namespace diff --git a/xarray/core/computation.py b/xarray/core/computation.py index f418d3821c2..5d21d0836b9 100644 --- a/xarray/core/computation.py +++ b/xarray/core/computation.py @@ -1064,7 +1064,7 @@ def apply_ufunc( supported: >>> magnitude(3, 4) - 5.0 + np.float64(5.0) >>> magnitude(3, np.array([0, 4])) array([3., 5.]) >>> magnitude(array, 0) @@ -1587,15 +1587,6 @@ def cross( array([-3, 6, -3]) Dimensions without coordinates: dim_0 - Vector cross-product with 2 dimensions, returns in the perpendicular - direction: - - >>> a = xr.DataArray([1, 2]) - >>> b = xr.DataArray([4, 5]) - >>> xr.cross(a, b, dim="dim_0") - Size: 8B - array(-3) - Vector cross-product with 3 dimensions but zeros at the last axis yields the same results as with 2 dimensions: @@ -1606,42 +1597,6 @@ def cross( array([ 0, 0, -3]) Dimensions without coordinates: dim_0 - One vector with dimension 2: - - >>> a = xr.DataArray( - ... [1, 2], - ... dims=["cartesian"], - ... coords=dict(cartesian=(["cartesian"], ["x", "y"])), - ... ) - >>> b = xr.DataArray( - ... [4, 5, 6], - ... dims=["cartesian"], - ... coords=dict(cartesian=(["cartesian"], ["x", "y", "z"])), - ... ) - >>> xr.cross(a, b, dim="cartesian") - Size: 24B - array([12, -6, -3]) - Coordinates: - * cartesian (cartesian) >> a = xr.DataArray( - ... [1, 2], - ... dims=["cartesian"], - ... coords=dict(cartesian=(["cartesian"], ["x", "z"])), - ... ) - >>> b = xr.DataArray( - ... [4, 5, 6], - ... dims=["cartesian"], - ... coords=dict(cartesian=(["cartesian"], ["x", "y", "z"])), - ... ) - >>> xr.cross(a, b, dim="cartesian") - Size: 24B - array([-10, 2, 5]) - Coordinates: - * cartesian (cartesian) dict[str, tuple[float, float]]: >>> ds = xr.tutorial.scatter_example_dataset(seed=42) >>> fg = ds.plot.scatter(x="A", y="B", hue="y", row="x", col="w") >>> round(fg._get_largest_lims()["x"][0], 3) - -0.334 + np.float64(-0.334) """ lims_largest: dict[str, tuple[float, float]] = dict( x=(np.inf, -np.inf), y=(np.inf, -np.inf), z=(np.inf, -np.inf) @@ -817,7 +817,7 @@ def _set_lims( >>> fg = ds.plot.scatter(x="A", y="B", hue="y", row="x", col="w") >>> fg._set_lims(x=(-0.3, 0.3), y=(0, 2), z=(0, 4)) >>> fg.axs[0, 0].get_xlim(), fg.axs[0, 0].get_ylim() - ((-0.3, 0.3), (0.0, 2.0)) + ((np.float64(-0.3), np.float64(0.3)), (np.float64(0.0), np.float64(2.0))) """ lims_largest = self._get_largest_lims() diff --git a/xarray/plot/utils.py b/xarray/plot/utils.py index 8789bc2f9c2..a0abe0c8cfd 100644 --- a/xarray/plot/utils.py +++ b/xarray/plot/utils.py @@ -811,11 +811,11 @@ def _update_axes( def _is_monotonic(coord, axis=0): """ >>> _is_monotonic(np.array([0, 1, 2])) - True + np.True_ >>> _is_monotonic(np.array([2, 1, 0])) - True + np.True_ >>> _is_monotonic(np.array([0, 2, 1])) - False + np.False_ """ if coord.shape[axis] < 3: return True