Skip to content

Sort-of-merge-back: v3.12.3 into v3.13.x #6669

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 3 commits into from
Aug 26, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 27 additions & 1 deletion docs/src/whatsnew/3.12.rst
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,6 @@ v3.12.2 (09 May 2025)
:color: primary
:icon: alert
:animate: fade-in
:open:

The patches in this release of Iris include:

Expand All @@ -78,6 +77,23 @@ v3.12.2 (09 May 2025)
operations for many users.


v3.12.3 (22 Aug 2025)
=====================

.. dropdown:: v3.12.3 Patches
:color: primary
:icon: alert
:animate: fade-in
:open:

The patches in this release of Iris include:

#. Improved compatibility with NumPy >= v1.25 for array comparisons, in
response to deprecations around un-broadcastable arrays. Delivered in a
patch release to meet a time-critical need for a specific user.
:ref:`See the full entry for more<3_12_3_array_comparison>`.


📢 Announcements
================

Expand Down Expand Up @@ -217,6 +233,14 @@ v3.12.2 (09 May 2025)
#. `@trexfeathers`_ refactored Iris loading and saving to make it compatible
with Dask version ``2025.4.0`` and above. (:pull:`6451`)

.. _3_12_3_array_comparison:

#. `@trexfeathers`_ and `@ukmo-ccbunney`_ adapted array comparison in response
to NumPy v1.25 deprecating comparison of un-broadcastable arrays. It is
hoped that users will see no difference in behaviour, but please get in touch
if you notice anything. See `NumPy v1.25 expired deprecations`_ and
`numpy#22707`_ for more. (:pull:`6665`)


📚 Documentation
================
Expand Down Expand Up @@ -273,3 +297,5 @@ v3.12.2 (09 May 2025)

.. _SPEC 0: https://scientific-python.org/specs/spec-0000/
.. _Running setuptools commands: https://setuptools.pypa.io/en/latest/deprecated/commands.html
.. _NumPy v1.25 expired deprecations: https://numpy.org/doc/stable/release/1.25.0-notes.html#expired-deprecations
.. _numpy#22707: https://github.com/numpy/numpy/pull/22707
3 changes: 2 additions & 1 deletion lib/iris/_concatenate.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from xxhash import xxh3_64

from iris._lazy_data import concatenate as concatenate_arrays
from iris.common.metadata import hexdigest
import iris.coords
from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord
import iris.cube
Expand Down Expand Up @@ -792,7 +793,7 @@ def _coordinate_differences(self, other, attr, reason="metadata"):
diff_names = []
for self_key, self_value in self_dict.items():
other_value = other_dict[self_key]
if self_value != other_value:
if hexdigest(self_value) != hexdigest(other_value):
diff_names.append(self_key)
result = (
" " + reason,
Expand Down
4 changes: 4 additions & 0 deletions lib/iris/_constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -531,6 +531,8 @@ def __init__(self, **attributes):
super().__init__(cube_func=self._cube_func)

def __eq__(self, other):
# Note: equality means that NumPy arrays are not supported for
# AttributeConstraints (get the truth ambiguity error).
eq = (
isinstance(other, AttributeConstraint)
and self._attributes == other._attributes
Expand All @@ -553,6 +555,8 @@ def _cube_func(self, cube):
match = False
break
else:
# Note: equality means that NumPy arrays are not supported
# for AttributeConstraints (get the truth ambiguity error).
if cube_attr != value:
match = False
break
Expand Down
3 changes: 3 additions & 0 deletions lib/iris/common/mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,9 @@ def __eq__(self, other):
match = set(self.keys()) == set(other.keys())
if match:
for key, value in self.items():
# TODO: should this use the iris.common.metadata approach of
# using hexdigest? Might be a breaking change for some corner
# cases, so would need a major release.
match = np.array_equal(
np.array(value, ndmin=1), np.array(other[key], ndmin=1)
)
Expand Down
4 changes: 3 additions & 1 deletion lib/iris/coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,7 +772,9 @@ def is_compatible(self, other, ignore=None):
ignore = (ignore,)
common_keys = common_keys.difference(ignore)
for key in common_keys:
if np.any(self.attributes[key] != other.attributes[key]):
if not iris.util._attribute_equal(
self.attributes[key], other.attributes[key]
):
compatible = False
break

Expand Down
4 changes: 3 additions & 1 deletion lib/iris/cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -1446,7 +1446,9 @@ def is_compatible(
ignore = (ignore,)
common_keys = common_keys.difference(ignore)
for key in common_keys:
if np.any(self.attributes[key] != other.attributes[key]):
if not iris.util._attribute_equal(
self.attributes[key], other.attributes[key]
):
compatible = False
break

Expand Down
6 changes: 4 additions & 2 deletions lib/iris/fileformats/_structured_array_identification.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,9 @@ def __eq__(self, other):

result = NotImplemented
if stride is not None or arr is not None:
result = stride == self.stride and np.all(self.unique_ordered_values == arr)
result = stride == self.stride and np.array_equal(
self.unique_ordered_values, arr
)
return result

def __ne__(self, other):
Expand Down Expand Up @@ -284,7 +286,7 @@ def from_array(cls, arr):
# Do one last sanity check - does the array we've just described
# actually compute the correct array?
constructed_array = structure.construct_array(arr.size)
if not np.all(constructed_array == arr):
if not np.array_equal(constructed_array, arr):
structure = None

return structure
Expand Down
12 changes: 4 additions & 8 deletions lib/iris/fileformats/netcdf/saver.py
Original file line number Diff line number Diff line change
Expand Up @@ -2785,13 +2785,7 @@ def save(
# Fnd any global attributes which are not the same on *all* cubes.
def attr_values_equal(val1, val2):
# An equality test which also works when some values are numpy arrays (!)
# As done in :meth:`iris.common.mixin.LimitedAttributeDict.__eq__`.
match = val1 == val2
try:
match = bool(match)
except ValueError:
match = match.all()
return match
return iris.util._attribute_equal(val1, val2)

cube0 = cubes[0]
invalid_globals = set(
Expand Down Expand Up @@ -2878,7 +2872,9 @@ def attr_values_equal(val1, val2):
common_keys.intersection_update(keys)
different_value_keys = []
for key in common_keys:
if np.any(attributes[key] != cube.attributes[key]):
if not iris.util._attribute_equal(
attributes[key], cube.attributes[key]
):
different_value_keys.append(key)
common_keys.difference_update(different_value_keys)
local_keys.update(different_value_keys)
Expand Down
19 changes: 13 additions & 6 deletions lib/iris/fileformats/pp_load_rules.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,8 +139,15 @@ def _convert_vertical_coords(
)
coords_and_dims.append((coord, dim))

# Common calc for Depth
try:
svd_lev_eq = brsvd1 == brlev
except ValueError:
# In case of broadcasting errors.
svd_lev_eq = False

# Depth - unbound.
if (len(lbcode) != 5) and (lbvc == 2) and np.all(brsvd1 == brlev):
if (len(lbcode) != 5) and (lbvc == 2) and np.all(svd_lev_eq):
coord = _dim_or_aux(
blev,
standard_name="depth",
Expand All @@ -150,7 +157,7 @@ def _convert_vertical_coords(
coords_and_dims.append((coord, dim))

# Depth - bound.
if (len(lbcode) != 5) and (lbvc == 2) and np.all(brsvd1 != brlev):
if (len(lbcode) != 5) and (lbvc == 2) and np.all(~svd_lev_eq):
coord = _dim_or_aux(
blev,
standard_name="depth",
Expand All @@ -164,10 +171,10 @@ def _convert_vertical_coords(
if (
(len(lbcode) != 5)
and (lbvc == 2)
and (np.any(brsvd1 == brlev) and np.any(brsvd1 != brlev))
and (np.any(svd_lev_eq) and np.any(~svd_lev_eq))
):
lower = np.where(brsvd1 == brlev, blev, brsvd1)
upper = np.where(brsvd1 == brlev, blev, brlev)
lower = np.where(svd_lev_eq, blev, brsvd1)
upper = np.where(svd_lev_eq, blev, brlev)
coord = _dim_or_aux(
blev,
standard_name="depth",
Expand All @@ -189,7 +196,7 @@ def _convert_vertical_coords(
units="1",
)
coords_and_dims.append((coord, dim))
elif np.any(brsvd1 != brlev):
elif np.any(~svd_lev_eq):
# UM populates metadata CORRECTLY,
# so treat it as the expected (bounded) soil depth.
coord = _dim_or_aux(
Expand Down
4 changes: 3 additions & 1 deletion lib/iris/tests/test_coding_standards.py
Original file line number Diff line number Diff line change
Expand Up @@ -223,8 +223,10 @@ def last_change_by_fname():

# Call "git whatchanged" to get the details of all the files and when
# they were last changed.
# TODO: whatchanged is deprecated, find an alternative Git command.
output = subprocess.check_output(
["git", "whatchanged", "--pretty=TIME:%ct"], cwd=IRIS_REPO_DIRPATH
["git", "whatchanged", "--pretty=TIME:%ct", "--i-still-use-this"],
cwd=IRIS_REPO_DIRPATH,
)

output = output.decode().split("\n")
Expand Down
6 changes: 6 additions & 0 deletions lib/iris/tests/unit/coords/test_Coord.py
Original file line number Diff line number Diff line change
Expand Up @@ -832,6 +832,12 @@ def test_different_array_attrs_incompatible(self):
self.other_coord.attributes["array_test"] = np.array([1.0, 2, 777.7])
self.assertFalse(self.test_coord.is_compatible(self.other_coord))

def test_misshaped_array_attrs_incompatible(self):
# Comparison should avoid broadcast failures and return False.
self.test_coord.attributes["array_test"] = np.array([1.0, 2, 3])
self.other_coord.attributes["array_test"] = np.array([1.0, 2])
self.assertFalse(self.test_coord.is_compatible(self.other_coord))


class Test_contiguous_bounds(tests.IrisTest):
def test_1d_coord_no_bounds_warning(self):
Expand Down
6 changes: 6 additions & 0 deletions lib/iris/tests/unit/cube/test_Cube.py
Original file line number Diff line number Diff line change
Expand Up @@ -876,6 +876,12 @@ def test_different_array_attrs_incompatible(self):
self.other_cube.attributes["array_test"] = np.array([1.0, 2, 777.7])
assert not self.test_cube.is_compatible(self.other_cube)

def test_misshaped_array_attrs_incompatible(self):
# Comparison should avoid broadcast failures and return False.
self.test_cube.attributes["array_test"] = np.array([1.0, 2, 3])
self.other_cube.attributes["array_test"] = np.array([1.0, 2])
assert not self.test_cube.is_compatible(self.other_cube)


class Test_rolling_window:
@pytest.fixture(autouse=True)
Expand Down
15 changes: 15 additions & 0 deletions lib/iris/tests/unit/fileformats/netcdf/saver/test_save.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,21 @@ def test_attributes_arrays(self):
ds.close()
self.assertArrayEqual(res, np.arange(2))

def test_attributes_arrays_incompatible_shapes(self):
# Ensure successful comparison without raising a broadcast error.
c1 = Cube([1], attributes={"bar": np.arange(2)})
c2 = Cube([2], attributes={"bar": np.arange(3)})

with self.temp_filename("foo.nc") as nc_out:
save([c1, c2], nc_out)
ds = _thread_safe_nc.DatasetWrapper(nc_out)
with pytest.raises(AttributeError):
_ = ds.getncattr("bar")
for var in ds.variables.values():
res = var.getncattr("bar")
self.assertIsInstance(res, np.ndarray)
ds.close()

def test_no_special_attribute_clash(self):
# Ensure that saving multiple cubes with netCDF4 protected attributes
# works as expected.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
"""

import numpy as np
import pytest

from iris.aux_factory import HybridHeightFactory, HybridPressureFactory
from iris.coords import AuxCoord, DimCoord
Expand Down Expand Up @@ -280,6 +281,29 @@ def test_unbounded__vector_no_depth(self):
dim=0,
)

def test_unbounded_incompatible_vectors(self):
# Confirm this is not vulnerable to the non-broadcastable error.
lblev = [1, 2, 3]
blev = [10, 20, 30]
brsvd1 = [5, 15, 25, 35]
brlev = [5, 15, 25]
avoided_error = "operands could not be broadcast together"
try:
self._check_depth(
_lbcode(1),
lblev=lblev,
blev=blev,
brsvd1=brsvd1,
brlev=brlev,
expect_bounds=False,
dim=1,
)
except ValueError as err:
if avoided_error in str(err):
message = f'Test failed to avoid specified error: "{err}"'
pytest.fail(message)
pass

def test_bounded(self):
self._check_depth(
_lbcode(1), lblev=23.0, brlev=22.5, brsvd1=23.5, expect_bounds=True
Expand All @@ -300,6 +324,29 @@ def test_bounded__vector(self):
dim=1,
)

def test_bounded_incompatible_vectors(self):
# Confirm this is not vulnerable to the non-broadcastable error.
lblev = [1, 2, 3]
blev = [10, 20, 30]
brsvd1 = [5, 15, 25, 35]
brlev = [15, 25, 35]
avoided_error = "operands could not be broadcast together"
try:
self._check_depth(
_lbcode(1),
lblev=lblev,
blev=blev,
brsvd1=brsvd1,
brlev=brlev,
expect_bounds=True,
dim=1,
)
except ValueError as err:
if avoided_error in str(err):
message = f'Test failed to avoid specified error: "{err}"'
pytest.fail(message)
pass

def test_cross_section(self):
self._check_depth(_lbcode(ix=1, iy=2), lblev=23.0, expect_match=False)

Expand Down Expand Up @@ -360,6 +407,37 @@ def test_normal__vector(self):
lblev = np.arange(10)
self._check_soil_level(_lbcode(0), lblev=lblev, dim=0)

def test_normal_incompatible_vectors(self):
# Confirm this is not vulnerable to the non-broadcastable error.
lbvc = 6
stash = STASH(1, 1, 1)
lbcode = _lbcode(0)
lblev = np.arange(10)
brsvd1 = [1] * len(lblev)
brlev = brsvd1 + [1]
blev, bhlev, bhrlev, brsvd2 = None, None, None, None

avoided_error = "operands could not be broadcast together"
try:
_ = _convert_vertical_coords(
lbcode=lbcode,
lbvc=lbvc,
blev=blev,
lblev=lblev,
stash=stash,
bhlev=bhlev,
bhrlev=bhrlev,
brsvd1=brsvd1,
brsvd2=brsvd2,
brlev=brlev,
dim=0,
)
except ValueError as err:
if avoided_error in str(err):
message = f'Test failed to avoid specified error: "{err}"'
pytest.fail(message)
pass

def test_cross_section(self):
self._check_soil_level(_lbcode(ix=1, iy=2), expect_match=False)

Expand Down
Loading
Loading