From 89e72b786994d67daa88e15985720bae925741b6 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Tue, 17 Feb 2026 15:18:00 +0000 Subject: [PATCH 01/22] init commit --- lib/iris/tests/__init__.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 9ac5dc5322..0b5cc51985 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -222,8 +222,18 @@ def assert_cml(cubes, reference_filename=None, checksum=True): class IrisTest(unittest.TestCase): - """A subclass of unittest.TestCase which provides Iris specific testing functionality.""" + """A subclass of unittest.TestCase which provides Iris specific testing functionality. + .. deprecated:: v3.15.0 in favour of the private module `_shared_utils`, which contains + the majority of these methods converted to snake_case. + + """ + + iris._deprecation.warn_deprecated( + "IrisTest class is now deprecated. " + "The majority of these methods can be found (converted " + "to snake_case) in `_shared_utils`." + ) _assertion_counts: collections.defaultdict[str, int] = collections.defaultdict(int) def _assert_str_same( From 39ac116b0a8a2a4b4cbea3a8d1553ed3df606b8f Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 18 Feb 2026 12:12:05 +0000 Subject: [PATCH 02/22] A bunch more deprecation warnings and a whatsnew --- docs/src/whatsnew/latest.rst | 4 +- lib/iris/tests/__init__.py | 68 +++++++++++++++++++++++++++-- lib/iris/tests/graphics/__init__.py | 11 ++++- 3 files changed, 77 insertions(+), 6 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index 730880b368..eb801cd6f8 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -69,7 +69,9 @@ This document explains the changes made to Iris for this release 🔥 Deprecations =============== -#. N/A +#. `@ESadek-MO`_ has deprecated the :class:`~iris.tests.IrisTest` class, and other unittest-based + testing conveniences in favour of the conveniences found in `:mod:`iris/tests/_shared_utils.py``. + (pull:`6950`) 🔗 Dependencies diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 0b5cc51985..376c80abed 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -168,7 +168,14 @@ def assert_masked_array_equal(a, b, strict=False): If False (default), the data array equality considers only unmasked elements. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_equal()` instead." + ) _assert_masked_array(np.testing.assert_array_equal, a, b, strict) @@ -190,7 +197,14 @@ def assert_masked_array_almost_equal(a, b, decimal=6, strict=False): :meth:`numpy.testing.assert_array_almost_equal`, with the meaning 'abs(desired-actual) < 0.5 * 10**(-decimal)' + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_masked_array_almost_equal()` + """ + iris._deprecation.warn_deprecated( + "assert_masked_array_almost_equal()` is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_shared_utils.assert_masked_array_almost_equal()` instead." + ) _assert_masked_array( np.testing.assert_array_almost_equal, a, b, strict, decimal=decimal ) @@ -216,7 +230,13 @@ def assert_cml(cubes, reference_filename=None, checksum=True): When True, causes the CML to include a checksum for each Cube's data. Defaults to True. + .. deprecated:: v3.15.0 in favour of `_shared_utils.assert_CML()` + """ + iris._deprecation.warn_deprecated( + "`assert_cml` is now deprecated as part of the efforts " + "to convert from unittest to pytest. Please use `_shared_utils.assert_CML()` instead." + ) test = IrisTest() test.assertCML(cubes, reference_filename, checksum) @@ -225,13 +245,13 @@ class IrisTest(unittest.TestCase): """A subclass of unittest.TestCase which provides Iris specific testing functionality. .. deprecated:: v3.15.0 in favour of the private module `_shared_utils`, which contains - the majority of these methods converted to snake_case. + the majority of these methods converted to pytest-compliant functions. """ iris._deprecation.warn_deprecated( - "IrisTest class is now deprecated. " - "The majority of these methods can be found (converted " + "IrisTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "The majority of these methods can be found as functions (converted " "to snake_case) in `_shared_utils`." ) _assertion_counts: collections.defaultdict[str, int] = collections.defaultdict(int) @@ -916,11 +936,25 @@ def assertEqualAndKind(self, value, expected): class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): + """.. deprecated:: v3.15.0 in favour of `_shared_utils.GraphicsTest.""" + + iris._deprecation.warn_deprecated( + "`GraphicsTest` has been moved to `_shared_utils` as part of the efforts to convert " + "from unittest to pytest." + ) pass class PPTest: - """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest.""" + """A mixin class to provide PP-specific utilities to subclasses of tests.IrisTest. + + .. deprecated:: v3.15.0 in favour of `_shared_utils.pp_cube_save_test() + """ + + iris._deprecation.warn_deprecated( + "PPTest class is now deprecated as part of the efforts to migrate from unittest to pytest. " + "`cube_save_test()` has been moved to `_shared_utils` as `pp_cube_save_test()`" + ) @contextlib.contextmanager def cube_save_test( @@ -1015,7 +1049,14 @@ def skip_data(fn): class MyDataTests(tests.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_data` + """ + iris._deprecation.warn_deprecated( + "`skip_data` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) + no_data = ( not iris.config.TEST_DATA_DIR or not os.path.isdir(iris.config.TEST_DATA_DIR) @@ -1036,7 +1077,13 @@ def skip_gdal(fn): class MyGeoTiffTests(test.IrisTest): ... + .. deprecated:: v3.15.0 in favour of `_shared_utils.skip_gdal` + """ + iris._deprecation.warn_deprecated( + "`skip_gdal` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) skip = unittest.skipIf(condition=not GDAL_AVAILABLE, reason="Test requires 'gdal'.") return skip(fn) @@ -1072,7 +1119,13 @@ def no_warnings(func): """Provides a decorator to ensure that there are no warnings raised within the test, otherwise the test will fail. + .. deprecated:: v3.15.0 in favour of `_shared_utils.no_warnings` + """ + iris._deprecation.warn_deprecated( + "`no_warnings` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) @functools.wraps(func) def wrapped(self, *args, **kwargs): @@ -1106,7 +1159,14 @@ def env_bin_path(exe_name: str | None = None) -> Path | None: For use in tests which spawn commands which should call executables within the Python environment, since many IDEs (Eclipse, PyCharm) don't automatically include this location in $PATH (as opposed to $PYTHONPATH). + + .. deprecated:: v3.15.0 in favour of `_shared_utils.env_bin_path` + """ + iris._deprecation.warn_deprecated( + "`env_bin_path` has been moved to `_shared_utils` as part of the efforts " + "to convert from unittest to pytest." + ) exe_path = Path(os.__file__) exe_path = (exe_path / "../../../bin").resolve() if exe_name is not None: diff --git a/lib/iris/tests/graphics/__init__.py b/lib/iris/tests/graphics/__init__.py index 7fb2074ca0..2c9fc0b345 100644 --- a/lib/iris/tests/graphics/__init__.py +++ b/lib/iris/tests/graphics/__init__.py @@ -23,6 +23,8 @@ import filelock import pytest +from iris._deprecation import warn_deprecated + # Test for availability of matplotlib. # (And remove matplotlib as an iris.tests dependency.) try: @@ -241,7 +243,14 @@ def _create_missing(phash: str) -> None: class GraphicsTestMixin: - # TODO: deprecate this in favour of check_graphic_caller. + """.. deprecated:: v3.15.0 in favour of `_check_graphic_caller()`.""" + + warn_deprecated( + "GraphicsTestMixin class is now deprecated as part of the efforts " + "to convert from unittest to pytest." + "Please use `_check_graphic_caller()` instead." + ) + def setUp(self) -> None: # Acquire threading non re-entrant blocking lock to ensure # thread-safe plotting. From 36e3444df4556559670948e670d7f17b219695bf Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 18 Feb 2026 12:12:26 +0000 Subject: [PATCH 03/22] A bunch more deprecation warnings and a whatsnew --- lib/iris/tests/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/iris/tests/__init__.py b/lib/iris/tests/__init__.py index 376c80abed..77b78701eb 100644 --- a/lib/iris/tests/__init__.py +++ b/lib/iris/tests/__init__.py @@ -936,7 +936,7 @@ def assertEqualAndKind(self, value, expected): class GraphicsTest(graphics.GraphicsTestMixin, IrisTest): - """.. deprecated:: v3.15.0 in favour of `_shared_utils.GraphicsTest.""" + """.. deprecated:: v3.15.0 in favour of `_shared_utils.GraphicsTest`.""" iris._deprecation.warn_deprecated( "`GraphicsTest` has been moved to `_shared_utils` as part of the efforts to convert " From cfb2c3214a56f63a962732dc7b812ccad663cbdb Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 18 Feb 2026 12:22:07 +0000 Subject: [PATCH 04/22] Missed whatsnews from previous pytest work --- docs/src/whatsnew/latest.rst | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index eb801cd6f8..d13727686c 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -109,6 +109,11 @@ This document explains the changes made to Iris for this release `:module:~iris.experimental.geovista` is currently only available for Python \<3.14. (:pull:`6816`, :issue:`6775`) +#. `@ESadek-MO`_, `@trexfeathers`_, `@bjlittle`_, `@HGWright`_, `@pp-mo`_, + `@stephenworsley`_ and `@ukmo-ccbunney`_ converted the entirity of the tests + from unittest to pytest. Iris is now also ruff-PT compliant, save for PT019. + (:issue:`6212`, :pull:`6939) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: From 5afb7487ccf70ad96d26e92c6f3c0d2faa070c28 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 18 Feb 2026 12:23:44 +0000 Subject: [PATCH 05/22] corrected whatsnew formatting --- docs/src/whatsnew/latest.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index d13727686c..04e51fa372 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -70,8 +70,8 @@ This document explains the changes made to Iris for this release =============== #. `@ESadek-MO`_ has deprecated the :class:`~iris.tests.IrisTest` class, and other unittest-based - testing conveniences in favour of the conveniences found in `:mod:`iris/tests/_shared_utils.py``. - (pull:`6950`) + testing conveniences in favour of the conveniences found in :mod:`iris/tests/_shared_utils.py`. + (:pull:`6950`) 🔗 Dependencies From 6afa3c9c04522c17a2fdfe1ea2fa666147daa3da Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 26 Feb 2026 15:34:19 +0000 Subject: [PATCH 06/22] stratify --- .../experimental/stratify/__init__.py | 5 + .../experimental/stratify/test_relevel.py | 116 ++++++++++++++++++ .../experimental/stratify/test_relevel.py | 69 +++-------- 3 files changed, 136 insertions(+), 54 deletions(-) create mode 100644 lib/iris/tests/integration/experimental/stratify/__init__.py create mode 100644 lib/iris/tests/integration/experimental/stratify/test_relevel.py diff --git a/lib/iris/tests/integration/experimental/stratify/__init__.py b/lib/iris/tests/integration/experimental/stratify/__init__.py new file mode 100644 index 0000000000..e31d61ba10 --- /dev/null +++ b/lib/iris/tests/integration/experimental/stratify/__init__.py @@ -0,0 +1,5 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for the :mod:`iris.experimental.stratify` package.""" diff --git a/lib/iris/tests/integration/experimental/stratify/test_relevel.py b/lib/iris/tests/integration/experimental/stratify/test_relevel.py new file mode 100644 index 0000000000..e9761858d7 --- /dev/null +++ b/lib/iris/tests/integration/experimental/stratify/test_relevel.py @@ -0,0 +1,116 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for the :func:`iris.experimental.stratify.relevel` function.""" + +from functools import partial + +import numpy as np +import pytest + +from iris.coords import AuxCoord, DimCoord +from iris.tests import _shared_utils +import iris.tests.stock as stock + +try: + import stratify + + from iris.experimental.stratify import relevel +except ImportError: + stratify = None + + +@_shared_utils.skip_stratify +class Test: + @pytest.fixture(autouse=True) + def _setup(self): + cube = stock.simple_3d()[:, :1, :1] + #: The data from which to get the levels. + self.src_levels = cube.copy() + #: The data to interpolate. + self.cube = cube.copy() + self.cube.rename("foobar") + self.cube *= 10 + self.coord = self.src_levels.coord("wibble") + self.axes = (self.coord, self.coord.name(), None, 0) + + def test_broadcast_fail_src_levels(self): + emsg = "Cannot broadcast the cube and src_levels" + data = np.arange(60).reshape(3, 4, 5) + with pytest.raises(ValueError, match=emsg): + relevel(self.cube, AuxCoord(data), [1, 2, 3]) + + def test_broadcast_fail_tgt_levels(self): + emsg = "Cannot broadcast the cube and tgt_levels" + data = np.arange(60).reshape(3, 4, 5) + with pytest.raises(ValueError, match=emsg): + relevel(self.cube, self.coord, data) + + def test_standard_input(self): + for axis in self.axes: + result = relevel(self.cube, self.src_levels, [-1, 0, 5.5], axis=axis) + _shared_utils.assert_array_equal( + result.data.flatten(), np.array([np.nan, 0, 55]) + ) + expected = DimCoord([-1, 0, 5.5], units=1, long_name="thingness") + assert expected == result.coord("thingness") + + def test_non_monotonic(self): + for axis in self.axes: + result = relevel(self.cube, self.src_levels, [2, 3, 2], axis=axis) + _shared_utils.assert_array_equal( + result.data.flatten(), np.array([20, 30, np.nan]) + ) + expected = AuxCoord([2, 3, 2], units=1, long_name="thingness") + assert result.coord("thingness") == expected + + def test_static_level(self): + for axis in self.axes: + result = relevel(self.cube, self.src_levels, [2, 2], axis=axis) + _shared_utils.assert_array_equal(result.data.flatten(), np.array([20, 20])) + + def test_coord_input(self): + source = AuxCoord(self.src_levels.data) + metadata = self.src_levels.metadata._asdict() + metadata["coord_system"] = None + metadata["climatological"] = None + source.metadata = metadata + + for axis in self.axes: + result = relevel(self.cube, source, [0, 12, 13], axis=axis) + assert result.shape == (3, 1, 1) + _shared_utils.assert_array_equal(result.data.flatten(), [0, 120, np.nan]) + + def test_custom_interpolator(self): + interpolator = partial(stratify.interpolate, interpolation="nearest") + + for axis in self.axes: + result = relevel( + self.cube, + self.src_levels, + [-1, 0, 6.5], + axis=axis, + interpolator=interpolator, + ) + _shared_utils.assert_array_equal( + result.data.flatten(), np.array([np.nan, 0, 120]) + ) + + def test_multi_dim_target_levels(self, request): + interpolator = partial( + stratify.interpolate, + interpolation="linear", + extrapolation="linear", + ) + + for axis in self.axes: + result = relevel( + self.cube, + self.src_levels, + self.src_levels.data, + axis=axis, + interpolator=interpolator, + ) + _shared_utils.assert_array_equal(result.data.flatten(), np.array([0, 120])) + _shared_utils.assert_CML(request, result) diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index 5a9c192d73..a430647891 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -4,27 +4,17 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.experimental.stratify.relevel` function.""" -from functools import partial - import numpy as np import pytest from iris.coords import AuxCoord, DimCoord -from iris.tests import _shared_utils +from iris.experimental.stratify import relevel import iris.tests.stock as stock -try: - import stratify - - from iris.experimental.stratify import relevel -except ImportError: - stratify = None - -@_shared_utils.skip_stratify class Test: @pytest.fixture(autouse=True) - def _setup(self): + def _setup(self, mocker): cube = stock.simple_3d()[:, :1, :1] #: The data from which to get the levels. self.src_levels = cube.copy() @@ -34,6 +24,10 @@ def _setup(self): self.cube *= 10 self.coord = self.src_levels.coord("wibble") self.axes = (self.coord, self.coord.name(), None, 0) + self.patch_interpolate = mocker.patch( + "iris.experimental.stratify.stratify.interpolate" + ) + self.patch_interpolate.return_value = np.ones((3, 1, 1)) def test_broadcast_fail_src_levels(self): emsg = "Cannot broadcast the cube and src_levels" @@ -50,25 +44,9 @@ def test_broadcast_fail_tgt_levels(self): def test_standard_input(self): for axis in self.axes: result = relevel(self.cube, self.src_levels, [-1, 0, 5.5], axis=axis) - _shared_utils.assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 55]) - ) expected = DimCoord([-1, 0, 5.5], units=1, long_name="thingness") assert expected == result.coord("thingness") - - def test_non_monotonic(self): - for axis in self.axes: - result = relevel(self.cube, self.src_levels, [2, 3, 2], axis=axis) - _shared_utils.assert_array_equal( - result.data.flatten(), np.array([20, 30, np.nan]) - ) - expected = AuxCoord([2, 3, 2], units=1, long_name="thingness") - assert result.coord("thingness") == expected - - def test_static_level(self): - for axis in self.axes: - result = relevel(self.cube, self.src_levels, [2, 2], axis=axis) - _shared_utils.assert_array_equal(result.data.flatten(), np.array([20, 20])) + self.patch_interpolate.assert_called() def test_coord_input(self): source = AuxCoord(self.src_levels.data) @@ -80,37 +58,20 @@ def test_coord_input(self): for axis in self.axes: result = relevel(self.cube, source, [0, 12, 13], axis=axis) assert result.shape == (3, 1, 1) - _shared_utils.assert_array_equal(result.data.flatten(), [0, 120, np.nan]) + self.patch_interpolate.assert_called() - def test_custom_interpolator(self): - interpolator = partial(stratify.interpolate, interpolation="nearest") + def test_custom_interpolator(self, mocker): + mock_interpolate = mocker.Mock() + mock_interpolate.return_value = np.ones((3, 1, 1)) - for axis in self.axes: - result = relevel( - self.cube, - self.src_levels, - [-1, 0, 6.5], - axis=axis, - interpolator=interpolator, - ) - _shared_utils.assert_array_equal( - result.data.flatten(), np.array([np.nan, 0, 120]) - ) - - def test_multi_dim_target_levels(self, request): - interpolator = partial( - stratify.interpolate, - interpolation="linear", - extrapolation="linear", - ) + interpolator = mock_interpolate for axis in self.axes: - result = relevel( + _ = relevel( self.cube, self.src_levels, - self.src_levels.data, + [-1, 0, 6.5], axis=axis, interpolator=interpolator, ) - _shared_utils.assert_array_equal(result.data.flatten(), np.array([0, 120])) - _shared_utils.assert_CML(request, result) + mock_interpolate.assert_called() From 4ffcb55f12a73b5f038d49320fd09fe96a32c150 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 26 Feb 2026 15:55:04 +0000 Subject: [PATCH 07/22] stratify --- .../experimental/stratify/test_relevel.py | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/lib/iris/tests/integration/experimental/stratify/test_relevel.py b/lib/iris/tests/integration/experimental/stratify/test_relevel.py index e9761858d7..ee2d4505db 100644 --- a/lib/iris/tests/integration/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/integration/experimental/stratify/test_relevel.py @@ -35,18 +35,6 @@ def _setup(self): self.coord = self.src_levels.coord("wibble") self.axes = (self.coord, self.coord.name(), None, 0) - def test_broadcast_fail_src_levels(self): - emsg = "Cannot broadcast the cube and src_levels" - data = np.arange(60).reshape(3, 4, 5) - with pytest.raises(ValueError, match=emsg): - relevel(self.cube, AuxCoord(data), [1, 2, 3]) - - def test_broadcast_fail_tgt_levels(self): - emsg = "Cannot broadcast the cube and tgt_levels" - data = np.arange(60).reshape(3, 4, 5) - with pytest.raises(ValueError, match=emsg): - relevel(self.cube, self.coord, data) - def test_standard_input(self): for axis in self.axes: result = relevel(self.cube, self.src_levels, [-1, 0, 5.5], axis=axis) From 5f6deef400c748528e6d90c42d1cd264bc20df9f Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 26 Feb 2026 16:06:51 +0000 Subject: [PATCH 08/22] geovista --- .../tests/unit/experimental/geovista/test_cube_to_polydata.py | 4 ++-- .../experimental/geovista/test_extract_unstructured_region.py | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py index cecbba373b..93358d96f1 100644 --- a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py +++ b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py @@ -67,8 +67,8 @@ def cube_with_crs(self, default_cs, cube): @pytest.fixture def mocked_operation(self, mocker): - mocking = mocker.Mock() - setattr(Transform, self.MOCKED_OPERATION, mocking) + target = f"geovista.Transform.{self.MOCKED_OPERATION}" + mocking = mocker.patch(target, mocker.Mock()) return mocking @staticmethod diff --git a/lib/iris/tests/unit/experimental/geovista/test_extract_unstructured_region.py b/lib/iris/tests/unit/experimental/geovista/test_extract_unstructured_region.py index b7acd2412f..51688f9f6d 100644 --- a/lib/iris/tests/unit/experimental/geovista/test_extract_unstructured_region.py +++ b/lib/iris/tests/unit/experimental/geovista/test_extract_unstructured_region.py @@ -4,7 +4,6 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the `iris.experimental.geovista.extract_unstructured_region` function.""" -from geovista.common import VTK_CELL_IDS, VTK_POINT_IDS import numpy as np import pytest @@ -13,6 +12,9 @@ from iris.tests.stock import sample_2d_latlons from iris.tests.stock.mesh import sample_mesh_cube +VTK_CELL_IDS = "vtkOriginalCellIds" +VTK_POINT_IDS = "vtkOriginalPointIds" + class TestRegionExtraction: @pytest.fixture From c55410b7e0b491a8069e403db9026479cc06038e Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 26 Feb 2026 16:07:03 +0000 Subject: [PATCH 09/22] geovista --- .../tests/unit/experimental/geovista/test_cube_to_polydata.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py index 93358d96f1..effb55ecd7 100644 --- a/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py +++ b/lib/iris/tests/unit/experimental/geovista/test_cube_to_polydata.py @@ -6,7 +6,6 @@ from typing import ClassVar -from geovista import Transform import numpy as np import pytest From f61de54ad279aef08c6676e48a1f8b3c0c3691fa Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 4 Mar 2026 13:19:50 +0000 Subject: [PATCH 10/22] regridding --- .../experimental/regrid/__init__.py | 5 + .../test_regrid_conservative_via_esmpy.py | 818 ++++++++++++++++++ 2 files changed, 823 insertions(+) create mode 100644 lib/iris/tests/integration/experimental/regrid/__init__.py create mode 100644 lib/iris/tests/integration/experimental/regrid/test_regrid_conservative_via_esmpy.py diff --git a/lib/iris/tests/integration/experimental/regrid/__init__.py b/lib/iris/tests/integration/experimental/regrid/__init__.py new file mode 100644 index 0000000000..e3983bc695 --- /dev/null +++ b/lib/iris/tests/integration/experimental/regrid/__init__.py @@ -0,0 +1,5 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Regridding code is tested in this package.""" diff --git a/lib/iris/tests/integration/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/integration/experimental/regrid/test_regrid_conservative_via_esmpy.py new file mode 100644 index 0000000000..e6d209c691 --- /dev/null +++ b/lib/iris/tests/integration/experimental/regrid/test_regrid_conservative_via_esmpy.py @@ -0,0 +1,818 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`.""" + +import contextlib + +import cf_units +import numpy as np +import pytest + +from iris.tests import _shared_utils + +# Import ESMF if installed, else fail quietly + disable all the tests. +try: + import esmpy as ESMF +except ImportError: + ESMF = None +skip_esmf = pytest.mark.skipif( + condition=ESMF is None, reason="Requires ESMF, which is not available." +) + +import iris +import iris.analysis +import iris.analysis.cartography as i_cartog +from iris.experimental.regrid_conservative import regrid_conservative_via_esmpy +import iris.tests.stock as istk + +_PLAIN_GEODETIC_CS = iris.coord_systems.GeogCS(i_cartog.DEFAULT_SPHERICAL_EARTH_RADIUS) + + +def _make_test_cube(shape, xlims, ylims, pole_latlon=None): + """Create latlon cube (optionally rotated) with given xy dimensions and bounds + limit values. + + Produces a regular grid in source coordinates. + Does not work for 1xN or Nx1 grids, because guess_bounds fails. + + """ + nx, ny = shape + cube = iris.cube.Cube(np.zeros((ny, nx))) + xvals = np.linspace(xlims[0], xlims[1], nx) + yvals = np.linspace(ylims[0], ylims[1], ny) + coordname_prefix = "" + cs = _PLAIN_GEODETIC_CS + if pole_latlon is not None: + coordname_prefix = "grid_" + pole_lat, pole_lon = pole_latlon + cs = iris.coord_systems.RotatedGeogCS( + grid_north_pole_latitude=pole_lat, + grid_north_pole_longitude=pole_lon, + ellipsoid=cs, + ) + + co_x = iris.coords.DimCoord( + xvals, + standard_name=coordname_prefix + "longitude", + units=cf_units.Unit("degrees"), + coord_system=cs, + ) + co_x.guess_bounds() + cube.add_dim_coord(co_x, 1) + co_y = iris.coords.DimCoord( + yvals, + standard_name=coordname_prefix + "latitude", + units=cf_units.Unit("degrees"), + coord_system=cs, + ) + co_y.guess_bounds() + cube.add_dim_coord(co_y, 0) + return cube + + +def _cube_area_sum(cube): + """Calculate total area-sum - Iris can't do this in one operation.""" + area_sums = cube * i_cartog.area_weights(cube, normalize=False) + area_sum = area_sums.collapsed(area_sums.coords(dim_coords=True), iris.analysis.SUM) + return area_sum.data.flatten()[0] + + +def _reldiff(a, b): + """Compute a relative-difference measure between real numbers. + + Result is: + if a == b == 0: + 0.0 + otherwise: + |a - b| / mean(|a|, |b|) + + """ + if a == 0.0 and b == 0.0: + return 0.0 + return abs(a - b) * 2.0 / (abs(a) + abs(b)) + + +def _minmax(v): + """Calculate [min, max] of input.""" + return [f(v) for f in (np.min, np.max)] + + +@contextlib.contextmanager +def _donothing_context_manager(): + yield + + +@skip_esmf +class TestConservativeRegrid: + @pytest.fixture(autouse=True) + def _setup(self): + # Compute basic test data cubes. + shape1 = (5, 5) + xlims1, ylims1 = ((-2, 2), (-2, 2)) + c1 = _make_test_cube(shape1, xlims1, ylims1) + c1.data[:] = 0.0 + c1.data[2, 2] = 1.0 + + shape2 = (4, 4) + xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) + c2 = _make_test_cube(shape2, xlims2, ylims2) + c2.data[:] = 0.0 + + # Save timesaving pre-computed bits + self.stock_c1_c2 = (c1, c2) + self.stock_regrid_c1toc2 = regrid_conservative_via_esmpy(c1, c2) + self.stock_c1_areasum = _cube_area_sum(c1) + + def test_simple_areas(self): + """Test area-conserving regrid between simple "near-square" grids. + + Grids have overlapping areas in the same (lat-lon) coordinate system. + Grids are "nearly flat" lat-lon spaces (small ranges near the equator). + + """ + c1, c2 = self.stock_c1_c2 + c1_areasum = self.stock_c1_areasum + + # main regrid + c1to2 = regrid_conservative_via_esmpy(c1, c2) + + c1to2_areasum = _cube_area_sum(c1to2) + + # Check expected result (Cartesian equivalent, so not exact). + d_expect = np.array( + [ + [0.00, 0.00, 0.00, 0.00], + [0.00, 0.25, 0.25, 0.00], + [0.00, 0.25, 0.25, 0.00], + [0.00, 0.00, 0.00, 0.00], + ] + ) + # Numbers are slightly off (~0.25000952). This is expected. + _shared_utils.assert_array_all_close(c1to2.data, d_expect, rtol=5.0e-5) + + # check that the area sums are equivalent, simple total is a bit off + _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum) + + # + # regrid back onto original grid again ... + # + c1to2to1 = regrid_conservative_via_esmpy(c1to2, c1) + + c1to2to1_areasum = _cube_area_sum(c1to2to1) + + # Check expected result (Cartesian/exact difference now greater) + d_expect = np.array( + [ + [0.0, 0.0000, 0.0000, 0.0000, 0.0], + [0.0, 0.0625, 0.1250, 0.0625, 0.0], + [0.0, 0.1250, 0.2500, 0.1250, 0.0], + [0.0, 0.0625, 0.1250, 0.0625, 0.0], + [0.0, 0.0000, 0.0000, 0.0000, 0.0], + ] + ) + _shared_utils.assert_array_all_close(c1to2to1.data, d_expect, atol=0.00002) + + # check area sums again + _shared_utils.assert_array_all_close(c1to2to1_areasum, c1_areasum) + + def test_simple_missing_data(self): + """Check for missing data handling. + + Should mask cells that either .. + (a) go partly outside the source grid + (b) partially overlap masked source data + + """ + c1, c2 = self.stock_c1_c2 + + # regrid from c2 to c1 -- should mask all the edges... + c2_to_c1 = regrid_conservative_via_esmpy(c2, c1) + _shared_utils.assert_array_equal( + c2_to_c1.data.mask, + [ + [True, True, True, True, True], + [True, False, False, False, True], + [True, False, False, False, True], + [True, False, False, False, True], + [True, True, True, True, True], + ], + ) + + # do same with a particular point masked + c2m = c2.copy() + c2m.data = np.ma.array(c2m.data) + c2m.data[1, 1] = np.ma.masked + c2m_to_c1 = regrid_conservative_via_esmpy(c2m, c1) + _shared_utils.assert_array_equal( + c2m_to_c1.data.mask, + [ + [True, True, True, True, True], + [True, True, True, False, True], + [True, True, True, False, True], + [True, False, False, False, True], + [True, True, True, True, True], + ], + ) + + @_shared_utils.skip_data + def test_multidimensional(self): + """Check valid operation on a multidimensional cube. + + Calculation should repeat across multiple dimensions. + Any attached orography is interpolated. + + NOTE: in future, extra dimensions may be passed through to ESMF: At + present, it repeats the calculation on 2d slices. So we check that + at least the results are equivalent (as it's quite easy to do). + + """ + # Get some higher-dimensional test data + c1 = istk.realistic_4d() + # Chop down to small size, and mask some data + c1 = c1[:3, :4, :16, :12] + c1.data[:, 2, :, :] = np.ma.masked + c1.data[1, 1, 3:9, 4:7] = np.ma.masked + # Give it a slightly more challenging indexing order: tzyx --> xzty + c1.transpose((3, 1, 0, 2)) + + # Construct a (coarser) target grid of about the same extent + c1_cs = c1.coord(axis="x").coord_system + xlims = _minmax(c1.coord(axis="x").contiguous_bounds()) + ylims = _minmax(c1.coord(axis="y").contiguous_bounds()) + # Reduce the dimensions slightly to avoid NaNs in regridded orography + delta = 0.05 + # || NOTE: this is *not* a small amount. Think there is a bug. + # || NOTE: See https://github.com/SciTools/iris/issues/458 + xlims = np.interp([delta, 1.0 - delta], [0, 1], xlims) + ylims = np.interp([delta, 1.0 - delta], [0, 1], ylims) + pole_latlon = ( + c1_cs.grid_north_pole_latitude, + c1_cs.grid_north_pole_longitude, + ) + c2 = _make_test_cube((7, 8), xlims, ylims, pole_latlon=pole_latlon) + + # regrid onto new grid + c1_to_c2 = regrid_conservative_via_esmpy(c1, c2) + + # check that all the original coords exist in the new cube + # NOTE: this also effectively confirms we haven't lost the orography + def list_coord_names(cube): + return sorted([coord.name() for coord in cube.coords()]) + + assert list_coord_names(c1_to_c2) == list_coord_names(c1) + + # check that each xy 'slice' has same values as if done on its own. + for i_p, i_t in np.ndindex(c1.shape[1:3]): + c1_slice = c1[:, i_p, i_t] + c2_slice = regrid_conservative_via_esmpy(c1_slice, c2) + subcube = c1_to_c2[:, i_p, i_t] + assert subcube == c2_slice + + # check all other metadata + assert c1_to_c2.metadata == c1.metadata + + def test_xy_transposed(self): + # Test effects of transposing X and Y in src/dst data. + c1, c2 = self.stock_c1_c2 + testcube_xy = self.stock_regrid_c1toc2 + + # Check that transposed data produces transposed results + # - i.e. regrid(data^T)^T == regrid(data) + c1_yx = c1.copy() + c1_yx.transpose() + testcube_yx = regrid_conservative_via_esmpy(c1_yx, c2) + testcube_yx.transpose() + assert testcube_yx == testcube_xy + + # Check that transposing destination does nothing + c2_yx = c2.copy() + c2_yx.transpose() + testcube_dst_transpose = regrid_conservative_via_esmpy(c1, c2_yx) + assert testcube_dst_transpose == testcube_xy + + def test_same_grid(self): + # Test regridding onto the identical grid. + # Use regrid with self as target. + c1, _ = self.stock_c1_c2 + testcube = regrid_conservative_via_esmpy(c1, c1) + assert testcube == c1 + + def test_global(self): + # Test global regridding. + # Compute basic test data cubes. + shape1 = (8, 6) + xlim1 = 180.0 * (shape1[0] - 1) / shape1[0] + ylim1 = 90.0 * (shape1[1] - 1) / shape1[1] + c1 = _make_test_cube(shape1, (-xlim1, xlim1), (-ylim1, ylim1)) + # Create a small, plausible global array: + # - top + bottom rows all the same + # - left + right columns "mostly close" for checking across the seam + basedata = np.array( + [ + [1, 1, 1, 1, 1, 1, 1, 1], + [1, 1, 4, 4, 4, 2, 2, 1], + [2, 1, 4, 4, 4, 2, 2, 2], + [2, 5, 5, 1, 1, 1, 5, 5], + [5, 5, 5, 1, 1, 1, 5, 5], + [5, 5, 5, 5, 5, 5, 5, 5], + ] + ) + c1.data[:] = basedata + + # Create a rotated grid to regrid this onto. + shape2 = (14, 11) + xlim2 = 180.0 * (shape2[0] - 1) / shape2[0] + ylim2 = 90.0 * (shape2[1] - 1) / shape2[1] + c2 = _make_test_cube( + shape2, (-xlim2, xlim2), (-ylim2, ylim2), pole_latlon=(47.4, 25.7) + ) + + # Perform regridding + c1toc2 = regrid_conservative_via_esmpy(c1, c2) + + # Check that before+after area-sums match fairly well + c1_areasum = _cube_area_sum(c1) + c1toc2_areasum = _cube_area_sum(c1toc2) + _shared_utils.assert_array_all_close(c1toc2_areasum, c1_areasum, rtol=0.006) + + def test_global_collapse(self): + # Test regridding global data to a single cell. + # Fetch 'standard' testcube data + c1, _ = self.stock_c1_c2 + c1_areasum = self.stock_c1_areasum + + # Condense entire globe onto a single cell + x_coord_2 = iris.coords.DimCoord( + [0.0], + bounds=[-180.0, 180.0], + standard_name="longitude", + units="degrees", + coord_system=_PLAIN_GEODETIC_CS, + ) + y_coord_2 = iris.coords.DimCoord( + [0.0], + bounds=[-90.0, 90.0], + standard_name="latitude", + units="degrees", + coord_system=_PLAIN_GEODETIC_CS, + ) + c2 = iris.cube.Cube([[0.0]]) + c2.add_dim_coord(y_coord_2, 0) + c2.add_dim_coord(x_coord_2, 1) + + # NOTE: at present, this causes an error inside ESMF ... + emsg = "ESMC_FieldRegridStore failed with rc = 506." + context = pytest.raises(ValueError, match=emsg) + global_cell_supported = False + if global_cell_supported: + context = _donothing_context_manager() + with context: + c1_to_global = regrid_conservative_via_esmpy(c1, c2) + # Check the total area sum is still the same + _shared_utils.assert_array_all_close(c1_to_global.data[0, 0], c1_areasum) + + def test_single_cells(self): + # Test handling of single-cell grids. + # Fetch 'standard' testcube data + c1, c2 = self.stock_c1_c2 + c1_areasum = self.stock_c1_areasum + + # + # At present NxN -> 1x1 "in-place" doesn't seem to work properly + # - result cell has missing-data ? + # + # Condense entire region into a single cell in the c1 grid + xlims1 = _minmax(c1.coord(axis="x").bounds) + ylims1 = _minmax(c1.coord(axis="y").bounds) + x_c1x1 = iris.coords.DimCoord( + xlims1[0], + bounds=xlims1, + standard_name="longitude", + units="degrees", + coord_system=_PLAIN_GEODETIC_CS, + ) + y_c1x1 = iris.coords.DimCoord( + ylims1[0], + bounds=ylims1, + standard_name="latitude", + units="degrees", + coord_system=_PLAIN_GEODETIC_CS, + ) + c1x1_gridcube = iris.cube.Cube([[0.0]]) + c1x1_gridcube.add_dim_coord(y_c1x1, 0) + c1x1_gridcube.add_dim_coord(x_c1x1, 1) + c1x1 = regrid_conservative_via_esmpy(c1, c1x1_gridcube) + c1x1_areasum = _cube_area_sum(c1x1) + # Check the total area sum is still the same + condense_to_1x1_supported = False + # NOTE: currently disabled (ESMF gets this wrong) + # NOTE ALSO: call hits numpy 1.7 bug in testing.assert_array_compare. + if condense_to_1x1_supported: + _shared_utils.assert_array_all_close(c1x1_areasum, c1_areasum) + + # Condense entire region onto a single cell covering the area of 'c2' + xlims2 = _minmax(c2.coord(axis="x").bounds) + ylims2 = _minmax(c2.coord(axis="y").bounds) + x_c2x1 = iris.coords.DimCoord( + xlims2[0], + bounds=xlims2, + standard_name="longitude", + units=cf_units.Unit("degrees"), + coord_system=_PLAIN_GEODETIC_CS, + ) + y_c2x1 = iris.coords.DimCoord( + ylims2[0], + bounds=ylims2, + standard_name="latitude", + units=cf_units.Unit("degrees"), + coord_system=_PLAIN_GEODETIC_CS, + ) + c2x1_gridcube = iris.cube.Cube([[0.0]]) + c2x1_gridcube.add_dim_coord(y_c2x1, 0) + c2x1_gridcube.add_dim_coord(x_c2x1, 1) + c1_to_c2x1 = regrid_conservative_via_esmpy(c1, c2x1_gridcube) + + # Check the total area sum is still the same + c1_to_c2x1_areasum = _cube_area_sum(c1_to_c2x1) + _shared_utils.assert_array_all_close(c1_to_c2x1_areasum, c1_areasum, 0.0004) + + # 1x1 -> NxN : regrid single cell to NxN grid + # construct a single-cell approximation to 'c1' with the same area sum. + # NOTE: can't use _make_cube (see docstring) + c1x1 = c1.copy()[0:1, 0:1] + xlims1 = _minmax(c1.coord(axis="x").bounds) + ylims1 = _minmax(c1.coord(axis="y").bounds) + c1x1.coord(axis="x").bounds = xlims1 + c1x1.coord(axis="y").bounds = ylims1 + # Assign data mean as single cell value : Maybe not exact, but "close" + c1x1.data[0, 0] = np.mean(c1.data) + + # Regrid this back onto the original NxN grid + c1x1_to_c1 = regrid_conservative_via_esmpy(c1x1, c1) + c1x1_to_c1_areasum = _cube_area_sum(c1x1_to_c1) + + # Check that area sum is ~unchanged, as expected + _shared_utils.assert_array_all_close(c1x1_to_c1_areasum, c1_areasum, 0.0004) + + # Check 1x1 -> 1x1 + # NOTE: can *only* get any result with a fully overlapping cell, so + # just regrid onto self + c1x1toself = regrid_conservative_via_esmpy(c1x1, c1x1) + c1x1toself_areasum = _cube_area_sum(c1x1toself) + _shared_utils.assert_array_all_close(c1x1toself_areasum, c1_areasum, 0.0004) + # NOTE: perhaps surprisingly, this has a similar level of error. + + def test_longitude_wraps(self): + """Check results are independent of where the grid 'seams' are.""" + # First repeat global regrid calculation from 'test_global'. + shape1 = (8, 6) + xlim1 = 180.0 * (shape1[0] - 1) / shape1[0] + ylim1 = 90.0 * (shape1[1] - 1) / shape1[1] + xlims1 = (-xlim1, xlim1) + ylims1 = (-ylim1, ylim1) + c1 = _make_test_cube(shape1, xlims1, ylims1) + + # Create a small, plausible global array (see test_global). + basedata = np.array( + [ + [1, 1, 1, 1, 1, 1, 1, 1], + [1, 1, 4, 4, 4, 2, 2, 1], + [2, 1, 4, 4, 4, 2, 2, 2], + [2, 5, 5, 1, 1, 1, 5, 5], + [5, 5, 5, 1, 1, 1, 5, 5], + [5, 5, 5, 5, 5, 5, 5, 5], + ] + ) + c1.data[:] = basedata + + shape2 = (14, 11) + xlim2 = 180.0 * (shape2[0] - 1) / shape2[0] + ylim2 = 90.0 * (shape2[1] - 1) / shape2[1] + xlims_2 = (-xlim2, xlim2) + ylims_2 = (-ylim2, ylim2) + c2 = _make_test_cube(shape2, xlims_2, ylims_2, pole_latlon=(47.4, 25.7)) + + # Perform regridding + c1toc2 = regrid_conservative_via_esmpy(c1, c2) + + # Now redo with dst longitudes rotated, so 'seam' is somewhere else. + x2_shift_steps = shape2[0] // 3 + xlims2_shifted = np.array(xlims_2) + 360.0 * x2_shift_steps / shape2[0] + c2_shifted = _make_test_cube( + shape2, xlims2_shifted, ylims_2, pole_latlon=(47.4, 25.7) + ) + c1toc2_shifted = regrid_conservative_via_esmpy(c1, c2_shifted) + + # Show that results are the same, when output rolled by same amount + rolled_data = np.roll(c1toc2_shifted.data, x2_shift_steps, axis=1) + _shared_utils.assert_array_all_close(rolled_data, c1toc2.data) + + # Repeat with rolled *source* data : result should be identical + x1_shift_steps = shape1[0] // 3 + x_shift_degrees = 360.0 * x1_shift_steps / shape1[0] + xlims1_shifted = [x - x_shift_degrees for x in xlims1] + c1_shifted = _make_test_cube(shape1, xlims1_shifted, ylims1) + c1_shifted.data[:] = np.roll(basedata, x1_shift_steps, axis=1) + c1shifted_toc2 = regrid_conservative_via_esmpy(c1_shifted, c2) + assert c1shifted_toc2 == c1toc2 + + def test_polar_areas(self): + """Test area-conserving regrid between different grids. + + Grids have overlapping areas in the same (lat-lon) coordinate system. + Cells are highly non-square (near the pole). + + """ + # Like test_basic_area, but not symmetrical + bigger overall errors. + shape1 = (5, 5) + xlims1, ylims1 = ((-2, 2), (84, 88)) + c1 = _make_test_cube(shape1, xlims1, ylims1) + c1.data[:] = 0.0 + c1.data[2, 2] = 1.0 + c1_areasum = _cube_area_sum(c1) + + shape2 = (4, 4) + xlims2, ylims2 = ((-1.5, 1.5), (84.5, 87.5)) + c2 = _make_test_cube(shape2, xlims2, ylims2) + c2.data[:] = 0.0 + + c1to2 = regrid_conservative_via_esmpy(c1, c2) + + # check for expected pattern + d_expect = np.array( + [ + [0.0, 0.0, 0.0, 0.0], + [0.0, 0.23614, 0.23614, 0.0], + [0.0, 0.26784, 0.26784, 0.0], + [0.0, 0.0, 0.0, 0.0], + ] + ) + _shared_utils.assert_array_all_close(c1to2.data, d_expect, rtol=5.0e-5) + + # check sums + c1to2_areasum = _cube_area_sum(c1to2) + _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum) + + # + # transform back again ... + # + c1to2to1 = regrid_conservative_via_esmpy(c1to2, c1) + + # check values + d_expect = np.array( + [ + [0.0, 0.0, 0.0, 0.0, 0.0], + [0.0, 0.056091, 0.112181, 0.056091, 0.0], + [0.0, 0.125499, 0.250998, 0.125499, 0.0], + [0.0, 0.072534, 0.145067, 0.072534, 0.0], + [0.0, 0.0, 0.0, 0.0, 0.0], + ] + ) + _shared_utils.assert_array_all_close(c1to2to1.data, d_expect, atol=0.0005) + + # check sums + c1to2to1_areasum = _cube_area_sum(c1to2to1) + _shared_utils.assert_array_all_close(c1to2to1_areasum, c1_areasum) + + def test_fail_no_cs(self): + # Test error when one coordinate has no coord_system. + shape1 = (5, 5) + xlims1, ylims1 = ((-2, 2), (-2, 2)) + c1 = _make_test_cube(shape1, xlims1, ylims1) + c1.data[:] = 0.0 + c1.data[2, 2] = 1.0 + + shape2 = (4, 4) + xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) + c2 = _make_test_cube(shape2, xlims2, ylims2) + c2.data[:] = 0.0 + c2.coord("latitude").coord_system = None + + emsg = ( + r"The cube's x \('longitude'\) and y \('latitude'\) " + "coordinates must have the same coordinate system." + ) + with pytest.raises(ValueError, match=emsg): + regrid_conservative_via_esmpy(c1, c2) + + def test_fail_different_cs(self): + # Test error when either src or dst coords have different + # coord_systems. + shape1 = (5, 5) + xlims1, ylims1 = ((-2, 2), (-2, 2)) + shape2 = (4, 4) + xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) + + # Check basic regrid between these is ok. + c1 = _make_test_cube(shape1, xlims1, ylims1, pole_latlon=(45.0, 35.0)) + c2 = _make_test_cube(shape2, xlims2, ylims2) + regrid_conservative_via_esmpy(c1, c2) + + emsg = ( + r"The cube's x \('grid_longitude'\) and y \('grid_latitude'\) coordinates " + "must have the same coordinate system." + ) + # Replace the coord_system one of the source coords + check this fails. + c1.coord("grid_longitude").coord_system = c2.coord("longitude").coord_system + with pytest.raises(ValueError, match=emsg): + regrid_conservative_via_esmpy(c1, c2) + + emsg = ( + r"The cube's x \('longitude'\) and y \('latitude'\) coordinates " + "must have the same coordinate system." + ) + # Repeat with target coordinate fiddled. + c1 = _make_test_cube(shape1, xlims1, ylims1, pole_latlon=(45.0, 35.0)) + c2 = _make_test_cube(shape2, xlims2, ylims2) + c2.coord("latitude").coord_system = c1.coord("grid_latitude").coord_system + with pytest.raises(ValueError, match=emsg): + regrid_conservative_via_esmpy(c1, c2) + + def test_rotated(self): + """Test area-weighted regrid on more complex area. + + Use two mutually rotated grids, of similar area + same dims. + Only a small central region in each is non-zero, which maps entirely + inside the other region. + So the area-sum totals should match exactly. + + """ + # create source test cube on rotated form + pole_lat = 53.4 + pole_lon = -173.2 + deg_swing = 35.3 + pole_lon += deg_swing + c1_nx = 9 + 6 + c1_ny = 7 + 6 + c1_xlims = -60.0, 60.0 + c1_ylims = -45.0, 20.0 + c1_xlims = [x - deg_swing for x in c1_xlims] + c1 = _make_test_cube( + (c1_nx, c1_ny), + c1_xlims, + c1_ylims, + pole_latlon=(pole_lat, pole_lon), + ) + c1.data[3:-3, 3:-3] = np.array( + [ + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 199, 199, 199, 199, 100, 100, 100], + [100, 100, 100, 100, 199, 199, 100, 100, 100], + [100, 100, 100, 100, 199, 199, 199, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + ], + dtype=np.float64, + ) + + c1_areasum = _cube_area_sum(c1) + + # construct target cube to receive + nx2 = 9 + 6 + ny2 = 7 + 6 + c2_xlims = -100.0, 120.0 + c2_ylims = -20.0, 50.0 + c2 = _make_test_cube((nx2, ny2), c2_xlims, c2_ylims) + c2.data = np.ma.array(c2.data, mask=True) + + # perform regrid + c1to2 = regrid_conservative_via_esmpy(c1, c2) + + # check we have zeros (or nearly) all around the edge.. + c1toc2_zeros = np.ma.array(c1to2.data) + c1toc2_zeros[c1toc2_zeros.mask] = 0.0 + c1toc2_zeros = np.abs(c1toc2_zeros.mask) < 1.0e-6 + _shared_utils.assert_array_equal(c1toc2_zeros[0, :], True) + _shared_utils.assert_array_equal(c1toc2_zeros[-1, :], True) + _shared_utils.assert_array_equal(c1toc2_zeros[:, 0], True) + _shared_utils.assert_array_equal(c1toc2_zeros[:, -1], True) + + # check the area-sum operation + c1to2_areasum = _cube_area_sum(c1to2) + _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum, rtol=0.004) + + # + # Now repeat, transforming backwards ... + # + c1.data = np.ma.array(c1.data, mask=True) + c2.data[:] = 0.0 + c2.data[5:-5, 5:-5] = np.array( + [ + [199, 199, 199, 199, 100], + [100, 100, 199, 199, 100], + [100, 100, 199, 199, 199], + ], + dtype=np.float64, + ) + c2_areasum = _cube_area_sum(c2) + + c2toc1 = regrid_conservative_via_esmpy(c2, c1) + + # check we have zeros (or nearly) all around the edge.. + c2toc1_zeros = np.ma.array(c2toc1.data) + c2toc1_zeros[c2toc1_zeros.mask] = 0.0 + c2toc1_zeros = np.abs(c2toc1_zeros.mask) < 1.0e-6 + _shared_utils.assert_array_equal(c2toc1_zeros[0, :], True) + _shared_utils.assert_array_equal(c2toc1_zeros[-1, :], True) + _shared_utils.assert_array_equal(c2toc1_zeros[:, 0], True) + _shared_utils.assert_array_equal(c2toc1_zeros[:, -1], True) + + # check the area-sum operation + c2toc1_areasum = _cube_area_sum(c2toc1) + _shared_utils.assert_array_all_close(c2toc1_areasum, c2_areasum, rtol=0.004) + + def test_missing_data_rotated(self): + """Check missing-data handling between different coordinate systems. + + Regrid between mutually rotated lat/lon systems, and check results for + missing data due to grid edge overlap, and source-data masking. + + """ + for do_add_missing in (False, True): + # create source test cube on rotated form + pole_lat = 53.4 + pole_lon = -173.2 + deg_swing = 35.3 + pole_lon += deg_swing + c1_nx = 9 + 6 + c1_ny = 7 + 6 + c1_xlims = -60.0, 60.0 + c1_ylims = -45.0, 20.0 + c1_xlims = [x - deg_swing for x in c1_xlims] + c1 = _make_test_cube( + (c1_nx, c1_ny), + c1_xlims, + c1_ylims, + pole_latlon=(pole_lat, pole_lon), + ) + c1.data = np.ma.array(c1.data, mask=False) + c1.data[3:-3, 3:-3] = np.ma.array( + [ + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 199, 199, 199, 199, 100, 100, 100], + [100, 100, 100, 100, 199, 199, 100, 100, 100], + [100, 100, 100, 100, 199, 199, 199, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + [100, 100, 100, 100, 100, 100, 100, 100, 100], + ], + dtype=np.float64, + ) + + if do_add_missing: + c1.data = np.ma.array(c1.data) + c1.data[7, 7] = np.ma.masked + c1.data[3:5, 10:12] = np.ma.masked + + # construct target cube to receive + nx2 = 9 + 6 + ny2 = 7 + 6 + c2_xlims = -80.0, 80.0 + c2_ylims = -20.0, 50.0 + c2 = _make_test_cube((nx2, ny2), c2_xlims, c2_ylims) + c2.data = np.ma.array(c2.data, mask=True) + + # perform regrid + snapshot test results + c1toc2 = regrid_conservative_via_esmpy(c1, c2) + + # check masking of result is as expected + # (generated by inspecting plot of how src+dst grids overlap) + expected_mask_valuemap = np.array( + # KEY: 0=masked, 7=present, 5=masked with masked datapoints + [ + [0, 0, 0, 0, 7, 7, 7, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 7, 7, 7, 7, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 0, 0, 0, 0, 0], + [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 0], + [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], + [0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], + [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], + [0, 0, 0, 7, 7, 7, 7, 5, 5, 7, 7, 7, 7, 0, 0], + [0, 0, 0, 0, 7, 7, 7, 5, 5, 7, 7, 7, 7, 0, 0], + [0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0], + [0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0], + [0, 0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 0], + ] + ) + + if do_add_missing: + expected_mask = expected_mask_valuemap < 7 + else: + expected_mask = expected_mask_valuemap == 0 + + actual_mask = c1toc2.data.mask + _shared_utils.assert_array_equal(actual_mask, expected_mask) + + if not do_add_missing: + # check preservation of area-sums + # NOTE: does *not* work with missing data, even theoretically, + # as the 'missing areas' are not the same. + c1_areasum = _cube_area_sum(c1) + c1to2_areasum = _cube_area_sum(c1toc2) + _shared_utils.assert_array_all_close( + c1_areasum, c1to2_areasum, rtol=0.003 + ) From b2722790a5e9ea0013932a3d6e704adbaa024752 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Tue, 10 Mar 2026 15:07:30 +0000 Subject: [PATCH 11/22] stratify and extras --- .../tests/integration/_shapefiles/__init__.py | 0 .../_shapefiles/test_create_shape_mask.py | 2 +- .../_shapefiles/test_get_weighted_mask.py | 2 +- .../_shapefiles/test_is_geometry_valid.py | 2 +- .../test_make_raster_cube_transform.py | 2 +- .../_shapefiles/test_transform_geometry.py | 2 +- lib/iris/tests/integration/util/__init__.py | 5 + .../util/test_mask_cube_from_shape.py | 205 ++++++++++++++++++ .../util/test_mask_cube_from_shapefile.py | 128 +++++++++++ .../experimental/stratify/test_relevel.py | 28 ++- 10 files changed, 370 insertions(+), 6 deletions(-) create mode 100644 lib/iris/tests/integration/_shapefiles/__init__.py rename lib/iris/tests/{unit => integration}/_shapefiles/test_create_shape_mask.py (99%) rename lib/iris/tests/{unit => integration}/_shapefiles/test_get_weighted_mask.py (97%) rename lib/iris/tests/{unit => integration}/_shapefiles/test_is_geometry_valid.py (98%) rename lib/iris/tests/{unit => integration}/_shapefiles/test_make_raster_cube_transform.py (96%) rename lib/iris/tests/{unit => integration}/_shapefiles/test_transform_geometry.py (98%) create mode 100644 lib/iris/tests/integration/util/__init__.py create mode 100644 lib/iris/tests/integration/util/test_mask_cube_from_shape.py create mode 100644 lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py diff --git a/lib/iris/tests/integration/_shapefiles/__init__.py b/lib/iris/tests/integration/_shapefiles/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py b/lib/iris/tests/integration/_shapefiles/test_create_shape_mask.py similarity index 99% rename from lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py rename to lib/iris/tests/integration/_shapefiles/test_create_shape_mask.py index 32ca99d48d..fb7415da1a 100644 --- a/lib/iris/tests/unit/_shapefiles/test_create_shape_mask.py +++ b/lib/iris/tests/integration/_shapefiles/test_create_shape_mask.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris._shapefiles.create_shape_mask`.""" +"""Integration tests for :func:`iris._shapefiles.create_shape_mask`.""" import numpy as np from pyproj import CRS diff --git a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py b/lib/iris/tests/integration/_shapefiles/test_get_weighted_mask.py similarity index 97% rename from lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py rename to lib/iris/tests/integration/_shapefiles/test_get_weighted_mask.py index 6863fb1847..963d321204 100644 --- a/lib/iris/tests/unit/_shapefiles/test_get_weighted_mask.py +++ b/lib/iris/tests/integration/_shapefiles/test_get_weighted_mask.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris._shapefiles._get_weighted_mask`.""" +"""Integration tests for :func:`iris._shapefiles._get_weighted_mask`.""" import numpy as np import pytest diff --git a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py b/lib/iris/tests/integration/_shapefiles/test_is_geometry_valid.py similarity index 98% rename from lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py rename to lib/iris/tests/integration/_shapefiles/test_is_geometry_valid.py index 8605d72d8b..6902ad22e5 100644 --- a/lib/iris/tests/unit/_shapefiles/test_is_geometry_valid.py +++ b/lib/iris/tests/integration/_shapefiles/test_is_geometry_valid.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris._shapefiles.is_geometry_valid`.""" +"""Integration tests for :func:`iris._shapefiles.is_geometry_valid`.""" from pyproj import CRS import pytest diff --git a/lib/iris/tests/unit/_shapefiles/test_make_raster_cube_transform.py b/lib/iris/tests/integration/_shapefiles/test_make_raster_cube_transform.py similarity index 96% rename from lib/iris/tests/unit/_shapefiles/test_make_raster_cube_transform.py rename to lib/iris/tests/integration/_shapefiles/test_make_raster_cube_transform.py index 239cce15b2..253da96796 100644 --- a/lib/iris/tests/unit/_shapefiles/test_make_raster_cube_transform.py +++ b/lib/iris/tests/integration/_shapefiles/test_make_raster_cube_transform.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris._shapefiles._make_raster_cube_transform`.""" +"""Integration tests for :func:`iris._shapefiles._make_raster_cube_transform`.""" from affine import Affine import numpy as np diff --git a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py b/lib/iris/tests/integration/_shapefiles/test_transform_geometry.py similarity index 98% rename from lib/iris/tests/unit/_shapefiles/test_transform_geometry.py rename to lib/iris/tests/integration/_shapefiles/test_transform_geometry.py index 6aff0931c5..d56a010d67 100644 --- a/lib/iris/tests/unit/_shapefiles/test_transform_geometry.py +++ b/lib/iris/tests/integration/_shapefiles/test_transform_geometry.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris._shapefiles._transform_geometry`.""" +"""Integration tests for :func:`iris._shapefiles._transform_geometry`.""" import numpy as np import pyproj diff --git a/lib/iris/tests/integration/util/__init__.py b/lib/iris/tests/integration/util/__init__.py new file mode 100644 index 0000000000..687cf3e006 --- /dev/null +++ b/lib/iris/tests/integration/util/__init__.py @@ -0,0 +1,5 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for the :mod:`iris.util` module.""" diff --git a/lib/iris/tests/integration/util/test_mask_cube_from_shape.py b/lib/iris/tests/integration/util/test_mask_cube_from_shape.py new file mode 100644 index 0000000000..ef7b658589 --- /dev/null +++ b/lib/iris/tests/integration/util/test_mask_cube_from_shape.py @@ -0,0 +1,205 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" + +import numpy as np +from pyproj import CRS +import pytest +from shapely.geometry import box + +from iris.coord_systems import GeogCS +from iris.coords import DimCoord +from iris.cube import Cube +from iris.util import array_equal, is_masked, mask_cube_from_shape + + +@pytest.fixture +def square_polygon(): + # Create a roughly 3x3 square polygon + return box(2.4, 2.4, 6.4, 6.4) + + +@pytest.fixture +def mock_cube(): + """Create a mock 9x9 Iris cube for testing.""" + x_points = np.linspace(1, 9, 9) - 0.5 # Specify cube cell midpoints + y_points = np.linspace(1, 9, 9) - 0.5 + x_coord = DimCoord( + x_points, + standard_name="longitude", + units="degrees", + coord_system=GeogCS(6371229), + ) + y_coord = DimCoord( + y_points, + standard_name="latitude", + units="degrees", + coord_system=GeogCS(6371229), + ) + data = np.ones((len(y_points), len(x_points))) + cube = Cube(data, dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)]) + return cube + + +def test_mask_cube_from_shape_inplace(mock_cube, square_polygon): + masked_cube = mask_cube_from_shape( + cube=mock_cube, + shape=square_polygon, + shape_crs=CRS.from_epsg(4326), + in_place=True, + ) + assert masked_cube is None + assert is_masked(mock_cube.data) + + +def test_mask_cube_from_shape_not_inplace(mock_cube, square_polygon): + masked_cube = mask_cube_from_shape( + cube=mock_cube, + shape=square_polygon, + shape_crs=CRS.from_epsg(4326), + in_place=False, + ) + assert masked_cube is not None + assert is_masked(masked_cube.data) + # Original cube should remain unmasked + assert not is_masked(mock_cube.data) + + +@pytest.mark.parametrize( + ("minimum_weight", "expected_output"), + [ + ( + 0.0, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ( + 0.5, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ( + 1.0, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ], +) +def test_basic_mask_cube_from_shape( + mock_cube, square_polygon, minimum_weight, expected_output +): + """Test the create_shape_mask function with different minimum weights.""" + expected_cube = mock_cube.copy( + data=np.ma.array( + expected_output, dtype=float, mask=np.logical_not(expected_output) + ) + ) + # Create a mask using the square polygon + mask = mask_cube_from_shape( + cube=mock_cube, + shape=square_polygon, + shape_crs=None, + minimum_weight=minimum_weight, + ) + + assert array_equal(mask.data, expected_cube.data) + + +def test_mask_cube_from_shape_invert(mock_cube, square_polygon): + """Test the create_shape_mask function with different minimum weights.""" + expected_output = np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ) + + expected_cube = mock_cube.copy( + data=np.ma.array( + np.logical_not(expected_output), dtype=float, mask=expected_output + ) + ) + # Create a mask using the square polygon + mask = mask_cube_from_shape( + cube=mock_cube, + shape=square_polygon, + shape_crs=None, + minimum_weight=0, + invert=True, + ) + + assert array_equal(mask.data, expected_cube.data) + + +def test_mask_cube_from_shape_all_touched(mock_cube, square_polygon): + """Test the create_shape_mask function with different minimum weights.""" + expected_output = np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ) + + expected_cube = mock_cube.copy( + data=np.ma.array( + expected_output, dtype=float, mask=np.logical_not(expected_output) + ) + ) + # Create a mask using the square polygon + mask = mask_cube_from_shape( + cube=mock_cube, + shape=square_polygon, + shape_crs=None, + all_touched=True, + ) + + assert array_equal(mask.data, expected_cube.data) diff --git a/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py new file mode 100644 index 0000000000..845867ebae --- /dev/null +++ b/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py @@ -0,0 +1,128 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" + +import numpy as np +import pytest +from shapely.geometry import box + +from iris.coord_systems import GeogCS +from iris.coords import DimCoord +from iris.cube import Cube +from iris.util import array_equal, mask_cube_from_shapefile + + +@pytest.fixture +def square_polygon(): + # Create a roughly 3x3 square polygon + return box(2.4, 2.4, 6.4, 6.4) + + +@pytest.fixture +def mock_cube(): + """Create a mock 9x9 Iris cube for testing.""" + x_points = np.linspace(1, 9, 9) - 0.5 # Specify cube cell midpoints + y_points = np.linspace(1, 9, 9) - 0.5 + x_coord = DimCoord( + x_points, + standard_name="longitude", + units="degrees", + coord_system=GeogCS(6371229), + ) + y_coord = DimCoord( + y_points, + standard_name="latitude", + units="degrees", + coord_system=GeogCS(6371229), + ) + data = np.ones((len(y_points), len(x_points))) + cube = Cube(data, dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)]) + return cube + + +def test_mask_cube_from_shapefile_inplace( + mock_cube, +): + shape = box(0, 0, 10, 10) + masked_cube = mask_cube_from_shapefile(mock_cube, shape, in_place=True) + assert masked_cube is None + + +def test_mask_cube_from_shapefile_not_inplace(mock_cube): + shape = box(0, 0, 10, 10) + masked_cube = mask_cube_from_shapefile(mock_cube, shape, in_place=False) + assert masked_cube is not None + + +@pytest.mark.parametrize( + ("minimum_weight", "expected_output"), + [ + ( + 0.0, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 1, 1, 1, 1, 1, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ( + 0.5, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 1, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ( + 1.0, + np.array( + [ + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 1, 1, 1, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + [0, 0, 0, 0, 0, 0, 0, 0, 0], + ], + ), + ), + ], +) +def test_basic_mask_cube_from_shape( + mock_cube, square_polygon, minimum_weight, expected_output +): + """Test the create_shape_mask function with different minimum weights.""" + expected_cube = mock_cube.copy( + data=np.ma.array( + expected_output, dtype=float, mask=np.logical_not(expected_output) + ) + ) + # Create a mask using the square polygon + mask = mask_cube_from_shapefile( + cube=mock_cube, + shape=square_polygon, + minimum_weight=minimum_weight, + ) + + assert array_equal(mask.data, expected_cube.data) diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index a430647891..c202d9fc01 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -4,14 +4,30 @@ # See LICENSE in the root of the repository for full licensing details. """Unit tests for the :func:`iris.experimental.stratify.relevel` function.""" +import sys +import types + import numpy as np import pytest from iris.coords import AuxCoord, DimCoord -from iris.experimental.stratify import relevel import iris.tests.stock as stock +@pytest.fixture(autouse=True, scope="module") +def fake_stratify(): + try: + import stratify + except: + fake = types.ModuleType("stratify") + fake.interpolate = lambda *a, **k: None + sys.modules["stratify"] = fake + yield + finally: + # Remove fake after tests in this module complete + sys.modules.pop("stratify", None) + + class Test: @pytest.fixture(autouse=True) def _setup(self, mocker): @@ -30,18 +46,24 @@ def _setup(self, mocker): self.patch_interpolate.return_value = np.ones((3, 1, 1)) def test_broadcast_fail_src_levels(self): + from iris.experimental.stratify import relevel + emsg = "Cannot broadcast the cube and src_levels" data = np.arange(60).reshape(3, 4, 5) with pytest.raises(ValueError, match=emsg): relevel(self.cube, AuxCoord(data), [1, 2, 3]) def test_broadcast_fail_tgt_levels(self): + from iris.experimental.stratify import relevel + emsg = "Cannot broadcast the cube and tgt_levels" data = np.arange(60).reshape(3, 4, 5) with pytest.raises(ValueError, match=emsg): relevel(self.cube, self.coord, data) def test_standard_input(self): + from iris.experimental.stratify import relevel + for axis in self.axes: result = relevel(self.cube, self.src_levels, [-1, 0, 5.5], axis=axis) expected = DimCoord([-1, 0, 5.5], units=1, long_name="thingness") @@ -49,6 +71,8 @@ def test_standard_input(self): self.patch_interpolate.assert_called() def test_coord_input(self): + from iris.experimental.stratify import relevel + source = AuxCoord(self.src_levels.data) metadata = self.src_levels.metadata._asdict() metadata["coord_system"] = None @@ -61,6 +85,8 @@ def test_coord_input(self): self.patch_interpolate.assert_called() def test_custom_interpolator(self, mocker): + from iris.experimental.stratify import relevel + mock_interpolate = mocker.Mock() mock_interpolate.return_value = np.ones((3, 1, 1)) From 052c98d1c6c4b843afcea9a104ac51cbb122ed92 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Tue, 10 Mar 2026 15:09:38 +0000 Subject: [PATCH 12/22] regridding --- .../tests/unit/experimental/stratify/conf.py | 18 +++++ lib/iris/tests/unit/pandas/test_pandas.py | 66 ++++++------------- 2 files changed, 38 insertions(+), 46 deletions(-) create mode 100644 lib/iris/tests/unit/experimental/stratify/conf.py diff --git a/lib/iris/tests/unit/experimental/stratify/conf.py b/lib/iris/tests/unit/experimental/stratify/conf.py new file mode 100644 index 0000000000..114f350441 --- /dev/null +++ b/lib/iris/tests/unit/experimental/stratify/conf.py @@ -0,0 +1,18 @@ +import sys +import types + +import pytest + + +@pytest.fixture(autouse=True, scope="module") +def fake_stratify(): + try: + import stratify + except: + fake = types.ModuleType("iris.experimental.stratify") + fake.interpolate = lambda *a, **k: None + sys.modules["iris.experimental.stratify"] = fake + yield + finally: + # Remove fake after tests in this module complete + sys.modules.pop("iris.experimental.stratify", None) diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index f83004a31b..83e02ac5fc 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -54,20 +54,34 @@ def activate_pandas_ndim(): class TestAsSeries: """Test conversion of 1D cubes to Pandas using as_series().""" + @pytest.fixture(autouse=True) + def _setup(self, mocker): + self._series = mocker.patch("iris.pandas.pd.Series") + def test_no_dim_coord(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - series = iris.pandas.as_series(cube) - expected_index = np.array([0, 1, 2, 3, 4]) - _shared_utils.assert_array_equal(series, cube.data) - _shared_utils.assert_array_equal(series.index, expected_index) + expected_data = cube.data + + iris.pandas.as_series(cube) + args, _ = self._series.call_args + + self._series.assert_called_once() + _shared_utils.assert_array_equal(args[0], expected_data) + # index is only assigned with dim_coords present + assert args[1] is None def test_simple(self): cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo") dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar") cube.add_dim_coord(dim_coord, 0) - expected_index = np.array([5, 6, 7, 8, 9]) + expected_index = dim_coord.points[0] + expected_data = cube.data + series = iris.pandas.as_series(cube) - _shared_utils.assert_array_equal(series, cube.data) + args, _ = self._series.call_args + + self._series.assert_called_once() + _shared_utils.assert_array_equal(args[0], expected_data) _shared_utils.assert_array_equal(series.index, expected_index) def test_masked(self): @@ -76,46 +90,6 @@ def test_masked(self): series = iris.pandas.as_series(cube) _shared_utils.assert_array_equal(series, cube.data.astype("f").filled(np.nan)) - def test_time_standard(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") - time_coord = DimCoord( - [0, 100.1, 200.2, 300.3, 400.4], - long_name="time", - units="days since 2000-01-01 00:00", - ) - cube.add_dim_coord(time_coord, 0) - expected_index = [ - datetime.datetime(2000, 1, 1, 0, 0), - datetime.datetime(2000, 4, 10, 2, 24), - datetime.datetime(2000, 7, 19, 4, 48), - datetime.datetime(2000, 10, 27, 7, 12), - datetime.datetime(2001, 2, 4, 9, 36), - ] - series = iris.pandas.as_series(cube) - _shared_utils.assert_array_equal(series, cube.data) - assert list(series.index) == expected_index - - def test_time_360(self): - cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") - time_unit = cf_units.Unit( - "days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY - ) - time_coord = DimCoord( - [0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit - ) - cube.add_dim_coord(time_coord, 0) - expected_index = [ - cftime.Datetime360Day(2000, 1, 1, 0, 0), - cftime.Datetime360Day(2000, 4, 11, 2, 24), - cftime.Datetime360Day(2000, 7, 21, 4, 48), - cftime.Datetime360Day(2000, 11, 1, 7, 12), - cftime.Datetime360Day(2001, 2, 11, 9, 36), - ] - - series = iris.pandas.as_series(cube) - _shared_utils.assert_array_equal(series, cube.data) - _shared_utils.assert_array_equal(series.index, expected_index) - def test_copy_true(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") series = iris.pandas.as_series(cube) From db818a7ec46cd3aec4d78b57d35087225eb497c1 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Wed, 11 Mar 2026 21:34:20 +0000 Subject: [PATCH 13/22] constraints --- lib/iris/tests/test_constraints.py | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/lib/iris/tests/test_constraints.py b/lib/iris/tests/test_constraints.py index 9a7ab5fbdf..5adaa9cef8 100644 --- a/lib/iris/tests/test_constraints.py +++ b/lib/iris/tests/test_constraints.py @@ -305,12 +305,21 @@ def load_match(self, files, constraints): return cubes +@pytest.fixture +def _skip_sample_data_path(): + try: + fname = iris.sample_data_path("atlantic_profiles.nc") + cubes = iris.load(fname) + except ImportError: + cubes = iris.cube.CubeList([stock.simple_pp(), stock.simple_3d()]) + return cubes + + @_shared_utils.skip_data class TestCubeExtract__names(ConstraintMixin): @pytest.fixture(autouse=True) - def _setup(self, _setup_mixin): - fname = iris.sample_data_path("atlantic_profiles.nc") - self.cubes = iris.load(fname) + def _setup(self, _setup_mixin, _skip_sample_data_path): + self.cubes = _skip_sample_data_path cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" @@ -365,9 +374,8 @@ def test_unknown(self): @_shared_utils.skip_data class TestCubeExtract__name_constraint(ConstraintMixin): @pytest.fixture(autouse=True) - def _setup(self, _setup_mixin): - fname = iris.sample_data_path("atlantic_profiles.nc") - self.cubes = iris.load(fname) + def _setup(self, _setup_mixin, _skip_sample_data_path): + self.cubes = _skip_sample_data_path cube = iris.load_cube(self.theta_path) # Expected names... self.standard_name = "air_potential_temperature" From 2d92178b5600064f12a7a85f5fea988824288132 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 12 Mar 2026 11:17:15 +0000 Subject: [PATCH 14/22] fix stratify failures --- .../tests/unit/experimental/stratify/conf.py | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 lib/iris/tests/unit/experimental/stratify/conf.py diff --git a/lib/iris/tests/unit/experimental/stratify/conf.py b/lib/iris/tests/unit/experimental/stratify/conf.py deleted file mode 100644 index 114f350441..0000000000 --- a/lib/iris/tests/unit/experimental/stratify/conf.py +++ /dev/null @@ -1,18 +0,0 @@ -import sys -import types - -import pytest - - -@pytest.fixture(autouse=True, scope="module") -def fake_stratify(): - try: - import stratify - except: - fake = types.ModuleType("iris.experimental.stratify") - fake.interpolate = lambda *a, **k: None - sys.modules["iris.experimental.stratify"] = fake - yield - finally: - # Remove fake after tests in this module complete - sys.modules.pop("iris.experimental.stratify", None) From b8ce28ded4232993258942ef2cea1e0e6d096887 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 12 Mar 2026 11:37:57 +0000 Subject: [PATCH 15/22] fix stratify failures --- lib/iris/tests/unit/experimental/stratify/test_relevel.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/iris/tests/unit/experimental/stratify/test_relevel.py b/lib/iris/tests/unit/experimental/stratify/test_relevel.py index c202d9fc01..0b8881de68 100644 --- a/lib/iris/tests/unit/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/unit/experimental/stratify/test_relevel.py @@ -22,6 +22,8 @@ def fake_stratify(): fake = types.ModuleType("stratify") fake.interpolate = lambda *a, **k: None sys.modules["stratify"] = fake + + try: yield finally: # Remove fake after tests in this module complete From 5df8506ef85352758c4cf00e5a6cd88d2ac8034b Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 12 Mar 2026 12:18:38 +0000 Subject: [PATCH 16/22] licesnse header fix --- lib/iris/tests/integration/_shapefiles/__init__.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/lib/iris/tests/integration/_shapefiles/__init__.py b/lib/iris/tests/integration/_shapefiles/__init__.py index e69de29bb2..4c8a3cd5d5 100644 --- a/lib/iris/tests/integration/_shapefiles/__init__.py +++ b/lib/iris/tests/integration/_shapefiles/__init__.py @@ -0,0 +1,5 @@ +# Copyright Iris contributors +# +# This file is part of Iris and is released under the BSD license. +# See LICENSE in the root of the repository for full licensing details. +"""Integration tests for the :mod:`iris._shapefiles` package.""" From 21ac33570e85aa826624e2261b568f29454bf495 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 12 Mar 2026 14:03:00 +0000 Subject: [PATCH 17/22] pandas --- lib/iris/tests/unit/pandas/test_pandas.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/unit/pandas/test_pandas.py index 3f26375dc2..10d8feaa5f 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/unit/pandas/test_pandas.py @@ -85,25 +85,12 @@ def test_simple(self): _shared_utils.assert_array_equal(args[0], expected_data) _shared_utils.assert_array_equal(series.index, expected_index) - def test_masked(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4.4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - series = iris.pandas.as_series(cube) - _shared_utils.assert_array_equal(series, cube.data.astype("f").filled(np.nan)) - def test_copy_true(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") series = iris.pandas.as_series(cube) series[0] = 99 assert cube.data[0] == 0 - def test_copy_masked_true(self): - data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) - cube = Cube(data, long_name="foo") - series = iris.pandas.as_series(cube) - series[0] = 99 - assert cube.data[0] == 0 - @skip_pandas @pytest.mark.filterwarnings( From 21268ec0d94f584c3e5ccef9bb8b72afdd9111cc Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Thu, 12 Mar 2026 15:30:29 +0000 Subject: [PATCH 18/22] added cml --- .../relevel/multi_dim_target_levels.cml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 lib/iris/tests/results/integration/experimental/stratify/relevel/multi_dim_target_levels.cml diff --git a/lib/iris/tests/results/integration/experimental/stratify/relevel/multi_dim_target_levels.cml b/lib/iris/tests/results/integration/experimental/stratify/relevel/multi_dim_target_levels.cml new file mode 100644 index 0000000000..b67109636a --- /dev/null +++ b/lib/iris/tests/results/integration/experimental/stratify/relevel/multi_dim_target_levels.cml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + From 5b890f1931947818d573f5076c93ad17a3744241 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Mon, 16 Mar 2026 10:48:48 +0000 Subject: [PATCH 19/22] deleted old files --- .../test_regrid_conservative_via_esmpy.py | 818 ------------------ .../util/test_mask_cube_from_shapefile.py | 2 +- .../relevel/multi_dim_target_levels.cml | 18 - .../unit/util/test_mask_cube_from_shape.py | 205 ----- .../util/test_mask_cube_from_shapefile.py | 128 --- 5 files changed, 1 insertion(+), 1170 deletions(-) delete mode 100644 lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py delete mode 100644 lib/iris/tests/results/unit/experimental/stratify/relevel/multi_dim_target_levels.cml delete mode 100644 lib/iris/tests/unit/util/test_mask_cube_from_shape.py delete mode 100644 lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py diff --git a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py b/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py deleted file mode 100644 index e6d209c691..0000000000 --- a/lib/iris/tests/experimental/regrid/test_regrid_conservative_via_esmpy.py +++ /dev/null @@ -1,818 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Tests for :func:`iris.experimental.regrid.regrid_conservative_via_esmpy`.""" - -import contextlib - -import cf_units -import numpy as np -import pytest - -from iris.tests import _shared_utils - -# Import ESMF if installed, else fail quietly + disable all the tests. -try: - import esmpy as ESMF -except ImportError: - ESMF = None -skip_esmf = pytest.mark.skipif( - condition=ESMF is None, reason="Requires ESMF, which is not available." -) - -import iris -import iris.analysis -import iris.analysis.cartography as i_cartog -from iris.experimental.regrid_conservative import regrid_conservative_via_esmpy -import iris.tests.stock as istk - -_PLAIN_GEODETIC_CS = iris.coord_systems.GeogCS(i_cartog.DEFAULT_SPHERICAL_EARTH_RADIUS) - - -def _make_test_cube(shape, xlims, ylims, pole_latlon=None): - """Create latlon cube (optionally rotated) with given xy dimensions and bounds - limit values. - - Produces a regular grid in source coordinates. - Does not work for 1xN or Nx1 grids, because guess_bounds fails. - - """ - nx, ny = shape - cube = iris.cube.Cube(np.zeros((ny, nx))) - xvals = np.linspace(xlims[0], xlims[1], nx) - yvals = np.linspace(ylims[0], ylims[1], ny) - coordname_prefix = "" - cs = _PLAIN_GEODETIC_CS - if pole_latlon is not None: - coordname_prefix = "grid_" - pole_lat, pole_lon = pole_latlon - cs = iris.coord_systems.RotatedGeogCS( - grid_north_pole_latitude=pole_lat, - grid_north_pole_longitude=pole_lon, - ellipsoid=cs, - ) - - co_x = iris.coords.DimCoord( - xvals, - standard_name=coordname_prefix + "longitude", - units=cf_units.Unit("degrees"), - coord_system=cs, - ) - co_x.guess_bounds() - cube.add_dim_coord(co_x, 1) - co_y = iris.coords.DimCoord( - yvals, - standard_name=coordname_prefix + "latitude", - units=cf_units.Unit("degrees"), - coord_system=cs, - ) - co_y.guess_bounds() - cube.add_dim_coord(co_y, 0) - return cube - - -def _cube_area_sum(cube): - """Calculate total area-sum - Iris can't do this in one operation.""" - area_sums = cube * i_cartog.area_weights(cube, normalize=False) - area_sum = area_sums.collapsed(area_sums.coords(dim_coords=True), iris.analysis.SUM) - return area_sum.data.flatten()[0] - - -def _reldiff(a, b): - """Compute a relative-difference measure between real numbers. - - Result is: - if a == b == 0: - 0.0 - otherwise: - |a - b| / mean(|a|, |b|) - - """ - if a == 0.0 and b == 0.0: - return 0.0 - return abs(a - b) * 2.0 / (abs(a) + abs(b)) - - -def _minmax(v): - """Calculate [min, max] of input.""" - return [f(v) for f in (np.min, np.max)] - - -@contextlib.contextmanager -def _donothing_context_manager(): - yield - - -@skip_esmf -class TestConservativeRegrid: - @pytest.fixture(autouse=True) - def _setup(self): - # Compute basic test data cubes. - shape1 = (5, 5) - xlims1, ylims1 = ((-2, 2), (-2, 2)) - c1 = _make_test_cube(shape1, xlims1, ylims1) - c1.data[:] = 0.0 - c1.data[2, 2] = 1.0 - - shape2 = (4, 4) - xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) - c2 = _make_test_cube(shape2, xlims2, ylims2) - c2.data[:] = 0.0 - - # Save timesaving pre-computed bits - self.stock_c1_c2 = (c1, c2) - self.stock_regrid_c1toc2 = regrid_conservative_via_esmpy(c1, c2) - self.stock_c1_areasum = _cube_area_sum(c1) - - def test_simple_areas(self): - """Test area-conserving regrid between simple "near-square" grids. - - Grids have overlapping areas in the same (lat-lon) coordinate system. - Grids are "nearly flat" lat-lon spaces (small ranges near the equator). - - """ - c1, c2 = self.stock_c1_c2 - c1_areasum = self.stock_c1_areasum - - # main regrid - c1to2 = regrid_conservative_via_esmpy(c1, c2) - - c1to2_areasum = _cube_area_sum(c1to2) - - # Check expected result (Cartesian equivalent, so not exact). - d_expect = np.array( - [ - [0.00, 0.00, 0.00, 0.00], - [0.00, 0.25, 0.25, 0.00], - [0.00, 0.25, 0.25, 0.00], - [0.00, 0.00, 0.00, 0.00], - ] - ) - # Numbers are slightly off (~0.25000952). This is expected. - _shared_utils.assert_array_all_close(c1to2.data, d_expect, rtol=5.0e-5) - - # check that the area sums are equivalent, simple total is a bit off - _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum) - - # - # regrid back onto original grid again ... - # - c1to2to1 = regrid_conservative_via_esmpy(c1to2, c1) - - c1to2to1_areasum = _cube_area_sum(c1to2to1) - - # Check expected result (Cartesian/exact difference now greater) - d_expect = np.array( - [ - [0.0, 0.0000, 0.0000, 0.0000, 0.0], - [0.0, 0.0625, 0.1250, 0.0625, 0.0], - [0.0, 0.1250, 0.2500, 0.1250, 0.0], - [0.0, 0.0625, 0.1250, 0.0625, 0.0], - [0.0, 0.0000, 0.0000, 0.0000, 0.0], - ] - ) - _shared_utils.assert_array_all_close(c1to2to1.data, d_expect, atol=0.00002) - - # check area sums again - _shared_utils.assert_array_all_close(c1to2to1_areasum, c1_areasum) - - def test_simple_missing_data(self): - """Check for missing data handling. - - Should mask cells that either .. - (a) go partly outside the source grid - (b) partially overlap masked source data - - """ - c1, c2 = self.stock_c1_c2 - - # regrid from c2 to c1 -- should mask all the edges... - c2_to_c1 = regrid_conservative_via_esmpy(c2, c1) - _shared_utils.assert_array_equal( - c2_to_c1.data.mask, - [ - [True, True, True, True, True], - [True, False, False, False, True], - [True, False, False, False, True], - [True, False, False, False, True], - [True, True, True, True, True], - ], - ) - - # do same with a particular point masked - c2m = c2.copy() - c2m.data = np.ma.array(c2m.data) - c2m.data[1, 1] = np.ma.masked - c2m_to_c1 = regrid_conservative_via_esmpy(c2m, c1) - _shared_utils.assert_array_equal( - c2m_to_c1.data.mask, - [ - [True, True, True, True, True], - [True, True, True, False, True], - [True, True, True, False, True], - [True, False, False, False, True], - [True, True, True, True, True], - ], - ) - - @_shared_utils.skip_data - def test_multidimensional(self): - """Check valid operation on a multidimensional cube. - - Calculation should repeat across multiple dimensions. - Any attached orography is interpolated. - - NOTE: in future, extra dimensions may be passed through to ESMF: At - present, it repeats the calculation on 2d slices. So we check that - at least the results are equivalent (as it's quite easy to do). - - """ - # Get some higher-dimensional test data - c1 = istk.realistic_4d() - # Chop down to small size, and mask some data - c1 = c1[:3, :4, :16, :12] - c1.data[:, 2, :, :] = np.ma.masked - c1.data[1, 1, 3:9, 4:7] = np.ma.masked - # Give it a slightly more challenging indexing order: tzyx --> xzty - c1.transpose((3, 1, 0, 2)) - - # Construct a (coarser) target grid of about the same extent - c1_cs = c1.coord(axis="x").coord_system - xlims = _minmax(c1.coord(axis="x").contiguous_bounds()) - ylims = _minmax(c1.coord(axis="y").contiguous_bounds()) - # Reduce the dimensions slightly to avoid NaNs in regridded orography - delta = 0.05 - # || NOTE: this is *not* a small amount. Think there is a bug. - # || NOTE: See https://github.com/SciTools/iris/issues/458 - xlims = np.interp([delta, 1.0 - delta], [0, 1], xlims) - ylims = np.interp([delta, 1.0 - delta], [0, 1], ylims) - pole_latlon = ( - c1_cs.grid_north_pole_latitude, - c1_cs.grid_north_pole_longitude, - ) - c2 = _make_test_cube((7, 8), xlims, ylims, pole_latlon=pole_latlon) - - # regrid onto new grid - c1_to_c2 = regrid_conservative_via_esmpy(c1, c2) - - # check that all the original coords exist in the new cube - # NOTE: this also effectively confirms we haven't lost the orography - def list_coord_names(cube): - return sorted([coord.name() for coord in cube.coords()]) - - assert list_coord_names(c1_to_c2) == list_coord_names(c1) - - # check that each xy 'slice' has same values as if done on its own. - for i_p, i_t in np.ndindex(c1.shape[1:3]): - c1_slice = c1[:, i_p, i_t] - c2_slice = regrid_conservative_via_esmpy(c1_slice, c2) - subcube = c1_to_c2[:, i_p, i_t] - assert subcube == c2_slice - - # check all other metadata - assert c1_to_c2.metadata == c1.metadata - - def test_xy_transposed(self): - # Test effects of transposing X and Y in src/dst data. - c1, c2 = self.stock_c1_c2 - testcube_xy = self.stock_regrid_c1toc2 - - # Check that transposed data produces transposed results - # - i.e. regrid(data^T)^T == regrid(data) - c1_yx = c1.copy() - c1_yx.transpose() - testcube_yx = regrid_conservative_via_esmpy(c1_yx, c2) - testcube_yx.transpose() - assert testcube_yx == testcube_xy - - # Check that transposing destination does nothing - c2_yx = c2.copy() - c2_yx.transpose() - testcube_dst_transpose = regrid_conservative_via_esmpy(c1, c2_yx) - assert testcube_dst_transpose == testcube_xy - - def test_same_grid(self): - # Test regridding onto the identical grid. - # Use regrid with self as target. - c1, _ = self.stock_c1_c2 - testcube = regrid_conservative_via_esmpy(c1, c1) - assert testcube == c1 - - def test_global(self): - # Test global regridding. - # Compute basic test data cubes. - shape1 = (8, 6) - xlim1 = 180.0 * (shape1[0] - 1) / shape1[0] - ylim1 = 90.0 * (shape1[1] - 1) / shape1[1] - c1 = _make_test_cube(shape1, (-xlim1, xlim1), (-ylim1, ylim1)) - # Create a small, plausible global array: - # - top + bottom rows all the same - # - left + right columns "mostly close" for checking across the seam - basedata = np.array( - [ - [1, 1, 1, 1, 1, 1, 1, 1], - [1, 1, 4, 4, 4, 2, 2, 1], - [2, 1, 4, 4, 4, 2, 2, 2], - [2, 5, 5, 1, 1, 1, 5, 5], - [5, 5, 5, 1, 1, 1, 5, 5], - [5, 5, 5, 5, 5, 5, 5, 5], - ] - ) - c1.data[:] = basedata - - # Create a rotated grid to regrid this onto. - shape2 = (14, 11) - xlim2 = 180.0 * (shape2[0] - 1) / shape2[0] - ylim2 = 90.0 * (shape2[1] - 1) / shape2[1] - c2 = _make_test_cube( - shape2, (-xlim2, xlim2), (-ylim2, ylim2), pole_latlon=(47.4, 25.7) - ) - - # Perform regridding - c1toc2 = regrid_conservative_via_esmpy(c1, c2) - - # Check that before+after area-sums match fairly well - c1_areasum = _cube_area_sum(c1) - c1toc2_areasum = _cube_area_sum(c1toc2) - _shared_utils.assert_array_all_close(c1toc2_areasum, c1_areasum, rtol=0.006) - - def test_global_collapse(self): - # Test regridding global data to a single cell. - # Fetch 'standard' testcube data - c1, _ = self.stock_c1_c2 - c1_areasum = self.stock_c1_areasum - - # Condense entire globe onto a single cell - x_coord_2 = iris.coords.DimCoord( - [0.0], - bounds=[-180.0, 180.0], - standard_name="longitude", - units="degrees", - coord_system=_PLAIN_GEODETIC_CS, - ) - y_coord_2 = iris.coords.DimCoord( - [0.0], - bounds=[-90.0, 90.0], - standard_name="latitude", - units="degrees", - coord_system=_PLAIN_GEODETIC_CS, - ) - c2 = iris.cube.Cube([[0.0]]) - c2.add_dim_coord(y_coord_2, 0) - c2.add_dim_coord(x_coord_2, 1) - - # NOTE: at present, this causes an error inside ESMF ... - emsg = "ESMC_FieldRegridStore failed with rc = 506." - context = pytest.raises(ValueError, match=emsg) - global_cell_supported = False - if global_cell_supported: - context = _donothing_context_manager() - with context: - c1_to_global = regrid_conservative_via_esmpy(c1, c2) - # Check the total area sum is still the same - _shared_utils.assert_array_all_close(c1_to_global.data[0, 0], c1_areasum) - - def test_single_cells(self): - # Test handling of single-cell grids. - # Fetch 'standard' testcube data - c1, c2 = self.stock_c1_c2 - c1_areasum = self.stock_c1_areasum - - # - # At present NxN -> 1x1 "in-place" doesn't seem to work properly - # - result cell has missing-data ? - # - # Condense entire region into a single cell in the c1 grid - xlims1 = _minmax(c1.coord(axis="x").bounds) - ylims1 = _minmax(c1.coord(axis="y").bounds) - x_c1x1 = iris.coords.DimCoord( - xlims1[0], - bounds=xlims1, - standard_name="longitude", - units="degrees", - coord_system=_PLAIN_GEODETIC_CS, - ) - y_c1x1 = iris.coords.DimCoord( - ylims1[0], - bounds=ylims1, - standard_name="latitude", - units="degrees", - coord_system=_PLAIN_GEODETIC_CS, - ) - c1x1_gridcube = iris.cube.Cube([[0.0]]) - c1x1_gridcube.add_dim_coord(y_c1x1, 0) - c1x1_gridcube.add_dim_coord(x_c1x1, 1) - c1x1 = regrid_conservative_via_esmpy(c1, c1x1_gridcube) - c1x1_areasum = _cube_area_sum(c1x1) - # Check the total area sum is still the same - condense_to_1x1_supported = False - # NOTE: currently disabled (ESMF gets this wrong) - # NOTE ALSO: call hits numpy 1.7 bug in testing.assert_array_compare. - if condense_to_1x1_supported: - _shared_utils.assert_array_all_close(c1x1_areasum, c1_areasum) - - # Condense entire region onto a single cell covering the area of 'c2' - xlims2 = _minmax(c2.coord(axis="x").bounds) - ylims2 = _minmax(c2.coord(axis="y").bounds) - x_c2x1 = iris.coords.DimCoord( - xlims2[0], - bounds=xlims2, - standard_name="longitude", - units=cf_units.Unit("degrees"), - coord_system=_PLAIN_GEODETIC_CS, - ) - y_c2x1 = iris.coords.DimCoord( - ylims2[0], - bounds=ylims2, - standard_name="latitude", - units=cf_units.Unit("degrees"), - coord_system=_PLAIN_GEODETIC_CS, - ) - c2x1_gridcube = iris.cube.Cube([[0.0]]) - c2x1_gridcube.add_dim_coord(y_c2x1, 0) - c2x1_gridcube.add_dim_coord(x_c2x1, 1) - c1_to_c2x1 = regrid_conservative_via_esmpy(c1, c2x1_gridcube) - - # Check the total area sum is still the same - c1_to_c2x1_areasum = _cube_area_sum(c1_to_c2x1) - _shared_utils.assert_array_all_close(c1_to_c2x1_areasum, c1_areasum, 0.0004) - - # 1x1 -> NxN : regrid single cell to NxN grid - # construct a single-cell approximation to 'c1' with the same area sum. - # NOTE: can't use _make_cube (see docstring) - c1x1 = c1.copy()[0:1, 0:1] - xlims1 = _minmax(c1.coord(axis="x").bounds) - ylims1 = _minmax(c1.coord(axis="y").bounds) - c1x1.coord(axis="x").bounds = xlims1 - c1x1.coord(axis="y").bounds = ylims1 - # Assign data mean as single cell value : Maybe not exact, but "close" - c1x1.data[0, 0] = np.mean(c1.data) - - # Regrid this back onto the original NxN grid - c1x1_to_c1 = regrid_conservative_via_esmpy(c1x1, c1) - c1x1_to_c1_areasum = _cube_area_sum(c1x1_to_c1) - - # Check that area sum is ~unchanged, as expected - _shared_utils.assert_array_all_close(c1x1_to_c1_areasum, c1_areasum, 0.0004) - - # Check 1x1 -> 1x1 - # NOTE: can *only* get any result with a fully overlapping cell, so - # just regrid onto self - c1x1toself = regrid_conservative_via_esmpy(c1x1, c1x1) - c1x1toself_areasum = _cube_area_sum(c1x1toself) - _shared_utils.assert_array_all_close(c1x1toself_areasum, c1_areasum, 0.0004) - # NOTE: perhaps surprisingly, this has a similar level of error. - - def test_longitude_wraps(self): - """Check results are independent of where the grid 'seams' are.""" - # First repeat global regrid calculation from 'test_global'. - shape1 = (8, 6) - xlim1 = 180.0 * (shape1[0] - 1) / shape1[0] - ylim1 = 90.0 * (shape1[1] - 1) / shape1[1] - xlims1 = (-xlim1, xlim1) - ylims1 = (-ylim1, ylim1) - c1 = _make_test_cube(shape1, xlims1, ylims1) - - # Create a small, plausible global array (see test_global). - basedata = np.array( - [ - [1, 1, 1, 1, 1, 1, 1, 1], - [1, 1, 4, 4, 4, 2, 2, 1], - [2, 1, 4, 4, 4, 2, 2, 2], - [2, 5, 5, 1, 1, 1, 5, 5], - [5, 5, 5, 1, 1, 1, 5, 5], - [5, 5, 5, 5, 5, 5, 5, 5], - ] - ) - c1.data[:] = basedata - - shape2 = (14, 11) - xlim2 = 180.0 * (shape2[0] - 1) / shape2[0] - ylim2 = 90.0 * (shape2[1] - 1) / shape2[1] - xlims_2 = (-xlim2, xlim2) - ylims_2 = (-ylim2, ylim2) - c2 = _make_test_cube(shape2, xlims_2, ylims_2, pole_latlon=(47.4, 25.7)) - - # Perform regridding - c1toc2 = regrid_conservative_via_esmpy(c1, c2) - - # Now redo with dst longitudes rotated, so 'seam' is somewhere else. - x2_shift_steps = shape2[0] // 3 - xlims2_shifted = np.array(xlims_2) + 360.0 * x2_shift_steps / shape2[0] - c2_shifted = _make_test_cube( - shape2, xlims2_shifted, ylims_2, pole_latlon=(47.4, 25.7) - ) - c1toc2_shifted = regrid_conservative_via_esmpy(c1, c2_shifted) - - # Show that results are the same, when output rolled by same amount - rolled_data = np.roll(c1toc2_shifted.data, x2_shift_steps, axis=1) - _shared_utils.assert_array_all_close(rolled_data, c1toc2.data) - - # Repeat with rolled *source* data : result should be identical - x1_shift_steps = shape1[0] // 3 - x_shift_degrees = 360.0 * x1_shift_steps / shape1[0] - xlims1_shifted = [x - x_shift_degrees for x in xlims1] - c1_shifted = _make_test_cube(shape1, xlims1_shifted, ylims1) - c1_shifted.data[:] = np.roll(basedata, x1_shift_steps, axis=1) - c1shifted_toc2 = regrid_conservative_via_esmpy(c1_shifted, c2) - assert c1shifted_toc2 == c1toc2 - - def test_polar_areas(self): - """Test area-conserving regrid between different grids. - - Grids have overlapping areas in the same (lat-lon) coordinate system. - Cells are highly non-square (near the pole). - - """ - # Like test_basic_area, but not symmetrical + bigger overall errors. - shape1 = (5, 5) - xlims1, ylims1 = ((-2, 2), (84, 88)) - c1 = _make_test_cube(shape1, xlims1, ylims1) - c1.data[:] = 0.0 - c1.data[2, 2] = 1.0 - c1_areasum = _cube_area_sum(c1) - - shape2 = (4, 4) - xlims2, ylims2 = ((-1.5, 1.5), (84.5, 87.5)) - c2 = _make_test_cube(shape2, xlims2, ylims2) - c2.data[:] = 0.0 - - c1to2 = regrid_conservative_via_esmpy(c1, c2) - - # check for expected pattern - d_expect = np.array( - [ - [0.0, 0.0, 0.0, 0.0], - [0.0, 0.23614, 0.23614, 0.0], - [0.0, 0.26784, 0.26784, 0.0], - [0.0, 0.0, 0.0, 0.0], - ] - ) - _shared_utils.assert_array_all_close(c1to2.data, d_expect, rtol=5.0e-5) - - # check sums - c1to2_areasum = _cube_area_sum(c1to2) - _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum) - - # - # transform back again ... - # - c1to2to1 = regrid_conservative_via_esmpy(c1to2, c1) - - # check values - d_expect = np.array( - [ - [0.0, 0.0, 0.0, 0.0, 0.0], - [0.0, 0.056091, 0.112181, 0.056091, 0.0], - [0.0, 0.125499, 0.250998, 0.125499, 0.0], - [0.0, 0.072534, 0.145067, 0.072534, 0.0], - [0.0, 0.0, 0.0, 0.0, 0.0], - ] - ) - _shared_utils.assert_array_all_close(c1to2to1.data, d_expect, atol=0.0005) - - # check sums - c1to2to1_areasum = _cube_area_sum(c1to2to1) - _shared_utils.assert_array_all_close(c1to2to1_areasum, c1_areasum) - - def test_fail_no_cs(self): - # Test error when one coordinate has no coord_system. - shape1 = (5, 5) - xlims1, ylims1 = ((-2, 2), (-2, 2)) - c1 = _make_test_cube(shape1, xlims1, ylims1) - c1.data[:] = 0.0 - c1.data[2, 2] = 1.0 - - shape2 = (4, 4) - xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) - c2 = _make_test_cube(shape2, xlims2, ylims2) - c2.data[:] = 0.0 - c2.coord("latitude").coord_system = None - - emsg = ( - r"The cube's x \('longitude'\) and y \('latitude'\) " - "coordinates must have the same coordinate system." - ) - with pytest.raises(ValueError, match=emsg): - regrid_conservative_via_esmpy(c1, c2) - - def test_fail_different_cs(self): - # Test error when either src or dst coords have different - # coord_systems. - shape1 = (5, 5) - xlims1, ylims1 = ((-2, 2), (-2, 2)) - shape2 = (4, 4) - xlims2, ylims2 = ((-1.5, 1.5), (-1.5, 1.5)) - - # Check basic regrid between these is ok. - c1 = _make_test_cube(shape1, xlims1, ylims1, pole_latlon=(45.0, 35.0)) - c2 = _make_test_cube(shape2, xlims2, ylims2) - regrid_conservative_via_esmpy(c1, c2) - - emsg = ( - r"The cube's x \('grid_longitude'\) and y \('grid_latitude'\) coordinates " - "must have the same coordinate system." - ) - # Replace the coord_system one of the source coords + check this fails. - c1.coord("grid_longitude").coord_system = c2.coord("longitude").coord_system - with pytest.raises(ValueError, match=emsg): - regrid_conservative_via_esmpy(c1, c2) - - emsg = ( - r"The cube's x \('longitude'\) and y \('latitude'\) coordinates " - "must have the same coordinate system." - ) - # Repeat with target coordinate fiddled. - c1 = _make_test_cube(shape1, xlims1, ylims1, pole_latlon=(45.0, 35.0)) - c2 = _make_test_cube(shape2, xlims2, ylims2) - c2.coord("latitude").coord_system = c1.coord("grid_latitude").coord_system - with pytest.raises(ValueError, match=emsg): - regrid_conservative_via_esmpy(c1, c2) - - def test_rotated(self): - """Test area-weighted regrid on more complex area. - - Use two mutually rotated grids, of similar area + same dims. - Only a small central region in each is non-zero, which maps entirely - inside the other region. - So the area-sum totals should match exactly. - - """ - # create source test cube on rotated form - pole_lat = 53.4 - pole_lon = -173.2 - deg_swing = 35.3 - pole_lon += deg_swing - c1_nx = 9 + 6 - c1_ny = 7 + 6 - c1_xlims = -60.0, 60.0 - c1_ylims = -45.0, 20.0 - c1_xlims = [x - deg_swing for x in c1_xlims] - c1 = _make_test_cube( - (c1_nx, c1_ny), - c1_xlims, - c1_ylims, - pole_latlon=(pole_lat, pole_lon), - ) - c1.data[3:-3, 3:-3] = np.array( - [ - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 199, 199, 199, 199, 100, 100, 100], - [100, 100, 100, 100, 199, 199, 100, 100, 100], - [100, 100, 100, 100, 199, 199, 199, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - ], - dtype=np.float64, - ) - - c1_areasum = _cube_area_sum(c1) - - # construct target cube to receive - nx2 = 9 + 6 - ny2 = 7 + 6 - c2_xlims = -100.0, 120.0 - c2_ylims = -20.0, 50.0 - c2 = _make_test_cube((nx2, ny2), c2_xlims, c2_ylims) - c2.data = np.ma.array(c2.data, mask=True) - - # perform regrid - c1to2 = regrid_conservative_via_esmpy(c1, c2) - - # check we have zeros (or nearly) all around the edge.. - c1toc2_zeros = np.ma.array(c1to2.data) - c1toc2_zeros[c1toc2_zeros.mask] = 0.0 - c1toc2_zeros = np.abs(c1toc2_zeros.mask) < 1.0e-6 - _shared_utils.assert_array_equal(c1toc2_zeros[0, :], True) - _shared_utils.assert_array_equal(c1toc2_zeros[-1, :], True) - _shared_utils.assert_array_equal(c1toc2_zeros[:, 0], True) - _shared_utils.assert_array_equal(c1toc2_zeros[:, -1], True) - - # check the area-sum operation - c1to2_areasum = _cube_area_sum(c1to2) - _shared_utils.assert_array_all_close(c1to2_areasum, c1_areasum, rtol=0.004) - - # - # Now repeat, transforming backwards ... - # - c1.data = np.ma.array(c1.data, mask=True) - c2.data[:] = 0.0 - c2.data[5:-5, 5:-5] = np.array( - [ - [199, 199, 199, 199, 100], - [100, 100, 199, 199, 100], - [100, 100, 199, 199, 199], - ], - dtype=np.float64, - ) - c2_areasum = _cube_area_sum(c2) - - c2toc1 = regrid_conservative_via_esmpy(c2, c1) - - # check we have zeros (or nearly) all around the edge.. - c2toc1_zeros = np.ma.array(c2toc1.data) - c2toc1_zeros[c2toc1_zeros.mask] = 0.0 - c2toc1_zeros = np.abs(c2toc1_zeros.mask) < 1.0e-6 - _shared_utils.assert_array_equal(c2toc1_zeros[0, :], True) - _shared_utils.assert_array_equal(c2toc1_zeros[-1, :], True) - _shared_utils.assert_array_equal(c2toc1_zeros[:, 0], True) - _shared_utils.assert_array_equal(c2toc1_zeros[:, -1], True) - - # check the area-sum operation - c2toc1_areasum = _cube_area_sum(c2toc1) - _shared_utils.assert_array_all_close(c2toc1_areasum, c2_areasum, rtol=0.004) - - def test_missing_data_rotated(self): - """Check missing-data handling between different coordinate systems. - - Regrid between mutually rotated lat/lon systems, and check results for - missing data due to grid edge overlap, and source-data masking. - - """ - for do_add_missing in (False, True): - # create source test cube on rotated form - pole_lat = 53.4 - pole_lon = -173.2 - deg_swing = 35.3 - pole_lon += deg_swing - c1_nx = 9 + 6 - c1_ny = 7 + 6 - c1_xlims = -60.0, 60.0 - c1_ylims = -45.0, 20.0 - c1_xlims = [x - deg_swing for x in c1_xlims] - c1 = _make_test_cube( - (c1_nx, c1_ny), - c1_xlims, - c1_ylims, - pole_latlon=(pole_lat, pole_lon), - ) - c1.data = np.ma.array(c1.data, mask=False) - c1.data[3:-3, 3:-3] = np.ma.array( - [ - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 199, 199, 199, 199, 100, 100, 100], - [100, 100, 100, 100, 199, 199, 100, 100, 100], - [100, 100, 100, 100, 199, 199, 199, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - [100, 100, 100, 100, 100, 100, 100, 100, 100], - ], - dtype=np.float64, - ) - - if do_add_missing: - c1.data = np.ma.array(c1.data) - c1.data[7, 7] = np.ma.masked - c1.data[3:5, 10:12] = np.ma.masked - - # construct target cube to receive - nx2 = 9 + 6 - ny2 = 7 + 6 - c2_xlims = -80.0, 80.0 - c2_ylims = -20.0, 50.0 - c2 = _make_test_cube((nx2, ny2), c2_xlims, c2_ylims) - c2.data = np.ma.array(c2.data, mask=True) - - # perform regrid + snapshot test results - c1toc2 = regrid_conservative_via_esmpy(c1, c2) - - # check masking of result is as expected - # (generated by inspecting plot of how src+dst grids overlap) - expected_mask_valuemap = np.array( - # KEY: 0=masked, 7=present, 5=masked with masked datapoints - [ - [0, 0, 0, 0, 7, 7, 7, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 7, 7, 7, 7, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 0, 0, 0, 0, 0], - [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0, 0], - [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], - [0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], - [0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 5, 5, 7, 0, 0], - [0, 0, 0, 7, 7, 7, 7, 5, 5, 7, 7, 7, 7, 0, 0], - [0, 0, 0, 0, 7, 7, 7, 5, 5, 7, 7, 7, 7, 0, 0], - [0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0], - [0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 7, 0], - [0, 0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 7, 7, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 7, 7, 7, 7, 7, 7, 0], - ] - ) - - if do_add_missing: - expected_mask = expected_mask_valuemap < 7 - else: - expected_mask = expected_mask_valuemap == 0 - - actual_mask = c1toc2.data.mask - _shared_utils.assert_array_equal(actual_mask, expected_mask) - - if not do_add_missing: - # check preservation of area-sums - # NOTE: does *not* work with missing data, even theoretically, - # as the 'missing areas' are not the same. - c1_areasum = _cube_area_sum(c1) - c1to2_areasum = _cube_area_sum(c1toc2) - _shared_utils.assert_array_all_close( - c1_areasum, c1to2_areasum, rtol=0.003 - ) diff --git a/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py index 845867ebae..345889dacc 100644 --- a/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py +++ b/lib/iris/tests/integration/util/test_mask_cube_from_shapefile.py @@ -2,7 +2,7 @@ # # This file is part of Iris and is released under the BSD license. # See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" +"""Integration tests for :func:`iris.util.mask_cube_from_shapefile`.""" import numpy as np import pytest diff --git a/lib/iris/tests/results/unit/experimental/stratify/relevel/multi_dim_target_levels.cml b/lib/iris/tests/results/unit/experimental/stratify/relevel/multi_dim_target_levels.cml deleted file mode 100644 index b67109636a..0000000000 --- a/lib/iris/tests/results/unit/experimental/stratify/relevel/multi_dim_target_levels.cml +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - - diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py b/lib/iris/tests/unit/util/test_mask_cube_from_shape.py deleted file mode 100644 index ef7b658589..0000000000 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shape.py +++ /dev/null @@ -1,205 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" - -import numpy as np -from pyproj import CRS -import pytest -from shapely.geometry import box - -from iris.coord_systems import GeogCS -from iris.coords import DimCoord -from iris.cube import Cube -from iris.util import array_equal, is_masked, mask_cube_from_shape - - -@pytest.fixture -def square_polygon(): - # Create a roughly 3x3 square polygon - return box(2.4, 2.4, 6.4, 6.4) - - -@pytest.fixture -def mock_cube(): - """Create a mock 9x9 Iris cube for testing.""" - x_points = np.linspace(1, 9, 9) - 0.5 # Specify cube cell midpoints - y_points = np.linspace(1, 9, 9) - 0.5 - x_coord = DimCoord( - x_points, - standard_name="longitude", - units="degrees", - coord_system=GeogCS(6371229), - ) - y_coord = DimCoord( - y_points, - standard_name="latitude", - units="degrees", - coord_system=GeogCS(6371229), - ) - data = np.ones((len(y_points), len(x_points))) - cube = Cube(data, dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)]) - return cube - - -def test_mask_cube_from_shape_inplace(mock_cube, square_polygon): - masked_cube = mask_cube_from_shape( - cube=mock_cube, - shape=square_polygon, - shape_crs=CRS.from_epsg(4326), - in_place=True, - ) - assert masked_cube is None - assert is_masked(mock_cube.data) - - -def test_mask_cube_from_shape_not_inplace(mock_cube, square_polygon): - masked_cube = mask_cube_from_shape( - cube=mock_cube, - shape=square_polygon, - shape_crs=CRS.from_epsg(4326), - in_place=False, - ) - assert masked_cube is not None - assert is_masked(masked_cube.data) - # Original cube should remain unmasked - assert not is_masked(mock_cube.data) - - -@pytest.mark.parametrize( - ("minimum_weight", "expected_output"), - [ - ( - 0.0, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ( - 0.5, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ( - 1.0, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ], -) -def test_basic_mask_cube_from_shape( - mock_cube, square_polygon, minimum_weight, expected_output -): - """Test the create_shape_mask function with different minimum weights.""" - expected_cube = mock_cube.copy( - data=np.ma.array( - expected_output, dtype=float, mask=np.logical_not(expected_output) - ) - ) - # Create a mask using the square polygon - mask = mask_cube_from_shape( - cube=mock_cube, - shape=square_polygon, - shape_crs=None, - minimum_weight=minimum_weight, - ) - - assert array_equal(mask.data, expected_cube.data) - - -def test_mask_cube_from_shape_invert(mock_cube, square_polygon): - """Test the create_shape_mask function with different minimum weights.""" - expected_output = np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ) - - expected_cube = mock_cube.copy( - data=np.ma.array( - np.logical_not(expected_output), dtype=float, mask=expected_output - ) - ) - # Create a mask using the square polygon - mask = mask_cube_from_shape( - cube=mock_cube, - shape=square_polygon, - shape_crs=None, - minimum_weight=0, - invert=True, - ) - - assert array_equal(mask.data, expected_cube.data) - - -def test_mask_cube_from_shape_all_touched(mock_cube, square_polygon): - """Test the create_shape_mask function with different minimum weights.""" - expected_output = np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ) - - expected_cube = mock_cube.copy( - data=np.ma.array( - expected_output, dtype=float, mask=np.logical_not(expected_output) - ) - ) - # Create a mask using the square polygon - mask = mask_cube_from_shape( - cube=mock_cube, - shape=square_polygon, - shape_crs=None, - all_touched=True, - ) - - assert array_equal(mask.data, expected_cube.data) diff --git a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py b/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py deleted file mode 100644 index 845867ebae..0000000000 --- a/lib/iris/tests/unit/util/test_mask_cube_from_shapefile.py +++ /dev/null @@ -1,128 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for :func:`iris.util.mask_cube_from_shapefile`.""" - -import numpy as np -import pytest -from shapely.geometry import box - -from iris.coord_systems import GeogCS -from iris.coords import DimCoord -from iris.cube import Cube -from iris.util import array_equal, mask_cube_from_shapefile - - -@pytest.fixture -def square_polygon(): - # Create a roughly 3x3 square polygon - return box(2.4, 2.4, 6.4, 6.4) - - -@pytest.fixture -def mock_cube(): - """Create a mock 9x9 Iris cube for testing.""" - x_points = np.linspace(1, 9, 9) - 0.5 # Specify cube cell midpoints - y_points = np.linspace(1, 9, 9) - 0.5 - x_coord = DimCoord( - x_points, - standard_name="longitude", - units="degrees", - coord_system=GeogCS(6371229), - ) - y_coord = DimCoord( - y_points, - standard_name="latitude", - units="degrees", - coord_system=GeogCS(6371229), - ) - data = np.ones((len(y_points), len(x_points))) - cube = Cube(data, dim_coords_and_dims=[(y_coord, 0), (x_coord, 1)]) - return cube - - -def test_mask_cube_from_shapefile_inplace( - mock_cube, -): - shape = box(0, 0, 10, 10) - masked_cube = mask_cube_from_shapefile(mock_cube, shape, in_place=True) - assert masked_cube is None - - -def test_mask_cube_from_shapefile_not_inplace(mock_cube): - shape = box(0, 0, 10, 10) - masked_cube = mask_cube_from_shapefile(mock_cube, shape, in_place=False) - assert masked_cube is not None - - -@pytest.mark.parametrize( - ("minimum_weight", "expected_output"), - [ - ( - 0.0, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 1, 1, 1, 1, 1, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ( - 0.5, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 1, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ( - 1.0, - np.array( - [ - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 1, 1, 1, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - [0, 0, 0, 0, 0, 0, 0, 0, 0], - ], - ), - ), - ], -) -def test_basic_mask_cube_from_shape( - mock_cube, square_polygon, minimum_weight, expected_output -): - """Test the create_shape_mask function with different minimum weights.""" - expected_cube = mock_cube.copy( - data=np.ma.array( - expected_output, dtype=float, mask=np.logical_not(expected_output) - ) - ) - # Create a mask using the square polygon - mask = mask_cube_from_shapefile( - cube=mock_cube, - shape=square_polygon, - minimum_weight=minimum_weight, - ) - - assert array_equal(mask.data, expected_cube.data) From 8c1404f3c1c2ca5f595fc1ca19e05ac8aa81aa90 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Mon, 16 Mar 2026 10:59:59 +0000 Subject: [PATCH 20/22] moved pandas --- .../pandas => integration}/test_pandas.py | 75 ++++++++++++++----- lib/iris/tests/unit/pandas/__init__.py | 5 -- 2 files changed, 57 insertions(+), 23 deletions(-) rename lib/iris/tests/{unit/pandas => integration}/test_pandas.py (94%) delete mode 100644 lib/iris/tests/unit/pandas/__init__.py diff --git a/lib/iris/tests/unit/pandas/test_pandas.py b/lib/iris/tests/integration/test_pandas.py similarity index 94% rename from lib/iris/tests/unit/pandas/test_pandas.py rename to lib/iris/tests/integration/test_pandas.py index 10d8feaa5f..5fed3ee956 100644 --- a/lib/iris/tests/unit/pandas/test_pandas.py +++ b/lib/iris/tests/integration/test_pandas.py @@ -55,34 +55,66 @@ def activate_pandas_ndim(): class TestAsSeries: """Test conversion of 1D cubes to Pandas using as_series().""" - @pytest.fixture(autouse=True) - def _setup(self, mocker): - self._series = mocker.patch("iris.pandas.pd.Series") - def test_no_dim_coord(self): cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="foo") - expected_data = cube.data - - iris.pandas.as_series(cube) - args, _ = self._series.call_args - - self._series.assert_called_once() - _shared_utils.assert_array_equal(args[0], expected_data) - # index is only assigned with dim_coords present - assert args[1] is None + series = iris.pandas.as_series(cube) + expected_index = np.array([0, 1, 2, 3, 4]) + _shared_utils.assert_array_equal(series, cube.data) + _shared_utils.assert_array_equal(series.index, expected_index) def test_simple(self): cube = Cube(np.array([0, 1, 2, 3, 4.4]), long_name="foo") dim_coord = DimCoord([5, 6, 7, 8, 9], long_name="bar") cube.add_dim_coord(dim_coord, 0) - expected_index = dim_coord.points[0] - expected_data = cube.data + expected_index = np.array([5, 6, 7, 8, 9]) + series = iris.pandas.as_series(cube) + _shared_utils.assert_array_equal(series, cube.data) + _shared_utils.assert_array_equal(series.index, expected_index) + def test_masked(self): + data = np.ma.MaskedArray([0, 1, 2, 3, 4.4], mask=[0, 1, 0, 1, 0]) + cube = Cube(data, long_name="foo") series = iris.pandas.as_series(cube) - args, _ = self._series.call_args + _shared_utils.assert_array_equal(series, cube.data.astype("f").filled(np.nan)) - self._series.assert_called_once() - _shared_utils.assert_array_equal(args[0], expected_data) + def test_time_standard(self): + cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") + time_coord = DimCoord( + [0, 100.1, 200.2, 300.3, 400.4], + long_name="time", + units="days since 2000-01-01 00:00", + ) + cube.add_dim_coord(time_coord, 0) + expected_index = [ + datetime.datetime(2000, 1, 1, 0, 0), + datetime.datetime(2000, 4, 10, 2, 24), + datetime.datetime(2000, 7, 19, 4, 48), + datetime.datetime(2000, 10, 27, 7, 12), + datetime.datetime(2001, 2, 4, 9, 36), + ] + series = iris.pandas.as_series(cube) + _shared_utils.assert_array_equal(series, cube.data) + assert list(series.index) == expected_index + + def test_time_360(self): + cube = Cube(np.array([0, 1, 2, 3, 4]), long_name="ts") + time_unit = cf_units.Unit( + "days since 2000-01-01 00:00", calendar=cf_units.CALENDAR_360_DAY + ) + time_coord = DimCoord( + [0, 100.1, 200.2, 300.3, 400.4], long_name="time", units=time_unit + ) + cube.add_dim_coord(time_coord, 0) + expected_index = [ + cftime.Datetime360Day(2000, 1, 1, 0, 0), + cftime.Datetime360Day(2000, 4, 11, 2, 24), + cftime.Datetime360Day(2000, 7, 21, 4, 48), + cftime.Datetime360Day(2000, 11, 1, 7, 12), + cftime.Datetime360Day(2001, 2, 11, 9, 36), + ] + + series = iris.pandas.as_series(cube) + _shared_utils.assert_array_equal(series, cube.data) _shared_utils.assert_array_equal(series.index, expected_index) def test_copy_true(self): @@ -91,6 +123,13 @@ def test_copy_true(self): series[0] = 99 assert cube.data[0] == 0 + def test_copy_masked_true(self): + data = np.ma.MaskedArray([0, 1, 2, 3, 4], mask=[0, 1, 0, 1, 0]) + cube = Cube(data, long_name="foo") + series = iris.pandas.as_series(cube) + series[0] = 99 + assert cube.data[0] == 0 + @skip_pandas @pytest.mark.filterwarnings( diff --git a/lib/iris/tests/unit/pandas/__init__.py b/lib/iris/tests/unit/pandas/__init__.py deleted file mode 100644 index 2ee1fb1cfe..0000000000 --- a/lib/iris/tests/unit/pandas/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# Copyright Iris contributors -# -# This file is part of Iris and is released under the BSD license. -# See LICENSE in the root of the repository for full licensing details. -"""Unit tests for the :mod:`iris.pandas` module.""" From e8244c1341d789dc8d576a1c91b6be58a4780877 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Mon, 16 Mar 2026 16:00:02 +0000 Subject: [PATCH 21/22] whatsnew --- docs/src/whatsnew/latest.rst | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/whatsnew/latest.rst b/docs/src/whatsnew/latest.rst index f1d8547c75..28917ae408 100644 --- a/docs/src/whatsnew/latest.rst +++ b/docs/src/whatsnew/latest.rst @@ -144,6 +144,10 @@ This document explains the changes made to Iris for this release Cube/Coord summary to use ``str`` representation instead of ``repr``. (:pull:`6966`, :issue:`6692`) +#. `@ESadek-MO` and `@pp-mo`_ removed unit test reliance on all optional dependencies + except for mo_pack. + (:issue:`6832`, :pull:`6976`) + .. comment Whatsnew author names (@github name) in alphabetical order. Note that, core dev names are automatically included by the common_links.inc: From 95d696040707f1a6e0e7e71e8f2e3fa3d2859c59 Mon Sep 17 00:00:00 2001 From: Elias Sadek Date: Mon, 16 Mar 2026 16:05:57 +0000 Subject: [PATCH 22/22] remove skips for integration tests --- lib/iris/tests/_shared_utils.py | 6 ------ .../experimental/stratify/test_relevel.py | 10 ++-------- lib/iris/tests/integration/test_pandas.py | 18 ++---------------- 3 files changed, 4 insertions(+), 30 deletions(-) diff --git a/lib/iris/tests/_shared_utils.py b/lib/iris/tests/_shared_utils.py index 3e0ed6ccc4..3d47b0c618 100644 --- a/lib/iris/tests/_shared_utils.py +++ b/lib/iris/tests/_shared_utils.py @@ -979,12 +979,6 @@ class MyGeoTiffTests(test.IrisTest): ) -skip_stratify = pytest.mark.skipif( - not STRATIFY_AVAILABLE, - reason='Test(s) require "python-stratify", which is not available.', -) - - def no_warnings(func): """Provides a decorator to ensure that there are no warnings raised within the test, otherwise the test will fail. diff --git a/lib/iris/tests/integration/experimental/stratify/test_relevel.py b/lib/iris/tests/integration/experimental/stratify/test_relevel.py index ee2d4505db..e4665c689f 100644 --- a/lib/iris/tests/integration/experimental/stratify/test_relevel.py +++ b/lib/iris/tests/integration/experimental/stratify/test_relevel.py @@ -8,20 +8,14 @@ import numpy as np import pytest +import stratify from iris.coords import AuxCoord, DimCoord +from iris.experimental.stratify import relevel from iris.tests import _shared_utils import iris.tests.stock as stock -try: - import stratify - from iris.experimental.stratify import relevel -except ImportError: - stratify = None - - -@_shared_utils.skip_stratify class Test: @pytest.fixture(autouse=True) def _setup(self): diff --git a/lib/iris/tests/integration/test_pandas.py b/lib/iris/tests/integration/test_pandas.py index 5fed3ee956..2a042f5529 100644 --- a/lib/iris/tests/integration/test_pandas.py +++ b/lib/iris/tests/integration/test_pandas.py @@ -23,17 +23,10 @@ # Importing pandas has the side-effect of messing with the formatters # used by matplotlib for handling dates. default_units_registry = copy.copy(matplotlib.units.registry) -try: - import pandas as pd -except ImportError: - # Disable all these tests if pandas is not installed. - pd = None +import pandas as pd + matplotlib.units.registry = default_units_registry -skip_pandas = pytest.mark.skipif( - pd is None, - reason='Test(s) require "pandas", which is not available.', -) if pd is not None: from iris.coords import AncillaryVariable, AuxCoord, CellMeasure, DimCoord @@ -48,7 +41,6 @@ def activate_pandas_ndim(): iris.FUTURE.pandas_ndim = False -@skip_pandas @pytest.mark.filterwarnings( "ignore:.*as_series has been deprecated.*:iris._deprecation.IrisDeprecation" ) @@ -131,7 +123,6 @@ def test_copy_masked_true(self): assert cube.data[0] == 0 -@skip_pandas @pytest.mark.filterwarnings( "ignore:You are using legacy 2-dimensional behaviour.*:FutureWarning" ) @@ -261,7 +252,6 @@ def test_copy_masked_true(self): assert cube.data[0, 0] == 0 -@skip_pandas class TestAsDataFrameNDim: """Test conversion of n-dimensional cubes to Pandas using as_data_frame().""" @@ -565,7 +555,6 @@ def test_instance_error(self): _ = iris.pandas.as_data_frame(list()) -@skip_pandas @pytest.mark.filterwarnings( "ignore:.*as_cube has been deprecated.*:iris._deprecation.IrisDeprecation" ) @@ -647,7 +636,6 @@ def test_implicit_copy_true(self): assert series[5] == 0 -@skip_pandas @pytest.mark.filterwarnings( "ignore:.*as_cube has been deprecated.*:iris._deprecation.IrisDeprecation" ) @@ -750,7 +738,6 @@ def test_implicit_copy_true(self): assert data_frame.iloc[0, 0] == 0 -@skip_pandas class TestFutureAndDeprecation: def test_as_cube_deprecation_warning(self): data_frame = pd.DataFrame([[0, 1, 2, 3, 4], [5, 6, 7, 8, 9]]) @@ -819,7 +806,6 @@ def test_explicit_copy_false_error(self, test_function, test_input): _ = test_function(test_input, copy=False) -@skip_pandas class TestPandasAsCubes: @staticmethod def _create_pandas(index_levels=0, is_series=False):