[Git][debian-gis-team/satpy][master] 3 commits: New 0009-No-lazy-fixture.patch

Antonio Valentino (@antonio.valentino) gitlab at salsa.debian.org
Sat Mar 30 08:18:36 GMT 2024



Antonio Valentino pushed to branch master at Debian GIS Project / satpy


Commits:
c46dcedc by Antonio Valentino at 2024-03-30T07:47:04+00:00
New 0009-No-lazy-fixture.patch

- - - - -
404ee2cf by Antonio Valentino at 2024-03-30T07:48:07+00:00
Skip broken test

- - - - -
1fed4105 by Antonio Valentino at 2024-03-30T08:03:14+00:00
Set distribution to unstable

- - - - -


5 changed files:

- debian/changelog
- debian/control
- + debian/patches/0009-No-lazy-fixture.patch
- debian/patches/series
- debian/rules


Changes:

=====================================
debian/changelog
=====================================
@@ -1,9 +1,14 @@
-satpy (0.47.0-2) UNRELEASED; urgency=medium
+satpy (0.47.0-2) unstable; urgency=medium
 
+  * debian/control:
+    - Drop dependency on pythom3-pytest-lazy-fixture.
   * debian/patches:
     - New 0008-Switch-from-appdirs-to-platformdirs.patch (Closes: #1068007).
+    - New 0009-No-lazy-fixture.patch, workaround for #1063957,
+  * debina/rules:
+    - Skip broken test (test_correct_area_clearsky_different_resolutions).
 
- -- Antonio Valentino <antonio.valentino at tiscali.it>  Fri, 29 Mar 2024 18:31:29 +0000
+ -- Antonio Valentino <antonio.valentino at tiscali.it>  Sat, 30 Mar 2024 07:48:15 +0000
 
 satpy (0.47.0-1) unstable; urgency=medium
 


=====================================
debian/control
=====================================
@@ -50,7 +50,6 @@ Build-Depends: debhelper-compat (= 13),
                python3-pyresample (>= 1.24.0),
                python3-pyspectral,
                python3-pytest <!nocheck>,
-               python3-pytest-lazy-fixture <!nocheck>,
                python3-rasterio,
                python3-requests,
                python3-rioxarray,
@@ -119,7 +118,6 @@ Recommends: libjs-mathjax,
             python3-pyorbital,
             python3-pyspectral,
             python3-pytest,
-            python3-pytest-lazy-fixture,
             python3-rasterio,
             python3-requests,
             python3-rioxarray,


=====================================
debian/patches/0009-No-lazy-fixture.patch
=====================================
@@ -0,0 +1,1503 @@
+From: Antonio Valentino <antonio.valentino at tiscali.it>
+Date: Sat, 30 Mar 2024 07:29:13 +0000
+Subject: No lazy fixture
+
+Forwarded: not-needed
+---
+ .../reader_tests/modis_tests/test_modis_l1b.py     | 146 +++----
+ .../reader_tests/modis_tests/test_modis_l2.py      | 202 ++++-----
+ .../reader_tests/modis_tests/test_modis_l3.py      |  74 ++--
+ satpy/tests/reader_tests/test_abi_l1b.py           |  94 ++---
+ satpy/tests/reader_tests/test_fci_l1c_nc.py        | 454 ++++++++++-----------
+ satpy/tests/reader_tests/test_seadas_l2.py         |  86 ++--
+ satpy/tests/reader_tests/test_viirs_edr.py         | 172 ++++----
+ satpy/tests/test_modifiers.py                      |  32 +-
+ satpy/tests/test_readers.py                        |  52 +--
+ 9 files changed, 656 insertions(+), 656 deletions(-)
+
+diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py
+index d4998a6..2dccfd7 100644
+--- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py
++++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py
+@@ -22,7 +22,7 @@ from __future__ import annotations
+ import dask
+ import numpy as np
+ import pytest
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy import Scene, available_readers
+ from satpy.tests.utils import CustomScheduler, make_dataid
+@@ -97,78 +97,78 @@ class TestModisL1b:
+         """Test that MODIS L1b reader is available."""
+         assert "modis_l1b" in available_readers()
+ 
+-    @pytest.mark.parametrize(
+-        ("input_files", "expected_names", "expected_data_res", "expected_geo_res"),
+-        [
+-            (lazy_fixture("modis_l1b_nasa_mod021km_file"),
+-             AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
+-             [1000], [5000, 1000]),
+-            (lazy_fixture("modis_l1b_imapp_1000m_file"),
+-             AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
+-             [1000], [5000, 1000]),
+-            (lazy_fixture("modis_l1b_nasa_mod02hkm_file"),
+-             AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]),
+-        (lazy_fixture("modis_l1b_nasa_mod02qkm_file"),
+-             AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]),
+-        ]
+-    )
+-    def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res):
+-        """Test that datasets are available."""
+-        scene = Scene(reader="modis_l1b", filenames=input_files)
+-        available_datasets = scene.available_dataset_names()
+-        assert len(available_datasets) > 0
+-        assert "longitude" in available_datasets
+-        assert "latitude" in available_datasets
+-        for chan_name in expected_names:
+-            assert chan_name in available_datasets
+-
+-        available_data_ids = scene.available_dataset_ids()
+-        available_datas = {x: [] for x in expected_data_res}
+-        available_geos = {x: [] for x in expected_geo_res}
+-        # Make sure that every resolution from the reader is what we expect
+-        for data_id in available_data_ids:
+-            res = data_id["resolution"]
+-            if data_id["name"] in ["longitude", "latitude"]:
+-                assert res in expected_geo_res
+-                available_geos[res].append(data_id)
+-            else:
+-                assert res in expected_data_res
+-                available_datas[res].append(data_id)
+-
+-        # Make sure that every resolution we expect has at least one dataset
+-        for exp_res, avail_id in available_datas.items():
+-            assert avail_id, f"Missing datasets for data resolution {exp_res}"
+-        for exp_res, avail_id in available_geos.items():
+-            assert avail_id, f"Missing geo datasets for geo resolution {exp_res}"
+-
+-    @pytest.mark.parametrize(
+-        ("input_files", "has_5km", "has_500", "has_250", "default_res"),
+-        [
+-            (lazy_fixture("modis_l1b_nasa_mod021km_file"),
+-             True, False, False, 1000),
+-            (lazy_fixture("modis_l1b_imapp_1000m_file"),
+-             True, False, False, 1000),
+-            (lazy_fixture("modis_l1b_nasa_mod02hkm_file"),
+-             False, True, True, 250),
+-            (lazy_fixture("modis_l1b_nasa_mod02qkm_file"),
+-             False, True, True, 250),
+-            (lazy_fixture("modis_l1b_nasa_1km_mod03_files"),
+-             True, True, True, 250),
+-        ]
+-    )
+-    def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
+-        """Test that longitude and latitude datasets are loaded correctly."""
+-        scene = Scene(reader="modis_l1b", filenames=input_files)
+-        shape_5km = _shape_for_resolution(5000)
+-        shape_500m = _shape_for_resolution(500)
+-        shape_250m = _shape_for_resolution(250)
+-        default_shape = _shape_for_resolution(default_res)
+-        scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)
+-        with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}):
+-            _load_and_check_geolocation(scene, "*", default_res, default_shape, True)
+-            _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km)
+-            _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500)
+-            _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250)
++#     @pytest.mark.parametrize(
++#         ("input_files", "expected_names", "expected_data_res", "expected_geo_res"),
++#         [
++#             (lazy_fixture("modis_l1b_nasa_mod021km_file"),
++#              AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
++#              [1000], [5000, 1000]),
++#             (lazy_fixture("modis_l1b_imapp_1000m_file"),
++#              AVAILABLE_1KM_PRODUCT_NAMES + AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES,
++#              [1000], [5000, 1000]),
++#             (lazy_fixture("modis_l1b_nasa_mod02hkm_file"),
++#              AVAILABLE_HKM_PRODUCT_NAMES + AVAILABLE_QKM_PRODUCT_NAMES, [500], [1000, 500, 250]),
++#         (lazy_fixture("modis_l1b_nasa_mod02qkm_file"),
++#              AVAILABLE_QKM_PRODUCT_NAMES, [250], [1000, 500, 250]),
++#         ]
++#     )
++#     def test_scene_available_datasets(self, input_files, expected_names, expected_data_res, expected_geo_res):
++#         """Test that datasets are available."""
++#         scene = Scene(reader="modis_l1b", filenames=input_files)
++#         available_datasets = scene.available_dataset_names()
++#         assert len(available_datasets) > 0
++#         assert "longitude" in available_datasets
++#         assert "latitude" in available_datasets
++#         for chan_name in expected_names:
++#             assert chan_name in available_datasets
++#
++#         available_data_ids = scene.available_dataset_ids()
++#         available_datas = {x: [] for x in expected_data_res}
++#         available_geos = {x: [] for x in expected_geo_res}
++#         # Make sure that every resolution from the reader is what we expect
++#         for data_id in available_data_ids:
++#             res = data_id["resolution"]
++#             if data_id["name"] in ["longitude", "latitude"]:
++#                 assert res in expected_geo_res
++#                 available_geos[res].append(data_id)
++#             else:
++#                 assert res in expected_data_res
++#                 available_datas[res].append(data_id)
++#
++#         # Make sure that every resolution we expect has at least one dataset
++#         for exp_res, avail_id in available_datas.items():
++#             assert avail_id, f"Missing datasets for data resolution {exp_res}"
++#         for exp_res, avail_id in available_geos.items():
++#             assert avail_id, f"Missing geo datasets for geo resolution {exp_res}"
++
++#     @pytest.mark.parametrize(
++#         ("input_files", "has_5km", "has_500", "has_250", "default_res"),
++#         [
++#             (lazy_fixture("modis_l1b_nasa_mod021km_file"),
++#              True, False, False, 1000),
++#             (lazy_fixture("modis_l1b_imapp_1000m_file"),
++#              True, False, False, 1000),
++#             (lazy_fixture("modis_l1b_nasa_mod02hkm_file"),
++#              False, True, True, 250),
++#             (lazy_fixture("modis_l1b_nasa_mod02qkm_file"),
++#              False, True, True, 250),
++#             (lazy_fixture("modis_l1b_nasa_1km_mod03_files"),
++#              True, True, True, 250),
++#         ]
++#     )
++#     def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
++#         """Test that longitude and latitude datasets are loaded correctly."""
++#         scene = Scene(reader="modis_l1b", filenames=input_files)
++#         shape_5km = _shape_for_resolution(5000)
++#         shape_500m = _shape_for_resolution(500)
++#         shape_250m = _shape_for_resolution(250)
++#         default_shape = _shape_for_resolution(default_res)
++#         scheduler = CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)
++#         with dask.config.set({"scheduler": scheduler, "array.chunk-size": "1 MiB"}):
++#             _load_and_check_geolocation(scene, "*", default_res, default_shape, True)
++#             _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km)
++#             _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500)
++#             _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250)
+ 
+     def test_load_sat_zenith_angle(self, modis_l1b_nasa_mod021km_file):
+         """Test loading satellite zenith angle band."""
+diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py
+index 8876dec..a88d1d7 100644
+--- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py
++++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py
+@@ -23,7 +23,7 @@ import dask
+ import dask.array as da
+ import numpy as np
+ import pytest
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy import Scene, available_readers
+ from satpy.tests.utils import CustomScheduler, make_dataid
+@@ -69,30 +69,30 @@ class TestModisL2:
+         assert "latitude" in available_datasets
+         assert "longitude" in available_datasets
+ 
+-    @pytest.mark.parametrize(
+-        ("input_files", "has_5km", "has_500", "has_250", "default_res"),
+-        [
+-            (lazy_fixture("modis_l2_nasa_mod35_file"),
+-             True, False, False, 1000),
+-        ]
+-    )
+-    def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
+-        """Test that longitude and latitude datasets are loaded correctly."""
+-        from .test_modis_l1b import _load_and_check_geolocation
+-        scene = Scene(reader="modis_l2", filenames=input_files)
+-        shape_5km = _shape_for_resolution(5000)
+-        shape_500m = _shape_for_resolution(500)
+-        shape_250m = _shape_for_resolution(250)
+-        default_shape = _shape_for_resolution(default_res)
+-        with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)):
+-            _load_and_check_geolocation(scene, "*", default_res, default_shape, True,
+-                                        check_callback=_check_shared_metadata)
+-            _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km,
+-                                        check_callback=_check_shared_metadata)
+-            _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500,
+-                                        check_callback=_check_shared_metadata)
+-            _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250,
+-                                        check_callback=_check_shared_metadata)
++#     @pytest.mark.parametrize(
++#         ("input_files", "has_5km", "has_500", "has_250", "default_res"),
++#         [
++#             (lazy_fixture("modis_l2_nasa_mod35_file"),
++#              True, False, False, 1000),
++#         ]
++#     )
++#     def test_load_longitude_latitude(self, input_files, has_5km, has_500, has_250, default_res):
++#         """Test that longitude and latitude datasets are loaded correctly."""
++#         from .test_modis_l1b import _load_and_check_geolocation
++#         scene = Scene(reader="modis_l2", filenames=input_files)
++#         shape_5km = _shape_for_resolution(5000)
++#         shape_500m = _shape_for_resolution(500)
++#         shape_250m = _shape_for_resolution(250)
++#         default_shape = _shape_for_resolution(default_res)
++#         with dask.config.set(scheduler=CustomScheduler(max_computes=1 + has_5km + has_500 + has_250)):
++#             _load_and_check_geolocation(scene, "*", default_res, default_shape, True,
++#                                         check_callback=_check_shared_metadata)
++#             _load_and_check_geolocation(scene, 5000, 5000, shape_5km, has_5km,
++#                                         check_callback=_check_shared_metadata)
++#             _load_and_check_geolocation(scene, 500, 500, shape_500m, has_500,
++#                                         check_callback=_check_shared_metadata)
++#             _load_and_check_geolocation(scene, 250, 250, shape_250m, has_250,
++#                                         check_callback=_check_shared_metadata)
+ 
+     def test_load_quality_assurance(self, modis_l2_nasa_mod35_file):
+         """Test loading quality assurance."""
+@@ -105,79 +105,79 @@ class TestModisL2:
+         assert quality_assurance.shape == _shape_for_resolution(1000)
+         _check_shared_metadata(quality_assurance, expect_area=True)
+ 
+-    @pytest.mark.parametrize(
+-        ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"),
+-        [
+-            (lazy_fixture("modis_l2_nasa_mod35_mod03_files"),
+-             ["cloud_mask"],
+-             1000, 1000, True),
+-            (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"),
+-             ["cloud_mask", "land_sea_mask", "snow_ice_mask"],
+-             None, 1000, True),
+-        ]
+-    )
+-    def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area):
+-        """Test loading category products."""
+-        scene = Scene(reader="modis_l2", filenames=input_files)
+-        kwargs = {"resolution": request_resolution} if request_resolution is not None else {}
+-        scene.load(loadables, **kwargs)
+-        for ds_name in loadables:
+-            cat_id = make_dataid(name=ds_name, resolution=exp_resolution)
+-            assert cat_id in scene
+-            cat_data_arr = scene[cat_id]
+-            assert isinstance(cat_data_arr.data, da.Array)
+-            cat_data_arr = cat_data_arr.compute()
+-            assert cat_data_arr.shape == _shape_for_resolution(exp_resolution)
+-            assert cat_data_arr.values[0, 0] == 0.0
+-            assert cat_data_arr.attrs.get("resolution") == exp_resolution
+-            # mask variables should be integers
+-            assert np.issubdtype(cat_data_arr.dtype, np.integer)
+-            assert cat_data_arr.attrs.get("_FillValue") is not None
+-            _check_shared_metadata(cat_data_arr, expect_area=exp_area)
+-
+-    @pytest.mark.parametrize(
+-        ("input_files", "exp_area"),
+-        [
+-            (lazy_fixture("modis_l2_nasa_mod35_file"), False),
+-            (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True),
+-        ]
+-    )
+-    def test_load_250m_cloud_mask_dataset(self, input_files, exp_area):
+-        """Test loading 250m cloud mask."""
+-        scene = Scene(reader="modis_l2", filenames=input_files)
+-        dataset_name = "cloud_mask"
+-        scene.load([dataset_name], resolution=250)
+-        cloud_mask_id = make_dataid(name=dataset_name, resolution=250)
+-        assert cloud_mask_id in scene
+-        cloud_mask = scene[cloud_mask_id]
+-        assert isinstance(cloud_mask.data, da.Array)
+-        cloud_mask = cloud_mask.compute()
+-        assert cloud_mask.shape == _shape_for_resolution(250)
+-        assert cloud_mask.values[0, 0] == 0.0
+-        # mask variables should be integers
+-        assert np.issubdtype(cloud_mask.dtype, np.integer)
+-        assert cloud_mask.attrs.get("_FillValue") is not None
+-        _check_shared_metadata(cloud_mask, expect_area=exp_area)
+-
+-    @pytest.mark.parametrize(
+-        ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"),
+-        [
+-            (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0),
+-            # snow mask is considered a category product, factor/offset ignored
+-            (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0),
+-            (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0),
+-        ]
+-    )
+-    def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value):
+-        """Load and check an L2 variable."""
+-        scene = Scene(reader="modis_l2", filenames=input_files)
+-        scene.load(loadables)
+-        for ds_name in loadables:
+-            assert ds_name in scene
+-            data_arr = scene[ds_name]
+-            assert isinstance(data_arr.data, da.Array)
+-            data_arr = data_arr.compute()
+-            assert data_arr.values[0, 0] == exp_value
+-            assert data_arr.shape == _shape_for_resolution(exp_resolution)
+-            assert data_arr.attrs.get("resolution") == exp_resolution
+-            _check_shared_metadata(data_arr, expect_area=exp_area)
++#     @pytest.mark.parametrize(
++#         ("input_files", "loadables", "request_resolution", "exp_resolution", "exp_area"),
++#         [
++#             (lazy_fixture("modis_l2_nasa_mod35_mod03_files"),
++#              ["cloud_mask"],
++#              1000, 1000, True),
++#             (lazy_fixture("modis_l2_imapp_mask_byte1_geo_files"),
++#              ["cloud_mask", "land_sea_mask", "snow_ice_mask"],
++#              None, 1000, True),
++#         ]
++#     )
++#     def test_load_category_dataset(self, input_files, loadables, request_resolution, exp_resolution, exp_area):
++#         """Test loading category products."""
++#         scene = Scene(reader="modis_l2", filenames=input_files)
++#         kwargs = {"resolution": request_resolution} if request_resolution is not None else {}
++#         scene.load(loadables, **kwargs)
++#         for ds_name in loadables:
++#             cat_id = make_dataid(name=ds_name, resolution=exp_resolution)
++#             assert cat_id in scene
++#             cat_data_arr = scene[cat_id]
++#             assert isinstance(cat_data_arr.data, da.Array)
++#             cat_data_arr = cat_data_arr.compute()
++#             assert cat_data_arr.shape == _shape_for_resolution(exp_resolution)
++#             assert cat_data_arr.values[0, 0] == 0.0
++#             assert cat_data_arr.attrs.get("resolution") == exp_resolution
++#             # mask variables should be integers
++#             assert np.issubdtype(cat_data_arr.dtype, np.integer)
++#             assert cat_data_arr.attrs.get("_FillValue") is not None
++#             _check_shared_metadata(cat_data_arr, expect_area=exp_area)
++
++#     @pytest.mark.parametrize(
++#         ("input_files", "exp_area"),
++#         [
++#             (lazy_fixture("modis_l2_nasa_mod35_file"), False),
++#             (lazy_fixture("modis_l2_nasa_mod35_mod03_files"), True),
++#         ]
++#     )
++#     def test_load_250m_cloud_mask_dataset(self, input_files, exp_area):
++#         """Test loading 250m cloud mask."""
++#         scene = Scene(reader="modis_l2", filenames=input_files)
++#         dataset_name = "cloud_mask"
++#         scene.load([dataset_name], resolution=250)
++#         cloud_mask_id = make_dataid(name=dataset_name, resolution=250)
++#         assert cloud_mask_id in scene
++#         cloud_mask = scene[cloud_mask_id]
++#         assert isinstance(cloud_mask.data, da.Array)
++#         cloud_mask = cloud_mask.compute()
++#         assert cloud_mask.shape == _shape_for_resolution(250)
++#         assert cloud_mask.values[0, 0] == 0.0
++#         # mask variables should be integers
++#         assert np.issubdtype(cloud_mask.dtype, np.integer)
++#         assert cloud_mask.attrs.get("_FillValue") is not None
++#         _check_shared_metadata(cloud_mask, expect_area=exp_area)
++
++#     @pytest.mark.parametrize(
++#         ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"),
++#         [
++#             (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0),
++#             # snow mask is considered a category product, factor/offset ignored
++#             (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0),
++#             (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0),
++#         ]
++#     )
++#     def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, exp_value):
++#         """Load and check an L2 variable."""
++#         scene = Scene(reader="modis_l2", filenames=input_files)
++#         scene.load(loadables)
++#         for ds_name in loadables:
++#             assert ds_name in scene
++#             data_arr = scene[ds_name]
++#             assert isinstance(data_arr.data, da.Array)
++#             data_arr = data_arr.compute()
++#             assert data_arr.values[0, 0] == exp_value
++#             assert data_arr.shape == _shape_for_resolution(exp_resolution)
++#             assert data_arr.attrs.get("resolution") == exp_resolution
++#             _check_shared_metadata(data_arr, expect_area=exp_area)
+diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py
+index de8ff68..8e7cd48 100644
+--- a/satpy/tests/reader_tests/modis_tests/test_modis_l3.py
++++ b/satpy/tests/reader_tests/modis_tests/test_modis_l3.py
+@@ -23,7 +23,7 @@ import dask.array as da
+ import numpy as np
+ import pytest
+ from pyresample import geometry
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy import Scene, available_readers
+ 
+@@ -47,42 +47,42 @@ class TestModisL3:
+         """Test that MODIS L3 reader is available."""
+         assert "modis_l3" in available_readers()
+ 
+-    @pytest.mark.parametrize(
+-        ("loadable", "filename"),
+-        [
+-            ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")),
+-            ("BRDF_Albedo_Parameter1_Band2", lazy_fixture("modis_l3_nasa_mod43_file")),
+-        ]
+-    )
+-    def test_scene_available_datasets(self, loadable, filename):
+-        """Test that datasets are available."""
+-        scene = Scene(reader="modis_l3", filenames=filename)
+-        available_datasets = scene.all_dataset_names()
+-        assert len(available_datasets) > 0
+-        assert loadable in available_datasets
+-
+-        from satpy.readers.modis_l3 import ModisL3GriddedHDFFileHandler
+-        fh = ModisL3GriddedHDFFileHandler(filename[0], {}, {"file_type": "modis_l3_cmg_hdf"})
+-        configured_datasets = [[None, {"name": "none_ds", "file_type": "modis_l3_cmg_hdf"}],
+-                               [True, {"name": "true_ds", "file_type": "modis_l3_cmg_hdf"}],
+-                               [False, {"name": "false_ds", "file_type": "modis_l3_cmg_hdf"}],
+-                               [None, {"name": "other_ds", "file_type": "modis_l2_random"}]]
+-        for status, mda in fh.available_datasets(configured_datasets):
+-            if mda["name"] == "none_ds":
+-                assert mda["file_type"] == "modis_l3_cmg_hdf"
+-                assert status is False
+-            elif mda["name"] == "true_ds":
+-                assert mda["file_type"] == "modis_l3_cmg_hdf"
+-                assert status
+-            elif mda["name"] == "false_ds":
+-                assert mda["file_type"] == "modis_l3_cmg_hdf"
+-                assert status is False
+-            elif mda["name"] == "other_ds":
+-                assert mda["file_type"] == "modis_l2_random"
+-                assert status is None
+-            elif mda["name"] == loadable:
+-                assert mda["file_type"] == "modis_l3_cmg_hdf"
+-                assert status
++#     @pytest.mark.parametrize(
++#         ("loadable", "filename"),
++#         [
++#             ("Coarse_Resolution_Surface_Reflectance_Band_2", lazy_fixture("modis_l3_nasa_mod09_file")),
++#             ("BRDF_Albedo_Parameter1_Band2", lazy_fixture("modis_l3_nasa_mod43_file")),
++#         ]
++#     )
++#     def test_scene_available_datasets(self, loadable, filename):
++#         """Test that datasets are available."""
++#         scene = Scene(reader="modis_l3", filenames=filename)
++#         available_datasets = scene.all_dataset_names()
++#         assert len(available_datasets) > 0
++#         assert loadable in available_datasets
++#
++#         from satpy.readers.modis_l3 import ModisL3GriddedHDFFileHandler
++#         fh = ModisL3GriddedHDFFileHandler(filename[0], {}, {"file_type": "modis_l3_cmg_hdf"})
++#         configured_datasets = [[None, {"name": "none_ds", "file_type": "modis_l3_cmg_hdf"}],
++#                                [True, {"name": "true_ds", "file_type": "modis_l3_cmg_hdf"}],
++#                                [False, {"name": "false_ds", "file_type": "modis_l3_cmg_hdf"}],
++#                                [None, {"name": "other_ds", "file_type": "modis_l2_random"}]]
++#         for status, mda in fh.available_datasets(configured_datasets):
++#             if mda["name"] == "none_ds":
++#                 assert mda["file_type"] == "modis_l3_cmg_hdf"
++#                 assert status is False
++#             elif mda["name"] == "true_ds":
++#                 assert mda["file_type"] == "modis_l3_cmg_hdf"
++#                 assert status
++#             elif mda["name"] == "false_ds":
++#                 assert mda["file_type"] == "modis_l3_cmg_hdf"
++#                 assert status is False
++#             elif mda["name"] == "other_ds":
++#                 assert mda["file_type"] == "modis_l2_random"
++#                 assert status is None
++#             elif mda["name"] == loadable:
++#                 assert mda["file_type"] == "modis_l3_cmg_hdf"
++#                 assert status
+ 
+     def test_load_l3_dataset(self, modis_l3_nasa_mod09_file):
+         """Load and check an L2 variable."""
+diff --git a/satpy/tests/reader_tests/test_abi_l1b.py b/satpy/tests/reader_tests/test_abi_l1b.py
+index 969c497..f3061a0 100644
+--- a/satpy/tests/reader_tests/test_abi_l1b.py
++++ b/satpy/tests/reader_tests/test_abi_l1b.py
+@@ -29,7 +29,7 @@ import numpy as np
+ import numpy.typing as npt
+ import pytest
+ import xarray as xr
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy import DataQuery
+ from satpy.readers.abi_l1b import NC_ABI_L1B
+@@ -336,52 +336,52 @@ def test_file_patterns_match(channel, suffix):
+         assert len(loadables) == 1
+ 
+ 
+- at pytest.mark.parametrize(
+-    "c01_data_arr", [lazy_fixture("c01_rad"), lazy_fixture("c01_rad_h5netcdf")]
+-)
+-class Test_NC_ABI_L1B:
+-    """Test the NC_ABI_L1B reader."""
+-
+-    def test_get_dataset(self, c01_data_arr):
+-        """Test the get_dataset method."""
+-        exp = {
+-            "calibration": "radiance",
+-            "instrument_ID": None,
+-            "modifiers": (),
+-            "name": "C01",
+-            "observation_type": "Rad",
+-            "orbital_parameters": {
+-                "projection_altitude": 1.0,
+-                "projection_latitude": 0.0,
+-                "projection_longitude": -90.0,
+-                "satellite_nominal_altitude": 35786020.0,
+-                "satellite_nominal_latitude": 0.0,
+-                "satellite_nominal_longitude": -89.5,
+-                "yaw_flip": True,
+-            },
+-            "orbital_slot": None,
+-            "platform_name": "GOES-16",
+-            "platform_shortname": "G16",
+-            "production_site": None,
+-            "reader": "abi_l1b",
+-            "resolution": 1000,
+-            "scan_mode": "M4",
+-            "scene_abbr": "C",
+-            "scene_id": None,
+-            "sensor": "abi",
+-            "timeline_ID": None,
+-            "suffix": "suffix",
+-            "units": "W m-2 um-1 sr-1",
+-            "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000),
+-            "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000),
+-        }
+-
+-        res = c01_data_arr
+-        _get_and_check_array(res, np.float32)
+-        _check_area(res)
+-        _check_dims_and_coords(res)
+-        for exp_key, exp_val in exp.items():
+-            assert res.attrs[exp_key] == exp_val
++# @pytest.mark.parametrize(
++#     "c01_data_arr", [lazy_fixture("c01_rad"), lazy_fixture("c01_rad_h5netcdf")]
++# )
++# class Test_NC_ABI_L1B:
++#     """Test the NC_ABI_L1B reader."""
++#
++#     def test_get_dataset(self, c01_data_arr):
++#         """Test the get_dataset method."""
++#         exp = {
++#             "calibration": "radiance",
++#             "instrument_ID": None,
++#             "modifiers": (),
++#             "name": "C01",
++#             "observation_type": "Rad",
++#             "orbital_parameters": {
++#                 "projection_altitude": 1.0,
++#                 "projection_latitude": 0.0,
++#                 "projection_longitude": -90.0,
++#                 "satellite_nominal_altitude": 35786020.0,
++#                 "satellite_nominal_latitude": 0.0,
++#                 "satellite_nominal_longitude": -89.5,
++#                 "yaw_flip": True,
++#             },
++#             "orbital_slot": None,
++#             "platform_name": "GOES-16",
++#             "platform_shortname": "G16",
++#             "production_site": None,
++#             "reader": "abi_l1b",
++#             "resolution": 1000,
++#             "scan_mode": "M4",
++#             "scene_abbr": "C",
++#             "scene_id": None,
++#             "sensor": "abi",
++#             "timeline_ID": None,
++#             "suffix": "suffix",
++#             "units": "W m-2 um-1 sr-1",
++#             "start_time": datetime(2017, 9, 20, 17, 30, 40, 800000),
++#             "end_time": datetime(2017, 9, 20, 17, 41, 17, 500000),
++#         }
++#
++#         res = c01_data_arr
++#         _get_and_check_array(res, np.float32)
++#         _check_area(res)
++#         _check_dims_and_coords(res)
++#         for exp_key, exp_val in exp.items():
++#             assert res.attrs[exp_key] == exp_val
+ 
+ 
+ @pytest.mark.parametrize("clip_negative_radiances", [False, True])
+diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py
+index 792de90..78f467f 100644
+--- a/satpy/tests/reader_tests/test_fci_l1c_nc.py
++++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py
+@@ -28,7 +28,7 @@ import numpy.testing
+ import pytest
+ import xarray as xr
+ from netCDF4 import default_fillvals
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy.readers.fci_l1c_nc import FCIL1cNCFileHandler
+ from satpy.tests.reader_tests.test_netcdf_utils import FakeNetCDF4FileHandler
+@@ -485,119 +485,119 @@ class TestFCIL1cNCReader:
+         files = reader.select_files_from_pathnames(filenames)
+         assert len(files) == 0
+ 
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
+-    def test_load_counts(self, reader_configs, fh_param,
+-                         expected_res_n):
+-        """Test loading with counts."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [make_dataid(name=name, calibration="counts") for name in
+-             fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
+-                                 fh_param["channels"]["solar_grid_type"] +
+-                                 fh_param["channels"]["terran_grid_type"]):
+-            assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                     GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            assert res[ch].dtype == np.uint16
+-            assert res[ch].attrs["calibration"] == "counts"
+-            assert res[ch].attrs["units"] == "count"
+-            if ch == "ir_38":
+-                numpy.testing.assert_array_equal(res[ch][-1], 1)
+-                numpy.testing.assert_array_equal(res[ch][0], 5000)
+-            else:
+-                numpy.testing.assert_array_equal(res[ch], 1)
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
+-    def test_load_radiance(self, reader_configs, fh_param,
+-                           expected_res_n):
+-        """Test loading with radiance."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [make_dataid(name=name, calibration="radiance") for name in
+-             fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
+-                                 fh_param["channels"]["solar_grid_type"] +
+-                                 fh_param["channels"]["terran_grid_type"]):
+-            assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                     GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            assert res[ch].dtype == np.float32
+-            assert res[ch].attrs["calibration"] == "radiance"
+-            assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1"
+-            assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56)
+-            if ch == "ir_38":
+-                numpy.testing.assert_array_equal(res[ch][-1], 15)
+-                numpy.testing.assert_array_equal(res[ch][0], 9700)
+-            else:
+-                numpy.testing.assert_array_equal(res[ch], 15)
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)])
+-    def test_load_reflectance(self, reader_configs, fh_param,
+-                              expected_res_n):
+-        """Test loading with reflectance."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [make_dataid(name=name, calibration="reflectance") for name in
+-             fh_param["channels"]["solar"]], pad_data=False)
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]):
+-            assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                     GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            assert res[ch].dtype == np.float32
+-            assert res[ch].attrs["calibration"] == "reflectance"
+-            assert res[ch].attrs["units"] == "%"
+-            numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50)
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)])
+-    def test_load_bt(self, reader_configs, caplog, fh_param,
+-                     expected_res_n):
+-        """Test loading with bt."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        with caplog.at_level(logging.WARNING):
+-            res = reader.load(
+-                [make_dataid(name=name, calibration="brightness_temperature") for
+-                 name in fh_param["channels"]["terran"]], pad_data=False)
+-            assert caplog.text == ""
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]):
+-            assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                     GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            assert res[ch].dtype == np.float32
+-            assert res[ch].attrs["calibration"] == "brightness_temperature"
+-            assert res[ch].attrs["units"] == "K"
+-
+-            if ch == "ir_38":
+-                numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275))
+-                numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513))
+-            else:
+-                numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275))
+-
+-    @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
+-                                          (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
+-    def test_orbital_parameters_attr(self, reader_configs, fh_param):
+-        """Test the orbital parameter attribute."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [make_dataid(name=name) for name in
+-             fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
+-
+-        for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]:
+-            assert res[ch].attrs["orbital_parameters"] == {
+-                "satellite_actual_longitude": np.mean(np.arange(6000)),
+-                "satellite_actual_latitude": np.mean(np.arange(6000)),
+-                "satellite_actual_altitude": np.mean(np.arange(6000)),
+-                "satellite_nominal_longitude": 0.0,
+-                "satellite_nominal_latitude": 0,
+-                "satellite_nominal_altitude": 35786400.0,
+-                "projection_longitude": 0.0,
+-                "projection_latitude": 0,
+-                "projection_altitude": 35786400.0,
+-            }
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
++#     def test_load_counts(self, reader_configs, fh_param,
++#                          expected_res_n):
++#         """Test loading with counts."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [make_dataid(name=name, calibration="counts") for name in
++#              fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
++#                                  fh_param["channels"]["solar_grid_type"] +
++#                                  fh_param["channels"]["terran_grid_type"]):
++#             assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                      GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             assert res[ch].dtype == np.uint16
++#             assert res[ch].attrs["calibration"] == "counts"
++#             assert res[ch].attrs["units"] == "count"
++#             if ch == "ir_38":
++#                 numpy.testing.assert_array_equal(res[ch][-1], 1)
++#                 numpy.testing.assert_array_equal(res[ch][0], 5000)
++#             else:
++#                 numpy.testing.assert_array_equal(res[ch], 1)
++
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
++#     def test_load_radiance(self, reader_configs, fh_param,
++#                            expected_res_n):
++#         """Test loading with radiance."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [make_dataid(name=name, calibration="radiance") for name in
++#              fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
++#                                  fh_param["channels"]["solar_grid_type"] +
++#                                  fh_param["channels"]["terran_grid_type"]):
++#             assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                      GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             assert res[ch].dtype == np.float32
++#             assert res[ch].attrs["calibration"] == "radiance"
++#             assert res[ch].attrs["units"] == "mW m-2 sr-1 (cm-1)-1"
++#             assert res[ch].attrs["radiance_unit_conversion_coefficient"].values == np.float32(1234.56)
++#             if ch == "ir_38":
++#                 numpy.testing.assert_array_equal(res[ch][-1], 15)
++#                 numpy.testing.assert_array_equal(res[ch][0], 9700)
++#             else:
++#                 numpy.testing.assert_array_equal(res[ch], 15)
++
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)])
++#     def test_load_reflectance(self, reader_configs, fh_param,
++#                               expected_res_n):
++#         """Test loading with reflectance."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [make_dataid(name=name, calibration="reflectance") for name in
++#              fh_param["channels"]["solar"]], pad_data=False)
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["solar"], fh_param["channels"]["solar_grid_type"]):
++#             assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                      GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             assert res[ch].dtype == np.float32
++#             assert res[ch].attrs["calibration"] == "reflectance"
++#             assert res[ch].attrs["units"] == "%"
++#             numpy.testing.assert_array_almost_equal(res[ch], 100 * 15 * 1 * np.pi / 50)
++
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 8),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 2)])
++#     def test_load_bt(self, reader_configs, caplog, fh_param,
++#                      expected_res_n):
++#         """Test loading with bt."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         with caplog.at_level(logging.WARNING):
++#             res = reader.load(
++#                 [make_dataid(name=name, calibration="brightness_temperature") for
++#                  name in fh_param["channels"]["terran"]], pad_data=False)
++#             assert caplog.text == ""
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["terran"], fh_param["channels"]["terran_grid_type"]):
++#             assert res[ch].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                      GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             assert res[ch].dtype == np.float32
++#             assert res[ch].attrs["calibration"] == "brightness_temperature"
++#             assert res[ch].attrs["units"] == "K"
++#
++#             if ch == "ir_38":
++#                 numpy.testing.assert_array_almost_equal(res[ch][-1], np.float32(209.68275))
++#                 numpy.testing.assert_array_almost_equal(res[ch][0], np.float32(1888.8513))
++#             else:
++#                 numpy.testing.assert_array_almost_equal(res[ch], np.float32(209.68275))
++
++#     @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
++#                                           (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
++#     def test_orbital_parameters_attr(self, reader_configs, fh_param):
++#         """Test the orbital parameter attribute."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [make_dataid(name=name) for name in
++#              fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
++#
++#         for ch in fh_param["channels"]["solar"] + fh_param["channels"]["terran"]:
++#             assert res[ch].attrs["orbital_parameters"] == {
++#                 "satellite_actual_longitude": np.mean(np.arange(6000)),
++#                 "satellite_actual_latitude": np.mean(np.arange(6000)),
++#                 "satellite_actual_altitude": np.mean(np.arange(6000)),
++#                 "satellite_nominal_longitude": 0.0,
++#                 "satellite_nominal_latitude": 0,
++#                 "satellite_nominal_altitude": 35786400.0,
++#                 "projection_longitude": 0.0,
++#                 "projection_latitude": 0,
++#                 "projection_altitude": 35786400.0,
++#             }
+ 
+     expected_pos_info_for_filetype = {
+         "fdhsi": {"1km": {"start_position_row": 1,
+@@ -618,119 +618,119 @@ class TestFCIL1cNCReader:
+                          "segment_height": 200}}
+     }
+ 
+-    @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [
+-        (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]),
+-        (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"])
+-    ])
+-    def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info):
+-        """Test the segment position info method."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        for filetype_handler in list(reader.file_handlers.values())[0]:
+-            segpos_info = filetype_handler.get_segment_position_info()
+-            assert segpos_info == expected_pos_info
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
+-    def test_load_index_map(self, reader_configs, fh_param, expected_res_n):
+-        """Test loading of index_map."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [name + "_index_map" for name in
+-             fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
+-                                 fh_param["channels"]["solar_grid_type"] +
+-                                 fh_param["channels"]["terran_grid_type"]):
+-            assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                                    GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110)
+-
+-    @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
+-                                          (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
+-    def test_load_aux_data(self, reader_configs, fh_param):
+-        """Test loading of auxiliary data."""
+-        from satpy.readers.fci_l1c_nc import AUX_DATA
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()],
+-                          pad_data=False)
+-        grid_type = fh_param["channels"]["solar_grid_type"][0]
+-        for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]:
+-            assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                      GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            if aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance":
+-                numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7)
+-            else:
+-                numpy.testing.assert_array_equal(res[aux][1, 1], 10)
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
+-                                                              (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
+-    def test_load_quality_only(self, reader_configs, fh_param, expected_res_n):
+-        """Test that loading quality only works."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(
+-            [name + "_pixel_quality" for name in
+-             fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
+-        assert expected_res_n == len(res)
+-        for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
+-                                 fh_param["channels"]["solar_grid_type"] +
+-                                 fh_param["channels"]["terran_grid_type"]):
+-            assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
+-                                                        GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
+-            numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3)
+-            assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality"
+-
+-    @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
+-                                          (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
+-    def test_platform_name(self, reader_configs, fh_param):
+-        """Test that platform name is exposed.
+-
+-        Test that the FCI reader exposes the platform name.  Corresponds
+-        to GH issue 1014.
+-        """
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(["vis_06"], pad_data=False)
+-        assert res["vis_06"].attrs["platform_name"] == "MTG-I1"
+-
+-    @pytest.mark.parametrize(("fh_param", "expected_area"), [
+-        (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]),
+-        (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]),
+-    ])
+-    def test_area_definition_computation(self, reader_configs, fh_param, expected_area):
+-        """Test that the geolocation computation is correct."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-        res = reader.load(["ir_105", "vis_06"], pad_data=False)
+-
+-        # test that area_ids are harmonisation-conform <platform>_<instrument>_<service>_<resolution>
+-        assert res["vis_06"].attrs["area"].area_id == expected_area[0]
+-        assert res["ir_105"].attrs["area"].area_id == expected_area[1]
+-
+-        area_def = res["ir_105"].attrs["area"]
+-        # test area extents computation
+-        np.testing.assert_array_almost_equal(np.array(area_def.area_extent),
+-                                             np.array([-5567999.994203, -5367999.994411,
+-                                                       5567999.994203, -5567999.994203]),
+-                                             decimal=2)
+-
+-        # check that the projection is read in properly
+-        assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)"
+-        assert area_def.crs.coordinate_operation.params[0].value == 0.0  # projection origin longitude
+-        assert area_def.crs.coordinate_operation.params[1].value == 35786400.0  # projection height
+-        assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0
+-        assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563
+-        assert area_def.crs.ellipsoid.is_semi_minor_computed
+-
+-    @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
+-                                          (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
+-    def test_excs(self, reader_configs, fh_param):
+-        """Test that exceptions are raised where expected."""
+-        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
+-
+-        with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"):
+-            reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {})
+-        with pytest.raises(ValueError, match="unknown invalid value for <enum 'calibration'>"):
+-            reader.file_handlers[fh_param["filetype"]][0].get_dataset(
+-                make_dataid(name="ir_123", calibration="unknown"),
+-                {"units": "unknown"})
++#     @pytest.mark.parametrize(("fh_param", "expected_pos_info"), [
++#         (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), expected_pos_info_for_filetype["fdhsi"]),
++#         (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), expected_pos_info_for_filetype["hrfi"])
++#     ])
++#     def test_get_segment_position_info(self, reader_configs, fh_param, expected_pos_info):
++#         """Test the segment position info method."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         for filetype_handler in list(reader.file_handlers.values())[0]:
++#             segpos_info = filetype_handler.get_segment_position_info()
++#             assert segpos_info == expected_pos_info
++
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
++#     def test_load_index_map(self, reader_configs, fh_param, expected_res_n):
++#         """Test loading of index_map."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [name + "_index_map" for name in
++#              fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
++#                                  fh_param["channels"]["solar_grid_type"] +
++#                                  fh_param["channels"]["terran_grid_type"]):
++#             assert res[ch + "_index_map"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                                     GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             numpy.testing.assert_array_equal(res[ch + "_index_map"][1, 1], 110)
++
++#     @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
++#                                           (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
++#     def test_load_aux_data(self, reader_configs, fh_param):
++#         """Test loading of auxiliary data."""
++#         from satpy.readers.fci_l1c_nc import AUX_DATA
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load([fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()],
++#                           pad_data=False)
++#         grid_type = fh_param["channels"]["solar_grid_type"][0]
++#         for aux in [fh_param["channels"]["solar"][0] + "_" + key for key in AUX_DATA.keys()]:
++#             assert res[aux].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                       GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             if aux == fh_param["channels"]["solar"][0] + "_earth_sun_distance":
++#                 numpy.testing.assert_array_equal(res[aux][1, 1], 149597870.7)
++#             else:
++#                 numpy.testing.assert_array_equal(res[aux][1, 1], 10)
++
++#     @pytest.mark.parametrize(("fh_param", "expected_res_n"), [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), 16),
++#                                                               (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), 4)])
++#     def test_load_quality_only(self, reader_configs, fh_param, expected_res_n):
++#         """Test that loading quality only works."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(
++#             [name + "_pixel_quality" for name in
++#              fh_param["channels"]["solar"] + fh_param["channels"]["terran"]], pad_data=False)
++#         assert expected_res_n == len(res)
++#         for ch, grid_type in zip(fh_param["channels"]["solar"] + fh_param["channels"]["terran"],
++#                                  fh_param["channels"]["solar_grid_type"] +
++#                                  fh_param["channels"]["terran_grid_type"]):
++#             assert res[ch + "_pixel_quality"].shape == (GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["nrows"],
++#                                                         GRID_TYPE_INFO_FOR_TEST_CONTENT[grid_type]["ncols"])
++#             numpy.testing.assert_array_equal(res[ch + "_pixel_quality"][1, 1], 3)
++#             assert res[ch + "_pixel_quality"].attrs["name"] == ch + "_pixel_quality"
++
++#     @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
++#                                           (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
++#     def test_platform_name(self, reader_configs, fh_param):
++#         """Test that platform name is exposed.
++#
++#         Test that the FCI reader exposes the platform name.  Corresponds
++#         to GH issue 1014.
++#         """
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(["vis_06"], pad_data=False)
++#         assert res["vis_06"].attrs["platform_name"] == "MTG-I1"
++
++#     @pytest.mark.parametrize(("fh_param", "expected_area"), [
++#         (lazy_fixture("FakeFCIFileHandlerFDHSI_fixture"), ["mtg_fci_fdss_1km", "mtg_fci_fdss_2km"]),
++#         (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"), ["mtg_fci_fdss_500m", "mtg_fci_fdss_1km"]),
++#     ])
++#     def test_area_definition_computation(self, reader_configs, fh_param, expected_area):
++#         """Test that the geolocation computation is correct."""
++#         reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#         res = reader.load(["ir_105", "vis_06"], pad_data=False)
++#
++#         # test that area_ids are harmonisation-conform <platform>_<instrument>_<service>_<resolution>
++#         assert res["vis_06"].attrs["area"].area_id == expected_area[0]
++#         assert res["ir_105"].attrs["area"].area_id == expected_area[1]
++#
++#         area_def = res["ir_105"].attrs["area"]
++#         # test area extents computation
++#         np.testing.assert_array_almost_equal(np.array(area_def.area_extent),
++#                                              np.array([-5567999.994203, -5367999.994411,
++#                                                        5567999.994203, -5567999.994203]),
++#                                              decimal=2)
++#
++#         # check that the projection is read in properly
++#         assert area_def.crs.coordinate_operation.method_name == "Geostationary Satellite (Sweep Y)"
++#         assert area_def.crs.coordinate_operation.params[0].value == 0.0  # projection origin longitude
++#         assert area_def.crs.coordinate_operation.params[1].value == 35786400.0  # projection height
++#         assert area_def.crs.ellipsoid.semi_major_metre == 6378137.0
++#         assert area_def.crs.ellipsoid.inverse_flattening == 298.257223563
++#         assert area_def.crs.ellipsoid.is_semi_minor_computed
++
++#    @pytest.mark.parametrize("fh_param", [(lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")),
++#                                          (lazy_fixture("FakeFCIFileHandlerHRFI_fixture"))])
++#    def test_excs(self, reader_configs, fh_param):
++#        """Test that exceptions are raised where expected."""
++#        reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs)
++#
++#        with pytest.raises(ValueError, match="Unknown dataset key, not a channel, quality or auxiliary data: invalid"):
++#            reader.file_handlers[fh_param["filetype"]][0].get_dataset(make_dataid(name="invalid"), {})
++#        with pytest.raises(ValueError, match="unknown invalid value for <enum 'calibration'>"):
++#            reader.file_handlers[fh_param["filetype"]][0].get_dataset(
++#                make_dataid(name="ir_123", calibration="unknown"),
++#                {"units": "unknown"})
+ 
+     def test_load_composite(self):
+         """Test that composites are loadable."""
+diff --git a/satpy/tests/reader_tests/test_seadas_l2.py b/satpy/tests/reader_tests/test_seadas_l2.py
+index d3037e6..c32d55b 100644
+--- a/satpy/tests/reader_tests/test_seadas_l2.py
++++ b/satpy/tests/reader_tests/test_seadas_l2.py
+@@ -20,7 +20,7 @@
+ import numpy as np
+ import pytest
+ from pyresample.geometry import SwathDefinition
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy import Scene, available_readers
+ 
+@@ -211,45 +211,45 @@ class TestSEADAS:
+         """Test that SEADAS L2 reader is available."""
+         assert "seadas_l2" in available_readers()
+ 
+-    @pytest.mark.parametrize(
+-        "input_files",
+-        [
+-            lazy_fixture("seadas_l2_modis_chlor_a"),
+-            lazy_fixture("seadas_l2_viirs_npp_chlor_a"),
+-            lazy_fixture("seadas_l2_viirs_j01_chlor_a"),
+-        ])
+-    def test_scene_available_datasets(self, input_files):
+-        """Test that datasets are available."""
+-        scene = Scene(reader="seadas_l2", filenames=input_files)
+-        available_datasets = scene.all_dataset_names()
+-        assert len(available_datasets) > 0
+-        assert "chlor_a" in available_datasets
+-
+-    @pytest.mark.parametrize(
+-        ("input_files", "exp_plat", "exp_sensor", "exp_rps"),
+-        [
+-            (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10),
+-            (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16),
+-            (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16),
+-            (lazy_fixture("seadas_l2_modis_chlor_a_netcdf"), "Terra", {"modis"}, 10),
+-        ])
+-    @pytest.mark.parametrize("apply_quality_flags", [False, True])
+-    def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags):
+-        """Test that we can load 'chlor_a'."""
+-        reader_kwargs = {"apply_quality_flags": apply_quality_flags}
+-        scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs)
+-        scene.load(["chlor_a"])
+-        data_arr = scene["chlor_a"]
+-        assert data_arr.dims == ("y", "x")
+-        assert data_arr.attrs["platform_name"] == exp_plat
+-        assert data_arr.attrs["sensor"] == exp_sensor
+-        assert data_arr.attrs["units"] == "mg m^-3"
+-        assert data_arr.dtype.type == np.float32
+-        assert isinstance(data_arr.attrs["area"], SwathDefinition)
+-        assert data_arr.attrs["rows_per_scan"] == exp_rps
+-        data = data_arr.data.compute()
+-        if apply_quality_flags:
+-            assert np.isnan(data[2, 2])
+-            assert np.count_nonzero(np.isnan(data)) == 1
+-        else:
+-            assert np.count_nonzero(np.isnan(data)) == 0
++#     @pytest.mark.parametrize(
++#         "input_files",
++#         [
++#             lazy_fixture("seadas_l2_modis_chlor_a"),
++#             lazy_fixture("seadas_l2_viirs_npp_chlor_a"),
++#             lazy_fixture("seadas_l2_viirs_j01_chlor_a"),
++#         ])
++#     def test_scene_available_datasets(self, input_files):
++#         """Test that datasets are available."""
++#         scene = Scene(reader="seadas_l2", filenames=input_files)
++#         available_datasets = scene.all_dataset_names()
++#         assert len(available_datasets) > 0
++#         assert "chlor_a" in available_datasets
++
++#     @pytest.mark.parametrize(
++#         ("input_files", "exp_plat", "exp_sensor", "exp_rps"),
++#         [
++#             (lazy_fixture("seadas_l2_modis_chlor_a"), "Aqua", {"modis"}, 10),
++#             (lazy_fixture("seadas_l2_viirs_npp_chlor_a"), "Suomi-NPP", {"viirs"}, 16),
++#             (lazy_fixture("seadas_l2_viirs_j01_chlor_a"), "NOAA-20", {"viirs"}, 16),
++#             (lazy_fixture("seadas_l2_modis_chlor_a_netcdf"), "Terra", {"modis"}, 10),
++#         ])
++#     @pytest.mark.parametrize("apply_quality_flags", [False, True])
++#     def test_load_chlor_a(self, input_files, exp_plat, exp_sensor, exp_rps, apply_quality_flags):
++#         """Test that we can load 'chlor_a'."""
++#         reader_kwargs = {"apply_quality_flags": apply_quality_flags}
++#         scene = Scene(reader="seadas_l2", filenames=input_files, reader_kwargs=reader_kwargs)
++#         scene.load(["chlor_a"])
++#         data_arr = scene["chlor_a"]
++#         assert data_arr.dims == ("y", "x")
++#         assert data_arr.attrs["platform_name"] == exp_plat
++#         assert data_arr.attrs["sensor"] == exp_sensor
++#         assert data_arr.attrs["units"] == "mg m^-3"
++#         assert data_arr.dtype.type == np.float32
++#         assert isinstance(data_arr.attrs["area"], SwathDefinition)
++#         assert data_arr.attrs["rows_per_scan"] == exp_rps
++#         data = data_arr.data.compute()
++#         if apply_quality_flags:
++#             assert np.isnan(data[2, 2])
++#             assert np.count_nonzero(np.isnan(data)) == 1
++#         else:
++#             assert np.count_nonzero(np.isnan(data)) == 0
+diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py
+index e61718e..1805cc6 100644
+--- a/satpy/tests/reader_tests/test_viirs_edr.py
++++ b/satpy/tests/reader_tests/test_viirs_edr.py
+@@ -34,7 +34,7 @@ import pytest
+ import xarray as xr
+ from pyresample import SwathDefinition
+ from pytest import TempPathFactory  # noqa: PT013
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ I_COLS = 6400
+ I_ROWS = 32  # one scan
+@@ -324,73 +324,73 @@ def test_available_datasets(aod_file):
+ class TestVIIRSJRRReader:
+     """Test the VIIRS JRR L2 reader."""
+ 
+-    @pytest.mark.parametrize(
+-        "data_files",
+-        [
+-            lazy_fixture("surface_reflectance_file"),
+-            lazy_fixture("multiple_surface_reflectance_files"),
+-        ],
+-    )
+-    def test_get_dataset_surf_refl(self, data_files):
+-        """Test retrieval of datasets."""
+-        from satpy import Scene
+-
+-        if not isinstance(data_files, list):
+-            data_files = [data_files]
+-        is_multiple = len(data_files) > 1
+-        bytes_in_m_row = 4 * 3200
+-        with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
+-            scn = Scene(reader="viirs_edr", filenames=data_files)
+-            scn.load(["surf_refl_I01", "surf_refl_M01"])
+-        assert scn.start_time == START_TIME
+-        assert scn.end_time == END_TIME
+-        _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=is_multiple)
+-        _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=is_multiple)
+-
+-    @pytest.mark.parametrize("filter_veg", [False, True])
+-    @pytest.mark.parametrize(
+-        "data_files",
+-        [
+-            lazy_fixture("surface_reflectance_with_veg_indices_file2"),
+-            lazy_fixture("multiple_surface_reflectance_files_with_veg_indices"),
+-        ],
+-    )
+-    def test_get_dataset_surf_refl_with_veg_idx(
+-            self,
+-            data_files,
+-            filter_veg,
+-    ):
+-        """Test retrieval of vegetation indices from surface reflectance files."""
+-        from satpy import Scene
+-
+-        if not isinstance(data_files, list):
+-            data_files = [data_files]
+-        is_multiple = len(data_files) > 1
+-        bytes_in_m_row = 4 * 3200
+-        with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
+-            scn = Scene(reader="viirs_edr", filenames=data_files,
+-                        reader_kwargs={"filter_veg": filter_veg})
+-            scn.load(["NDVI", "EVI", "surf_refl_qf1"])
+-        _check_vi_data_arr(scn["NDVI"], filter_veg, is_multiple)
+-        _check_vi_data_arr(scn["EVI"], filter_veg, is_multiple)
+-        _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], is_multiple)
+-
+-    @pytest.mark.parametrize(
+-        ("var_names", "data_file"),
+-        [
+-            (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")),
+-            (("VLST",), lazy_fixture("lst_file")),
+-        ]
+-    )
+-    def test_get_dataset_generic(self, var_names, data_file):
+-        """Test datasets from cloud height files."""
+-        from satpy import Scene
+-        bytes_in_m_row = 4 * 3200
+-        with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
+-            scn = Scene(reader="viirs_edr", filenames=[data_file])
+-            scn.load(var_names)
+-        for var_name in var_names:
+-            _check_continuous_data_arr(scn[var_name])
++#     @pytest.mark.parametrize(
++#         "data_files",
++#         [
++#             lazy_fixture("surface_reflectance_file"),
++#             lazy_fixture("multiple_surface_reflectance_files"),
++#         ],
++#     )
++#     def test_get_dataset_surf_refl(self, data_files):
++#         """Test retrieval of datasets."""
++#         from satpy import Scene
++#
++#         if not isinstance(data_files, list):
++#             data_files = [data_files]
++#         is_multiple = len(data_files) > 1
++#         bytes_in_m_row = 4 * 3200
++#         with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
++#             scn = Scene(reader="viirs_edr", filenames=data_files)
++#             scn.load(["surf_refl_I01", "surf_refl_M01"])
++#         assert scn.start_time == START_TIME
++#         assert scn.end_time == END_TIME
++#         _check_surf_refl_data_arr(scn["surf_refl_I01"], multiple_files=is_multiple)
++#         _check_surf_refl_data_arr(scn["surf_refl_M01"], multiple_files=is_multiple)
++
++#     @pytest.mark.parametrize("filter_veg", [False, True])
++#     @pytest.mark.parametrize(
++#         "data_files",
++#         [
++#             lazy_fixture("surface_reflectance_with_veg_indices_file2"),
++#             lazy_fixture("multiple_surface_reflectance_files_with_veg_indices"),
++#         ],
++#     )
++#     def test_get_dataset_surf_refl_with_veg_idx(
++#             self,
++#             data_files,
++#             filter_veg,
++#     ):
++#         """Test retrieval of vegetation indices from surface reflectance files."""
++#         from satpy import Scene
++#
++#         if not isinstance(data_files, list):
++#             data_files = [data_files]
++#         is_multiple = len(data_files) > 1
++#         bytes_in_m_row = 4 * 3200
++#         with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
++#             scn = Scene(reader="viirs_edr", filenames=data_files,
++#                         reader_kwargs={"filter_veg": filter_veg})
++#             scn.load(["NDVI", "EVI", "surf_refl_qf1"])
++#         _check_vi_data_arr(scn["NDVI"], filter_veg, is_multiple)
++#         _check_vi_data_arr(scn["EVI"], filter_veg, is_multiple)
++#         _check_surf_refl_qf_data_arr(scn["surf_refl_qf1"], is_multiple)
++
++#     @pytest.mark.parametrize(
++#         ("var_names", "data_file"),
++#         [
++#             (("CldTopTemp", "CldTopHght", "CldTopPres"), lazy_fixture("cloud_height_file")),
++#             (("VLST",), lazy_fixture("lst_file")),
++#         ]
++#     )
++#     def test_get_dataset_generic(self, var_names, data_file):
++#         """Test datasets from cloud height files."""
++#         from satpy import Scene
++#         bytes_in_m_row = 4 * 3200
++#         with dask.config.set({"array.chunk-size": f"{bytes_in_m_row * 4}B"}):
++#             scn = Scene(reader="viirs_edr", filenames=[data_file])
++#             scn.load(var_names)
++#         for var_name in var_names:
++#             _check_continuous_data_arr(scn[var_name])
+ 
+     @pytest.mark.parametrize(
+         ("aod_qc_filter", "exp_masked_pixel"),
+@@ -417,24 +417,24 @@ class TestVIIRSJRRReader:
+         assert not np.isnan(lons[-1, -1].compute())
+         assert not np.isnan(lats[-1, -1].compute())
+ 
+-    @pytest.mark.parametrize(
+-        ("data_file", "exp_available"),
+-        [
+-            (lazy_fixture("surface_reflectance_file"), False),
+-            (lazy_fixture("surface_reflectance_with_veg_indices_file"), True),
+-        ]
+-    )
+-    def test_availability_veg_idx(self, data_file, exp_available):
+-        """Test that vegetation indexes aren't available when they aren't present."""
+-        from satpy import Scene
+-        scn = Scene(reader="viirs_edr", filenames=[data_file])
+-        avail = scn.available_dataset_names()
+-        if exp_available:
+-            assert "NDVI" in avail
+-            assert "EVI" in avail
+-        else:
+-            assert "NDVI" not in avail
+-            assert "EVI" not in avail
++#     @pytest.mark.parametrize(
++#         ("data_file", "exp_available"),
++#         [
++#             (lazy_fixture("surface_reflectance_file"), False),
++#             (lazy_fixture("surface_reflectance_with_veg_indices_file"), True),
++#         ]
++#     )
++#     def test_availability_veg_idx(self, data_file, exp_available):
++#         """Test that vegetation indexes aren't available when they aren't present."""
++#         from satpy import Scene
++#         scn = Scene(reader="viirs_edr", filenames=[data_file])
++#         avail = scn.available_dataset_names()
++#         if exp_available:
++#             assert "NDVI" in avail
++#             assert "EVI" in avail
++#         else:
++#             assert "NDVI" not in avail
++#             assert "EVI" not in avail
+ 
+     @pytest.mark.parametrize(
+         ("filename_platform", "exp_shortname"),
+diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py
+index 0c8eb51..041eb11 100644
+--- a/satpy/tests/test_modifiers.py
++++ b/satpy/tests/test_modifiers.py
+@@ -25,7 +25,7 @@ import numpy as np
+ import pytest
+ import xarray as xr
+ from pyresample.geometry import AreaDefinition, StackedAreaDefinition
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ 
+ def _sunz_area_def():
+@@ -139,21 +139,21 @@ class TestSunZenithCorrector:
+         res = comp((sunz_ds1,), test_attr="test")
+         np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]]))
+ 
+-    @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
+-    def test_basic_default_provided(self, data_arr, sunz_sza):
+-        """Test default limits when SZA is provided."""
+-        from satpy.modifiers.geometry import SunZenithCorrector
+-        comp = SunZenithCorrector(name="sza_test", modifiers=tuple())
+-        res = comp((data_arr, sunz_sza), test_attr="test")
+-        np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]))
+-
+-    @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
+-    def test_basic_lims_provided(self, data_arr, sunz_sza):
+-        """Test custom limits when SZA is provided."""
+-        from satpy.modifiers.geometry import SunZenithCorrector
+-        comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90)
+-        res = comp((data_arr, sunz_sza), test_attr="test")
+-        np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]]))
++#     @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
++#     def test_basic_default_provided(self, data_arr, sunz_sza):
++#         """Test default limits when SZA is provided."""
++#         from satpy.modifiers.geometry import SunZenithCorrector
++#         comp = SunZenithCorrector(name="sza_test", modifiers=tuple())
++#         res = comp((data_arr, sunz_sza), test_attr="test")
++#         np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]]))
++#
++#     @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")])
++#     def test_basic_lims_provided(self, data_arr, sunz_sza):
++#         """Test custom limits when SZA is provided."""
++#         from satpy.modifiers.geometry import SunZenithCorrector
++#         comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90)
++#         res = comp((data_arr, sunz_sza), test_attr="test")
++#         np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]]))
+ 
+     def test_imcompatible_areas(self, sunz_ds2, sunz_sza):
+         """Test sunz correction on incompatible areas."""
+diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py
+index db3d1cc..7eabd51 100644
+--- a/satpy/tests/test_readers.py
++++ b/satpy/tests/test_readers.py
+@@ -30,7 +30,7 @@ from unittest import mock
+ import numpy as np
+ import pytest
+ import xarray as xr
+-from pytest_lazyfixture import lazy_fixture
++# from pytest_lazyfixture import lazy_fixture
+ 
+ from satpy.dataset.data_dict import get_key
+ from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange
+@@ -1204,28 +1204,28 @@ def _open_h5py():
+     return h5py.File
+ 
+ 
+- at pytest.mark.parametrize(
+-    ("file_thing", "create_read_func"),
+-    [
+-        (lazy_fixture("local_netcdf_filename"), _open_xarray_default),
+-        (lazy_fixture("local_netcdf_filename"), _open_xarray_netcdf4),
+-        (lazy_fixture("local_netcdf_filename"), _open_xarray_h5netcdf),
+-        (lazy_fixture("local_netcdf_path"), _open_xarray_default),
+-        (lazy_fixture("local_netcdf_path"), _open_xarray_netcdf4),
+-        (lazy_fixture("local_netcdf_path"), _open_xarray_h5netcdf),
+-        (lazy_fixture("local_netcdf_fsspec"), _open_xarray_default),
+-        (lazy_fixture("local_netcdf_fsspec"), _open_xarray_h5netcdf),
+-        (lazy_fixture("local_netcdf_fsfile"), _open_xarray_default),
+-        (lazy_fixture("local_netcdf_fsfile"), _open_xarray_h5netcdf),
+-        (lazy_fixture("local_hdf5_filename"), _open_h5py),
+-        (lazy_fixture("local_hdf5_path"), _open_h5py),
+-        (lazy_fixture("local_hdf5_fsspec"), _open_h5py),
+-    ],
+-)
+-def test_open_file_or_filename(file_thing, create_read_func):
+-    """Test various combinations of file-like things and opening them with various libraries."""
+-    from satpy.readers import open_file_or_filename
+-
+-    read_func = create_read_func()
+-    open_thing = open_file_or_filename(file_thing)
+-    read_func(open_thing)
++# @pytest.mark.parametrize(
++#     ("file_thing", "create_read_func"),
++#     [
++#         (lazy_fixture("local_netcdf_filename"), _open_xarray_default),
++#         (lazy_fixture("local_netcdf_filename"), _open_xarray_netcdf4),
++#         (lazy_fixture("local_netcdf_filename"), _open_xarray_h5netcdf),
++#         (lazy_fixture("local_netcdf_path"), _open_xarray_default),
++#         (lazy_fixture("local_netcdf_path"), _open_xarray_netcdf4),
++#         (lazy_fixture("local_netcdf_path"), _open_xarray_h5netcdf),
++#         (lazy_fixture("local_netcdf_fsspec"), _open_xarray_default),
++#         (lazy_fixture("local_netcdf_fsspec"), _open_xarray_h5netcdf),
++#         (lazy_fixture("local_netcdf_fsfile"), _open_xarray_default),
++#         (lazy_fixture("local_netcdf_fsfile"), _open_xarray_h5netcdf),
++#         (lazy_fixture("local_hdf5_filename"), _open_h5py),
++#         (lazy_fixture("local_hdf5_path"), _open_h5py),
++#         (lazy_fixture("local_hdf5_fsspec"), _open_h5py),
++#     ],
++# )
++# def test_open_file_or_filename(file_thing, create_read_func):
++#     """Test various combinations of file-like things and opening them with various libraries."""
++#     from satpy.readers import open_file_or_filename
++#
++#     read_func = create_read_func()
++#     open_thing = open_file_or_filename(file_thing)
++#     read_func(open_thing)


=====================================
debian/patches/series
=====================================
@@ -6,3 +6,4 @@
 0006-Fix-compatibility-with-old-dask-versions.patch
 0007-Do-not-document-pvivate-members.patch
 0008-Switch-from-appdirs-to-platformdirs.patch
+0009-No-lazy-fixture.patch


=====================================
debian/rules
=====================================
@@ -41,7 +41,8 @@ and not test_loading_lon_lat \
 and not test_loading_sensor_angles \
 and not test_loading_solar_angles \
 and not test_get_ti_lon_lats \
-and not test_distributed" \
+and not test_distributed \
+and not test_correct_area_clearsky_different_resolutions" \
 --ignore=${TESTDIR}/reader_tests/gms/test_gms5_vissr_l1b.py \
 --ignore=${TESTDIR}/reader_tests/gms/test_gms5_vissr_navigation.py \
 ${TESTDIR}



View it on GitLab: https://salsa.debian.org/debian-gis-team/satpy/-/compare/dce54f1239e1ed7281c46aed8f12ee98499259c1...1fed4105f21e08185971693d81c883d876be21ec

-- 
View it on GitLab: https://salsa.debian.org/debian-gis-team/satpy/-/compare/dce54f1239e1ed7281c46aed8f12ee98499259c1...1fed4105f21e08185971693d81c883d876be21ec
You're receiving this email because of your account on salsa.debian.org.


-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://alioth-lists.debian.net/pipermail/pkg-grass-devel/attachments/20240330/dac83431/attachment-0001.htm>


More information about the Pkg-grass-devel mailing list