diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index f866a6cfab..1867599b8f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,6 +1,7 @@ --- name: Bug report about: Create a report to help us improve +type: 'bug' --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 854663574b..f5c967d2f0 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,6 +1,7 @@ --- name: Feature request about: Suggest an idea for this project +type: 'feature' --- diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c9f5aa1f73..e18d23d63e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -18,7 +18,7 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] experimental: [false] include: - python-version: "3.12" @@ -41,8 +41,9 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - mamba-version: "*" channels: conda-forge + conda-remove-defaults: true + channel-priority: strict - name: Set cache environment variables shell: bash -l {0} @@ -114,7 +115,7 @@ jobs: pytest -n auto --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: flags: unittests file: ./coverage.xml @@ -135,7 +136,7 @@ jobs: coverage xml - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: flags: behaviourtests file: ./coverage.xml diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 130b3a43b8..45b90be73b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,11 +19,11 @@ jobs: shell: bash -l {0} run: | python -m pip install -q build - python -m build -s + python -m build - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.0 + uses: pypa/gh-action-pypi-publish@v1.12.4 with: user: __token__ password: ${{ secrets.pypi_password }} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6c4b2b3d2..741d23cc1c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,23 +3,23 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.6.3' + rev: 'v0.9.4' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.9' # Update me! + rev: '1.8.2' # Update me! hooks: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.11.2' # Use the sha / tag you want to point at + rev: 'v1.14.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: @@ -27,9 +27,9 @@ repos: - types-setuptools - types-PyYAML - types-requests - args: ["--python-version", "3.9", "--ignore-missing-imports"] + args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.13.2 + rev: 6.0.0 hooks: - id: isort language_version: python3 diff --git a/AUTHORS.md b/AUTHORS.md index c5f8607c85..4e3821c54e 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -7,6 +7,7 @@ The following people have made contributions to this project: +- [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) - [Trygve Aspenes (TAlonglong)](https://github.com/TAlonglong) - [Talfan Barnie (TalfanBarnie)](https://github.com/TalfanBarnie) - [Jonathan Beavers (jon4than)](https://github.com/jon4than) @@ -37,7 +38,10 @@ The following people have made contributions to this project: - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) +- [Chung-Hsiang Horng(chorng)](https://github.com/chorng) +- [Mario Hros (k3a)](https://github.com/k3a) - [Lloyd Hughes (system123)](https://github.com/system123) +- [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) - [Tommy Jasmin (tommyjasmin)](https://github.com/tommyjasmin) - [Jactry Zeng](https://github.com/jactry) @@ -45,8 +49,10 @@ The following people have made contributions to this project: - [Sauli Joro (sjoro)](https://github.com/sjoro) - [Pouria Khalaj](https://github.com/pkhalaj) - [Janne Kotro (jkotro)](https://github.com/jkotro) +- [Beke Kremmling (bkremmli)](https://github.com/bkremmli) - Deutscher Wetterdienst - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) +- [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss - [Thomas Leppelt (m4sth0)](https://github.com/m4sth0) - Deutscher Wetterdienst - [Lu Liu (yukaribbba)](https://github.com/yukaribbba) @@ -55,6 +61,7 @@ The following people have made contributions to this project: - [Luca Merucci (lmeru)](https://github.com/lmeru) - [Lucas Meyer (LTMeyer)](https://github.com/LTMeyer) - [Zifeng Mo (Isotr0py)](https://github.com/Isotr0py) +- [David Navia (dnaviap)](https://github.com/dnaviap) - [Ondrej Nedelcev (nedelceo)](https://github.com/nedelceo) - [Oana Nicola](https://github.com/) - [Esben S. Nielsen (storpipfugl)](https://github.com/storpipfugl) @@ -79,12 +86,14 @@ The following people have made contributions to this project: - [Michael Schmutz (Graenni)](https://github.com/Graenni) - Meteotest AG - [Hauke Schulz (observingClouds)](https://github.com/observingClouds) - [Jakub Seidl (seidlj)](https://github.com/seidlj) +- [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Eysteinn Sigurðsson (eysteinn)](https://github.com/eysteinn) - [Jean-Luc Shaw (jeanlucshaw)](https://github.com/jeanlucshaw) - [Dario Stelitano (bornagain1981)](https://github.com/bornagain1981) - [Johan Strandgren (strandgren)](https://github.com/strandgren) - [Matias Takala (elfsprite)](https://github.com/elfsprite) - [Taiga Tsukada (tsukada-cs)](https://github.com/tsukada-cs) +- [Antonio Valentino](https://github.com/avalentino) - [Christian Versloot (christianversloot)](https://github.com/christianversloot) - [Helga Weber (helgaweb)](https://github.com/helgaweb) - [hazbottles (hazbottles)](https://github.com/hazbottles) @@ -97,5 +106,5 @@ The following people have made contributions to this project: - [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) -- [Antonio Valentino](https://github.com/avalentino) - [Clément (ludwigvonkoopa)](https://github.com/ludwigVonKoopa) +- [Xuanhan Lai (sgxl)](https://github.com/sgxl) diff --git a/CHANGELOG.md b/CHANGELOG.md index b421696af8..b709083c5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,170 @@ +## Version 0.54.0 (2025/01/20) + +### Issues Closed + +* [Issue 3020](https://github.com/pytroll/satpy/issues/3020) - Re-implement essl_colorized_low_level_moisture using colorize ([PR 3021](https://github.com/pytroll/satpy/pull/3021) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 3009](https://github.com/pytroll/satpy/issues/3009) - artefacts in FCI RGBs using 3.8 µm ([PR 3013](https://github.com/pytroll/satpy/pull/3013) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2991](https://github.com/pytroll/satpy/issues/2991) - Resampling MTG FCI high res bands fails when the resample includes bands at different spatial resolutions +* [Issue 2981](https://github.com/pytroll/satpy/issues/2981) - Fix the bug with `satpy` when using `numpy 2.x` which leads to `SEVIRI` resampled files having a double size ([PR 2983](https://github.com/pytroll/satpy/pull/2983) by [@pkhalaj](https://github.com/pkhalaj)) +* [Issue 2979](https://github.com/pytroll/satpy/issues/2979) - Improving resolution when setting extent +* [Issue 2977](https://github.com/pytroll/satpy/issues/2977) - CRS data is being printed to title of image +* [Issue 2975](https://github.com/pytroll/satpy/issues/2975) - can't create ABI geo_color composite +* [Issue 2963](https://github.com/pytroll/satpy/issues/2963) - ahi_hrit reader cannot create a Scene +* [Issue 2814](https://github.com/pytroll/satpy/issues/2814) - Reading LI L2 point data is not daskified ([PR 2985](https://github.com/pytroll/satpy/pull/2985) by [@ClementLaplace](https://github.com/ClementLaplace)) +* [Issue 2566](https://github.com/pytroll/satpy/issues/2566) - Wrong version numbers at readthedocs +* [Issue 1997](https://github.com/pytroll/satpy/issues/1997) - Resampling from SwathDefinition to AreaDefinition fails with OSError and AssertionError +* [Issue 1788](https://github.com/pytroll/satpy/issues/1788) - integration / regression tests that compare images +* [Issue 1755](https://github.com/pytroll/satpy/issues/1755) - Store project metadata in pyproject.toml +* [Issue 1240](https://github.com/pytroll/satpy/issues/1240) - iber projection lost in the North Pacific + +In this release 14 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 3035](https://github.com/pytroll/satpy/pull/3035) - Pin dask to avoid dataframe problem +* [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files +* [PR 2995](https://github.com/pytroll/satpy/pull/2995) - Add new ABI L2 "CPS" variable name for Cloud Particle Size +* [PR 2985](https://github.com/pytroll/satpy/pull/2985) - li2_nc reader daskified ([2814](https://github.com/pytroll/satpy/issues/2814)) +* [PR 2983](https://github.com/pytroll/satpy/pull/2983) - Fix dtype promotion in SEVIRI native reader ([2981](https://github.com/pytroll/satpy/issues/2981)) +* [PR 2976](https://github.com/pytroll/satpy/pull/2976) - Fix dtype promotion in `mersi2_l1b` reader +* [PR 2969](https://github.com/pytroll/satpy/pull/2969) - Fix geos proj parameters for Insat 3d satellites +* [PR 2959](https://github.com/pytroll/satpy/pull/2959) - Modified the issue with the calibration coefficient indices for FY-3 satellite data reader + +#### Features added + +* [PR 3034](https://github.com/pytroll/satpy/pull/3034) - Set issue type in templates +* [PR 3021](https://github.com/pytroll/satpy/pull/3021) - Change ESSL colorisation approach ([3020](https://github.com/pytroll/satpy/issues/3020)) +* [PR 3013](https://github.com/pytroll/satpy/pull/3013) - Clip negative FCI radiances ([3009](https://github.com/pytroll/satpy/issues/3009)) +* [PR 3007](https://github.com/pytroll/satpy/pull/3007) - Add t865 dataset to olci l2 list ([1767](https://github.com/pytroll/satpy/issues/1767)) +* [PR 2999](https://github.com/pytroll/satpy/pull/2999) - Add Accsos image comparison tests +* [PR 2941](https://github.com/pytroll/satpy/pull/2941) - Refactor MVIRI dataset access +* [PR 2565](https://github.com/pytroll/satpy/pull/2565) - Add level-1 readers for the arctic weather satelliter data + +#### Clean ups + +* [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files +* [PR 3014](https://github.com/pytroll/satpy/pull/3014) - Remove xarray-datatree dependency from CI +* [PR 3010](https://github.com/pytroll/satpy/pull/3010) - Remove version limit on pytest in CI + +In this release 18 pull requests were closed. + + +## Version 0.53.0 (2024/11/08) + +### Issues Closed + +* [Issue 2960](https://github.com/pytroll/satpy/issues/2960) - netcdf4 version causes error ([PR 2961](https://github.com/pytroll/satpy/pull/2961) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 2952](https://github.com/pytroll/satpy/issues/2952) - Altitude, LandCover, and LandSeaMask are missing in the `mersi_ll_l1b` reader for FY3E L1B ([PR 2953](https://github.com/pytroll/satpy/pull/2953) by [@chorng](https://github.com/chorng)) +* [Issue 2948](https://github.com/pytroll/satpy/issues/2948) - "Missing" platform abbreviation causes unexpected error when loading data array in Scene ([PR 2949](https://github.com/pytroll/satpy/pull/2949) by [@joleenf](https://github.com/joleenf)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2971](https://github.com/pytroll/satpy/pull/2971) - Pin flexparser before it breaks pint +* [PR 2970](https://github.com/pytroll/satpy/pull/2970) - Remove rayleigh correction on VIIRS false_color for I02 band +* [PR 2968](https://github.com/pytroll/satpy/pull/2968) - Remove unneeded call to private scipy function in SAR reader +* [PR 2965](https://github.com/pytroll/satpy/pull/2965) - Fix MODIS readers chunking compatibility with newer dask +* [PR 2961](https://github.com/pytroll/satpy/pull/2961) - Fix CF writer crashing with netcdf development version ([2960](https://github.com/pytroll/satpy/issues/2960)) +* [PR 2957](https://github.com/pytroll/satpy/pull/2957) - Bugfix the VIIRS lowres version of the day-microphysics. +* [PR 2956](https://github.com/pytroll/satpy/pull/2956) - Fix cira stretch upcasting the data +* [PR 2954](https://github.com/pytroll/satpy/pull/2954) - Fix Rayleigh correction to use the same datatype as the input data +* [PR 2950](https://github.com/pytroll/satpy/pull/2950) - Fix dtype promotion in `SunZenithReduction` +* [PR 2949](https://github.com/pytroll/satpy/pull/2949) - Add more platforms to VIIRS EDR reader ([2948](https://github.com/pytroll/satpy/issues/2948)) +* [PR 2930](https://github.com/pytroll/satpy/pull/2930) - Fix data type when getting a line offset for a segmented hrit_jma + +#### Features added + +* [PR 2973](https://github.com/pytroll/satpy/pull/2973) - Remove flexparser pinning +* [PR 2953](https://github.com/pytroll/satpy/pull/2953) - Add altitude, landcover, and landseamask to mersi_ll_l1b reader ([2952](https://github.com/pytroll/satpy/issues/2952)) +* [PR 2946](https://github.com/pytroll/satpy/pull/2946) - Update MODIS L1b reader with additional geoinfo datasets + +In this release 14 pull requests were closed. + + +## Version 0.52.1 (2024/10/23) + +### Issues Closed + +* [Issue 2942](https://github.com/pytroll/satpy/issues/2942) - 0.52.0 breaks `seviri_l2_grib`-reader with 'EUML2GribFileHandler' object has no attribute '_ssp_lon' ([PR 2943](https://github.com/pytroll/satpy/pull/2943) by [@strandgren](https://github.com/strandgren)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2944](https://github.com/pytroll/satpy/pull/2944) - Fix tests using `palettize` +* [PR 2943](https://github.com/pytroll/satpy/pull/2943) - Fix seviri_l2_grib end_time property bug. ([2942](https://github.com/pytroll/satpy/issues/2942)) + +In this release 2 pull requests were closed. + + +## Version 0.52.0 (2024/10/18) + +### Issues Closed + +* [Issue 2922](https://github.com/pytroll/satpy/issues/2922) - fci_l1c_nc reader ignoring MTG FDHSI segment 41 +* [Issue 2920](https://github.com/pytroll/satpy/issues/2920) - SEVIRI/FCI Water Vapour channel different normalization? +* [Issue 2917](https://github.com/pytroll/satpy/issues/2917) - Unpin mamba version in CI +* [Issue 2914](https://github.com/pytroll/satpy/issues/2914) - save.dataset - problem with MTG +* [Issue 2909](https://github.com/pytroll/satpy/issues/2909) - RuntimeError while compositing after resampling datasets +* [Issue 2907](https://github.com/pytroll/satpy/issues/2907) - The debug run reports an error, but there is no problem running after stopping at the breakpoint +* [Issue 2900](https://github.com/pytroll/satpy/issues/2900) - Eliminate dependency on external binaries of PublicDecompWT (xRITDecompress) by using pyPublicDecompWT +* [Issue 2897](https://github.com/pytroll/satpy/issues/2897) - generic_image reader returns data as float64 for PNG images +* [Issue 2887](https://github.com/pytroll/satpy/issues/2887) - "Don't know how to open the following files" ERROR in MTG-I1 LI data. +* [Issue 2884](https://github.com/pytroll/satpy/issues/2884) - MODIS and SEADAS test failures ([PR 2886](https://github.com/pytroll/satpy/pull/2886) by [@djhoese](https://github.com/djhoese)) +* [Issue 2869](https://github.com/pytroll/satpy/issues/2869) - ninjogeotiff writer should write gradient for P mode images ([PR 2870](https://github.com/pytroll/satpy/pull/2870) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2864](https://github.com/pytroll/satpy/issues/2864) - Documentation takes too long to build ([PR 2875](https://github.com/pytroll/satpy/pull/2875) by [@djhoese](https://github.com/djhoese)) +* [Issue 2839](https://github.com/pytroll/satpy/issues/2839) - Help about Netcdf Data +* [Issue 1974](https://github.com/pytroll/satpy/issues/1974) - debug_on() could write relevant versions +* [Issue 1266](https://github.com/pytroll/satpy/issues/1266) - Can pytroll process MetOp L0 Data? + +In this release 15 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2939](https://github.com/pytroll/satpy/pull/2939) - Fix bogus no_op implementation +* [PR 2938](https://github.com/pytroll/satpy/pull/2938) - Update Landsat reader for compatibility with Pyspectral. +* [PR 2926](https://github.com/pytroll/satpy/pull/2926) - Import DataTree from xarray +* [PR 2923](https://github.com/pytroll/satpy/pull/2923) - Fix data promotion in `generic_image` reader and `satpy.composites.add_bands` +* [PR 2916](https://github.com/pytroll/satpy/pull/2916) - Fix deprecated "compositor" usage in modifier definitions +* [PR 2910](https://github.com/pytroll/satpy/pull/2910) - Remove deprecated usage of pyspectral's download_luts aerosol_type +* [PR 2896](https://github.com/pytroll/satpy/pull/2896) - Bugfix for Sentinel-2 radiance calculation +* [PR 2886](https://github.com/pytroll/satpy/pull/2886) - Update pyhdf-based arrs to be manually tokenized ([2884](https://github.com/pytroll/satpy/issues/2884)) + +#### Features added + +* [PR 2936](https://github.com/pytroll/satpy/pull/2936) - Drop python 3.9 ([2741](https://github.com/pytroll/satpy/issues/2741)) +* [PR 2933](https://github.com/pytroll/satpy/pull/2933) - Add no-op image_ready enhancement +* [PR 2931](https://github.com/pytroll/satpy/pull/2931) - Enhance visibility of missing dependencies +* [PR 2929](https://github.com/pytroll/satpy/pull/2929) - Replace patched `print` with capsys fixture +* [PR 2927](https://github.com/pytroll/satpy/pull/2927) - Use spline interpolation for faster processing +* [PR 2925](https://github.com/pytroll/satpy/pull/2925) - Fix types to allow float32 computations for SAR-C +* [PR 2913](https://github.com/pytroll/satpy/pull/2913) - Update `check_satpy` to use new `show_version` to display package versions +* [PR 2905](https://github.com/pytroll/satpy/pull/2905) - Mcd12q1 draft +* [PR 2904](https://github.com/pytroll/satpy/pull/2904) - Add reader for Landsat L1 data +* [PR 2902](https://github.com/pytroll/satpy/pull/2902) - Add OCI L2 BGC reader +* [PR 2899](https://github.com/pytroll/satpy/pull/2899) - Switch from Mambaforge to Miniforge +* [PR 2893](https://github.com/pytroll/satpy/pull/2893) - Fix AAPP L1b reader not to up-cast data to float64 +* [PR 2870](https://github.com/pytroll/satpy/pull/2870) - Include gradient/axisintercept for mode p ([2869](https://github.com/pytroll/satpy/issues/2869)) +* [PR 2717](https://github.com/pytroll/satpy/pull/2717) - Add combined GRIB reader for both SEVIRI and FCI L2 products + +#### Documentation changes + +* [PR 2915](https://github.com/pytroll/satpy/pull/2915) - Improve SEVIRI metadata documentation +* [PR 2890](https://github.com/pytroll/satpy/pull/2890) - Fixing contributing.rst access on windows systems +* [PR 2875](https://github.com/pytroll/satpy/pull/2875) - Make documentation generation faster ([2864](https://github.com/pytroll/satpy/issues/2864), [2864](https://github.com/pytroll/satpy/issues/2864)) + +In this release 25 pull requests were closed. + + ## Version 0.51.0 (2024/08/15) ### Issues Closed diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 05c921b367..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,17 +0,0 @@ -prune * -exclude * -graft doc -recursive-exclude doc/build * -graft satpy -include LICENSE.txt -include README.rst -include AUTHORS.md -include CHANGELOG.md -include SECURITY.md -include CITATION -include satpy/version.py -include pyproject.toml -include setup.py -include setup.cfg -include satpy/py.typed -global-exclude *.py[cod] diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py index 936e0dc514..574226a737 100644 --- a/benchmarks/abi_l1b_benchmarks.py +++ b/benchmarks/abi_l1b_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py index 361934168a..0a2dc65496 100644 --- a/benchmarks/ahi_hsd_benchmarks.py +++ b/benchmarks/ahi_hsd_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py index 9851dbdac9..0d14320b48 100644 --- a/benchmarks/seviri_hrit_benchmarks.py +++ b/benchmarks/seviri_hrit_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py index 68db5c6682..a98e02fa97 100644 --- a/benchmarks/viirs_sdr_benchmarks.py +++ b/benchmarks/viirs_sdr_benchmarks.py @@ -42,7 +42,7 @@ def setup_cache(self): except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self, name): """Set up the benchmarks.""" diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index ec5668c8e4..df10c1a7a8 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - xarray!=2022.9.0 - - dask + - dask<2025.1.0 - distributed - dask-image - donfig @@ -43,7 +43,7 @@ dependencies: - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - pytest<8.0.0 + - pytest - pytest-cov - fsspec - botocore>=1.33 @@ -53,7 +53,6 @@ dependencies: - pip - skyfield - astropy - - xarray-datatree - pint-xarray - ephem - bokeh diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 5bc7dabe95..58a1e068f5 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -2,11 +2,12 @@ name: readthedocs channels: - conda-forge dependencies: - - python=3.10 + - python=3.11 - pip - platformdirs - dask - dask-image + - dask-expr - defusedxml - donfig # 2.19.1 seems to cause library linking issues diff --git a/doc/source/composites.rst b/doc/source/composites.rst index d0c494e414..dda071db24 100644 --- a/doc/source/composites.rst +++ b/doc/source/composites.rst @@ -579,8 +579,9 @@ the file) as:: default apply the :func:`~trollimage.xrimage.XRImage.stretch_linear` enhancement with cutoffs of 0.5% and 99.5%. If you want no enhancement at all (maybe you are enhancing a composite based on :class:`DayNightCompositor` where - the components have their own enhancements defined), you need to define - an enhancement that does nothing:: + the components have their own enhancements defined), you can use the `image_ready` standard name. + If this is not a suitable standard name, you can also define + an enhancement that does nothing: enhancements: day_x: diff --git a/doc/source/conf.py b/doc/source/conf.py index 2e41793c15..4c8405d19f 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -85,10 +85,15 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", - "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "doi_role", - "sphinx.ext.viewcode", "sphinxcontrib.apidoc", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel", + "doi_role", "sphinx.ext.viewcode", "sphinxcontrib.apidoc", "sphinx.ext.mathjax"] +# Autosectionlabel +# Make sure target is unique +autosectionlabel_prefix_document = True +autosectionlabel_maxdepth = 3 + # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" diff --git a/doc/source/config.rst b/doc/source/config.rst index 9babc1abbf..1cbbbec2ed 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -272,7 +272,7 @@ If ``clip_negative_radiances=False``, pixels with negative radiances will have Clipping of negative radiances is currently implemented for the following readers: -* ``abi_l1b``, ``ami_l1b`` +* ``abi_l1b``, ``ami_l1b``, ``fci_l1c_nc`` Temporary Directory diff --git a/doc/source/reading.rst b/doc/source/reading.rst index 2f274e9b3f..0136b1dd77 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -216,6 +216,9 @@ load the datasets using e.g.:: :meth:`scn.missing_datasets ` property for any ``DataID`` that could not be loaded. +Available datasets +------------------ + To find out what datasets are available from a reader from the files that were provided to the ``Scene`` use :meth:`~satpy.scene.Scene.available_dataset_ids`:: diff --git a/pyproject.toml b/pyproject.toml index 196ae6a462..a562d2d22a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ authors = [ ] dependencies = [ "platformdirs", - "dask[array]>=0.17.1", + "dask[array]>=0.17.1,<2025.1.0", "donfig", "numpy>=1.21", "packaging", @@ -24,7 +24,7 @@ dependencies = [ "zarr", ] readme = "README.rst" -requires-python = ">=3.9" +requires-python = ">=3.10" license = { text = "GPLv3" } classifiers = [ "Development Status :: 5 - Production/Stable", @@ -62,7 +62,7 @@ seviri_l2_bufr = ["eccodes"] seviri_l2_grib = ["eccodes"] hsaf_grib = ["pygrib"] remote_reading = ["fsspec"] -insat_3d = ["xarray-datatree"] +insat_3d = ["xarray>=2024.10.0"] gms5-vissr_l1b = ["numba"] # Writers: cf = ["h5netcdf >= 0.7.3"] @@ -87,7 +87,7 @@ satpos_from_tle = ["skyfield", "astropy"] tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", "rioxarray", "pytest", "pytest-lazy-fixtures", "defusedxml", - "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "s3fs", "eccodes", "h5netcdf", "xarray>=2024.10.0", "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] dev = ["satpy[doc,tests]"] @@ -112,6 +112,16 @@ build-backend = "hatchling.build" [tool.hatch.metadata] allow-direct-references = true +[tool.hatch.build.targets.sdist] +only-include = [ + "satpy", + "doc", + "AUTHORS.md", + "CHANGELOG.md", + "SECURITY.md", + "CITATION", +] + [tool.hatch.build.targets.wheel] packages = ["satpy"] diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0ac99c98f7..d7518be91d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -721,7 +721,7 @@ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", inclu self.day_night = day_night self.include_alpha = include_alpha self._has_sza = False - super(DayNightCompositor, self).__init__(name, **kwargs) + super().__init__(name, **kwargs) def __call__( self, @@ -985,6 +985,7 @@ def add_bands(data, bands): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), + dtype=new_data[0].dtype, chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha["bands"] = "A" diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py new file mode 100644 index 0000000000..5fc52d7c9c --- /dev/null +++ b/satpy/composites/lightning.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Composite classes for the LI instrument.""" + +import logging + +import numpy as np +import xarray as xr + +from satpy.composites import CompositeBase + +LOG = logging.getLogger(__name__) + + +class LightningTimeCompositor(CompositeBase): + """Class used to create the flash_age compositor usefull for lighting event visualisation. + + The datas used are dates related to the lightning event that should be normalised between + 0 and 1. The value 1 corresponds to the latest lightning event and the value 0 corresponds + to the latest lightning event - time_range. The time_range is defined in the satpy/etc/composites/li.yaml + and is in minutes. + """ + def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): + """Initialisation of the class.""" + super().__init__(name, prerequisites, optional_prerequisites, **kwargs) + # Get the time_range which is in minute + self.time_range = self.attrs["time_range"] + self.standard_name = self.attrs["standard_name"] + self.reference_time_attr = self.attrs["reference_time"] + + + def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray: + """Normalize the time in the range between [end_time, end_time - time_range]. + + The range of the normalised data is between 0 and 1 where 0 corresponds to the date end_time - time_range + and 1 to the end_time. Where end_times represent the latest lightning event and time_range is the range of + time you want to see the event.The dates that are earlier to end_time - time_range are removed. + + Args: + data (xr.DataArray): datas containing dates to be normalised + attrs (dict): Attributes suited to the flash_age composite + + Returns: + xr.DataArray: Normalised time + """ + # Compute the maximum time value + end_time = np.array(np.datetime64(data.attrs[self.reference_time_attr])) + # Compute the minimum time value based on the time range + begin_time = end_time - np.timedelta64(self.time_range, "m") + # Drop values that are bellow begin_time + condition_time = data >= begin_time + condition_time_computed = condition_time.compute() + data = data.where(condition_time_computed, drop=True) + # exit if data is empty afer filtering + if data.size == 0 : + LOG.error(f"All the flash_age events happened before {begin_time}") + raise ValueError(f"Invalid data: data size is zero. All flash_age " + f"events occurred before the specified start time ({begin_time})." + ) + # Normalize the time values + normalized_data = (data - begin_time) / (end_time - begin_time) + # Ensure the result is still an xarray.DataArray + return xr.DataArray(normalized_data, dims=data.dims, coords=data.coords, attrs=attrs) + + + @staticmethod + def _update_missing_metadata(existing_attrs, new_attrs): + for key, val in new_attrs.items(): + if key not in existing_attrs and val is not None: + existing_attrs[key] = val + + def _redefine_metadata(self,attrs:dict)->dict: + """Modify the standard_name and name metadatas. + + Args: + attrs (dict): data's attributes + + Returns: + dict: atualised attributes + """ + attrs["name"] = self.standard_name + attrs["standard_name"] = self.standard_name + # Attributes to describe the values range + return attrs + + + def __call__(self,projectables, nonprojectables=None, **attrs): + """Normalise the dates.""" + data = projectables[0] + new_attrs = data.attrs.copy() + self._update_missing_metadata(new_attrs, attrs) + new_attrs = self._redefine_metadata(new_attrs) + return self._normalize_time(data, new_attrs) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index a44ca590cf..86efe1ffba 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -219,11 +219,12 @@ def cira_stretch(img, **kwargs): @exclude_alpha def _cira_stretch(band_data): - log_root = np.log10(0.0223) + dtype = band_data.dtype + log_root = np.log10(0.0223, dtype=dtype) denom = (1.0 - log_root) * 0.75 band_data *= 0.01 band_data = band_data.clip(np.finfo(float).eps) - band_data = np.log10(band_data) + band_data = np.log10(band_data, dtype=dtype) band_data -= log_root band_data /= denom return band_data diff --git a/satpy/enhancements/atmosphere.py b/satpy/enhancements/atmosphere.py deleted file mode 100644 index bbc4bc3a86..0000000000 --- a/satpy/enhancements/atmosphere.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) 2022- Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Enhancements related to visualising atmospheric phenomena.""" - -import datetime - -import dask.array as da -import xarray as xr - - -def essl_moisture(img, low=1.1, high=1.6) -> None: - r"""Low level moisture by European Severe Storms Laboratory (ESSL). - - Expects a mode L image with data corresponding to the ratio of the - calibrated reflectances for the 0.86 µm and 0.906 µm channel. - - This composite and its colorisation were developed by ESSL. - - Ratio values are scaled from the range ``[low, high]``, which is by default - between 1.1 and 1.6, but might be tuned based on region or sensor, - to ``[0, 1]``. Values outside this range are clipped. Color values - for red, green, and blue are calculated as follows, where ``x`` is the - ratio between the 0.86 µm and 0.905 µm channels: - - .. math:: - - R = \max(1.375 - 2.67 x, -0.75 + x) \\ - G = 1 - \frac{8x}{7} \\ - B = \max(0.75 - 1.5 x, 0.25 - (x - 0.75)^2) \\ - - The value of ``img.data`` is modified in-place. - - A color interpretation guide is pending further adjustments to the - parameters for current and future sensors. - - Args: - img: XRImage containing the relevant composite - low: optional, low end for scaling, defaults to 1.1 - high: optional, high end for scaling, defaults to 1.6 - """ - ratio = img.data - if _is_fci_test_data(img.data): - # Due to a bug in the FCI pre-launch simulated test data, - # the 0.86 µm channel is too bright. To correct for this, its - # reflectances should be multiplied by 0.8. - ratio *= 0.8 - - with xr.set_options(keep_attrs=True): - ratio = _scale_and_clip(ratio, low, high) - red = _calc_essl_red(ratio) - green = _calc_essl_green(ratio) - blue = _calc_essl_blue(ratio) - data = xr.concat([red, green, blue], dim="bands") - data.attrs["mode"] = "RGB" - data["bands"] = ["R", "G", "B"] - img.data = data - - -def _scale_and_clip(ratio, low, high): - """Scale ratio values to [0, 1] and clip values outside this range.""" - scaled = (ratio - low) / (high - low) - scaled.data = da.clip(scaled.data, 0, 1) - return scaled - - -def _calc_essl_red(ratio): - """Calculate values for red based on scaled and clipped ratio.""" - red_a = 1.375 - 2.67 * ratio - red_b = -0.75 + ratio - red = xr.where(red_a > red_b, red_a, red_b) - red.data = da.clip(red.data, 0, 1) - return red - - -def _calc_essl_green(ratio): - """Calculate values for green based on scaled and clipped ratio.""" - green = 1 - (8/7) * ratio - green.data = da.clip(green.data, 0, 1) - return green - - -def _calc_essl_blue(ratio): - """Calculate values for blue based on scaled and clipped ratio.""" - blue_a = 0.75 - 1.5 * ratio - blue_b = 0.25 - (ratio - 0.75)**2 - blue = xr.where(blue_a > blue_b, blue_a, blue_b) - blue.data = da.clip(blue.data, 0, 1) - return blue - - -def _is_fci_test_data(data): - """Check if we are working with FCI test data.""" - return ("sensor" in data.attrs and - "start_time" in data.attrs and - data.attrs["sensor"] == "fci" and - isinstance(data.attrs["start_time"], datetime.datetime) and - data.attrs["start_time"] < datetime.datetime(2022, 11, 30)) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 4700aa470b..d4aa8c0185 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -36,7 +36,7 @@ composites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C03 - modifiers: [sunz_corrected, rayleigh_corrected_crefl] + modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_raw: @@ -710,7 +710,7 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 5d633056be..179f36c97c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -214,11 +214,11 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 1.63 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - wavelength: 0.85 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - wavelength: 0.635 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color @@ -303,34 +303,6 @@ composites: - name: B01 standard_name: true_color_reproduction_color_stretch -# true_color_reducedsize_land: -# compositor: !!python/name:satpy.composites.GenericCompositor -# prerequisites: -# - wavelength: 0.65 -# modifiers: [reducer4, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# - wavelength: 0.51 -# modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# - wavelength: 0.46 -# modifiers: [reducer2, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# standard_name: true_color -# -# true_color_reducedsize_marine_tropical: -# compositor: !!python/name:satpy.composites.GenericCompositor -# prerequisites: -# - wavelength: 0.65 -# modifiers: [reducer4, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# - wavelength: 0.51 -# modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# - wavelength: 0.46 -# modifiers: [reducer2, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# standard_name: true_color - day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index b0d943c5ca..a58463a93a 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -137,11 +137,11 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - name: VI008 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - name: VI006 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 5dd812c73f..07ff48da8a 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -1,17 +1,17 @@ sensor_name: visir/fci composites: -### L2 + ### L2 binary_cloud_mask: # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). compositor: !!python/name:satpy.composites.CategoricalDataCompositor prerequisites: - - name: 'cloud_state' - lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] + - name: "cloud_state" + lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] standard_name: binary_cloud_mask -### Night Layers + ### Night Layers night_ir105: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -41,7 +41,7 @@ composites: - night_ir_alpha - _night_background_hires -### Green Corrections + ### Green Corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that @@ -58,7 +58,7 @@ composites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_08 - modifiers: [sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: @@ -76,18 +76,18 @@ composites: ndvi_hybrid_green_fully_sunzencorrected: description: Same as ndvi_hybrid_green, but without Sun-zenith reduction compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - limits: [ 0.15, 0.05 ] + limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: vis_05 - modifiers: [ sunz_corrected, rayleigh_corrected ] + modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_06 - modifiers: [ sunz_corrected, rayleigh_corrected ] + modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_08 - modifiers: [ sunz_corrected ] + modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance -### True Color + ### True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -190,7 +190,7 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch -### True Color with LI lightning + ### True Color with LI lightning true_color_with_night_ir105_acc_flash: compositor: !!python/name:satpy.composites.BackgroundCompositor @@ -227,74 +227,81 @@ composites: - group_radiance_alpha - true_color_with_night_ir105 -### GeoColor + true_color_with_night_ir105_flash_age: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: imager_with_lightning + prerequisites: + - flash_age + - true_color_with_night_ir105 + + ### GeoColor geo_color: - compositor: !!python/name:satpy.composites.DayNightCompositor - description: > - GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true - color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a - high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static - surface terrain layer with city lights (NASA Black Marble). - references: - Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml - lim_low: 78 - lim_high: 88 - standard_name: geo_color_day_night_blend - prerequisites: - - true_color - - geo_color_night + compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml + lim_low: 78 + lim_high: 88 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night geo_color_high_clouds: - standard_name: geo_color_high_clouds - compositor: !!python/name:satpy.composites.HighCloudCompositor - prerequisites: - - name: ir_105 + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: ir_105 geo_color_low_clouds: - standard_name: geo_color_low_clouds - compositor: !!python/name:satpy.composites.LowCloudCompositor - values_water: 0 - values_land: 100 - range_water: [0.0, 4.0] - range_land: [1.5, 4.0] - prerequisites: - - compositor: !!python/name:satpy.composites.DifferenceCompositor - prerequisites: - - name: ir_105 - - name: ir_38 - - name: ir_105 - - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_water_mask - url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" - known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_water: 0 + values_land: 100 + range_water: [0.0, 4.0] + range_land: [1.5, 4.0] + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: ir_105 + - name: ir_38 + - name: ir_105 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_water_mask + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - geo_color_low_clouds - - _night_background_hires + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires geo_color_night: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - geo_color_high_clouds - - geo_color_background_with_low_clouds + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds -### IR-Sandwich + ### IR-Sandwich ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - - name: 'vis_06' - modifiers: [ sunz_corrected ] + - name: "vis_06" + modifiers: [sunz_corrected] - name: colorized_ir_clouds colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - - name: 'ir_105' + - name: "ir_105" standard_name: colorized_ir_clouds ir_sandwich_with_night_colorized_ir_clouds: @@ -306,7 +313,7 @@ composites: - ir_sandwich - colorized_ir_clouds -### other RGBs + ### other RGBs cloud_type: description: > Equal to cimss_cloud_type recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. @@ -316,11 +323,11 @@ composites: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: nir_13 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] - name: vis_06 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] - name: nir_16 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] standard_name: cimss_cloud_type cloud_type_with_night_ir105: @@ -416,10 +423,10 @@ composites: Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: vis_08 - modifiers: [sunz_corrected] - - name: nir_16 - modifiers: [sunz_corrected] - - name: ir_38 - modifiers: [nir_reflectance] + - name: vis_08 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + - name: ir_38 + modifiers: [nir_reflectance] standard_name: snow diff --git a/satpy/etc/composites/li.yaml b/satpy/etc/composites/li.yaml index 4d3cc88e95..19e879590c 100644 --- a/satpy/etc/composites/li.yaml +++ b/satpy/etc/composites/li.yaml @@ -10,69 +10,78 @@ composites: compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash prerequisites: - - flash_accumulation + - flash_accumulation acc_flash_alpha: description: Composite to colorise the AF product using the flash accumulation with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_alpha prerequisites: - - flash_accumulation + - flash_accumulation acc_flash_area: description: Composite to colorise the AFA product using the flash area compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - - accumulated_flash_area + - accumulated_flash_area acc_flash_area_alpha: description: Composite to colorise the AFA product using the flash area with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area_alpha prerequisites: - - accumulated_flash_area + - accumulated_flash_area acc_flash_radiance: description: Composite to colorise the AFR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - flash_radiance + - flash_radiance acc_flash_radiance_alpha: description: Composite to colorise the AFR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - flash_radiance + - flash_radiance flash_radiance: description: Composite to colorise the LFL product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - radiance + - radiance flash_radiance_alpha: description: Composite to colorise the LFL product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - radiance + - radiance group_radiance: description: Composite to colorise the LGR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - radiance + - radiance group_radiance_alpha: description: Composite to colorise the LGR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - radiance + - radiance # DEPRECATED, USE acc_flash_area INSTEAD flash_area: compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - - accumulated_flash_area + - accumulated_flash_area + + flash_age: + description: Composite to colorise the LFL product using the flash time + compositor: !!python/name:satpy.composites.lightning.LightningTimeCompositor + standard_name: lightning_time + time_range: 60 # range for colormap in minutes + reference_time: end_time + prerequisites: + - flash_time diff --git a/satpy/etc/composites/microwave.yaml b/satpy/etc/composites/microwave.yaml new file mode 100644 index 0000000000..9cc6789cbd --- /dev/null +++ b/satpy/etc/composites/microwave.yaml @@ -0,0 +1 @@ +sensor_name: microwave diff --git a/satpy/etc/composites/mwr.yaml b/satpy/etc/composites/mwr.yaml new file mode 100644 index 0000000000..5f10986d4c --- /dev/null +++ b/satpy/etc/composites/mwr.yaml @@ -0,0 +1,46 @@ +sensor_name: microwave/mwr + +composites: + mw183_humidity: + standard_name: mw183_humidity + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: '11' + - name: '13' + - name: '15' + + mw183_humidity_surface: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '9' + - name: '10' + - name: '12' + standard_name: mw_humidity_surface + + mw325_humidity_surface: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '9' + - name: '10' + - name: '19' + standard_name: mw_humidity_surface + + mw325_humidity: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '16' + - name: '18' + - name: '19' + standard_name: mw_humidity_surface + + ch1_tbs_colors: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '1' + standard_name: tbs_colors + + ch10_tbs_colors: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '10' + standard_name: tbs_colors diff --git a/satpy/etc/composites/oli_tirs.yaml b/satpy/etc/composites/oli_tirs.yaml new file mode 100644 index 0000000000..7dd41db4af --- /dev/null +++ b/satpy/etc/composites/oli_tirs.yaml @@ -0,0 +1,422 @@ +sensor_name: visir/oli_tirs + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_antarctic: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: antarctic_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_average: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_clean_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_polluted_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_desert: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: desert_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_clean_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_polluted_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_tropical: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_tropical_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_rural: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rural_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_urban: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: urban_aerosol + prerequisites: + - name: 'B4' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + +composites: + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: true_color + + true_color_antarctic: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + standard_name: true_color + + true_color_continental_average: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + standard_name: true_color + + true_color_continental_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + standard_name: true_color + + true_color_continental_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + standard_name: true_color + + true_color_desert: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + standard_name: true_color + + true_color_marine_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + standard_name: true_color + + true_color_marine_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + standard_name: true_color + + true_color_marine_tropical: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + standard_name: true_color + + true_color_rural: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + standard_name: true_color + + true_color_urban: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + standard_name: true_color + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B2' + modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + true_color_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B4' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B3' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'B2' + # modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + natural_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + urban_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B7' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + false_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + ndvi: + # Normalized Difference Vegetation Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B4' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: ndvi_msi + + ndmi: + # Normalized Difference Moisture Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndmi_msi + + ndwi: + # Normalized Difference Water Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B5' + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndwi_msi + + ndsi: + # Normalized Difference Snow Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'B3' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'B6' + modifiers: [effective_solar_pathlength_corrected] + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + - method: isnan + transparency: 100 + standard_name: ndsi_msi + + ndsi_with_true_color: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi + - name: true_color + standard_name: no_enhancement diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 451c60d8e6..d5d46114a4 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -4,7 +4,7 @@ sensor_name: visir/sgli modifiers: rayleigh_corrected: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: @@ -17,7 +17,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_clean: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: @@ -30,7 +30,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_tropical: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: @@ -43,7 +43,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_desert: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: @@ -56,7 +56,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_land: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: diff --git a/satpy/etc/composites/vii.yaml b/satpy/etc/composites/vii.yaml index e5c4f0786b..7828050170 100644 --- a/satpy/etc/composites/vii.yaml +++ b/satpy/etc/composites/vii.yaml @@ -87,7 +87,6 @@ composites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_3740' - modifiers: [ co2_corrected ] - name: 'vii_10690' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index bebf6c5833..f2baec6f13 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -232,7 +232,7 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I02 - modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] + modifiers: [sunz_corrected_iband] standard_name: false_color high_resolution_band: green @@ -405,7 +405,7 @@ composites: - name: M07 modifiers: [sunz_corrected] - name: M12 - modifiers: [nir_reflectance] + modifiers: [nir_reflectance_lowres] - M15 standard_name: day_microphysics diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index ffe3be4183..fa774e26da 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -626,8 +626,8 @@ composites: is still under development and may be subject to change. compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - - wavelength: 0.86 - wavelength: 0.905 + - wavelength: 0.86 standard_name: essl_colorized_low_level_moisture day_essl_colorized_low_level_moisture: @@ -638,7 +638,7 @@ composites: day_night: day_only prerequisites: - name: essl_colorized_low_level_moisture - standard_name: day_essl_colorized_low_level_moisture + standard_name: image_ready rocket_plume_day: description: > diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 5d17154aab..b0ba3df206 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1243,12 +1243,114 @@ enhancements: essl_colorized_low_level_moisture: name: essl_colorized_low_level_moisture operations: - - name: essl_moisture - method: !!python/name:satpy.enhancements.atmosphere.essl_moisture - - day_essl_colorized_low_level_moisture: - standard_name: day_essl_colorized_low_level_moisture - operations: [] + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: 0.625 + max_value: 0.91 + values: + - 0.6250 + - 0.6290 + - 0.6331 + - 0.6372 + - 0.6414 + - 0.6456 + - 0.6499 + - 0.6542 + - 0.6586 + - 0.6631 + - 0.6676 + - 0.6722 + - 0.6768 + - 0.6815 + - 0.6863 + - 0.6911 + - 0.6960 + - 0.7010 + - 0.7061 + - 0.7112 + - 0.7164 + - 0.7216 + - 0.7270 + - 0.7324 + - 0.7380 + - 0.7436 + - 0.7492 + - 0.7550 + - 0.7609 + - 0.7668 + - 0.7729 + - 0.7790 + - 0.7853 + - 0.7916 + - 0.7980 + - 0.8046 + - 0.8113 + - 0.8180 + - 0.8249 + - 0.8319 + - 0.8390 + - 0.8463 + - 0.8537 + - 0.8612 + - 0.8688 + - 0.8766 + - 0.8845 + - 0.8925 + - 0.9007 + - 0.9091 + colors: + - [63, 0, 47] + - [58, 0, 50] + - [53, 0, 52] + - [48, 0, 54] + - [42, 0, 56] + - [37, 0, 58] + - [32, 0, 59] + - [27, 5, 60] + - [22, 11, 61] + - [16, 17, 62] + - [11, 23, 63] + - [6, 28, 63] + - [1, 34, 63] + - [0, 40, 63] + - [0, 46, 63] + - [0, 52, 62] + - [0, 58, 62] + - [0, 64, 61] + - [0, 70, 60] + - [0, 76, 58] + - [0, 82, 57] + - [0, 88, 55] + - [0, 94, 53] + - [0, 100, 51] + - [3, 106, 49] + - [17, 112, 46] + - [31, 118, 43] + - [44, 124, 40] + - [58, 130, 37] + - [72, 136, 35] + - [86, 141, 42] + - [100, 147, 50] + - [114, 153, 58] + - [128, 159, 66] + - [142, 165, 74] + - [156, 171, 81] + - [169, 177, 89] + - [183, 183, 97] + - [197, 189, 105] + - [211, 195, 113] + - [225, 201, 120] + - [239, 207, 128] + - [253, 213, 136] + - [255, 219, 144] + - [255, 225, 152] + - [255, 231, 160] + - [255, 237, 167] + - [255, 243, 175] + - [255, 249, 183] + - [255, 255, 191] rocket_plume: standard_name: rocket_plume @@ -1285,3 +1387,22 @@ enhancements: imager_with_lightning: standard_name: imager_with_lightning operations: [] + + image_ready: + standard_name: image_ready + operations: [] + + mw183_humidity: + # matches EPS-Sterna and AWS MWR, and ATMS and MHS + standard_name: mw183_humidity + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [290, 290, 290] + max_stretch: [190, 190, 190] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1.5, 1.2, 1.2] diff --git a/satpy/etc/enhancements/li.yaml b/satpy/etc/enhancements/li.yaml index 49009808eb..82b6056b9a 100644 --- a/satpy/etc/enhancements/li.yaml +++ b/satpy/etc/enhancements/li.yaml @@ -1,60 +1,84 @@ enhancements: -# note that the colormap parameters are tuned for 5 minutes of files accumulation -# these are tentative recipes that will need to be further tuned as we gain experience with LI data + # note that the colormap parameters are tuned for 5 minutes of files accumulation + # these are tentative recipes that will need to be further tuned as we gain experience with LI data acc_flash: standard_name: acc_flash operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 5} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 5 } acc_flash_alpha: standard_name: acc_flash_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 5, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 5, + min_alpha: 120, + max_alpha: 180, + } acc_flash_area: standard_name: acc_flash_area operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 20} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 20 } acc_flash_area_alpha: standard_name: acc_flash_area_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 20, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 20, + min_alpha: 120, + max_alpha: 180, + } lightning_radiance: standard_name: lightning_radiance operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 1000} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 1000 } lightning_radiance_alpha: standard_name: lightning_radiance_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 1000, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 1000, + min_alpha: 120, + max_alpha: 180, + } + + lightning_time: + standard_name: lightning_time + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, reverse: True, min_value: 0, max_value: 1 } diff --git a/satpy/etc/enhancements/mwr.yaml b/satpy/etc/enhancements/mwr.yaml new file mode 100644 index 0000000000..6c39440de8 --- /dev/null +++ b/satpy/etc/enhancements/mwr.yaml @@ -0,0 +1,24 @@ +enhancements: + + mw_humidity_surface: + standard_name: mw_humidity_surface + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} + + tbs_colors: + standard_name: tbs_colors + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: spectral, min_value: 280, max_value: 180} diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 3c3a94cc40..8410d4cff8 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -212,6 +212,12 @@ datasets: file_type: abi_l2_cpsn file_key: PSD + # new variable name since 18:51UTC December 04, 2023. + cloud_particle_size_new: + name: CPS + file_type: abi_l2_cps + file_key: CPS + # --- Cloud Top Pressure --- cloud_top_pressure: name: PRES diff --git a/satpy/etc/readers/atms_sdr_hdf5.yaml b/satpy/etc/readers/atms_sdr_hdf5.yaml index fa8e4105ce..6ad1a1e0a9 100644 --- a/satpy/etc/readers/atms_sdr_hdf5.yaml +++ b/satpy/etc/readers/atms_sdr_hdf5.yaml @@ -33,7 +33,7 @@ reader: file_types: atms_sdr_hdf5: file_reader: !!python/name:satpy.readers.atms_sdr_hdf5.ATMS_SDR_FileHandler - file_patterns: ['SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5', 'GATMO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] + file_patterns: ['SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5', 'GATMO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5','GATMO-SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5 # SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5 diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml new file mode 100644 index 0000000000..f487d58265 --- /dev/null +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -0,0 +1,536 @@ +reader: + name: aws1_mwr_l1b_nc + short_name: AWS1 MWR L1B + long_name: AWS1 MWR L1B Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) Microwave Radiometer (MWR) level-1b files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [mwr,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "2" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: aws_l1b_nc + standard_name: longitude + units: degrees_east + horn: ["1", "2", "3", "4"] + file_key: data/navigation/aws_lon + + + latitude: + name: latitude + file_type: aws_l1b_nc + standard_name: latitude + units: degrees_north + horn: ["1", "2", "3", "4"] + file_key: data/navigation/aws_lat + + +# --- Navigation data --- + + solar_azimuth_horn1: + name: solar_azimuth_horn1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_azimuth_horn2: + name: solar_azimuth_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_azimuth_horn3: + name: solar_azimuth_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_azimuth_horn4: + name: solar_azimuth_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + + solar_zenith_horn1: + name: solar_zenith_horn1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_zenith_horn2: + name: solar_zenith_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_zenith_horn3: + name: solar_zenith_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_zenith_horn4: + name: solar_zenith_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_zenith_horn1: + name: satellite_zenith_horn1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_zenith_horn2: + name: satellite_zenith_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_zenith_horn3: + name: satellite_zenith_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_zenith_horn4: + name: satellite_zenith_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn1: + name: satellite_azimuth_horn1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn2: + name: satellite_azimuth_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn3: + name: satellite_azimuth_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn4: + name: satellite_azimuth_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + +file_types: + aws_l1b_nc: + # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + # W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__20250110134851_G_O_20250110114708_20250110132329_C_N____.nc + file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile + file_patterns: [ + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_O_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + ] + feed_horn_group_name: n_geo_groups diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml new file mode 100644 index 0000000000..e27cc99a2d --- /dev/null +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -0,0 +1,372 @@ +reader: + name: aws1_mwr_l1c_nc + short_name: AWS1 MWR L1C + long_name: AWS1 MWR L1C Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) MWR level-1c files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [mwr,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: aws_l1c_nc + standard_name: longitude + units: degrees_east + file_key: data/navigation/aws_lon + + latitude: + name: latitude + file_type: aws_l1c_nc + standard_name: latitude + units: degrees_north + file_key: data/navigation/aws_lat + +# --- Navigation data --- + + solar_azimuth_angle: + name: solar_azimuth_angle + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + coordinates: + - longitude + - latitude + + solar_zenith_angle: + name: solar_zenith_angle + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + coordinates: + - longitude + - latitude + + satellite_azimuth_angle: + name: satellite_azimuth_angle + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + coordinates: + - longitude + - latitude + + satellite_zenith_angle: + name: satellite_zenith_angle + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + coordinates: + - longitude + - latitude + +file_types: + aws_l1c_nc: + file_reader: !!python/name:satpy.readers.mwr_l1c.AWS_MWR_L1CFile + file_patterns: [ + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1C-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + ] diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml new file mode 100644 index 0000000000..d08113270a --- /dev/null +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -0,0 +1,533 @@ +reader: + name: eps_sterna_mwr_l1b_nc + short_name: EPS-Sterna MWR L1B + long_name: EPS-Sterna MWR L1B Radiance (NetCDF4) + description: Reader for the EUMETSAT EPS-Sterna radiometer level-1b files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [mwr,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "2" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: eps_sterna_l1b_nc + standard_name: longitude + units: degrees_east + horn: ["1", "2", "3", "4"] + file_key: data/navigation/longitude + + + latitude: + name: latitude + file_type: eps_sterna_l1b_nc + standard_name: latitude + units: degrees_north + horn: ["1", "2", "3", "4"] + file_key: data/navigation/latitude + + +# --- Navigation data --- + + solar_azimuth_horn1: + name: solar_azimuth_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_azimuth_horn2: + name: solar_azimuth_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_azimuth_horn3: + name: solar_azimuth_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_azimuth_horn4: + name: solar_azimuth_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + + solar_zenith_horn1: + name: solar_zenith_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_zenith_horn2: + name: solar_zenith_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_zenith_horn3: + name: solar_zenith_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_zenith_horn4: + name: solar_zenith_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_zenith_horn1: + name: satellite_zenith_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_zenith_horn2: + name: satellite_zenith_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_zenith_horn3: + name: satellite_zenith_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_zenith_horn4: + name: satellite_zenith_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn1: + name: satellite_azimuth_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn2: + name: satellite_azimuth_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn3: + name: satellite_azimuth_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn4: + name: satellite_azimuth_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + +file_types: + eps_sterna_l1b_nc: + # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc + file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile + file_patterns: [ + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + ] + feed_horn_group_name: n_feedhorns diff --git a/satpy/etc/readers/fci_l2_grib.yaml b/satpy/etc/readers/fci_l2_grib.yaml new file mode 100644 index 0000000000..cc16c77081 --- /dev/null +++ b/satpy/etc/readers/fci_l2_grib.yaml @@ -0,0 +1,28 @@ +reader: + name: fci_l2_grib + short_name: FCI L2 GRIB2 + long_name: MTG FCI L2 data in GRIB2 format + description: Reader for EUMETSAT MTG FCI L2 files in GRIB2 format. + status: Nominal + supports_fsspec: false + sensors: [fci] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + +file_types: + grib_fci_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' + + +datasets: + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: 2000 + file_type: grib_fci_clm + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'undefined' ] diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 6a465f3f7a..ec15013555 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1129,7 +1129,7 @@ datasets: nc_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] - flag_meanings: ['No snow/ice detected',' Snow/ice detected'] + flag_meanings: ['No snow/ice detected','Snow/ice detected'] cloud_test_cmt1: name: cloud_test_cmt1 diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index 3e0ecb390c..352a3a4c83 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -176,7 +176,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -196,7 +196,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -216,7 +216,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -236,7 +236,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -256,7 +256,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -276,7 +276,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -296,7 +296,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -316,7 +316,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -336,7 +336,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -356,7 +356,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -376,7 +376,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -396,7 +396,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -416,7 +416,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -436,7 +436,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -456,7 +456,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: diff --git a/satpy/etc/readers/mersi3_l1b.yaml b/satpy/etc/readers/mersi3_l1b.yaml index d7078786db..4cc35efa40 100644 --- a/satpy/etc/readers/mersi3_l1b.yaml +++ b/satpy/etc/readers/mersi3_l1b.yaml @@ -164,7 +164,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -184,7 +184,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -204,7 +204,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -224,7 +224,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -244,7 +244,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -264,7 +264,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -284,7 +284,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -304,7 +304,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -324,7 +324,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -344,7 +344,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -364,7 +364,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -384,7 +384,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -404,7 +404,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -424,7 +424,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -444,7 +444,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 47b6d432b0..c898ae11cf 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -272,3 +272,27 @@ datasets: coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/MoonAzimuth + altitude: + name: altitude + units: degree + standard_name: altitude + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/Altitude + landcover: + name: landcover + units: degree + standard_name: landcover + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/LandCover + landseamask: + name: landseamask + units: degree + standard_name: landseamask + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/LandSeaMask diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index 17bdf134bf..94e8d3fb20 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -486,6 +486,34 @@ datasets: coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] + landsea_mask: + name: landsea_mask + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + height: + name: height + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + range: + name: range + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + waterpresent: + name: waterpresent + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + file_types: hdf_eos_data_250m: diff --git a/satpy/etc/readers/modis_l2.yaml b/satpy/etc/readers/modis_l2.yaml index c1556a6544..7d53a2f893 100644 --- a/satpy/etc/readers/modis_l2.yaml +++ b/satpy/etc/readers/modis_l2.yaml @@ -1,7 +1,7 @@ reader: name: modis_l2 short_name: MODIS l2 - long_name: MODIS Level 2 (mod35) data in HDF-EOS format + long_name: Terra and Aqua MODIS Level 2 (mod35) data in HDF-EOS format description: MODIS HDF-EOS L2 Reader status: Beta supports_fsspec: false @@ -11,67 +11,71 @@ reader: file_types: mod05_hdf: file_patterns: - - 'M{platform_indicator:1s}D05_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod05.hdf' + - "M{platform_indicator:1s}D05_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod05.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod35_hdf: file_patterns: - - 'M{platform_indicator:1s}D35_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod35.hdf' + - "M{platform_indicator:1s}D35_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod35.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06_hdf: file_patterns: - - 'M{platform_indicator:1s}D06_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06.hdf' + - "M{platform_indicator:1s}D06_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod06ct_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06ct.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod06ct.hdf" + file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler + modis_l2_product: + file_patterns: + - "M{platform_indicator:1s}D{product:2s}_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler hdf_eos_geo: file_patterns: - - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' - - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf' - - 'M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf' - - 'M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf' - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf' + - "M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf" + - "M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}.{processing_time:%Y%j%H%M%S}.hdf" + - "M{platform_indicator:1s}D03.A{start_time:%Y%j.%H%M}.{collection:03d}{suffix}.hdf" + - "M{platform_indicator:1s}D03.{start_time:%y%j%H%M%S}.hdf" + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.geo.hdf" file_reader: !!python/name:satpy.readers.modis_l1b.HDFEOSGeoReader icecon_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.icecon.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.icecon.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler inversion_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.inversion.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.inversion.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ist_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ist.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ist.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mask_byte1_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mask_byte1.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mask_byte1.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod07_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod07.hdf' - - 'M{platform_indicator:1s}D07_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod07.hdf" + - "M{platform_indicator:1s}D07_L2.A{start_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler mod28_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod28.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.mod28.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler modlst_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.modlst.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.modlst.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler ndvi_1000m_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ndvi.1000m.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.ndvi.1000m.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler snowmask_hdf: file_patterns: - - '{platform_indicator:1s}1.{start_time:%y%j.%H%M}.snowmask.hdf' + - "{platform_indicator:1s}1.{start_time:%y%j.%H%M}.snowmask.hdf" file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler datasets: @@ -79,9 +83,9 @@ datasets: name: longitude resolution: 5000: - file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf] + file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: - file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf] + file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: @@ -94,9 +98,9 @@ datasets: resolution: 5000: # For EUM reduced (thinned) files - file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf] + file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: - file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf] + file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: diff --git a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml index ec3c5cab77..da30cb2545 100644 --- a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml +++ b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml @@ -20,14 +20,18 @@ file_types: nc_easy: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriEasyFcdrFileHandler file_patterns: [ - 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc' + 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc + '{sensor}_FCDR-EASY_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' + # Example: MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc ] nc_full: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriFullFcdrFileHandler file_patterns: [ - 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc' + 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc + '{sensor}_FCDR-FULL_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' + # Example: MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc ] datasets: diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 110bb11a2e..a3e82d3b5c 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -46,6 +46,11 @@ file_types: file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' + esa_l2_w_aer: + file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: @@ -426,6 +431,16 @@ datasets: file_type: esa_l2_wqsf nc_key: WQSF + t865: + name: w_aer + sensor: olci + resolution: 300 + standard_name: aerosol_optical_thickness + units: "lg(re g.m-3)" + coordinates: [longitude, latitude] + file_type: esa_l2_w_aer + nc_key: T865 + iwv: name: iwv sensor: olci diff --git a/satpy/etc/readers/oli_tirs_l1_tif.yaml b/satpy/etc/readers/oli_tirs_l1_tif.yaml new file mode 100644 index 0000000000..ea21d3ec91 --- /dev/null +++ b/satpy/etc/readers/oli_tirs_l1_tif.yaml @@ -0,0 +1,335 @@ +reader: + name: oli_tirs_l1_tif + short_name: OLI/TIRS L1 GeoTIFF + long_name: Landsat-8/9 OLI/TIRS L1 data in GeoTIFF format. + description: GeoTIFF reader for Landsat-8/9 OLI/TIRS L1 data. + status: Beta + supports_fsspec: false + sensors: oli_tirs + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + # Bands on the OLI subsystem + granule_B1: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] + requires: [l1_metadata] + + granule_B2: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] + requires: [l1_metadata] + + granule_B3: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] + requires: [l1_metadata] + + granule_B4: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] + requires: [l1_metadata] + + granule_B5: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] + requires: [l1_metadata] + + granule_B6: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] + requires: [l1_metadata] + + granule_B7: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] + requires: [l1_metadata] + + granule_B8: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] + requires: [l1_metadata] + + granule_B9: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] + requires: [l1_metadata] + + # Bands on the TIRS subsystem + granule_B10: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] + requires: [l1_metadata] + + granule_B11: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] + requires: [l1_metadata] + + # Geometry datasets + granule_sza: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SZA.TIF'] + requires: [l1_metadata] + granule_saa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SAA.TIF'] + requires: [l1_metadata] + granule_vza: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VZA.TIF'] + requires: [l1_metadata] + granule_vaa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VAA.TIF'] + requires: [l1_metadata] + + # QA Variables + granule_qa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA.TIF'] + requires: [l1_metadata] + granule_qa_radsat: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA_RADSAT.TIF'] + requires: [l1_metadata] + + l1_metadata: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSMDReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] + +datasets: + B1: + name: B1 + sensor: oli_tirs + wavelength: [0.433, 0.443, 0.453] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B1 + + B2: + name: B2 + sensor: oli_tirs + wavelength: [0.450, 0.482, 0.515] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B2 + + B3: + name: B3 + sensor: oli_tirs + wavelength: [0.525, 0.565, 0.600] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B3 + + B4: + name: B4 + sensor: oli_tirs + wavelength: [0.630, 0.660, 0.680] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B4 + + B5: + name: B5 + sensor: oli_tirs + wavelength: [0.845, 0.867, 0.885] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B5 + + B6: + name: B6 + sensor: oli_tirs + wavelength: [1.560, 1.650, 1.660] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B6 + + B7: + name: B7 + sensor: oli_tirs + wavelength: [2.100, 2.215, 2.300] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B7 + + B8: + name: B8 + sensor: oli_tirs + wavelength: [0.500, 0.579, 0.680] + resolution: 15 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B8 + + B9: + name: B9 + sensor: oli_tirs + wavelength: [1.360, 1.373, 1.390] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B9 + + # Channels on the TIRS instrument + B10: + name: B10 + sensor: oli_tirs + wavelength: [10.6, 10.888, 11.19] + resolution: 30 + calibration: + brightness_temperature: + standard_name: brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B10 + + B11: + name: B11 + sensor: oli_tirs + wavelength: [11.5, 11.981, 12.51] + resolution: 30 + calibration: + brightness_temperature: + standard_name: brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: granule_B11 + + # QA Variables + qa: + name: qa + sensor: oli_tirs + resolution: 30 + file_type: granule_qa + + qa_radsat: + name: qa_radsat + sensor: oli_tirs + resolution: 30 + file_type: granule_qa_radsat + + # Angles datasets + solar_zenith_angle: + name: solar_zenith_angle + sensor: oli_tirs + standard_name: solar_zenith_angle + resolution: 30 + units: "degrees" + file_type: granule_sza + + solar_azimuth_angle: + name: solar_azimuth_angle + sensor: oli_tirs + standard_name: solar_azimuth_angle + resolution: 30 + units: "degrees" + file_type: granule_saa + + satellite_zenith_angle: + name: satellite_zenith_angle + sensor: oli_tirs + standard_name: viewing_zenith_angle + resolution: 30 + units: "degrees" + file_type: granule_vza + + satellite_azimuth_angle: + name: satellite_azimuth_angle + sensor: oli_tirs + standard_name: viewing_azimuth_angle + resolution: 30 + units: "degrees" + file_type: granule_vaa diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index 5d7a204e24..9f156c5388 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -14,7 +14,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES grib_seviri_aes: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -24,7 +24,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Mask product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM grib_seviri_clm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -34,7 +34,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Top Height product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH grib_seviri_cth: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -44,7 +44,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM grib_seviri_crm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -54,7 +54,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR grib_seviri_fir: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -65,7 +65,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB grib_seviri_mpe: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -75,7 +75,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA grib_seviri_oca: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 4c4c91a91f..e53a27d072 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -36,6 +36,34 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler file_patterns: - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudbase: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudBase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_clouddcomp: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudDCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudncomp: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudNCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudlayers: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudCoverLayers_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudphase: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudPhase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_iceconcentration: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-IceConcentration_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_iceage: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-IceAge_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 5ea8530612..679af8b3b9 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -572,7 +572,6 @@ def sunzen_reduction(data: da.Array, return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, meta=np.array((), dtype=data.dtype), chunks=data.chunks) - def _sunzen_reduction_ndarray(data: np.ndarray, sunz: np.ndarray, limit: float, @@ -584,7 +583,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza with np.errstate(invalid="ignore"): # we expect space pixels to be invalid - reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) + reduction_factor = 1. - np.log2(reduction_factor + 1) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index 1c6225f42a..c7144c27ca 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -77,7 +77,9 @@ def __call__(self, projectables, optional_datasets=None, **info): projectables = projectables + (optional_datasets or []) if len(projectables) != 6: vis, red = self.match_data_arrays(projectables) - sata, satz, suna, sunz = get_angles(vis) + # Adjust the angle data precision to match the data + # This does not affect the accuracy visibly + sata, satz, suna, sunz = [d.astype(vis.dtype) for d in get_angles(vis)] else: vis, red, sata, satz, suna, sunz = self.match_data_arrays(projectables) # First make sure the two azimuth angles are in the range 0-360: @@ -97,7 +99,7 @@ def __call__(self, projectables, optional_datasets=None, **info): aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) - reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1) + reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1).astype(vis.dtype) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 64055c7232..04f98e2182 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -557,24 +557,16 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader, filenames, remaining_filenames = _get_reader_and_filenames(reader, filenames) (reader_kwargs, reader_kwargs_without_filter) = _get_reader_kwargs(reader, reader_kwargs) - for idx, reader_configs in enumerate(configs_for_reader(reader)): - if isinstance(filenames, dict): - readers_files = set(filenames[reader[idx]]) - else: - readers_files = remaining_filenames + if reader_kwargs is None: + reader_kwargs = {} - try: - reader_instance = load_reader( - reader_configs, - **reader_kwargs[None if reader is None else reader[idx]]) - except (KeyError, IOError, yaml.YAMLError) as err: - LOG.info("Cannot use %s", str(reader_configs)) - LOG.debug(str(err)) + for idx, reader_configs in enumerate(configs_for_reader(reader)): + readers_files = _get_readers_files(filenames, reader, idx, remaining_filenames) + reader_instance = _get_reader_instance(reader, reader_configs, idx, reader_kwargs) + if reader_instance is None or not readers_files: + # Reader initiliasation failed or no files were given continue - if not readers_files: - # we weren't given any files for this reader - continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: reader_instance.create_storage_items( @@ -582,6 +574,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance remaining_filenames -= set(loadables) + if not remaining_filenames: break @@ -590,6 +583,32 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): return reader_instances +def _get_readers_files(filenames, reader, idx, remaining_filenames): + if isinstance(filenames, dict): + return set(filenames[reader[idx]]) + return remaining_filenames + + +def _get_reader_instance(reader, reader_configs, idx, reader_kwargs): + reader_instance = None + try: + reader_instance = load_reader( + reader_configs, + **reader_kwargs[None if reader is None else reader[idx]]) + except (KeyError, IOError) as err: + LOG.info("Cannot use %s", str(reader_configs)) + LOG.debug(str(err)) + except yaml.constructor.ConstructorError as err: + _log_yaml_error(reader_configs, err) + + return reader_instance + + +def _log_yaml_error(reader_configs, err): + LOG.error("Problem with %s", str(reader_configs)) + LOG.error(str(err)) + + def _early_exit(filenames, reader): if not filenames and not reader: # used for an empty Scene diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/eum_l2_grib.py similarity index 76% rename from satpy/readers/seviri_l2_grib.py rename to satpy/readers/eum_l2_grib.py index d178d6b716..4ed5901a65 100644 --- a/satpy/readers/seviri_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -13,7 +13,7 @@ # You should have received a copy of the GNU General Public License # along with satpy. If not, see . -"""Reader for the SEVIRI L2 products in GRIB2 format. +"""Reader for both SEVIRI and FCI L2 products in GRIB2 format. References: FM 92 GRIB Edition 2 @@ -31,22 +31,29 @@ from satpy.readers._geos_area import get_area_definition, get_geos_area_naming from satpy.readers.eum_base import get_service_mode +from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent +from satpy.readers.seviri_base import PLATFORM_DICT as SEVIRI_PLATFORM_DICT +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION as SEVIRI_REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION_RSS as SEVIRI_REPEAT_CYCLE_DURATION_RSS +from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size +CHUNK_SIZE = get_legacy_chunk_size() + try: import eccodes as ec except ImportError: raise ImportError( - "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") + "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") -CHUNK_SIZE = get_legacy_chunk_size() logger = logging.getLogger(__name__) -class SeviriL2GribFileHandler(BaseFileHandler): - """Reader class for SEVIRI L2 products in GRIB format.""" +class EUML2GribFileHandler(BaseFileHandler): + """Reader class for EUM L2 products in GRIB format.""" + + calculate_area_extent = None def __init__(self, filename, filename_info, filetype_info): """Read the global attributes and prepare for dataset reading.""" @@ -54,6 +61,14 @@ def __init__(self, filename, filename_info, filetype_info): # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) ec.codes_grib_multi_support_on() + if "seviri" in self.filetype_info["file_type"]: + self.sensor = "seviri" + self.PLATFORM_NAME = SEVIRI_PLATFORM_DICT[self.filename_info["spacecraft"]] + elif "fci" in self.filetype_info["file_type"]: + self.sensor = "fci" + self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" + pass + @property def start_time(self): """Return the sensing start time.""" @@ -62,14 +77,28 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - return self.start_time + dt.timedelta(minutes=REPEAT_CYCLE_DURATION) + if self.sensor == "seviri": + try: + delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION + return self.start_time + dt.timedelta(minutes=delta) + except AttributeError: + # If dataset and metadata (ssp_lon) have not yet been loaded, return None + return None + elif self.sensor == "fci": + return self.filename_info["end_time"] def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" + # Compute the dictionary with the area extension + self._area_dict["column_step"] = dataset_id["resolution"] self._area_dict["line_step"] = dataset_id["resolution"] - area_extent = calculate_area_extent(self._area_dict) + if self.sensor == "seviri": + area_extent = seviri_calculate_area_extent(self._area_dict) + + elif self.sensor == "fci": + area_extent = fci_calculate_area_extent(self._area_dict) # Call the get_area_definition function to obtain the area area_def = get_area_definition(self._pdict, area_extent) @@ -173,19 +202,20 @@ def _get_proj_area(self, gid): """ # Get name of area definition area_naming_input_dict = {"platform_name": "msg", - "instrument_name": "seviri", + "instrument_name": self.sensor, "resolution": self._res, } area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode("seviri", self._ssp_lon)}) + **get_service_mode(self.sensor, self._ssp_lon)}) # Read all projection and area parameters from the message earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] - earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) - earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) + if self.sensor == "seviri": + earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) + earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") @@ -204,14 +234,21 @@ def _get_proj_area(self, gid): "p_id": "", } - # Compute the dictionary with the area extension - area_dict = { - "center_point": xp_in_grid_lengths, - "north": self._nrows, - "east": 1, - "west": self._ncols, - "south": 1, - } + if self.sensor == "seviri": + # Compute the dictionary with the area extension + area_dict = { + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, + } + + elif self.sensor == "fci": + area_dict = { + "nlines": self._ncols, + "ncols": self._nrows, + } return pdict, area_dict @@ -219,10 +256,9 @@ def _get_proj_area(self, gid): def _scale_earth_axis(data): """Scale Earth axis data to make sure the value matched the expected unit [m]. - The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This - method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such - that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has - been resolved by EUMETSAT this workaround can be removed. + The earthMinorAxis value stored in the MPEF aerosol over sea product prior to December 12, 2022 has the wrong + unit and this method provides a flexible work-around by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. """ scale_factor = 10 ** np.ceil(np.log10(1e6/data)) @@ -256,11 +292,9 @@ def _get_attributes(self): "projection_longitude": self._ssp_lon } - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": "seviri", - "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] - } + attributes = {"orbital_parameters": orbital_parameters, "sensor": self.sensor, + "platform_name": self.PLATFORM_NAME} + return attributes @staticmethod diff --git a/satpy/readers/fci_base.py b/satpy/readers/fci_base.py new file mode 100644 index 0000000000..c1f6fc2110 --- /dev/null +++ b/satpy/readers/fci_base.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Common functionality for FCI data readers.""" +from __future__ import annotations + + +def calculate_area_extent(area_dict): + """Calculate the area extent seen by MTG FCI instrument. + + Since the center of the FCI grids is located at the interface between the pixels, there are equally many + pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent + can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). + + Args: + area_dict: A dictionary containing the required parameters + ncols: number of pixels in east-west direction + nlines: number of pixels in south-north direction + column_step: Pixel resulution in meters in east-west direction + line_step: Pixel resulution in meters in south-north direction + Returns: + tuple: An area extent for the scene defined by the lower left and + upper right corners + + """ + ncols = area_dict["ncols"] + nlines = area_dict["nlines"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + + ll_c = (0 - ncols / 2.) * column_step + ll_l = (0 + nlines / 2.) * line_step + ur_c = (0 + ncols / 2.) * column_step + ur_l = (0 - nlines / 2.) * line_step + + return (ll_c, ll_l, ur_c, ur_l) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index fc40916699..4b8efd81a4 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -127,6 +127,7 @@ from pyorbital.astronomy import sun_earth_distance_correction from pyresample import geometry +import satpy from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode @@ -208,7 +209,8 @@ class using the :mod:`~satpy.Scene.load` method with the reader "MTI3": "MTG-I3", "MTI4": "MTG-I4"} - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, + clip_negative_radiances=None, **kwargs): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, @@ -233,6 +235,9 @@ def __init__(self, filename, filename_info, filetype_info): else: self.is_iqt = False + if clip_negative_radiances is None: + clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") + self.clip_negative_radiances = clip_negative_radiances self._cache = {} @property @@ -661,6 +666,8 @@ def calibrate_counts_to_physical_quantity(self, data, key): def calibrate_counts_to_rad(self, data, key): """Calibrate counts to radiances.""" + if self.clip_negative_radiances: + data = self._clipneg(data) if key["name"] == "ir_38": data = xr.where(((2 ** 12 - 1 < data) & (data <= 2 ** 13 - 1)), (data * data.attrs.get("warm_scale_factor", 1) + @@ -677,6 +684,12 @@ def calibrate_counts_to_rad(self, data, key): self.get_and_cache_npxr(measured + "/radiance_unit_conversion_coefficient")}) return data + @staticmethod + def _clipneg(data): + """Clip counts to avoid negative radiances.""" + lo = -data.attrs.get("add_offset", 0) // data.attrs.get("scale_factor", 1) + 1 + return data.where((~data.notnull())|(data>=lo), lo) + def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 8971eb4996..bf5c7a9bf6 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -60,7 +60,7 @@ def ssp_lon(self): f"of {SSP_DEFAULT} degrees east instead") return SSP_DEFAULT - def _get_global_attributes(self): + def _get_global_attributes(self, product_type="pixel"): """Create a dictionary of global attributes to be added to all datasets. Returns: @@ -70,26 +70,36 @@ def _get_global_attributes(self): ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform + Only for AMVs product: + channel: channel at which the AMVs have been retrieved + """ attributes = { "filename": self.filename, "spacecraft_name": self.spacecraft_name, - "ssp_lon": self.ssp_lon, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, } + + if product_type=="amv": + attributes["channel"] = self.filename_info["channel"] + return attributes - def _set_attributes(self, variable, dataset_info, segmented=False): + def _set_attributes(self, variable, dataset_info, product_type="pixel"): """Set dataset attributes.""" - if segmented: - xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" - else: - xdim, ydim = "number_of_columns", "number_of_rows" + if product_type in ["pixel", "segmented"]: + if product_type == "pixel": + xdim, ydim = "number_of_columns", "number_of_rows" + elif product_type == "segmented": + xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" - if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: - variable = variable.swap_dims({ydim: "y", xdim: "x"}) + if dataset_info["nc_key"] not in ["product_quality", + "product_completeness", + "product_timeliness"]: + variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) if "unit" in variable.attrs: @@ -98,7 +108,7 @@ def _set_attributes(self, variable, dataset_info, segmented=False): del variable.attrs["unit"] variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + variable.attrs.update(self._get_global_attributes(product_type=product_type)) import_enum_information = dataset_info.get("import_enum_information", False) if import_enum_information: @@ -382,7 +392,7 @@ def get_dataset(self, dataset_id, dataset_info): if "fill_value" in dataset_info: variable = self._mask_data(variable, dataset_info["fill_value"]) - variable = self._set_attributes(variable, dataset_info, segmented=True) + variable = self._set_attributes(variable, dataset_info, product_type="segmented") return variable @@ -457,26 +467,6 @@ def nc(self): } ) - def _get_global_attributes(self): - """Create a dictionary of global attributes to be added to all datasets. - - Returns: - dict: A dictionary of global attributes. - filename: name of the product file - spacecraft_name: name of the spacecraft - sensor: name of sensor - platform_name: name of the platform - - """ - attributes = { - "filename": self.filename, - "spacecraft_name": self.spacecraft_name, - "sensor": self.sensor_name, - "platform_name": self.spacecraft_name, - "channel": self.filename_info["channel"] - } - return attributes - def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" var_key = dataset_info["nc_key"] @@ -489,7 +479,6 @@ def get_dataset(self, dataset_id, dataset_info): return None # Manage the attributes of the dataset - variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + variable = self._set_attributes(variable, dataset_info, product_type="amv") return variable diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index b844732a2e..78622af780 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -251,10 +251,16 @@ def available_datasets(self, configured_datasets=None): Example 2 - Add dynamic datasets from the file:: def available_datasets(self, configured_datasets=None): - "Add information to configured datasets." + "Add datasets dynamically determined from the file." # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): - yield is_avail, ds_info + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + yield self.file_type_matches(ds_info["file_type"]), ds_info # get dynamic variables known to this file (that we created) for var_name, val in self.dynamic_variables.items(): diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 5032b7bbb8..c0e334302f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -147,7 +147,7 @@ def _mask_image_data(data, info): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min - data = data.astype(np.float64) + data = data.astype(np.float32) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 0f563efe2c..e1a76bec6b 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -238,7 +238,7 @@ def _chunks_for_variable(self, hdf_dataset): return normalize_low_res_chunks( (1,) * num_nonyx_dims + ("auto", -1), var_shape, - (1,) * num_nonyx_dims + (scan_length_250m, -1), + (1,) * num_nonyx_dims + (scan_length_250m, var_shape[-1]), (1,) * num_nonyx_dims + (res_multiplier, res_multiplier), np.float32, ) @@ -301,7 +301,7 @@ def _get_good_data_mask(self, data_arr, is_category=False): def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { - "platform_name": "EOS-" + self.metadata_platform_name, + "platform_name": self.metadata_platform_name, "sensor": "modis", } @@ -333,6 +333,10 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), + "water_present": "WaterPresent", + "landsea_mask": "Land/SeaMask", + "height": "Height", + "range": "Range", } def __init__(self, filename, filename_info, filetype_info, **kwargs): diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index ac83776a6a..0266ce945f 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -345,8 +345,8 @@ def _get_line_offset(self): if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) - segment_number = self.mda["segment_sequence_number"] - 1 - loff -= (self.mda["total_no_image_segm"] - segment_number - 1) * nlines + segment_number = int(self.mda["segment_sequence_number"]) - 1 + loff -= (int(self.mda["total_no_image_segm"]) - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 41ddee5df6..29f211f08f 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -7,11 +7,7 @@ import dask.array as da import numpy as np import xarray as xr - -from satpy.utils import import_error_helper - -with import_error_helper("xarray-datatree"): - from datatree import DataTree +from xarray.core.datatree import DataTree from satpy.readers.file_handlers import BaseFileHandler @@ -183,7 +179,10 @@ def get_area_def(self, ds_id): #fov = self.datatree.attrs["Field_of_View(degrees)"] fov = 18 cfac = 2 ** 16 / (fov / cols) - lfac = 2 ** 16 / (fov / lines) + + # From reverse engineering metadata from a netcdf file, we discovered + # the lfac is actually the same as cfac, ie dependent on cols, not lines! + lfac = 2 ** 16 / (fov / cols) h = self.datatree.attrs["Observed_Altitude(km)"] * 1000 # WGS 84 @@ -195,8 +194,8 @@ def get_area_def(self, ds_id): pdict = { "cfac": cfac, "lfac": lfac, - "coff": cols / 2, - "loff": lines / 2, + "coff": cols // 2 + 1, + "loff": lines // 2, "ncols": cols, "nlines": lines, "scandir": "N2S", diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index cefbcc7e55..a65129c904 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -742,7 +742,6 @@ def get_dataset(self, dataset_id, ds_info=None): # Retrieve default infos if missing: if ds_info is None: ds_info = self.get_dataset_infos(dataset_id["name"]) - # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 587039fa46..e69b6ffe5b 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -111,6 +111,10 @@ def get_dataset(self, dataset_id, ds_info=None): var_with_swath_coord = self.is_var_with_swath_coord(dataset_id) if var_with_swath_coord and self.with_area_def: data_array = self.get_array_on_fci_grid(data_array) + else : + if data_array is not None: + if not isinstance(data_array.data, da.Array): + data_array.data = da.from_array(data_array.data) return data_array def get_area_def(self, dsid): @@ -161,6 +165,7 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): data_2d = da.where(data_2d > 0, data_2d, np.nan) xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) + xarr.attrs = attrs return xarr diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 5b1a960031..a6db73bee5 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -182,7 +182,7 @@ def _mask_data(self, data, dataset_id, attrs): attrs["_FillValue"] = fill_value new_fill = data.dtype.type(fill_value) else: - new_fill = np.nan + new_fill = np.float32(np.nan) try: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 2f2555692d..ddf627c940 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -28,7 +28,11 @@ - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files -To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. +Additionally the reader tries to add non yaml configured 2D datasets dynamically. As mentioned above there are a lot +of different level 2 datasets so this might not work in every case (for example bit encoded datasets similar to the +supported m[0/y]d35_l2 cloud mask are not decoded). + +To get a list of the available datasets for a given file refer to the :ref:`reading:available datasets` section. Geolocation files @@ -145,6 +149,46 @@ def get_dataset(self, dataset_id, dataset_info): self._add_satpy_metadata(dataset_id, dataset) return dataset + def available_datasets(self, configured_datasets): + """Add dataset information from arbitrary level 2 files. + + Adds dataset information not specifically specified in reader yaml file + from arbitrary modis level 2 product files to available datasets. + + Notes: + Currently only adds 2D datasets and does not decode bit encoded information. + """ + # pass along yaml configured (handled) datasets and collect their file keys to check against dynamically + # collected variables later on. + handled = set() + for is_avail, ds_info in (configured_datasets or []): + file_key = ds_info.get("file_key", ds_info["name"]) + handled.add(file_key) + + if is_avail is not None: + yield is_avail, ds_info + continue + yield self.file_type_matches(ds_info["file_type"]), ds_info + + res_dict = {5416: 250, 2708: 500, 1354: 1000, 270: 5000, 135: 10000} + + # get variables from file dynamically and only add those which are not already configured in yaml + for var_name, val in self.sd.datasets().items(): + if var_name in handled: + continue + if len(val[0]) != 2: + continue + resolution = res_dict.get(val[1][-1]) + if resolution is not None: + ds_info = { + "file_type": self.filetype_info["file_type"], + "resolution": resolution, + "name": var_name, + "file_key": var_name, + "coordinates": ["longitude", "latitude"] + } + yield True, ds_info + def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte byte_dimension = None if self.is_imapp_mask_byte1 else dataset_info["byte_dimension"] diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index b041436a74..ec6a39f084 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""SAFE MSI L1C reader. +"""SAFE MSI L1C/L2A reader. The MSI data has a special value for saturated pixels. By default, these pixels are set to np.inf, but for some applications it might be desirable @@ -32,6 +32,10 @@ https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 +NOTE: At present, L1B data is not supported. If the user needs radiance data instead of counts or reflectances, these +are retrieved by first calculating the reflectance and then working back to the radiance. L1B radiance data support +will be added once the data is published onto the Copernicus data ecosystem. + """ import logging @@ -59,13 +63,16 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" - def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, + mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] + if self.process_level not in ["L1C", "L2A"]: + raise ValueError(f"Unsupported process level: {self.process_level}") self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -83,7 +90,6 @@ def get_dataset(self, key, info): if proj is None: return proj.attrs = info.copy() - proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name return proj @@ -93,7 +99,21 @@ def _read_from_file(self, key): if key["calibration"] == "reflectance": return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": - return self._mda.calibrate_to_radiances(proj, self._channel) + # The calibration procedure differs for L1B and L1C/L2A data! + if self.process_level in ["L1C", "L2A"]: + # For higher level data, radiances must be computed from the reflectance. + # By default, we use the mean solar angles so that the user does not need to resample, + # but the user can also choose to use the solar angles from the tile metadata. + # This is on a coarse grid so for most bands must be resampled before use. + dq = dict(name="solar_zenith_angle", resolution=key["resolution"]) + zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) + tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) + return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) + else: + # For L1B the radiances can be directly computed from the digital counts. + return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + + if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: @@ -149,7 +169,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level[:2] == "L1" else \ int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -157,7 +177,7 @@ def calibrate_to_reflectances(self, data, band_name): def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" atmospheric_bands = ["AOT", "WVP"] - if self.process_level == "L1C": + if self.process_level == "L1C" or self.process_level == "L1B": return elif self.process_level == "L2A" and band_name not in atmospheric_bands: return @@ -194,7 +214,7 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level[:2] == "L1" else \ self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} @@ -202,6 +222,30 @@ def band_offsets(self): band_offsets = {} return band_offsets + def solar_irradiance(self, band_name): + """Get the solar irradiance for a given *band_name*.""" + band_index = self._band_index(band_name) + return self.solar_irradiances[band_index] + + @cached_property + def solar_irradiances(self): + """Get the TOA solar irradiance values from the metadata.""" + irrads = self.root.find(".//Solar_Irradiance_List") + + if irrads is not None: + solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} + if len(solar_irrad) > 0: + return solar_irrad + raise ValueError("No solar irradiance values were found in the metadata.") + + @cached_property + def sun_earth_dist(self): + """Get the sun-earth distance from the metadata.""" + sed = self.root.find(".//U") + if sed.text is not None: + return float(sed.text) + raise ValueError("Sun-Earth distance in metadata is missing.") + @cached_property def special_values(self): """Get the special values from the metadata.""" @@ -219,12 +263,21 @@ def saturated(self): """Get the saturated value from the metadata.""" return self.special_values["SATURATED"] - def calibrate_to_radiances(self, data, band_name): + def calibrate_to_radiances_l1b(self, data, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" physical_gain = self.physical_gain(band_name) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain + def calibrate_to_radiances(self, data, solar_zenith, band_name): + """Calibrate *data* to radiance using the radiometric information for the metadata.""" + sed = self.sun_earth_dist + solar_irrad_band = self.solar_irradiance(band_name) + + solar_zenith = np.deg2rad(solar_zenith) + + return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) + def physical_gain(self, band_name): """Get the physical gain for a given *band_name*.""" band_index = self._band_index(band_name) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index fc5aea2c8e..56f15ca9d8 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -162,9 +162,9 @@ from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size +from satpy.utils import get_chunk_size_limit -CHUNK_SIZE = get_legacy_chunk_size() +CHUNK_SIZE = get_chunk_size_limit() EQUATOR_RADIUS = 6378140.0 POLE_RADIUS = 6356755.0 ALTITUDE = 42164000.0 - EQUATOR_RADIUS @@ -186,6 +186,8 @@ ] HIGH_RESOL = 2250 +warnings.filterwarnings("ignore", message="^.*We do not yet support duplicate dimension names, but " + "we do allow initial construction of the object.*$") class IRWVCalibrator: """Calibrate IR & WV channels.""" @@ -452,84 +454,165 @@ def is_high_resol(resolution): return resolution == HIGH_RESOL -class DatasetWrapper: - """Helper class for accessing the dataset.""" +def preprocess_dataset(ds): + """Preprocess the given dataset. - def __init__(self, nc): - """Wrap the given dataset.""" - self.nc = nc + Performs steps that can be done once, such as decoding + according to CF conventions. + """ + preproc = _DatasetPreprocessor() + return preproc.preprocess(ds) - @property - def attrs(self): - """Exposes dataset attributes.""" - return self.nc.attrs - def __getitem__(self, item): - """Get a variable from the dataset.""" - ds = self.nc[item] - if self._should_dims_be_renamed(ds): - ds = self._rename_dims(ds) - elif self._coordinates_not_assigned(ds): - ds = self._reassign_coords(ds) +class _DatasetPreprocessor: + """Helper class for preprocessing the dataset.""" + + def preprocess(self, ds): + """Preprocess the given dataset.""" + ds = self._rename_vars(ds) + ds = self._decode_cf(ds) + ds = self._fix_duplicate_dimensions(ds) + self._reassign_coords(ds) self._cleanup_attrs(ds) return ds - def _should_dims_be_renamed(self, ds): - """Determine whether dataset dimensions need to be renamed.""" - return "y_ir_wv" in ds.dims or "y_tie" in ds.dims - - def _rename_dims(self, ds): - """Rename dataset dimensions to match satpy's expectations.""" + def _rename_vars(self, ds): + """Rename variables to match satpy's expectations.""" new_names = { - "y_ir_wv": "y", - "x_ir_wv": "x", - "y_tie": "y", - "x_tie": "x" + "time_ir_wv": "time", + } + new_names_avail = { + old: new + for old, new in new_names.items() + if old in ds } - for old_name, new_name in new_names.items(): - if old_name in ds.dims: - ds = ds.rename({old_name: new_name}) + return ds.rename(new_names_avail) + + def _decode_cf(self, ds): + """Decode data according to CF conventions.""" + # CF decoding fails because time coordinate contains fill values. + # Decode time separately, then decode rest using decode_cf(). + time = self._decode_time(ds) + ds = ds.drop_vars(time.name) + ds = xr.decode_cf(ds) + ds[time.name] = (time.dims, time.values) return ds - def _coordinates_not_assigned(self, ds): - return "y" in ds.dims and "y" not in ds.coords + def _decode_time(self, ds): + """Decode time using fill value and offset. + + Replace fill values with NaT. + """ + time = ds["time"] + time_dec = (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]") + is_fill_value = time == time.attrs["_FillValue"] + return xr.where(is_fill_value, np.datetime64("NaT"), time_dec) + + def _fix_duplicate_dimensions(self, ds): + """Rename dimensions as duplicate dimensions names are not supported by xarray.""" + ds = ds.copy() + ds.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") + ds.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") + ds.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") + return ds.drop_dims(["channel", "srf_size"]) def _reassign_coords(self, ds): """Re-assign coordinates. For some reason xarray doesn't assign coordinates to all high - resolution data variables. + resolution data variables. In that case ds["varname"] doesn't + have coords, but they're still in ds.coords. """ - return ds.assign_coords({"y": self.nc.coords["y"], - "x": self.nc.coords["x"]}) + for var_name, data_array in ds.data_vars.items(): + if self._coordinates_not_assigned(data_array): + ds[var_name] = data_array.assign_coords( + { + "y": ds.coords["y"], + "x": ds.coords["x"] + } + ) + + def _coordinates_not_assigned(self, data_array): + return "y" in data_array.dims and "y" not in data_array.coords def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. - ds.attrs.pop("ancillary_variables", None) + for data_array in ds.data_vars.values(): + data_array.attrs.pop("ancillary_variables", None) - def get_time(self): - """Get time coordinate. - Variable is sometimes named "time" and sometimes "time_ir_wv". - """ - try: - return self["time_ir_wv"] - except KeyError: - return self["time"] +class DatasetAccessor: + """Helper class for accessing the dataset. + + Performs steps that need to be done each time a variable + is accessed, such as renaming "y_*" coordinates to "y". + """ + + def __init__(self, ds): + """Wrap the given dataset.""" + self.ds = ds + + @property + def attrs(self): + """Exposes dataset attributes.""" + return self.ds.attrs + + def __getitem__(self, item): + """Get a variable from the dataset.""" + data_array = self.ds[item] + if self._should_dims_be_renamed(data_array): + return self._rename_dims(data_array) + return data_array + + def _should_dims_be_renamed(self, data_array): + """Determine whether dataset dimensions need to be renamed.""" + return "y_ir_wv" in data_array.dims or "y_tie" in data_array.dims + + def _rename_dims(self, data_array): + """Rename dataset dimensions to match satpy's expectations.""" + new_names = { + "y_ir_wv": "y", + "x_ir_wv": "x", + "y_tie": "y", + "x_tie": "x" + } + new_names_avail = { + old: new + for old, new in new_names.items() + if old in data_array.dims + } + return data_array.rename(new_names_avail) def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords["x"], self.nc.coords["y"] - return self.nc.coords["x_ir_wv"], self.nc.coords["x_ir_wv"] + return self.ds.coords["x"], self.ds.coords["y"] + return self.ds.coords["x_ir_wv"], self.ds.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords["y"].size - return self.nc.coords["y_ir_wv"].size + return self.ds.coords["y"].size + return self.ds.coords["y_ir_wv"].size + + +def open_dataset(filename): + """Load dataset from the given file.""" + nc_raw = xr.open_dataset( + filename, + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE}, + # see dataset preprocessor for why decoding is disabled + decode_cf=False, + decode_times=False, + mask_and_scale=False, + ) + nc_preproc = preprocess_dataset(nc_raw) + return DatasetAccessor(nc_preproc) class FiduceoMviriBase(BaseFileHandler): @@ -553,20 +636,9 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 super(FiduceoMviriBase, self).__init__( filename, filename_info, filetype_info) self.mask_bad_quality = mask_bad_quality - nc_raw = xr.open_dataset( - filename, - chunks={"x": CHUNK_SIZE, - "y": CHUNK_SIZE, - "x_ir_wv": CHUNK_SIZE, - "y_ir_wv": CHUNK_SIZE} - ) - self.nc = DatasetWrapper(nc_raw) - - # Projection longitude is not provided in the file, read it from the - # filename. - self.projection_longitude = float(filename_info["projection_longitude"]) + self.nc = open_dataset(filename) + self.projection_longitude = self._get_projection_longitude(filename_info) self.calib_coefs = self._get_calib_coefs() - self._get_angles = functools.lru_cache(maxsize=8)( self._get_angles_uncached ) @@ -574,6 +646,13 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self._get_acq_time_uncached ) + def _get_projection_longitude(self, filename_info): + """Read projection longitude from filename as it is not provided in the file.""" + if "." in str(filename_info["projection_longitude"]): + return float(filename_info["projection_longitude"]) + + return float(filename_info["projection_longitude"]) / 100 + def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" name = dataset_id["name"] @@ -703,7 +782,7 @@ def _get_acq_time_uncached(self, resolution): Note that the acquisition time does not increase monotonically with the scanline number due to the scan pattern and rectification. """ - time2d = self.nc.get_time() + time2d = self.nc["time"] _, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_acq_time(time2d, target_y=target_y.values) diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py new file mode 100644 index 0000000000..173c729ed8 --- /dev/null +++ b/satpy/readers/mwr_l1b.py @@ -0,0 +1,219 @@ +# Copyright (c) 2023 - 2025 Pytroll Developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Reader for the level-1b data from the MWR sounder onboard AWS and EPS-STerna. + +AWS = Arctic Weather Satellite. MWR = Microwave Radiometer. + +AWS test data provided by ESA August 23, 2023. + +Sample data for five orbits in September 2024 provided by ESA to the Science +Advisory Group for MWS and AWS, November 26, 2024. + +Sample EPS-Sterna l1b format AWS data from 16 orbits the 9th of November 2024. + +Continous feed (though restricted to the SAG members and selected European +users/evaluators) in the EUMETSAT Data Store of global AWS data from January +9th, 2025. + +Example: +-------- +Here is an example how to read the data in satpy: + +.. code-block:: python + + from satpy import Scene + from glob import glob + + filenames = glob("data/W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__*_G_O_20250110114708*.nc" + scn = Scene(filenames=filenames, reader='aws1_mwr_l1b_nc') + + composites = ['mw183_humidity'] + dataset_names = composites + ['1'] + + scn.load(dataset_names) + print(scn['1']) + scn.show('mw183_humidity') + + +As the file format for the EPS Sterna Level-1b is slightly different from the +ESA format, reading the EPS Sterna level-1b data uses a different reader, named +`eps_sterna_mwr_l1b_nc`. So, if specifying the reader name as in the above code +example, please provide the actual name for that data: eps_sterna_mwr_l1b_nc. + + +""" + +import xarray as xr + +from .netcdf_utils import NetCDF4FileHandler + +MWR_CHANNEL_NAMES = [str(i) for i in range(1, 20)] + +NAVIGATION_DATASET_NAMES = ["satellite_zenith_horn1", + "satellite_zenith_horn2", + "satellite_zenith_horn3", + "satellite_zenith_horn4", + "solar_azimuth_horn1", + "solar_azimuth_horn2", + "solar_azimuth_horn3", + "solar_azimuth_horn4", + "solar_zenith_horn1", + "solar_zenith_horn2", + "solar_zenith_horn3", + "solar_zenith_horn4", + "satellite_azimuth_horn1", + "satellite_azimuth_horn2", + "satellite_azimuth_horn3", + "satellite_azimuth_horn4", + "longitude", + "latitude"] + +class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): + """Base class implementing the AWS/EPS-Sterna MWR Level-1b&c Filehandlers.""" + + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info, + cache_var_size=10000, + cache_handle=True) + self.filename_info = filename_info + + @property + def start_time(self): + """Get the start time.""" + return self.filename_info["start_time"] + + @property + def end_time(self): + """Get the end time.""" + return self.filename_info["end_time"] + + @property + def sensor(self): + """Get the sensor name.""" + # This should have been self["/attr/instrument"] + # But the sensor name is currently incorrect in the ESA level-1b files + return "mwr" + + @property + def platform_name(self): + """Get the platform name.""" + return self.filename_info["platform_name"] + + @property + def orbit_start(self): + """Get the orbit number for the start of data.""" + return int(self["/attr/orbit_start"]) + + @property + def orbit_end(self): + """Get the orbit number for the end of data.""" + return int(self["/attr/orbit_end"]) + + def get_dataset(self, dataset_id, dataset_info): + """Get the data.""" + raise NotImplementedError("This is not implemented in the Base class.") + + def _get_channel_data(self, dataset_id, dataset_info): + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_channels"] = MWR_CHANNEL_NAMES + channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) + return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") + + + +class AWS_EPS_Sterna_MWR_L1BFile(AWS_EPS_Sterna_BaseFileHandler): + """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler.""" + + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info, auto_maskandscale) + self._feed_horn_group_name = filetype_info.get("feed_horn_group_name") + + @property + def sub_satellite_longitude_start(self): + """Get the longitude of sub-satellite point at start of the product.""" + return self["status/satellite/subsat_longitude_start"].data.item() + + @property + def sub_satellite_latitude_start(self): + """Get the latitude of sub-satellite point at start of the product.""" + return self["status/satellite/subsat_latitude_start"].data.item() + + @property + def sub_satellite_longitude_end(self): + """Get the longitude of sub-satellite point at end of the product.""" + return self["status/satellite/subsat_longitude_end"].data.item() + + @property + def sub_satellite_latitude_end(self): + """Get the latitude of sub-satellite point at end of the product.""" + return self["status/satellite/subsat_latitude_end"].data.item() + + def get_dataset(self, dataset_id, dataset_info): + """Get the data.""" + if dataset_id["name"] in MWR_CHANNEL_NAMES: + data_array = self._get_channel_data(dataset_id, dataset_info) + elif dataset_id["name"] in NAVIGATION_DATASET_NAMES: + data_array = self._get_navigation_data(dataset_id, dataset_info) + else: + raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") + + data_array = mask_and_scale(data_array) + if dataset_id["name"] == "longitude": + data_array = data_array.where(data_array <= 180, data_array - 360) + + data_array.attrs.update(dataset_info) + + data_array.attrs["orbital_parameters"] = {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, + "sub_satellite_longitude_start": self.sub_satellite_longitude_start, + "sub_satellite_latitude_end": self.sub_satellite_latitude_end, + "sub_satellite_longitude_end": self.sub_satellite_longitude_end} + + data_array.attrs["platform_name"] = self.platform_name + data_array.attrs["sensor"] = self.sensor + data_array.attrs["orbit_number"] = self.orbit_start + return data_array + + def _get_navigation_data(self, dataset_id, dataset_info): + """Get the navigation (geolocation) data for one feed horn.""" + geo_data = self[dataset_info["file_key"]] + geo_data.coords[self._feed_horn_group_name] = ["1", "2", "3", "4"] + geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) + horn = dataset_id["horn"].name + _selection = {self._feed_horn_group_name: horn} + return geo_data.sel(_selection).drop_vars(self._feed_horn_group_name) + + +def mask_and_scale(data_array): + """Mask then scale the data array.""" + if "missing_value" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array != data_array.attrs["missing_value"]) + data_array.attrs.pop("missing_value") + if "valid_max" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array <= data_array.attrs["valid_max"]) + data_array.attrs.pop("valid_max") + if "valid_min" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array >= data_array.attrs["valid_min"]) + data_array.attrs.pop("valid_min") + if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] + data_array.attrs.pop("scale_factor") + data_array.attrs.pop("add_offset") + return data_array diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py new file mode 100644 index 0000000000..3d429fd8f5 --- /dev/null +++ b/satpy/readers/mwr_l1c.py @@ -0,0 +1,95 @@ +# Copyright (c) 2024 - 2025 Pytroll Developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Reader for the Arctic Weather Satellite (AWS) MWR level-1c data. + +MWR = Microwave Radiometer, onboard AWS and EPS-Sterna + +Sample data provided by ESA September 27, 2024. + + +Example: +-------- +Here is an example how to read the data in satpy: + +.. code-block:: python + + from satpy import Scene + from glob import glob + + filenames = glob("data/W_XX-OHB-Stockholm,SAT,AWS1-MWR-1C-RAD_C_OHB_*20240913204851_*.nc") + + scn = Scene(filenames=filenames, reader='aws1_mwr_l1c_nc') + + composites = ['mw183_humidity'] + dataset_names = composites + ['1'] + + scn.load(dataset_names) + print(scn['1']) + scn.show('mw183_humidity') + +""" + + +from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_BaseFileHandler, mask_and_scale + + +class AWS_MWR_L1CFile(AWS_EPS_Sterna_BaseFileHandler): + """Class implementing the AWS L1c Filehandler. + + This class implements the ESA Arctic Weather Satellite (AWS) Level-1b + NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` + class using the :mod:`~satpy.Scene.load` method with the reader + ``"aws_l1c_nc"``. + + """ + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info, auto_maskandscale) + self.filename_info = filename_info + + @property + def sensor(self): + """Get the sensor name.""" + # This should have been self["/attr/instrument"] + # But the sensor name is currently incorrect in the ESA level-1b files + return "mwr" + + def get_dataset(self, dataset_id, dataset_info): + """Get the data.""" + if dataset_id["name"] in MWR_CHANNEL_NAMES: + data_array = self._get_channel_data(dataset_id, dataset_info) + elif (dataset_id["name"] in ["longitude", "latitude", + "solar_azimuth_angle", "solar_zenith_angle", + "satellite_zenith_angle", "satellite_azimuth_angle"]): + data_array = self._get_navigation_data(dataset_id, dataset_info) + else: + raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") + + data_array = mask_and_scale(data_array) + if dataset_id["name"] == "longitude": + data_array = data_array.where(data_array <= 180, data_array - 360) + + data_array.attrs.update(dataset_info) + + data_array.attrs["platform_name"] = self.platform_name + data_array.attrs["sensor"] = self.sensor + return data_array + + + def _get_navigation_data(self, dataset_id, dataset_info): + """Get the navigation (geolocation) data.""" + geo_data = self[dataset_info["file_key"]] + geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) + return geo_data diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py new file mode 100644 index 0000000000..08d2240e88 --- /dev/null +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Landsat OLI/TIRS Level 1 reader. + +Details of the data format can be found here: + https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/LSDS-1822_Landsat8-9-OLI-TIRS-C2-L1-DFCB-v6.pdf + https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product + +NOTE: The scene geometry data (SZA, VZA, SAA, VAA) is retrieved from the L1 TIFF files, which are derived from Band 04. +The geometry differs between bands, so if you need precise geometry you should calculate this from the metadata instead. + +""" + +import logging +from datetime import datetime, timezone + +import defusedxml.ElementTree as ET +import numpy as np +import xarray as xr + +from satpy.readers.file_handlers import BaseFileHandler + +logger = logging.getLogger(__name__) + +PLATFORMS = {"08": "Landsat-8", + "09": "Landsat-9"} + +OLI_BANDLIST = ["B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9"] +TIRS_BANDLIST = ["B10", "B11"] +PAN_BANDLIST = ["B8"] +ANGLIST = ["satellite_azimuth_angle", + "satellite_zenith_angle", + "solar_azimuth_angle", + "solar_zenith_angle"] + +ANGLIST_CHAN = ["sza", "saa", "vaa", "vza"] + +BANDLIST = OLI_BANDLIST + TIRS_BANDLIST + + +class OLITIRSCHReader(BaseFileHandler): + """File handler for Landsat L1 files (tif).""" + + @staticmethod + def get_btype(file_type): + """Return the band type from the file type.""" + pos = file_type.rfind("_") + if pos == -1: + raise ValueError(f"Invalid file type: {file_type}") + else: + return file_type[pos+1:] + + @property + def start_time(self): + """Return start time.""" + return self._mda.start_time + + @property + def end_time(self): + """Return end time.""" + return self._mda.end_time + + def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): + """Initialize the reader.""" + super().__init__(filename, filename_info, filetype_info) + + # Check we have landsat data + if filename_info["platform_type"] != "L": + raise ValueError("This reader only supports Landsat data") + + # Get the channel name + self.channel = self.get_btype(filetype_info["file_type"]) + + # Data can be VIS, TIR or Combined. This flag denotes what the granule contains (O, T or C respectively). + self.chan_selector = filename_info["data_type"] + + self._obs_date = filename_info["observation_date"] + self._mda = mda + + # Retrieve some per-band useful metadata + self.bsat = self._mda.band_saturation + self.calinfo = self._mda.band_calibration + self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] + + def get_dataset(self, key, info): + """Load a dataset.""" + if self.channel != key["name"] and self.channel not in ANGLIST_CHAN: + raise ValueError(f"Requested channel {key['name']} does not match the reader channel {self.channel}") + + if key["name"] in OLI_BANDLIST and self.chan_selector not in ["O", "C"]: + raise ValueError(f"Requested channel {key['name']} is not available in this granule") + if key["name"] in TIRS_BANDLIST and self.chan_selector not in ["T", "C"]: + raise ValueError(f"Requested channel {key['name']} is not available in this granule") + + logger.debug("Reading %s.", key["name"]) + + data = xr.open_dataarray(self.filename, engine="rasterio", + chunks={"band": 1, + "y": "auto", + "x": "auto"}, + mask_and_scale=False).squeeze() + + + # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan + data.data = xr.where(data.data == 0, np.float32(np.nan), data.data) + + attrs = data.attrs.copy() + # Add useful metadata to the attributes. + attrs["perc_cloud_cover"] = self._mda.cloud_cover + # Add platform / sensor attributes + attrs["platform_name"] = self.platform_name + attrs["sensor"] = "OLI_TIRS" + # Apply attrs from YAML + attrs["standard_name"] = info["standard_name"] + attrs["units"] = info["units"] + + # Only OLI bands have a saturation flag + if key["name"] in OLI_BANDLIST: + attrs["saturated"] = self.bsat[key["name"]] + + # Rename to Satpy convention + data = data.rename({"band": "bands"}) + + data.attrs.update(attrs) + + # Calibrate if we're using a band rather than a QA or geometry dataset + if key["name"] in BANDLIST: + data = self.calibrate(data, key["calibration"]) + if key["name"] in ANGLIST: + data.data = data.data * 0.01 + + return data + + def calibrate(self, data, calibration): + """Calibrate the data from counts into the desired units.""" + if calibration == "counts": + return data + + if calibration in ["radiance", "brightness_temperature"]: + data.data = data.data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] + if calibration == "radiance": + data.data = data.data.astype(np.float32) + return data + + if calibration == "reflectance": + if int(self.channel[1:]) < 10: + data.data = data.data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] + data.data = data.data.astype(np.float32) * 100 + return data + + if calibration == "brightness_temperature": + if self.channel[1:] in ["10", "11"]: + data.data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data.data) + 1)) + data.data = data.data.astype(np.float32) + return data + + def get_area_def(self, dsid): + """Get area definition of the image from the metadata.""" + return self._mda.build_area_def(dsid["name"]) + + +class OLITIRSMDReader(BaseFileHandler): + """File handler for Landsat L1 files (tif).""" + def __init__(self, filename, filename_info, filetype_info): + """Init the reader.""" + super().__init__(filename, filename_info, filetype_info) + # Check we have landsat data + if filename_info["platform_type"] != "L": + raise ValueError("This reader only supports Landsat data") + self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] + self._obs_date = filename_info["observation_date"] + self.root = ET.parse(self.filename) + self.process_level = filename_info["process_level_correction"] + import bottleneck # noqa + import geotiepoints # noqa + + + @property + def center_time(self): + """Return center time.""" + return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], + "%H:%M:%S.%f").replace(tzinfo=timezone.utc) + + @property + def start_time(self): + """Return start time. + + This is actually the scene center time, as we don't have the start time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self.center_time.hour, self.center_time.minute, self.center_time.second, + tzinfo=timezone.utc) + + @property + def end_time(self): + """Return end time. + + This is actually the scene center time, as we don't have the end time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self.center_time.hour, self.center_time.minute, self.center_time.second, + tzinfo=timezone.utc) + + @property + def cloud_cover(self): + """Return estimated granule cloud cover percentage.""" + return float(self.root.find(".//IMAGE_ATTRIBUTES/CLOUD_COVER").text) + + def _get_satflag(self, band): + """Return saturation flag for a band.""" + flag = self.root.find(f".//IMAGE_ATTRIBUTES/SATURATION_BAND_{band}").text + if flag == "Y": + return True + return False + + @property + def band_saturation(self): + """Return per-band saturation flag.""" + bdict = {} + for i in range(1, 10): + bdict[f"B{i:01d}"] = self._get_satflag(i) + + return bdict + + def _get_band_radcal(self, band): + """Get the radiance scale and offset values.""" + rad_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_MULT_BAND_{band}").text) + rad_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_ADD_BAND_{band}").text) + return rad_gain, rad_add + + def _get_band_viscal(self, band): + """Return visible channel calibration info.""" + rad_gain, rad_add = self._get_band_radcal(band) + ref_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_MULT_BAND_{band}").text) + ref_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_ADD_BAND_{band}").text) + return rad_gain, rad_add, ref_gain, ref_add + + def _get_band_tircal(self, band): + """Return thermal channel calibration info.""" + rad_gain, rad_add = self._get_band_radcal(band) + bt_k1 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K1_CONSTANT_BAND_{band}").text) + bt_k2 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K2_CONSTANT_BAND_{band}").text) + return rad_gain, rad_add, bt_k1, bt_k2 + + @property + def band_calibration(self): + """Return per-band saturation flag.""" + bdict = {} + for i in range(1, 10): + bdict[f"B{i:01d}"] = self._get_band_viscal(i) + for i in range(10, 12): + bdict[f"B{i:02d}"] = self._get_band_tircal(i) + + return bdict + + def earth_sun_distance(self): + """Return Earth-Sun distance.""" + return float(self.root.find(".//IMAGE_ATTRIBUTES/EARTH_SUN_DISTANCE").text) + + def build_area_def(self, bname): + """Build area definition from metadata.""" + from pyresample.geometry import AreaDefinition + + # Here we assume that the thermal bands have the same resolution as the reflective bands, + # with only the panchromatic band (b08) having a different resolution. + if bname in PAN_BANDLIST: + pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_PANCHROMATIC").text) / 2. + x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_SAMPLES").text) + y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_LINES").text) + else: + pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_REFLECTIVE").text) / 2. + x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_SAMPLES").text) + y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_LINES").text) + + # Get remaining geoinfo from file + datum = self.root.find(".//PROJECTION_ATTRIBUTES/DATUM").text + utm_zone = int(self.root.find(".//PROJECTION_ATTRIBUTES/UTM_ZONE").text) + utm_str = f"{utm_zone}N" + + # We need to subtract / add half a pixel from the corner to get the correct extent (pixel centers) + ext_p1 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_X_PRODUCT").text) - pixoff + ext_p2 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_Y_PRODUCT").text) - pixoff + ext_p3 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_X_PRODUCT").text) + pixoff + ext_p4 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_Y_PRODUCT").text) + pixoff + + # Create area definition + pcs_id = f"{datum} / UTM zone {utm_str}" + proj4_dict = {"proj": "utm", "zone": utm_zone, "datum": datum, "units": "m", "no_defs": None, "type": "crs"} + area_extent = (ext_p1, ext_p2, ext_p3, ext_p4) + + # Return the area extent + return AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, x_size, y_size, area_extent) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 986440759a..437d25929a 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -51,7 +51,7 @@ import xarray as xr from dask import array as da from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat -from geotiepoints.interpolator import MultipleGridInterpolator +from geotiepoints.interpolator import MultipleSplineInterpolator from xarray import DataArray from satpy.dataset.data_dict import DatasetDict @@ -78,7 +78,7 @@ def _dictify(r): return int(r.text) except ValueError: try: - return float(r.text) + return np.float32(r.text) except ValueError: return r.text for x in r.findall("./*"): @@ -186,7 +186,7 @@ def get_dataset(self, key, info, chunks=None): def get_calibration_constant(self): """Load the calibration constant.""" - return float(self.root.find(".//absoluteCalibrationConstant").text) + return np.float32(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" @@ -341,7 +341,7 @@ def _get_array_pieces_for_current_line(self, current_line): current_blocks = self._find_blocks_covering_line(current_line) current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) - current_y = np.arange(current_line, next_line) + current_y = np.arange(current_line, next_line, dtype=np.uint16) pieces = [arr.sel(y=current_y) for arr in current_blocks] return pieces @@ -389,7 +389,7 @@ def _get_padded_dask_pieces(self, pieces, chunks): @staticmethod def _fill_dask_pieces(dask_pieces, shape, chunks): if shape[1] > 0: - new_piece = da.full(shape, np.nan, chunks=chunks) + new_piece = da.full(shape, np.nan, chunks=chunks, dtype=np.float32) dask_pieces.append(new_piece) @@ -425,11 +425,10 @@ def expand(self, chunks): # corr = 1.5 data = self.lut * corr - x_coord = np.arange(self.first_pixel, self.last_pixel + 1) - y_coord = np.arange(self.first_line, self.last_line + 1) - - new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * - np.interp(y_coord, self.lines, data)[:, np.newaxis]) + x_coord = np.arange(self.first_pixel, self.last_pixel + 1, dtype=np.uint16) + y_coord = np.arange(self.first_line, self.last_line + 1, dtype=np.uint16) + new_arr = (da.ones((len(y_coord), len(x_coord)), dtype=np.float32, chunks=chunks) * + np.interp(y_coord, self.lines, data)[:, np.newaxis].astype(np.float32)) new_arr = xr.DataArray(new_arr, dims=["y", "x"], coords={"x": x_coord, @@ -438,29 +437,29 @@ def expand(self, chunks): @property def first_pixel(self): - return int(self.element.find("firstRangeSample").text) + return np.uint16(self.element.find("firstRangeSample").text) @property def last_pixel(self): - return int(self.element.find("lastRangeSample").text) + return np.uint16(self.element.find("lastRangeSample").text) @property def first_line(self): - return int(self.element.find("firstAzimuthLine").text) + return np.uint16(self.element.find("firstAzimuthLine").text) @property def last_line(self): - return int(self.element.find("lastAzimuthLine").text) + return np.uint16(self.element.find("lastAzimuthLine").text) @property def lines(self): lines = self.element.find("line").text.split() - return np.array(lines).astype(int) + return np.array(lines).astype(np.uint16) @property def lut(self): lut = self.element.find("noiseAzimuthLut").text.split() - return np.array(lut).astype(float) + return np.array(lut, dtype=np.float32) class XMLArray: @@ -487,7 +486,7 @@ def _read_xml_array(self): new_x = elt.find("pixel").text.split() y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] - data += [float(val) + data += [np.float32(val) for val in elt.find(self.element_tag).text.split()] return np.asarray(data), (x, y) @@ -512,24 +511,23 @@ def intp(grid_x, grid_y, interpolator): def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" - from scipy.interpolate.interpnd import LinearNDInterpolator, _ndim_coords_from_arrays + from scipy.interpolate.interpnd import LinearNDInterpolator if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: vchunks, hchunks = chunks, chunks - - points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints), - np.asarray(xpoints))).T) + points = np.vstack((np.asarray(ypoints, dtype=np.uint16), + np.asarray(xpoints, dtype=np.uint16))).T interpolator = LinearNDInterpolator(points, values) - grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks), - da.arange(shape[0], chunks=vchunks)) + grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks, dtype=np.uint16), + da.arange(shape[0], chunks=vchunks, dtype=np.uint16)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) - res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) + res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator).astype(values.dtype) return DataArray(res, dims=("y", "x")) @@ -617,7 +615,7 @@ def _calibrate_and_denoise(self, data, key): def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) - data = data.astype(np.float64) + data = data.astype(np.float32) dn = data * data return dn @@ -636,10 +634,13 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - interpolator = MultipleGridInterpolator((ypoints, xpoints), x, y, z, gcp_alts) - hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) - longitudes, latitudes = xyz2lonlat(hx, hy, hz) + + interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=2, ky=2) + hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) + + + longitudes, latitudes = xyz2lonlat(hx, hy, hz) altitudes = xr.DataArray(altitudes, dims=["y", "x"]) longitudes = xr.DataArray(longitudes, dims=["y", "x"]) latitudes = xr.DataArray(latitudes, dims=["y", "x"]) @@ -672,8 +673,8 @@ def get_gcps(self): for feature in gcps["features"]] gcp_array = np.array(gcp_list) - ypoints = np.unique(gcp_array[:, 0]) - xpoints = np.unique(gcp_array[:, 1]) + ypoints = np.unique(gcp_array[:, 0]).astype(np.uint16) + xpoints = np.unique(gcp_array[:, 1]).astype(np.uint16) gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) @@ -683,6 +684,13 @@ def get_gcps(self): return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) + def get_bounding_box(self): + """Get the bounding box for the data coverage.""" + (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) = self.get_gcps() + bblons = np.hstack((gcp_lons[0, :-1], gcp_lons[:-1, -1], gcp_lons[-1, :1:-1], gcp_lons[:1:-1, 0])) + bblats = np.hstack((gcp_lats[0, :-1], gcp_lats[:-1, -1], gcp_lats[-1, :1:-1], gcp_lats[:1:-1, 0])) + return bblons.tolist(), bblats.tolist() + @property def start_time(self): """Get the start time.""" @@ -730,7 +738,8 @@ def load(self, dataset_keys, **kwargs): gcps = get_gcps_from_array(val) from pyresample.future.geometry import SwathDefinition val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], - attrs=dict(gcps=gcps)) + attrs=dict(gcps=gcps, + bounding_box=handler.get_bounding_box())) datasets[key] = val continue return datasets diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 9f742272a1..b34e13d028 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -91,8 +91,8 @@ ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' -Example: --------- +**Example**: + Here is an example how to read the data in satpy: .. code-block:: python diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index ace63e3f12..c9ad563899 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -153,9 +153,21 @@ scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) -* Raw metadata from the file header can be included by setting the reader - argument ``include_raw_metadata=True`` (HRIT and Native format only). Note - that this comes with a performance penalty of up to 10% if raw metadata from +* HRIT and Native readers can add raw metadata from the file header, such + as calibration coefficients, to dataset attributes. Use the reader keyword + argument ``include_raw_metadata``. Here's an example for extracting + calibration coefficients from Native files. + + .. code-block:: python + + scene = satpy.Scene(filenames, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True}) + scene.load(["IR_108"]) + mda = scene["IR_108"].attrs["raw_metadata"] + coefs = mda["15_DATA_HEADER"]["RadiometricProcessing"]["Level15ImageCalibration"] + + Note that this comes with a performance penalty of up to 10% if raw metadata from multiple segments or scans need to be combined. By default, arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword @@ -164,8 +176,8 @@ .. code-block:: python scene = satpy.Scene(filenames, - reader='seviri_l1b_hrit/native', - reader_kwargs={'include_raw_metadata': True, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True, 'mda_max_array_size': 1000}) References: @@ -213,6 +225,8 @@ REPEAT_CYCLE_DURATION = 15 +REPEAT_CYCLE_DURATION_RSS = 5 + C1 = 1.19104273e-5 C2 = 1.43877523 @@ -686,7 +700,7 @@ def calibrate(self, data, calibration): "brightness_temperature"]: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( - data.astype(np.float32), gain, offset + data.astype(np.float32), np.float32(gain), np.float32(offset) ) else: raise ValueError( diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 079f93d2f3..f22f77b03a 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -175,7 +175,7 @@ def get_lon_lats(self, key): def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): """Interpolate spherical coordinates.""" - from geotiepoints.geointerpolator import GeoGridInterpolator + from geotiepoints.geointerpolator import GeoSplineInterpolator full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], self.h5file["Image_data"].attrs["Number_of_pixels"]) @@ -183,7 +183,7 @@ def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interva tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) - interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") + interpolator = GeoSplineInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, kx=2, ky=2) new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") return new_azi, new_pol diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 10cdbac043..43f7d06032 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -197,9 +197,12 @@ def platform_name(self): platform_path = self.filename_info["platform_shortname"] platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", + "SNPP": "Suomi-NPP", "J01": "NOAA-20", + "N20": "NOAA-20", "JPSS-2": "NOAA-21", - "J02": "NOAA-21"} + "J02": "NOAA-21", + "N21": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 5bbaba4a6c..0d0894fb77 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -636,8 +636,9 @@ def create_filehandlers(self, filenames, fh_kwargs=None): self.file_handlers.get(filetype, []) + filehandlers, key=lambda fhd: (fhd.start_time, fhd.filename)) - # load any additional dataset IDs determined dynamically from the file - # and update any missing metadata that only the file knows + # Update dataset IDs with IDs determined dynamically from the file + # and/or update any missing metadata that only the file knows. + # Check if the dataset ID is loadable from that file. self.update_ds_ids_from_file_handlers() return created_fhs diff --git a/satpy/scene.py b/satpy/scene.py index e340cee372..0e46ce1544 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1079,12 +1079,16 @@ def to_hvplot(self, datasets=None, *args, **kwargs): - def to_xarray_dataset(self, datasets=None): + def to_xarray_dataset(self, datasets=None, compat="minimal"): """Merge all xr.DataArrays of a scene to a xr.DataSet. Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` + compat (str): + How to compare variables with the same name for conflicts. + See :func:`xarray.merge` for possible options. Defaults to + "minimal" which drops conflicting variables. Returns: :class:`xarray.Dataset` @@ -1100,7 +1104,7 @@ def to_xarray_dataset(self, datasets=None): mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) if mdata.get("area") is None or not isinstance(mdata["area"], SwathDefinition): # either don't know what the area is or we have an AreaDefinition - ds = xr.merge(ds_dict.values()) + ds = xr.merge(ds_dict.values(), compat=compat) else: # we have a swath definition and should use lon/lat values lons, lats = mdata["area"].get_lonlats() diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature new file mode 100755 index 0000000000..686062462c --- /dev/null +++ b/satpy/tests/behave/features/image_comparison.feature @@ -0,0 +1,16 @@ +Feature: Image Comparison + + Scenario Outline: Compare generated image with reference image + Given I have a reference image file from resampled to + When I generate a new image file from case with for with clipping + Then the generated image should be the same as the reference image + + Examples: + |satellite | case | composite | reader | area | clip | + |Meteosat-12 | scan_night | cloudtop | fci_l1c_nc | sve | True | + |Meteosat-12 | scan_night | night_microphysics | fci_l1c_nc | sve | True | + |Meteosat-12 | mali_day | essl_colorized_low_level_moisture | fci_l1c_nc | mali | False | + |GOES17 | americas_night | airmass | abi_l1b | null | null | + |GOES16 | americas_night | airmass | abi_l1b | null | null | + |GOES16 | americas_night | ash | abi_l1b | null | null | + |GOES17 | americas_night | ash | abi_l1b | null | null | diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py new file mode 100644 index 0000000000..5e7135bc53 --- /dev/null +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -0,0 +1,134 @@ +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Image comparison tests.""" + +import hdf5plugin # noqa: F401 isort:skip +import os +import os.path +import warnings +from datetime import datetime +from glob import glob + +import cv2 +import dask +import numpy as np +from behave import given, then, when + +from satpy import Scene + +ext_data_path = "/app/ext_data" +threshold = 2000 + +def before_all(context): + """Define a before_all hook to create the timestamp and test results directory.""" + tm = datetime.now() + context.timestamp = tm.strftime("%Y-%m-%d-%H-%M-%S") + context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" + os.makedirs(os.path.join(context.test_results_dir, "generated"), exist_ok=True) + os.makedirs(os.path.join(context.test_results_dir, "difference"), exist_ok=True) + + # Write the timestamp to test_results.txt + results_file = os.path.join(context.test_results_dir, "test_results.txt") + with open(results_file, "a") as f: + f.write(f"Test executed at {context.timestamp}.\n\n") + +def setup_hooks(): + """Register the before_all hook.""" + from behave import use_fixture + from behave.runner import Context + + use_fixture(before_all, Context) + +setup_hooks() +@given("I have a {composite} reference image file from {satellite} resampled to {area}") +def step_given_reference_image(context, composite, satellite, area): + """Prepare a reference image.""" + reference_image = f"satpy-reference-image-{satellite}-{composite}-{area}.png" + context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") + context.satellite = satellite + context.composite = composite + context.area = area + + +@when("I generate a new {composite} image file from {satellite} case {case} " + "with {reader} for {area} with clipping {clip}") +def step_when_generate_image(context, composite, satellite, case, reader, area, clip): + """Generate test images.""" + os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" + os.environ["PYTROLL_CHUNK_SIZE"] = "1024" + warnings.simplefilter("ignore") + dask.config.set(scheduler="threads", num_workers=4) + + # Get the list of satellite files to open + filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/{case}/*.nc") + + reader_kwargs = {} + if clip != "null": + reader_kwargs["clip_negative_radiances"] = clip + scn = Scene(reader=reader, filenames=filenames, reader_kwargs=reader_kwargs) + + scn.load([composite]) + + if area == "null": + ls = scn + else: + ls = scn.resample(area, resampler="gradient_search") + + # Save the generated image in the generated folder + generated_image_path = os.path.join(context.test_results_dir, "generated", + f"generated_{context.satellite}_{context.composite}_{context.area}.png") + ls.save_datasets(writer="simple_image", filename=generated_image_path) + + # Save the generated image in the context + context.generated_image = cv2.imread(generated_image_path) + + +@then("the generated image should be the same as the reference image") +def step_then_compare_images(context): + """Compare test image to reference image.""" + # Load the images + imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) + imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) + # Ensure both images have the same dimensions + if imageA.shape != imageB.shape: + raise ValueError("Both images must have the same dimensions") + array1 = np.array(imageA) + array2 = np.array(imageB) + # Perform pixel-wise comparison + result_matrix = (array1 != array2).astype(np.uint8) * 255 + + # Save the resulting numpy array as an image in the difference folder + diff_image_path = os.path.join(context.test_results_dir, "difference", + f"diff_{context.satellite}_{context.composite}.png") + cv2.imwrite(diff_image_path, result_matrix) + + # Count non-zero pixels in the result matrix + non_zero_count = np.count_nonzero(result_matrix) + + # Write the results to a file in the test results directory + results_file = os.path.join(context.test_results_dir, "test_results.txt") + with open(results_file, "a") as f: + f.write(f"Test for {context.satellite} - {context.composite}\n") + f.write(f"Non-zero pixel differences: {non_zero_count}\n") + if non_zero_count < threshold: + f.write(f"Result: Passed - {non_zero_count} pixel differences.\n\n") + else: + f.write(f"Result: Failed - {non_zero_count} pixel differences exceed the threshold of {threshold}.\n\n") + + # Assert that the number of differences is below the threshold + assert non_zero_count < threshold, (f"Images are not similar enough. " + f"{non_zero_count} pixel differences exceed the threshold of " + f"{threshold}.") diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py new file mode 100644 index 0000000000..52eea22f3b --- /dev/null +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -0,0 +1,118 @@ +"""Test the flash age compositor.""" +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + + +import datetime +import logging + +import dask.array as da +import numpy as np +import pytest +import xarray as xr + +from satpy.composites.lightning import LightningTimeCompositor + + +def test_flash_age_compositor(): + """Test the flash_age compsitor by comparing two xarrays object.""" + comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], + standard_name="ligtning_time", + time_range=60, + reference_time="end_time") + attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", + "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} + flash_age_value = da.array(["2024-08-01T09:00:00", + "2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]") + flash_age = xr.DataArray( + flash_age_value, + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = attrs_flash_age,name="flash_time") + res = comp([flash_age]) + expected_attrs = {"variable_name": "flash_time","name": "lightning_time", + "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc", + "standard_name": "ligtning_time" + } + expected_array = xr.DataArray( + da.array([0.0,0.5,1.0]), + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = expected_attrs,name="flash_time") + xr.testing.assert_equal(res,expected_array) + +def test_empty_array_error(caplog): + """Test when the filtered array is empty.""" + comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], + standard_name="ligtning_time", + time_range=60, + reference_time="end_time") + attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", + "start_time": np.datetime64(datetime.datetime(2024, 8, 1, 10, 0, 0)), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0), + "reader": "li_l2_nc"} + flash_age_value = da.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") + flash_age = xr.DataArray( + flash_age_value, + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = attrs_flash_age,name="flash_time") + with caplog.at_level(logging.ERROR): + # Simulate the operation that raises the exception + with pytest.raises(ValueError, match="data size is zero") as excinfo: + _ = comp([flash_age]) + + # Assert the exception message + assert str(excinfo.value) == ( + f"Invalid data: data size is zero. All flash_age events occurred before " + f"the specified start time ({attrs_flash_age['start_time']})." + ) + assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text + +def test_update_missing_metadata(): + """Test the _update_missing_metadata method.""" + existing_attrs = { + "standard_name": "lightning_event_time", + "time_range": 30 + } + + # New metadata to be merged + new_attrs = { + "standard_name": None, # Should not overwrite since it's None + "reference_time": "2023-09-20T00:00:00Z", # Should be added + "units": "seconds" # Should be added + } + + # Expected result after merging + expected_attrs = { + "standard_name": "lightning_event_time", # Should remain the same + "time_range": 30, # Should remain the same + "reference_time": "2023-09-20T00:00:00Z", # Should be added + "units": "seconds" # Should be added + } + + # Call the static method + LightningTimeCompositor._update_missing_metadata(existing_attrs, new_attrs) + + # Assert the final state of existing_attrs is as expected + assert existing_attrs == expected_attrs diff --git a/satpy/tests/enhancement_tests/test_atmosphere.py b/satpy/tests/enhancement_tests/test_atmosphere.py deleted file mode 100644 index 42e25af0c6..0000000000 --- a/satpy/tests/enhancement_tests/test_atmosphere.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2022- Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Tests for enhancements in enhancements/atmosphere.py.""" - -import datetime - -import dask.array as da -import numpy as np -import xarray as xr -from trollimage.xrimage import XRImage - - -def test_essl_moisture(): - """Test ESSL moisture compositor.""" - from satpy.enhancements.atmosphere import essl_moisture - - ratio = xr.DataArray( - da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)), - dims=("y", "x"), - attrs={"name": "ratio", - "calibration": "reflectance", - "units": "%", - "mode": "L"}) - im = XRImage(ratio) - - essl_moisture(im) - assert im.data.attrs["mode"] == "RGB" - np.testing.assert_array_equal(im.data["bands"], ["R", "G", "B"]) - assert im.data.sel(bands="R")[0, 0] == 1 - np.testing.assert_allclose(im.data.sel(bands="R")[2, 2], 0.04, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="G")[2, 2], 0.42857, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="B")[2, 2], 0.1875, rtol=1e-4) - - # test FCI test data correction - ratio = xr.DataArray( - da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)), - dims=("y", "x"), - attrs={"name": "ratio", - "calibration": "reflectance", - "units": "%", - "mode": "L", - "sensor": "fci", - "start_time": datetime.datetime(1999, 1, 1)}) - im = XRImage(ratio) - essl_moisture(im) - np.testing.assert_allclose(im.data.sel(bands="R")[3, 3], 0.7342, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="G")[3, 3], 0.7257, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="B")[3, 3], 0.39, rtol=1e-4) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b30a073968..6b797f0015 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -34,24 +34,55 @@ def run_and_check_enhancement(func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" + pre_attrs = data.attrs + img = _get_enhanced_image(func, data, **kwargs) + + _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) + _assert_image_data(img, expected) + + +def _get_enhanced_image(func, data, **kwargs): from trollimage.xrimage import XRImage - pre_attrs = data.attrs img = XRImage(data) func(img, **kwargs) + return img + + +def _assert_image(img, pre_attrs, func_name, has_palette): + assert isinstance(img.data, xr.DataArray) assert isinstance(img.data.data, da.Array) + old_keys = set(pre_attrs.keys()) # It is OK to have "enhancement_history" added new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} + # In case of palettes are used, _FillValue is added. + # Colorize doesn't add the fill value, so ignore that + if has_palette and func_name != "colorize": + assert "_FillValue" in new_keys + # Remove it from further comparisons + new_keys = new_keys - {"_FillValue"} assert old_keys == new_keys - res_data_arr = img.data - assert isinstance(res_data_arr, xr.DataArray) - assert isinstance(res_data_arr.data, da.Array) - res_data = res_data_arr.data.compute() # mimics what xrimage geotiff writing does + +def _assert_image_data(img, expected, dtype=None): + # Compute the data to mimic what xrimage geotiff writing does + res_data = img.data.data.compute() assert not isinstance(res_data, da.Array) np.testing.assert_allclose(res_data, expected, atol=1.e-6, rtol=0) + if dtype: + assert img.data.dtype == dtype + assert res_data.dtype == dtype + + +def run_and_check_enhancement_with_dtype(func, data, expected, **kwargs): + """Perform basic checks that apply to multiple tests.""" + pre_attrs = data.attrs + img = _get_enhanced_image(func, data, **kwargs) + + _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) + _assert_image_data(img, expected, dtype=data.dtype) def identical_decorator(func): @@ -103,14 +134,15 @@ def _calc_func(data): exp_data = exp_data[np.newaxis, :, :] run_and_check_enhancement(_enh_func, in_data, exp_data) - def test_cira_stretch(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_cira_stretch(self, dtype): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], - [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) - run_and_check_enhancement(cira_stretch, self.ch1, expected) + [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]], dtype=dtype) + run_and_check_enhancement_with_dtype(cira_stretch, self.ch1.astype(dtype), expected) def test_reinhard(self): """Test the reinhard algorithm.""" @@ -456,10 +488,10 @@ def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ - [0, 0, 1], - [1, 0, 1], - [0, 1, 1], - [1, 1, 1], + [0., 0., 1.], + [1., 0., 1.], + [0., 1., 1.], + [1., 1., 1.], ] values = [2, 4, 6, 8] cmap = create_colormap({"colors": colors, "color_scale": 1}) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 276ff0ebd4..63ddbd8caf 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -18,6 +18,7 @@ import math import os import unittest.mock +import warnings import dask.array as da import dask.config @@ -368,13 +369,13 @@ def test_correct_area_clearsky_different_resolutions(self, res1, res2): resolution=res2, area_extent=[-1, -1, 1, 1]) - with pytest.warns(None) as record: + with warnings.catch_warnings(): + warnings.simplefilter("error") sc = make_fake_scene( {"CTH_clear": np.full(area1.shape, np.nan)}, daskify=False, area=area1, common_attrs=_get_attrs(0, 0, 35_000)) - assert len(record) == 0 corrector = ParallaxCorrection(area2) new_area = corrector(sc["CTH_clear"]) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 8f6f572494..00742574ef 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers +# Copyright (c) 2021, 2024, 2025 Satpy developers # # This file is part of satpy. # @@ -15,4 +15,204 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Setup and configuration for all reader tests.""" + +import datetime as dt +import os +from random import randrange + +import numpy as np +import pytest +import xarray as xr +from trollsift import compose, parse +from xarray import DataTree + +from satpy.readers.mwr_l1b import AWS_EPS_Sterna_MWR_L1BFile +from satpy.readers.mwr_l1c import AWS_MWR_L1CFile + +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" + +platform_name = "AWS1" +# W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc +file_pattern = "W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-{processing_level}-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + + +rng = np.random.default_rng() + +def random_date(start, end): + """Create a random datetime between two datetimes.""" + delta = end - start + int_delta = (delta.days * 24 * 60 * 60) + delta.seconds + random_second = randrange(int_delta) + return start + dt.timedelta(seconds=random_second) + + +@pytest.fixture(scope="module") +def fake_mwr_data_array(): + """Return a fake AWS/EPS-Sterna MWR l1b data array.""" + fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) + fake_data_np[0, 0, 0] = -2147483648 + fake_data_np[1, 0, 0] = 700000 + 10 + fake_data_np[2, 0, 0] = -10 + array_dims = ["n_scans", "n_fovs", "n_channels"] + return xr.DataArray(fake_data_np, dims=array_dims) + + +def make_fake_angles(geo_size, geo_dims, shape): + """Return fake sun-satellite angle array.""" + maxval = 36000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + return xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) + + +def make_fake_mwr_lonlats(geo_size, geo_dims, shape): + """Return fake geolocation data arrays for all 4 MWR horns.""" + maxval = 3600000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + fake_lon_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) + maxval = 1800000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") + fake_lat_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) + return (fake_lon_data, fake_lat_data) + + +def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): + """Return fake level-1c geolocation data arrays.""" + maxval = 3600000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) + maxval = 1800000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") + fake_lat_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) + return (fake_lon_data, fake_lat_data) + + +def aws_eps_sterna_mwr_level1_file(fake_mwr_data_array, eps_sterna=True, l1b=True): + """Create an AWS and EPS-Sterna MWR l1b file.""" + if eps_sterna: + n_feedhorns="n_feedhorns" + prefix = "" + longitude_attr = "longitude" + latitude_attr = "latitude" + else: + n_feedhorns="n_geo_groups" + prefix = "aws_" + longitude_attr = "aws_lon" + latitude_attr = "aws_lat" + + if l1b: + geo_dims = ["n_scans", "n_fovs", n_feedhorns] + geo_size = 10 * 145 * 4 + shape = (10, 145, 4) + else: + geo_dims = ["n_scans", "n_fovs"] + geo_size = 10 * 145 + shape = (10, 145) + + ds = DataTree() + start_time = dt.datetime(2024, 9, 1, 12, 0) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = dt.datetime(2024, 9, 1, 12, 15) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + + ds.attrs["instrument"] = "MWR" + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 + dset_name = f"data/calibration/{prefix}toa_brightness_temperature" + ds[dset_name] = fake_mwr_data_array + ds[dset_name].attrs["scale_factor"] = 0.001 + ds[dset_name].attrs["add_offset"] = 0.0 + ds[dset_name].attrs["missing_value"] = -2147483648 + ds[dset_name].attrs["valid_min"] = 0 + ds[dset_name].attrs["valid_max"] = 700000 + + fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) + + ds[f"data/navigation/{longitude_attr}"] = fake_lon_data + ds[f"data/navigation/{longitude_attr}"].attrs["scale_factor"] = 1e-4 + ds[f"data/navigation/{longitude_attr}"].attrs["add_offset"] = 0.0 + ds[f"data/navigation/{latitude_attr}"] = fake_lat_data + ds[f"data/navigation/{prefix}solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) + if l1b: + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + + return ds + + +def create_mwr_file(tmpdir, data_array, eps_sterna=False, l1b=True): + """Create an AWS or EPS-Sterna MWR l1b (or level-1c) file.""" + ds = aws_eps_sterna_mwr_level1_file(data_array, eps_sterna=eps_sterna, l1b=l1b) + start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) + end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) + + platform_name = "ST01" if eps_sterna else "AWS1" + processing_level = "1B" if l1b else "1C" + + processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) + filename = tmpdir / compose(file_pattern, dict(country="XX", + organisation="EUMETSAT", + location="Darmstadt", + processing_level=processing_level, + originator="EUMT", + start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) + ds.to_netcdf(filename) + return filename + +@pytest.fixture(scope="module") +def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an EPS-Sterna MWR l1b file.""" + tmpdir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=True) + + +@pytest.fixture(scope="module") +def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an AWS MWR l1b file.""" + tmpdir = tmp_path_factory.mktemp("aws_l1b_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False) + + +@pytest.fixture(scope="module") +def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): + """Create an AWS MWR l1c file.""" + tmpdir = tmp_path_factory.mktemp("aws_l1c_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False, l1b=False) + + +@pytest.fixture(scope="module") +def eps_sterna_mwr_handler(eps_sterna_mwr_file): + """Create an EPS-Sterna MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "eps_sterna_mwr_l1b" + filetype_info["feed_horn_group_name"] = "n_feedhorns" + return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) + + +@pytest.fixture(scope="module") +def aws_mwr_handler(aws_mwr_file): + """Create an AWS MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "aws1_mwr_l1b" + filetype_info["feed_horn_group_name"] = "n_geo_groups" + return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) + + +@pytest.fixture(scope="module") +def aws_mwr_l1c_handler(aws_mwr_l1c_file): + """Create an AWS MWR level-1c filehandler.""" + filename_info = parse(file_pattern, os.path.basename(aws_mwr_l1c_file)) + filetype_info = dict() + filetype_info["file_type"] = "aws1_mwr_l1c" + filetype_info["feed_horn_group_name"] = None + return AWS_MWR_L1CFile(aws_mwr_l1c_file, filename_info, filetype_info) diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index c90cc93123..a8ee45a052 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -21,7 +21,7 @@ from __future__ import annotations import datetime as dt -from typing import Optional +from typing import Literal, Optional import numpy as np import pytest @@ -496,11 +496,20 @@ def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_ # Level 2 Fixtures -def _get_basic_variable_info(var_name: str, resolution: int) -> dict: +def _get_basic_variable_info(var_name: str, resolution: int, dim_size: Literal[2, 3]=2) -> dict: shape = _shape_for_resolution(resolution) - data = np.ones((shape[0], shape[1]), dtype=np.uint16) + row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" + + if dim_size == 3: + data = np.ones((1, shape[0], shape[1]), dtype=np.uint16) + dim_labels = ["channel", row_dim_name, col_dim_name] + elif dim_size == 2: + data = np.ones((shape[0], shape[1]), dtype=np.uint16) + dim_labels = [row_dim_name, col_dim_name] + + return { var_name: { "data": data, @@ -508,8 +517,7 @@ def _get_basic_variable_info(var_name: str, resolution: int) -> dict: "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - "dim_labels": [row_dim_name, - col_dim_name], + "dim_labels": dim_labels, "valid_range": (0, 32767), "scale_factor": 2.0, "add_offset": -1.0, @@ -728,6 +736,8 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 5000)) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_3D_var", 5000, dim_size=3)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), @@ -735,6 +745,23 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: _create_header_metadata()) return [full_path] +@pytest.fixture(scope="session") +def modis_l2_nasa_mod99_file(tmpdir_factory) -> list[str]: + """Create an "artificial" MOD99 L2 HDF4 file with headers. + + There exists no MOD99 Level 2 product. This is just for testing available datasets + in arbitrary level 2 file. + """ + filename = generate_nasa_l2_filename("MOD99") + full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) + variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 1000)) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD99"), + _create_header_metadata()) + return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 1b47007e63..8dd3bc230d 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -32,6 +32,7 @@ modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, + modis_l2_nasa_mod99_file, modis_l3_nasa_mcd12q1_file, modis_l3_nasa_mod09_file, modis_l3_nasa_mod43_file, diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 47f5f92c8e..94a21c2ef8 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -47,7 +47,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" + assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l1b" diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index a30bfc392d..d827e18215 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -38,12 +38,13 @@ # - modis_l2_imapp_snowmask_geo_files # - modis_l2_nasa_mod06_file # - modis_l2_nasa_mod35_file +# - modis_l2_nasa_mod99_file # - modis_l2_nasa_mod35_mod03_files def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" + assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l2" @@ -162,7 +163,9 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @pytest.mark.parametrize( ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0), + (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure", "non_yaml_configured_2D_var"], + 5000, True, 4.0), + (lazy_fixture("modis_l2_nasa_mod99_file"), ["non_yaml_configured_2D_var"], 1000, True, 4.0), # snow mask is considered a category product, factor/offset ignored (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), @@ -181,3 +184,17 @@ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, assert data_arr.shape == _shape_for_resolution(exp_resolution) assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) + + def test_scene_dynamic_available_datasets(self, modis_l2_nasa_mod06_file): + """Test available datasets method to dynmically add non configured datasets.""" + import xarray as xr + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod06_file) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "surface_pressure" in available_datasets + # make sure configured datasets are added again + assert available_datasets.count("surface_pressure") == 1 + assert "non_yaml_configured_2D_var" in available_datasets + file_ds = xr.open_dataset(modis_l2_nasa_mod06_file[0], engine="netcdf4") + assert "non_yaml_configured_3D_var" not in available_datasets and "non_yaml_configured_3D_var" in file_ds # noqa PT018 + assert "non_yaml_configured_3D_var" in file_ds diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 24e01e6802..ea4afab7f1 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -75,8 +75,8 @@ def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, proj_h8 = b"GEOS(140.70) " proj_mtsat2 = b"GEOS(145.00) " proj_name = proj_h8 if platform == "Himawari-8" else proj_mtsat2 - return {"image_segm_seq_no": segno, - "total_no_image_segm": numseg, + return {"image_segm_seq_no": np.uint8(segno), + "total_no_image_segm": np.uint8(numseg), "projection_name": proj_name, "projection_parameters": { "a": 6378169.00, @@ -85,10 +85,10 @@ def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, }, "cfac": 10233128, "lfac": 10233128, - "coff": coff, - "loff": loff, - "number_of_columns": ncols, - "number_of_lines": nlines, + "coff": np.int32(coff), + "loff": np.int32(loff), + "number_of_columns": np.uint16(ncols), + "number_of_lines": np.uint16(nlines), "image_data_function": idf, "image_observation_time": self._get_acq_time(nlines)} diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py new file mode 100644 index 0000000000..e388b6186e --- /dev/null +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -0,0 +1,120 @@ +"""Tests for aws l1b filehandlers.""" + +import datetime as dt +from enum import Enum + +import numpy as np +import pytest + +from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_lonlats + +PLATFORM_NAME = "AWS1" + + +geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] +geo_size = 10*145*4 +shape = (10, 145, 4) +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) +fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape) + + + +def test_start_end_time(aws_mwr_handler): + """Test that start and end times are read correctly.""" + assert aws_mwr_handler.start_time == dt.datetime(2024, 9, 1, 12, 0) + assert aws_mwr_handler.end_time == dt.datetime(2024, 9, 1, 12, 15) + + +def test_orbit_number_start_end(aws_mwr_handler): + """Test that start and end orbit number is read correctly.""" + assert aws_mwr_handler.orbit_start == 9991 + assert aws_mwr_handler.orbit_end == 9992 + + +def test_metadata(aws_mwr_handler): + """Test that the metadata is read correctly.""" + assert aws_mwr_handler.sensor == "mwr" + assert aws_mwr_handler.platform_name == PLATFORM_NAME + + +def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): + """Test retrieving the channel data.""" + did = dict(name="1") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + expected = fake_mwr_data_array.isel(n_channels=0) + # mask no_data value + expected = expected.where(expected != -2147483648) + # mask outside the valid range + expected = expected.where(expected <= 700000) + expected = expected.where(expected >= 0) + # "calibrate" + expected = expected * 0.001 + res = aws_mwr_handler.get_dataset(did, dataset_info) + np.testing.assert_allclose(res, expected) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 + assert res.dims == ("y", "x") + assert "n_channels" not in res.coords + assert res.attrs["sensor"] == "mwr" + assert res.attrs["platform_name"] == PLATFORM_NAME + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), + ("latitude", "data/navigation/aws_lat", fake_lat_data), + ]) +def test_get_navigation_data(aws_mwr_handler, id_name, file_key, fake_array): + """Test retrieving the geolocation (lon-lat) data.""" + Horn = Enum("Horn", ["1", "2", "3", "4"]) + did = dict(name=id_name, horn=Horn["1"]) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + assert "n_geo_groups" not in res.coords + if id_name == "longitude": + assert res.max() <= 180 + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("solar_azimuth_horn1", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith_horn1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth_horn1", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith_horn1", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) +def test_get_viewing_geometry_data(aws_mwr_handler, id_name, file_key, fake_array): + """Test retrieving the angles_data.""" + Horn = Enum("Horn", ["1", "2", "3", "4"]) + dset_id = dict(name=id_name, horn=Horn["1"]) + + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_handler.get_dataset(dset_id, dataset_info) + + np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + assert "n_geo_groups" not in res.coords + + +def test_try_get_data_not_in_file(aws_mwr_handler): + """Test retrieving a data field that is not available in the file.""" + did = dict(name="toa_brightness_temperature") + dataset_info = dict(file_key="data/calibration/toa_brightness_temperature") + + match_str = "Dataset toa_brightness_temperature not available or not supported yet!" + with pytest.raises(NotImplementedError, match=match_str): + _ = aws_mwr_handler.get_dataset(did, dataset_info) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py new file mode 100644 index 0000000000..10c499838d --- /dev/null +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -0,0 +1,95 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024-2025 Satpy developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests for ESA Arctic Weather Satellite (AWS) level-1c file reading.""" + + +import numpy as np +import pytest + +from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_l1c_lonlats + +PLATFORM_NAME = "AWS1" + +geo_dims = ["n_scans", "n_fovs"] +geo_size = 10 * 145 +fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) +fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + + +def test_get_channel_data(aws_mwr_l1c_handler, fake_mwr_data_array): + """Test retrieving the channel data.""" + did = dict(name="1") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + expected = fake_mwr_data_array.isel(n_channels=0) + # mask no_data value + expected = expected.where(expected != -2147483648) + # mask outside the valid range + expected = expected.where(expected <= 700000) + expected = expected.where(expected >= 0) + # "calibrate" + expected = expected * 0.001 + res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) + np.testing.assert_allclose(res, expected) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "n_channels" not in res.coords + assert res.attrs["sensor"] == "mwr" + assert res.attrs["platform_name"] == PLATFORM_NAME + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), + ("latitude", "data/navigation/aws_lat", fake_lat_data), + ]) +def test_get_navigation_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): + """Test retrieving the geolocation (lon, lat) data.""" + did = dict(name=id_name) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + if id_name == "longitude": + assert res.max() <= 180 + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("solar_azimuth_angle", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith_angle", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth_angle", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith_angle", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) +def test_get_viewing_geometry_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): + """Test retrieving the angles_data.""" + dset_id = dict(name=id_name) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_l1c_handler.get_dataset(dset_id, dataset_info) + np.testing.assert_allclose(res, fake_array) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs diff --git a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py new file mode 100644 index 0000000000..43abdf74d4 --- /dev/null +++ b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024, 2025 Satpy developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests for the EPS-Sterna MWR l1b filehandlers.""" + +from enum import Enum + +import numpy as np +import pytest + +from satpy.tests.reader_tests.conftest import make_fake_mwr_lonlats + +geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] +geo_size = 10*145*4 +shape = (10, 145, 4) +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("longitude", "data/navigation/longitude", fake_lon_data * 1e-4), + ("latitude", "data/navigation/latitude", fake_lat_data), + ]) +def test_get_navigation_data(eps_sterna_mwr_handler, id_name, file_key, fake_array): + """Test retrieving the geolocation (lon-lat) data.""" + Horn = Enum("Horn", ["1", "2", "3", "4"]) + did = dict(name=id_name, horn=Horn["1"]) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = eps_sterna_mwr_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array.isel(n_feedhorns=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + assert "n_feedhorns" not in res.coords + if id_name == "longitude": + assert res.max() <= 180 + + +def test_try_get_data_not_in_file(eps_sterna_mwr_handler): + """Test retrieving a data field that is not available in the file.""" + did = dict(name="aws_toa_brightness_temperature") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + + match_str = "Dataset aws_toa_brightness_temperature not available or not supported yet!" + with pytest.raises(NotImplementedError, match=match_str): + _ = eps_sterna_mwr_handler.get_dataset(did, dataset_info) + +def test_metadata(eps_sterna_mwr_handler): + """Test that the metadata is read correctly.""" + assert eps_sterna_mwr_handler.sensor == "mwr" + assert eps_sterna_mwr_handler.platform_name == "ST01" diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py new file mode 100644 index 0000000000..4172cf0ea0 --- /dev/null +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -0,0 +1,306 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""EUM L2 GRIB-reader test package.""" + +import datetime +import sys +from unittest import mock + +import numpy as np +import pytest + +from satpy.tests.utils import make_dataid + +# Dictionary to be used as fake GRIB message +FAKE_SEVIRI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, +} + +FAKE_FCI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 0.0, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 5568, + "Ny": 5568, + "earthMajorAxis": 6378140., + "earthMinorAxis": 6356755., + "NrInRadiusOfEarth": 6.6107, + "XpInGridLengths": 2784.0, + "parameterNumber": 30, + "missingValue": 9999, +} + +# List to be used as fake GID source +FAKE_GID = [0, 1, 2, 3, None] + + +@pytest.fixture +@mock.patch("satpy.readers.eum_l2_grib.ec") +def setup_reader(ec_): + """Set up the test by creating a mocked eccodes library.""" + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + return ec_ + + +def common_checks(ec_, reader, mock_file, dataset_id): + """Commmon checks for fci and seviri data.""" + # Checks that the codes_grib_multi_support_on function has been called + ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_seviri_data_reading(da_, xr_, setup_reader): + """Test the reading of data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(1000 * 1200) + ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type": "seviri" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=3000) + + # Check that end_time is None for SEVIRI before the dataset has been loaded + assert reader.end_time is None + + common_checks(ec_, reader, mock_file, dataset_id) + + # Check that end_time is now a valid datetime.datetime object after the dataset has been loaded + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 9.5 + }, + "sensor": "seviri", + "platform_name": "Meteosat-11" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", + } + assert pdict == expected_pdict + expected_area_dict = { + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", + mock.Mock(name="seviri_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_fci_data_reading(da_, xr_, setup_reader): + """Test the reading of fci data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(5568 * 5568) + ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft_id": "1", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + }, + filetype_info={ + "file_type": "fci" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=2000) + + # Check end_time + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + common_checks(ec_, reader, mock_file, dataset_id) + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 0.0 + }, + "sensor": "fci", + "platform_name": "MTG-i1" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((5568, 5568))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6378140000.0, + "b": 6356755000.0, + "h": 35785830098.0, + "ssp_lon": 0.0, + "nlines": 5568, + "ncols": 5568, + "a_name": "msg_fci_fdss_2km", + "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", + "p_id": "" + } + assert pdict == expected_pdict + expected_area_dict = { + "nlines": 5568, + "ncols": 5568 + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", + mock.Mock(name="fci_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=2000.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"nlines": 5568, "ncols": 5568, + "column_step": 2000., "line_step": 2000.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py new file mode 100644 index 0000000000..eda7eee8a1 --- /dev/null +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""FCI base reader tests package.""" + +from satpy.readers.fci_base import calculate_area_extent +from satpy.tests.utils import make_dataid + + +def test_calculate_area_extent(): + """Test function for calculate_area_extent.""" + dataset_id = make_dataid(name="dummy", resolution=2000.0) + + area_dict = { + "nlines": 5568, + "ncols": 5568, + "line_step": dataset_id["resolution"], + "column_step": dataset_id["resolution"], + } + + area_extent = calculate_area_extent(area_dict) + + expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) + + assert area_extent == expected diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index aa98990df3..93b6513108 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -121,6 +121,7 @@ DICT_CALIBRATION = {"radiance": {"dtype": np.float32, "value_1": 15, "value_0": 9700, + "value_2": -5, "attrs_dict": {"calibration": "radiance", "units": "mW m-2 sr-1 (cm-1)-1", "radiance_unit_conversion_coefficient": np.float32(1234.56) @@ -134,8 +135,9 @@ }, "counts": {"dtype": np.uint16, - "value_1": 1, + "value_1": 5, "value_0": 5000, + "value_2": 1, "attrs_dict": {"calibration": "counts", "units": "count", }, @@ -144,6 +146,7 @@ "brightness_temperature": {"dtype": np.float32, "value_1": np.float32(209.68275), "value_0": np.float32(1888.8513), + "value_2": np.float32("nan"), "attrs_dict": {"calibration": "brightness_temperature", "units": "K", }, @@ -293,16 +296,17 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): common_attrs = { "scale_factor": 5, - "add_offset": 10, + "add_offset": -10, "long_name": "Effective Radiance", "units": "mW.m-2.sr-1.(cm-1)-1", "ancillary_variables": "pixel_quality" } if "38" in ch_path: fire_line = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) * 5000 - data_without_fires = da.ones((n_rows_cols[0] - 1, n_rows_cols[1]), dtype="uint16", chunks=1024) + data_without_fires = da.full((n_rows_cols[0] - 2, n_rows_cols[1]), 5, dtype="uint16", chunks=1024) + neg_rad = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) d = FakeH5Variable( - da.concatenate([fire_line, data_without_fires], axis=0), + da.concatenate([fire_line, data_without_fires, neg_rad], axis=0), dims=("y", "x"), attrs={ "valid_range": [0, 8191], @@ -313,7 +317,7 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): ) else: d = FakeH5Variable( - da.ones(n_rows_cols, dtype="uint16", chunks=1024), + da.full(n_rows_cols, 5, dtype="uint16", chunks=1024), dims=("y", "x"), attrs={ "valid_range": [0, 4095], @@ -440,9 +444,14 @@ class FakeFCIFileHandlerBase(FakeNetCDF4FileHandler): """Class for faking the NetCDF4 Filehandler.""" cached_file_content: Dict[str, xr.DataArray] = {} - # overwritten by FDHSI and HRFI FIle Handlers + # overwritten by FDHSI and HRFI File Handlers chan_patterns: Dict[str, Dict[str, Union[List[int], str]]] = {} + def __init__(self, *args, **kwargs): + """Initiative fake file handler.""" + kwargs.pop("clip_negative_radiances", None) + super().__init__(*args, **kwargs) + def _get_test_content_all_channels(self): data = {} for pat in self.chan_patterns: @@ -542,11 +551,11 @@ def reader_configs(): os.path.join("readers", "fci_l1c_nc.yaml")) -def _get_reader_with_filehandlers(filenames, reader_configs): +def _get_reader_with_filehandlers(filenames, reader_configs, **reader_kwargs): from satpy.readers import load_reader reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) - reader.create_filehandlers(loadables) + reader.create_filehandlers(loadables, fh_kwargs=reader_kwargs) clear_cache(reader) return reader @@ -738,7 +747,8 @@ def _reflectance_test(tab, filenames): def _other_calibration_test(res, ch, dict_arg): """Test of other calibration test.""" if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) + numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_2"]) + numpy.testing.assert_array_equal(res[ch][-2], dict_arg["value_1"]) numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) else: numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) @@ -860,6 +870,26 @@ def test_load_calibration(self, reader_configs, fh_param, self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) + @pytest.mark.parametrize("fh_param", [lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")]) + def test_load_calibration_negative_rad(self, reader_configs, fh_param): + """Test calibrating negative radiances. + + See https://github.com/pytroll/satpy/issues/3009. + """ + import satpy + reader = _get_reader_with_filehandlers(fh_param["filenames"], + reader_configs, + clip_negative_radiances=True) + did = make_dataid(name="ir_38", calibration="radiance") + res = reader.load([did], pad_data=False) + with satpy.config.set({"readers.clip_negative_radiances": True}): + reader2 = _get_reader_with_filehandlers(fh_param["filenames"], + reader_configs) + res2 = reader2.load([did], pad_data=False) + numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance + numpy.testing.assert_array_equal(res2["ir_38"][-1, :], 5) # smallest positive radiance + assert res["ir_38"].dtype == res2["ir_38"].dtype == np.dtype("float32") + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 830f793d00..a3f7e6318c 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -616,13 +616,14 @@ def test_all_basic(self, amv_filehandler, amv_file): assert amv_filehandler.sensor_name == "test_data_source" assert amv_filehandler.ssp_lon == 0.0 - global_attributes = amv_filehandler._get_global_attributes() + global_attributes = amv_filehandler._get_global_attributes(product_type="amv") expected_global_attributes = { "filename": amv_file, "spacecraft_name": "test_platform", "sensor": "test_data_source", "platform_name": "test_platform", - "channel": "test_channel" + "channel": "test_channel", + "ssp_lon": 0.0, } assert global_attributes == expected_global_attributes diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 40d7611eb4..0d5d647420 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -16,273 +16,308 @@ # satpy. If not, see . """Unittests for generic image reader.""" -import os -import unittest +import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr +from pyresample.geometry import AreaDefinition +from rasterio.errors import NotGeoreferencedWarning +from satpy import Scene +from satpy.readers.generic_image import GenericImageFileHandler from satpy.tests.utils import RANDOM_GEN, make_dataid +DATA_DATE = dt.datetime(2018, 1, 1) -class TestGenericImage(unittest.TestCase): - """Test generic image reader.""" - - def setUp(self): - """Create temporary images to test on.""" - import datetime as dt - import tempfile - - from pyresample.geometry import AreaDefinition - - from satpy.scene import Scene - - self.date = dt.datetime(2018, 1, 1) - - # Create area definition - pcs_id = "ETRS89 / LAEA Europe" - proj4_dict = "EPSG:3035" - self.x_size = 100 - self.y_size = 100 - area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, - proj4_dict, self.x_size, self.y_size, - area_extent) - - # Create datasets for L, LA, RGB and RGBA mode images - r__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - g__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - b__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8) - a__[:10, :10] = 0 - a__ = da.from_array(a__, chunks=(50, 50)) - - r_nan__ = RANDOM_GEN.uniform(0., 1., size=(self.y_size, self.x_size)) - r_nan__[:10, :10] = np.nan - r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - - ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), - attrs={"name": "test_l", - "start_time": self.date}) - ds_l["bands"] = ["L"] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), - attrs={"name": "test_la", - "start_time": self.date}) - ds_la["bands"] = ["L", "A"] - ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), +X_SIZE = 100 +Y_SIZE = 100 +AREA_DEFINITION = AreaDefinition("geotiff_area", "ETRS89 / LAEA Europe", "ETRS89 / LAEA Europe", + "EPSG:3035", X_SIZE, Y_SIZE, + (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)) + + +@pytest.fixture +def random_image_channel(): + """Create random data.""" + return da.random.randint(0, 256, size=(Y_SIZE, X_SIZE), chunks=(50, 50)).astype(np.uint8) + + +random_image_channel_l = random_image_channel +random_image_channel_r = random_image_channel +random_image_channel_g = random_image_channel +random_image_channel_b = random_image_channel + + +@pytest.fixture +def alpha_channel(): + """Create alpha channel with fully transparent and opaque areas.""" + a__ = 255 * np.ones((Y_SIZE, X_SIZE), dtype=np.uint8) + a__[:10, :10] = 0 + return da.from_array(a__, chunks=(50, 50)) + + +@pytest.fixture +def random_image_channel_with_nans(): + """Create random data and replace a portion of it with NaN values.""" + arr = RANDOM_GEN.uniform(0., 1., size=(Y_SIZE, X_SIZE)) + arr[:10, :10] = np.nan + return da.from_array(arr, chunks=(50, 50)) + + +@pytest.fixture +def test_image_l(tmp_path, random_image_channel_l): + """Create a test image with mode L.""" + dset = xr.DataArray(da.stack([random_image_channel_l]), dims=("bands", "y", "x"), + attrs={"name": "test_l", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_l_nan(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_nofillvalue.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def test_image_l_nan_fill_value(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values and fill value is set.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_fillvalue.tif" + _save_image(dset, fname, "geotiff", fill_value=0) + + return fname + + +@pytest.fixture +def test_image_la(tmp_path, random_image_channel_l, alpha_channel): + """Create a test image with mode LA.""" + dset = xr.DataArray(da.stack([random_image_channel_l, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_la", "start_time": DATA_DATE}) + dset["bands"] = ["L", "A"] + fname = tmp_path / "20180101_0000_test_la.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_rgb(tmp_path, random_image_channel_r, random_image_channel_g, random_image_channel_b): + """Create a test image with mode RGB.""" + dset = xr.DataArray(da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b]), dims=("bands", "y", "x"), attrs={"name": "test_rgb", - "start_time": self.date}) - ds_rgb["bands"] = ["R", "G", "B"] - ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=("bands", "y", "x"), - attrs={"name": "test_rgba", - "start_time": self.date}) - ds_rgba["bands"] = ["R", "G", "B", "A"] - - ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=("bands", "y", "x"), - attrs={"name": "test_l_nan", - "start_time": self.date}) - ds_l_nan["bands"] = ["L"] - - # Temp dir for the saved images - self.base_dir = tempfile.mkdtemp() - - # Put the datasets to Scene for easy saving - scn = Scene() - scn["l"] = ds_l - scn["l"].attrs["area"] = self.area_def - scn["la"] = ds_la - scn["la"].attrs["area"] = self.area_def - scn["rgb"] = ds_rgb - scn["rgb"].attrs["area"] = self.area_def - scn["rgba"] = ds_rgba - scn["rgba"].attrs["area"] = self.area_def - scn["l_nan"] = ds_l_nan - scn["l_nan"].attrs["area"] = self.area_def - - # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") - scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") - scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") - scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), - writer="geotiff", fill_value=0) - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), - writer="geotiff") - - self.scn = scn - - def tearDown(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def test_png_scene(self): - """Test reading PNG images via satpy.Scene().""" - from rasterio.errors import NotGeoreferencedWarning - - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert "area" not in scn["image"].attrs - - fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - data = da.compute(scn["image"].data) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert "area" not in scn["image"].attrs - assert np.sum(np.isnan(data)) == 100 - - def test_geotiff_scene(self): - """Test reading TIFF images via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert scn["image"].area == self.area_def - - fname = os.path.join(self.base_dir, "test_rgba.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert scn["image"].area == self.area_def - - def test_geotiff_scene_nan(self): - """Test reading TIFF images originally containing NaN values via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - - fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) - - def test_GenericImageFileHandler(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image") - assert reader.file_content - assert reader.finfo["filename"] == fname - assert reader.finfo["start_time"] == self.date - assert reader.finfo["end_time"] == self.date - assert reader.area == self.area_def - assert reader.get_area_def(None) == self.area_def - assert reader.start_time == self.date - assert reader.end_time == self.date - - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert "spatial_ref" in dataset.coords - assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) - - def test_GenericImageFileHandler_masking_only_integer(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - class FakeGenericImageFileHandler(GenericImageFileHandler): - - def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): - """Get fake file content from 'get_test_content'.""" - super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) - self.file_content = file_content - self.dataset_name = None - self.file_content.update(kwargs) - - data = self.scn["rgba"] - - # do nothing if not integer - float_data = data / 255. - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - assert reader.get_dataset(make_dataid(name="image"), {}) is float_data - - # masking if integer - data = data.astype(np.uint32) - assert data.bands.size == 4 - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name="image"), {}) - assert ret_data.bands.size == 3 - - def test_GenericImageFileHandler_datasetid(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - - def test_GenericImageFileHandler_nodata(self): - """Test nodata handling with direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - info = {"nodata_handling": "nan_mask"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) - assert np.isnan(dataset.attrs["_FillValue"]) - - info = {"nodata_handling": "fill_value"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 - - # default same as 'nodata_handling': 'fill_value' - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B"] + fname = tmp_path / "20180101_0000_test_rgb.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def rgba_dset(random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel): + """Create an RGB dataset.""" + dset = xr.DataArray( + da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B", "A"] + return dset + + +@pytest.fixture +def test_image_rgba(tmp_path, rgba_dset): + """Create a test image with mode RGBA.""" + fname = tmp_path / "test_rgba.tif" + _save_image(rgba_dset, fname, "geotiff") + + return fname + + +def _save_image(dset, fname, writer, fill_value=None): + scn = Scene() + scn["data"] = dset + scn["data"].attrs["area"] = AREA_DEFINITION + scn.save_dataset("data", str(fname), writer=writer, fill_value=fill_value) + + +def test_png_scene_l_mode(test_image_l): + """Test reading a PNG image with L mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_l]) + scn.load(["image"]) + _assert_image_common(scn, 1, None, None, np.float32) + assert "area" not in scn["image"].attrs + + +def _assert_image_common(scn, channels, start_time, end_time, dtype): + assert scn["image"].shape == (channels, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + try: + assert scn.start_time is start_time + assert scn.end_time is end_time + except AssertionError: + assert scn.start_time == start_time + assert scn.end_time == end_time + assert scn["image"].dtype == dtype + + +def test_png_scene_la_mode(test_image_la): + """Test reading a PNG image with LA mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_la]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + assert np.sum(np.isnan(data)) == 100 + assert "area" not in scn["image"].attrs + _assert_image_common(scn, 1, DATA_DATE, DATA_DATE, np.float32) + + +def test_geotiff_scene_rgb(test_image_rgb): + """Test reading geotiff image in RGB mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgb]) + scn.load(["image"]) + assert scn["image"].area == AREA_DEFINITION + _assert_image_common(scn, 3, DATA_DATE, DATA_DATE, np.float32) + + +def test_geotiff_scene_rgba(test_image_rgba): + """Test reading geotiff image in RGBA mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgba]) + scn.load(["image"]) + _assert_image_common(scn, 3, None, None, np.float32) + assert scn["image"].area == AREA_DEFINITION + + +def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): + """Test reading geotiff image with fill value set via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) + scn.load(["image"]) + assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 + _assert_image_common(scn, 1, None, None, np.uint8) + +def test_geotiff_scene_nan(test_image_l_nan): + """Test reading geotiff image with NaN values in it via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) + scn.load(["image"]) + assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) + _assert_image_common(scn, 1, None, None, np.float32) + + +def test_GenericImageFileHandler(test_image_rgba): + """Test direct use of the reader.""" + from satpy.readers.generic_image import GenericImageFileHandler + + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image") + assert reader.file_content + assert reader.finfo["filename"] == test_image_rgba + assert reader.finfo["start_time"] == DATA_DATE + assert reader.finfo["end_time"] == DATA_DATE + assert reader.area == AREA_DEFINITION + assert reader.get_area_def(None) == AREA_DEFINITION + assert reader.start_time == DATA_DATE + assert reader.end_time == DATA_DATE + + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert "spatial_ref" in dataset.coords + assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) + + +class FakeGenericImageFileHandler(GenericImageFileHandler): + """Fake file handler.""" + + def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): + """Get fake file content from 'get_test_content'.""" + super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) + self.file_content = file_content + self.dataset_name = None + self.file_content.update(kwargs) + + +def test_GenericImageFileHandler_no_masking_for_float(rgba_dset): + """Test direct use of the reader for float_data.""" + # do nothing if not integer + float_data = rgba_dset / 255. + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) + assert reader.get_dataset(make_dataid(name="image"), {}) is float_data + + +def test_GenericImageFileHandler_masking_for_integer(rgba_dset): + """Test direct use of the reader for float_data.""" + # masking if integer + data = rgba_dset.astype(np.uint32) + assert data.bands.size == 4 + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) + assert ret_data.bands.size == 3 + + +def test_GenericImageFileHandler_datasetid(test_image_rgba): + """Test direct use of the reader.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image-custom") + assert reader.file_content + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + + +@pytest.fixture +def reader_l_nan_fill_value(test_image_l_nan_fill_value): + """Create GenericImageFileHandler.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + return GenericImageFileHandler(test_image_l_nan_fill_value, fname_info, ftype_info) + + +def test_GenericImageFileHandler_nodata_nan_mask(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: nan_mask.""" + data_id = make_dataid(name="image-custom") + assert reader_l_nan_fill_value.file_content + info = {"nodata_handling": "nan_mask"} + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) + assert np.isnan(dataset.attrs["_FillValue"]) + + +def test_GenericImageFileHandler_nodata_fill_value(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: fill_value.""" + info = {"nodata_handling": "fill_value"} + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 + + +def test_GenericImageFileHandler_nodata_nan_mask_default(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with default nodata handling.""" + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 7378078c2a..3b100415a4 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -280,6 +280,8 @@ def test_filehandler_returns_area(insat_filehandler): area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] + np.testing.assert_allclose(area_def.area_extent, [-5620072.101427, -5640108.009097, + 5620072.101427, 5644115.190631]) def test_filehandler_has_start_and_end_time(insat_filehandler): diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 36cc930683..13981a37f4 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -19,6 +19,7 @@ import os from unittest import mock +import dask.array as da import numpy as np import pytest import xarray as xr @@ -128,6 +129,7 @@ def _test_dataset_variable(self, var_params, sname=""): res = self.get_variable_dataset(dataset_info, dname, handler) assert res.shape == shape assert res.dims[0] == "y" + assert isinstance(res.data,da.Array) # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index e9b8c45ae6..3bdcfdb66f 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -31,23 +31,27 @@ def _get_calibration(num_scans, ftype): calibration = { f"Calibration/{ftype}_Cal_Coeff": xr.DataArray( - da.ones((19, 3), chunks=1024), - attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + da.ones((19, 3), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 19, dtype=np.float32), + "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands", "_coeffs")), "Calibration/Solar_Irradiance": xr.DataArray( - da.ones((19, ), chunks=1024), - attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + da.ones((19, ), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 19, dtype=np.float32), + "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands")), "Calibration/Solar_Irradiance_LL": xr.DataArray( - da.ones((1, ), chunks=1024), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.])}, + da.ones((1, ), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32)}, dims=("_bands")), "Calibration/IR_Cal_Coeff": xr.DataArray( - da.ones((6, 4, num_scans), chunks=1024), - attrs={"Slope": np.array([1.] * 6), "Intercept": np.array([0.] * 6)}, + da.ones((6, 4, num_scans), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 6, dtype=np.float32), + "Intercept": np.array([0.] * 6, dtype=np.float32)}, dims=("_bands", "_coeffs", "_scans")), } return calibration @@ -62,7 +66,7 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) + "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32) } nounits_attrs = {**def_attrs, **{"units": "NO"}} radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} @@ -116,7 +120,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): da.ones((5, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 5), "Intercept": np.array([0.] * 5), + "Slope": np.array([1.] * 5, dtype=np.float32), "Intercept": np.array([0.] * 5, dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], @@ -128,7 +132,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): da.ones((3, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 3), "Intercept": np.array([0.] * 3), + "Slope": np.array([1.] * 3, dtype=np.float32), "Intercept": np.array([0.] * 3, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], @@ -150,7 +154,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): data = {"Data/EV_1KM_LL": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], @@ -158,7 +163,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_rows", "_cols")), f"{key_prefix}EV_1KM_RefSB": xr.DataArray(da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + attrs={"Slope": np.array([1.] * 15, dtype=np.float32), + "Intercept": np.array([0.] * 15, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], @@ -166,7 +172,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ref_bands", "_rows", "_cols")), "Data/EV_1KM_Emissive": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + attrs={"Slope": np.array([1.] * 4, dtype=np.float32), + "Intercept": np.array([0.] * 4, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], @@ -174,7 +181,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ir_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_RefSB": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + attrs={"Slope": np.array([1.] * 4, dtype=np.float32), + "Intercept": np.array([0.] * 4, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], @@ -182,14 +190,16 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ref250_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_Emissive": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32), fill_value_name: 65535, "units": radunits, "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_rows", "_cols")) if is_mersi1 else xr.DataArray(da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + attrs={"Slope": np.array([1.] * 2, dtype=np.float32), + "Intercept": np.array([0.] * 2, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 4095], @@ -197,9 +207,10 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ir250_bands", "_rows", "_cols")), f"{key_prefix}SensorZenith": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ - "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([.01] * 1, dtype=np.float32), + "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, @@ -212,7 +223,7 @@ def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes def_attrs = {"FillValue": 65535, "valid_range": [0, 4095], - "Slope": np.array([1.]), "Intercept": np.array([0.]), + "Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32), "long_name": b"250m Earth View Science Data", "units": "mW/ (m2 cm-1 sr)", } @@ -235,27 +246,27 @@ def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { prefix + "Longitude": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-90, 90], }, dims=("_rows", "_cols")), prefix + "Latitude": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-180, 180], }, dims=("_rows", "_cols")), prefix + "SensorZenith": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ - "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, @@ -288,13 +299,13 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { - "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3b_attrs = { - "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3d_attrs = { - "/attr/Solar_Irradiance": np.array([1.0] * 19), + "/attr/Solar_Irradiance": np.array([1.0] * 19, dtype=np.float32), } global_attrs, ftype = self._set_sensor_attrs(global_attrs) @@ -384,11 +395,11 @@ def _add_tbb_coefficients(self, global_attrs): return if "_1000" in self.filetype_info["file_type"]: - global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6) - global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6, dtype=np.float32) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) else: - global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6) - global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6, dtype=np.float32) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) @property def _num_cols_for_file_type(self): @@ -512,6 +523,10 @@ def test_all_resolutions(self): _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) res = reader.load(self.bands_1000 + self.bands_250) + for i in res: + assert res[i].dtype == np.float32 + assert res[i].values.dtype == np.float32 + if resolution != "250": assert len(res) == len(self.bands_1000 + self.bands_250) else: diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 84828f4ecf..1f2e603ee2 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1435,18 +1435,25 @@ def xml_builder(process_level, mask_saturated=True, band_name=None): return xml_fh, xml_tile_fh -def jp2_builder(process_level, band_name, mask_saturated=True): +def jp2_builder(process_level, band_name, mask_saturated=True, test_l1b=False): """Build fake SAFE jp2 image file.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) + if test_l1b: + filename_info["process_level"] = "L1B" + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), - filename_info, mock.MagicMock()) + filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt + tile_xml_fh.get_dataset.return_value = xr.DataArray([[22.5, 23.8], + [22.5, 24.8]], + dims=["x", "y"]) jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh + def make_alt_dataid(**items): """Make a DataID with modified keys.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -1578,26 +1585,26 @@ def setup_method(self): [ ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, np.inf]]], - [[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], [-19.96, -10, 635.34, 635.35]]], - [[[np.nan, -35.465976, -35.448234, -35.430493], - [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[0.0, 1.09348075, 2.1869615, 3.28044225], + [4.373923, 1093.48075, 71660.1675, 71661.2609]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]], - [[[np.nan, 0.251836101, 0.503672202, 0.755508303], - [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, 645.35]]], - [[[np.nan, -238.571863, -238.333052, -238.094241], - [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[0.0, 5.25188783, 10.5037757, 15.7556635,], + [21.0075513, 5251.88783, 344177.217, 344182.469]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ]) @@ -1606,10 +1613,11 @@ def test_xml_calibration(self, process_level, mask_saturated, band_name, expecte xml_fh = xml_builder(process_level, mask_saturated)[0] res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, 25.6, band_name) res3 = xml_fh._sanitize_data(self.fake_data) results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), @@ -1640,22 +1648,25 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + self.fake_data_l1b = xr.Dataset({"band_data": xr.DataArray([[[1000, 1205.5], [3000.4, 2542.]]], + dims=["band", "x", "y"])}) - @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ - (False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - (True, "B02", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - (True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), - (False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - (True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - (True, "SNOW", "water_vapor", None), + ("L2A", False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + ("L1C", True, "B02", "radiance", [[np.nan, -59.439197], [3877.121602, np.inf]]), + ("L2A", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + ("L2A", False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + ("L2A", True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + ("L2A", True, "SNOW", "water_vapor", None), ]) - def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): + def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) + jp2_fh = jp2_builder(process_level, dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration, resolution="20"), + info=dict()) if res is not None: np.testing.assert_allclose(res, expected) else: @@ -1677,7 +1688,59 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): assert res1 is None assert res2 is None - def test_start_time(self): + def test_start_end_time(self): """Test that the correct start time is returned.""" jp2_fh = jp2_builder("L1C", "B01") assert tilemd_dt == jp2_fh.start_time + assert tilemd_dt == jp2_fh.end_time + + def test_l1b_error(self): + """We can't process L1B data yet, so check an error is raised.""" + with pytest.raises(ValueError, match="Unsupported process level: L1B"): + jp2_builder("L1C", "B01", test_l1b=True) + + + @pytest.mark.parametrize(("st_str", "en_str", "err_str"), + [ + ("", + "", + "Sun-Earth distance in metadata is missing."), + ("", + "", + "No solar irradiance values were found in the metadata."), + ]) + def test_missing_esd(self, st_str, en_str, err_str): + """Test that missing Earth-Sun distance in the metadata is handled correctly.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + tmp_xml = str(mtd_l1c_xml) + p1 = tmp_xml.find(st_str) + p2 = tmp_xml.find(en_str) + tmp_xml = tmp_xml[:p1+len(st_str)] + tmp_xml[p2:] + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(tmp_xml), filename_info, mock.MagicMock()) + + if st_str == "": + with pytest.raises(ValueError, match=err_str): + xml_fh.sun_earth_dist + else: + with pytest.raises(ValueError, match=err_str): + xml_fh.solar_irradiances + + + def test_l1b_calib(self): + """Test that Level-1B calibration can be performed.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock()) + + res = xml_fh.calibrate_to_radiances_l1b(self.fake_data_l1b, "B01") + np.testing.assert_allclose(res.band_data.data.ravel(), + np.array((0.0, 51.752319, 503.77294, 388.33127)), + rtol=1e-4) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 04694c145a..7e925d2b2c 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -33,9 +33,10 @@ ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, - DatasetWrapper, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, + Interpolator, + preprocess_dataset, ) from satpy.tests.utils import make_dataid @@ -43,6 +44,8 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request +fill_val = np.uint32(429496729) # FillValue lower than in dataset to be windows-compatible + attrs_exp: dict = { "platform": "MET7", "raw_metadata": {"foo": "bar"}, @@ -61,8 +64,8 @@ {"sun_earth_distance_correction_applied": True, "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), - np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), +acq_time_vis_exp = [np.datetime64("NaT").astype("datetime64[ns]"), + np.datetime64("NaT").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] vis_counts_exp = xr.DataArray( @@ -79,6 +82,7 @@ }, attrs=attrs_exp ) + vis_rad_exp = xr.DataArray( np.array( [[np.nan, 18.56, 38.28, 58.], @@ -124,7 +128,10 @@ }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), + +u_struct_refl_exp = u_vis_refl_exp.copy() + +acq_time_ir_wv_exp = [np.datetime64("NaT"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( @@ -250,30 +257,38 @@ ) +@pytest.fixture(name="time_fake_dataset") +def fixture_time_fake_dataset(): + """Create time for fake dataset.""" + time = np.arange(4) * 60 * 60 + time[0] = fill_val + time[1] = fill_val + time = time.reshape(2, 2) + + return time + + @pytest.fixture(name="fake_dataset") -def fixture_fake_dataset(): +def fixture_fake_dataset(time_fake_dataset): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4) sza = da.from_array( np.array( - [[45, 90], - [0, 45]], + [[45, 90], [0, 45]], dtype=np.float32 ) ) mask = da.from_array( np.array( - [[0, 0, 0, 0], - [0, 0, 0, 0], - [0, 0, 1, 0], # 1 = "invalid" - [0, 0, 0, 0]], + [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], # 1 = "invalid" dtype=np.uint8 ) ) - time = np.arange(4) * 60 * 60 * 1e9 - time = time.astype("datetime64[ns]").reshape(2, 2) + + cov = da.from_array([[1, 2], [3, 4]]) + ds = xr.Dataset( data_vars={ "count_vis": (("y", "x"), count_vis), @@ -281,9 +296,10 @@ def fixture_fake_dataset(): "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, + "u_structured_toa_bidirectional_reflectance": u_vis_refl_exp / 100, "quality_pixel_bitmask": (("y", "x"), mask), "solar_zenith_angle": (("y_tie", "x_tie"), sza), - "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time_fake_dataset), "a_ir": -5.0, "b_ir": 1.0, "bt_a_ir": 10.0, @@ -303,6 +319,9 @@ def fixture_fake_dataset(): "sub_satellite_longitude_end": np.nan, "sub_satellite_latitude_start": np.nan, "sub_satellite_latitude_end": 0.1, + "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), cov), + "channel_correlation_matrix_independent": (("channel", "channel"), cov), + "channel_correlation_matrix_structured": (("channel", "channel"), cov) }, coords={ "y": [1, 2, 3, 4], @@ -310,38 +329,53 @@ def fixture_fake_dataset(): "y_ir_wv": [1, 2], "x_ir_wv": [1, 2], "y_tie": [1, 2], - "x_tie": [1, 2] - + "x_tie": [1, 2], }, attrs={"foo": "bar"} ) ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" + ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) + ds["time_ir_wv"].attrs["_FillValue"] = fill_val + ds["time_ir_wv"].attrs["add_offset"] = 0 + return ds +@pytest.fixture(name="projection_longitude", params=["57.0"]) +def fixture_projection_longitude(request): + """Get projection longitude as string.""" + return request.param + + +@pytest.fixture(name="fake_file") +def fixture_fake_file(fake_dataset, tmp_path): + """Create test file.""" + filename = tmp_path / "test_mviri_fiduceo.nc" + fake_dataset.to_netcdf(filename) + return filename + + @pytest.fixture( name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) -def fixture_file_handler(fake_dataset, request): +def fixture_file_handler(fake_file, request, projection_longitude): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param - with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset") as open_dataset: - open_dataset.return_value = fake_dataset - return fh_class( - filename="filename", - filename_info={"platform": "MET7", - "sensor": "MVIRI", - "projection_longitude": "57.0"}, - filetype_info={"foo": "bar"}, - mask_bad_quality=mask_bad_quality - ) + return fh_class( + filename=fake_file, + filename_info={"platform": "MET7", + "sensor": "MVIRI", + "projection_longitude": projection_longitude}, + filetype_info={"foo": "bar"}, + mask_bad_quality=mask_bad_quality + ) @pytest.fixture(name="reader") @@ -359,7 +393,8 @@ def fixture_reader(): class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" - def test_init(self, file_handler): + @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"], indirect=True) + def test_init(self, file_handler, projection_longitude): """Test file handler initialization.""" assert file_handler.projection_longitude == 57.0 assert file_handler.mask_bad_quality is True @@ -379,7 +414,8 @@ def test_init(self, file_handler): ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), ("solar_zenith_angle", None, 2250, sza_vis_exp), ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), - ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp) + ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp), + ("u_structured_toa_bidirectional_reflectance", None, 4500, u_struct_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, @@ -406,11 +442,8 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" - # Time may have different names and satellite position might be missing - file_handler.nc.nc = file_handler.nc.nc.rename( - {"time_ir_wv": "time"} - ) - file_handler.nc.nc = file_handler.nc.nc.drop_vars( + # Satellite position might be missing + file_handler.nc.ds = file_handler.nc.ds.drop_vars( ["sub_satellite_longitude_start"] ) @@ -535,7 +568,7 @@ def test_calib_exceptions(self, file_handler): @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" - file_handler.nc.nc["quality_pixel_bitmask"] = 2 + file_handler.nc.ds["quality_pixel_bitmask"] = 2 vis = make_dataid(name="VIS", resolution=2250, calibration="reflectance") with pytest.warns(UserWarning): @@ -547,47 +580,118 @@ def test_file_pattern(self, reader): "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", + "MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc", + "MVIRI_FCDR-EASY_L15_MET7-E5700_200607060600_200607060630_0200.nc", + "MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc", "abcde", ] files = reader.select_files_from_pathnames(filenames) - # only 3 out of 4 above should match - assert len(files) == 3 + assert len(files) == 6 -class TestDatasetWrapper: - """Unit tests for DatasetWrapper class.""" +class TestDatasetPreprocessor: + """Test dataset preprocessing.""" - def test_reassign_coords(self): - """Test reassigning of coordinates. + @pytest.fixture(name="dataset") + def fixture_dataset(self): + """Get dataset before preprocessing. - For some reason xarray does not always assign (y, x) coordinates to - the high resolution datasets, although they have dimensions (y, x) and - coordinates y and x exist. A dataset with these properties seems - impossible to create (neither dropping, resetting or deleting - coordinates seems to work). Instead use mock as a workaround. + - Encoded timestamps including fill values + - Duplicate dimension names + - x/y coordinates not assigned """ - nc = mock.MagicMock( - coords={ - "y": [.1, .2], - "x": [.3, .4] - }, - dims=("y", "x") - ) - nc.__getitem__.return_value = xr.DataArray( - [[1, 2], - [3, 4]], - dims=("y", "x") + time = 60*60 + return xr.Dataset( + data_vars={ + "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), + "time_ir_wv": (("y", "x"), [[time, fill_val], [time, time]], + {"_FillValue": fill_val, "add_offset": 0}) + } ) - foo_exp = xr.DataArray( - [[1, 2], - [3, 4]], - dims=("y", "x"), + + @pytest.fixture(name="dataset_exp") + def fixture_dataset_exp(self): + """Get expected dataset after preprocessing. + + - Timestamps should have been converted to datetime64 + - Time dimension should have been renamed + - Duplicate dimensions should have been removed + - x/y coordinates should have been assigned + """ + time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") + return xr.Dataset( + data_vars={ + "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), + "time": (("y", "x"), [[time_exp, np.datetime64("NaT")], [time_exp, time_exp]]) + }, coords={ - "y": [.1, .2], - "x": [.3, .4] + "y": [0, 1], + "x": [0, 1] } ) - ds = DatasetWrapper(nc) - foo = ds["foo"] - xr.testing.assert_equal(foo, foo_exp) + + def test_preprocess(self, dataset, dataset_exp): + """Test dataset preprocessing.""" + preprocessed = preprocess_dataset(dataset) + xr.testing.assert_allclose(preprocessed, dataset_exp) + + +class TestInterpolator: + """Unit tests for Interpolator class.""" + @pytest.fixture(name="time_ir_wv") + def fixture_time_ir_wv(self): + """Returns time_ir_wv.""" + return xr.DataArray( + [ + [np.datetime64("1970-01-01 01:00"), np.datetime64("1970-01-01 02:00")], + [np.datetime64("1970-01-01 03:00"), np.datetime64("1970-01-01 04:00")], + [np.datetime64("NaT"), np.datetime64("1970-01-01 06:00")], + [np.datetime64("NaT"), np.datetime64("NaT")], + ], + dims=("y", "x"), + coords={"y": [1, 3, 5, 7]} + ) + + @pytest.fixture(name="acq_time_exp") + def fixture_acq_time_exp(self): + """Returns acq_time_vis_exp.""" + vis = xr.DataArray( + [ + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("NaT"), + np.datetime64("NaT") + ], + dims="y", + coords={"y": [1, 2, 3, 4, 5, 6, 7, 8]} + ) + + ir = xr.DataArray( + [ + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("NaT"), + ], + dims="y", + coords={"y": [1, 3, 5, 7]} + ) + + return vis, ir + + def test_interp_acq_time(self, time_ir_wv, acq_time_exp): + """Tests time interpolation.""" + res_vis = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[0].coords["y"]) + res_ir = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[1].coords["y"]) + + xr.testing.assert_allclose(res_vis, acq_time_exp[0]) + xr.testing.assert_allclose(res_ir, acq_time_exp[1]) diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py new file mode 100644 index 0000000000..6790ed378f --- /dev/null +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -0,0 +1,605 @@ +#!/usr/bin/python +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unittests for generic image reader.""" + +import os +from datetime import datetime, timezone + +import dask.array as da +import numpy as np +import pytest +import xarray as xr +from pyresample.geometry import AreaDefinition + +from satpy import Scene + +metadata_text = b""" + + + Image courtesy of the U.S. Geological Survey + https://doi.org/10.5066/P975CC9B + LC08_L1GT_026200_20240502_20240513_02_T2 + L1GT + 02 + T2 + GEOTIFF + LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt + LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + INT16 + INT16 + INT16 + INT16 + + + LANDSAT_8 + OLI_TIRS + 2 + 26 + 200 + NADIR + 26 + 200 + 2024-05-02 + 18:00:24.6148649Z + LGN + 0.85 + -1 + 9 + 9 + N + N + N + Y + N + N + N + N + N + -0.000 + -39.71362413 + -41.46228969 + 1.0079981 + UPPER + FINAL + ESTIMATED + + + UTM + WGS84 + WGS84 + 40 + 15.00 + 30.00 + 30.00 + 200 + 200 + 100 + 100 + 100 + 100 + NORTH_UP + 24.18941 + 58.17657 + 24.15493 + 60.44878 + 22.06522 + 58.15819 + 22.03410 + 60.39501 + 619500.000 + 2675700.000 + 850500.000 + 2675700.000 + 619500.000 + 2440500.000 + 850500.000 + 2440500.000 + + + Image courtesy of the U.S. Geological Survey + https://doi.org/10.5066/P975CC9B + 1885324_00001 + LC80262002024123LGN00 + LC08_L1GT_026200_20240502_20240513_02_T2 + L1GT + T2 + GEOTIFF + 2024-05-13T15:32:54Z + LPGS_16.4.0 + LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt + LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml + LC08CPF_20240429_20240630_02.03 + LO8BPF20240502162846_20240502181430.01 + LT8BPF20240502144307_20240510102926.01 + LC08RLUT_20150303_20431231_02_01.h5 + TIRS + GLS2000 + + + 748.04883 + -61.77412 + 766.01111 + -63.25745 + 705.87274 + -58.29120 + 595.23163 + -49.15442 + 364.25208 + -30.08006 + 90.58618 + -7.48064 + 30.53239 + -2.52137 + 673.63843 + -55.62928 + 142.35797 + -11.75597 + 22.00180 + 0.10033 + 22.00180 + 0.10033 + + + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + + + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + + + 1.2357E-02 + 1.2654E-02 + 1.1661E-02 + 9.8329E-03 + 6.0172E-03 + 1.4964E-03 + 5.0438E-04 + 1.1128E-02 + 2.3517E-03 + 3.3420E-04 + 3.3420E-04 + -61.78647 + -63.27010 + -58.30286 + -49.16426 + -30.08607 + -7.48213 + -2.52188 + -55.64041 + -11.75832 + 0.10000 + 0.10000 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + + + 774.8853 + 1321.0789 + 480.8883 + 1201.1442 + + + UTM + WGS84 + WGS84 + 40 + 15.00 + 30.00 + 30.00 + NORTH_UP + CUBIC_CONVOLUTION + + +""" + + +x_size = 100 +y_size = 100 +date = datetime(2024, 5, 12, tzinfo=timezone.utc) + + +@pytest.fixture(scope="session") +def l1_area(): + """Get the landsat 1 area def.""" + pcs_id = "WGS 84 / UTM zone 40N" + proj4_dict = {"proj": "utm", "zone": 40, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs"} + area_extent = (619485., 2440485., 850515., 2675715.) + return AreaDefinition("geotiff_area", pcs_id, pcs_id, + proj4_dict, x_size, y_size, + area_extent) + + +@pytest.fixture(scope="session") +def b4_data(): + """Get the data for the b4 channel.""" + return da.random.randint(12000, 16000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) + + +@pytest.fixture(scope="session") +def b11_data(): + """Get the data for the b11 channel.""" + return da.random.randint(8000, 14000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) + + +@pytest.fixture(scope="session") +def sza_data(): + """Get the data for the sza.""" + return da.random.randint(1, 10000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) + + +def create_tif_file(data, name, area, filename): + """Create a tif file.""" + data_array = xr.DataArray(data, + dims=("y", "x"), + attrs={"name": name, + "start_time": date}) + scn = Scene() + scn["band_data"] = data_array + scn["band_data"].attrs["area"] = area + scn.save_dataset("band_data", writer="geotiff", enhance=False, fill_value=0, + filename=os.fspath(filename)) + + +@pytest.fixture(scope="session") +def l1_files_path(tmp_path_factory): + """Create the path for l1 files.""" + return tmp_path_factory.mktemp("l1_files") + + +@pytest.fixture(scope="session") +def b4_file(l1_files_path, b4_data, l1_area): + """Create the file for the b4 channel.""" + data = b4_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF" + name = "B4" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + +@pytest.fixture(scope="session") +def b11_file(l1_files_path, b11_data, l1_area): + """Create the file for the b11 channel.""" + data = b11_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF" + name = "B11" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + +@pytest.fixture(scope="session") +def sza_file(l1_files_path, sza_data, l1_area): + """Create the file for the sza.""" + data = sza_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF" + name = "sza" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + + +@pytest.fixture(scope="session") +def mda_file(l1_files_path): + """Create the metadata xml file.""" + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml" + with open(filename, "wb") as f: + f.write(metadata_text) + return os.fspath(filename) + + +@pytest.fixture(scope="session") +def all_files(b4_file, b11_file, mda_file, sza_file): + """Return all the files.""" + return b4_file, b11_file, mda_file, sza_file + + +class TestOLITIRSL1: + """Test generic image reader.""" + + def setup_method(self, tmp_path): + """Set up the filename and filetype info dicts..""" + self.filename_info = dict(observation_date=datetime(2024, 5, 3), + platform_type="L", + process_level_correction="L1TP", + spacecraft_id="08", + data_type="C") + self.ftype_info = {"file_type": "granule_B4"} + + def test_basicload(self, l1_area, b4_file, b11_file, mda_file): + """Test loading a Landsat Scene.""" + scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, + b11_file, + mda_file]) + scn.load(["B4", "B11"]) + + # Check dataset is loaded correctly + assert scn["B4"].shape == (100, 100) + assert scn["B4"].attrs["area"] == l1_area + assert scn["B4"].attrs["saturated"] + assert scn["B11"].shape == (100, 100) + assert scn["B11"].attrs["area"] == l1_area + with pytest.raises(KeyError, match="saturated"): + assert not scn["B11"].attrs["saturated"] + + def test_ch_startend(self, b4_file, sza_file, mda_file): + """Test correct retrieval of start/end times.""" + scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, sza_file, mda_file]) + bnds = scn.available_dataset_names() + assert bnds == ["B4", "solar_zenith_angle"] + + scn.load(["B4"]) + assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) + assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) + + def test_loading_gd(self, mda_file, b4_file): + """Test loading a Landsat Scene with good channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) + + # Check case with good file data and load request + rdr.get_dataset({"name": "B4", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) + + def test_loading_badfil(self, mda_file, b4_file): + """Test loading a Landsat Scene with bad channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) + + ftype = {"standard_name": "test_data", "units": "test_units"} + # Check case with request to load channel not matching filename + with pytest.raises(ValueError, match="Requested channel B5 does not match the reader channel B4"): + rdr.get_dataset({"name": "B5", "calibration": "counts"}, ftype) + + def test_loading_badchan(self, mda_file, b11_file): + """Test loading a Landsat Scene with bad channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + ftype = {"standard_name": "test_data", "units": "test_units"} + bad_finfo = self.filename_info.copy() + bad_finfo["data_type"] = "T" + + # Check loading invalid channel for data type + rdr = OLITIRSCHReader(b11_file, bad_finfo, self.ftype_info, good_mda) + with pytest.raises(ValueError, match="Requested channel B4 is not available in this granule"): + rdr.get_dataset({"name": "B4", "calibration": "counts"}, ftype) + + bad_finfo["data_type"] = "O" + ftype_b11 = self.ftype_info.copy() + ftype_b11["file_type"] = "granule_B11" + rdr = OLITIRSCHReader(b11_file, bad_finfo, ftype_b11, good_mda) + with pytest.raises(ValueError, match="Requested channel B11 is not available in this granule"): + rdr.get_dataset({"name": "B11", "calibration": "counts"}, ftype) + + def test_badfiles(self, mda_file, b4_file): + """Test loading a Landsat Scene with bad data.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + bad_fname_info = self.filename_info.copy() + bad_fname_info["platform_type"] = "B" + + ftype = {"standard_name": "test_data", "units": "test_units"} + + # Test that metadata reader initialises with correct filename + good_mda = OLITIRSMDReader(mda_file, self.filename_info, ftype) + + # Check metadata reader fails if platform type is wrong + with pytest.raises(ValueError, match="This reader only supports Landsat data"): + OLITIRSMDReader(mda_file, bad_fname_info, ftype) + + # Test that metadata reader initialises with correct filename + OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) + + # Check metadata reader fails if platform type is wrong + with pytest.raises(ValueError, match="This reader only supports Landsat data"): + OLITIRSCHReader(b4_file, bad_fname_info, self.ftype_info, good_mda) + bad_ftype_info = self.ftype_info.copy() + bad_ftype_info["file_type"] = "granule-b05" + with pytest.raises(ValueError, match="Invalid file type: granule-b05"): + OLITIRSCHReader(b4_file, self.filename_info, bad_ftype_info, good_mda) + + def test_calibration_counts(self, all_files, b4_data, b11_data): + """Test counts calibration mode for the reader.""" + from satpy import Scene + + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) + scn.load(["B4", "B11"], calibration="counts") + np.testing.assert_allclose(scn["B4"].values, b4_data) + np.testing.assert_allclose(scn["B11"].values, b11_data) + assert scn["B4"].attrs["units"] == "1" + assert scn["B11"].attrs["units"] == "1" + assert scn["B4"].attrs["standard_name"] == "counts" + assert scn["B11"].attrs["standard_name"] == "counts" + + def test_calibration_radiance(self, all_files, b4_data, b11_data): + """Test radiance calibration mode for the reader.""" + from satpy import Scene + exp_b04 = (b4_data * 0.0098329 - 49.16426).astype(np.float32) + exp_b11 = (b11_data * 0.0003342 + 0.100000).astype(np.float32) + + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) + scn.load(["B4", "B11"], calibration="radiance") + assert scn["B4"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["B11"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["B4"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + assert scn["B11"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + np.testing.assert_allclose(scn["B4"].values, exp_b04, rtol=1e-4) + np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-4) + + def test_calibration_highlevel(self, all_files, b4_data, b11_data): + """Test high level calibration modes for the reader.""" + from satpy import Scene + exp_b04 = (b4_data * 2e-05 - 0.1).astype(np.float32) * 100 + exp_b11 = (b11_data * 0.0003342 + 0.100000) + exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) + scn.load(["B4", "B11"]) + + assert scn["B4"].attrs["units"] == "%" + assert scn["B11"].attrs["units"] == "K" + assert scn["B4"].attrs["standard_name"] == "toa_bidirectional_reflectance" + assert scn["B11"].attrs["standard_name"] == "brightness_temperature" + np.testing.assert_allclose(np.array(scn["B4"].values), np.array(exp_b04), rtol=1e-4) + np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-6) + + def test_angles(self, all_files, sza_data): + """Test calibration modes for the reader.""" + from satpy import Scene + + # Check angles are calculated correctly + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) + scn.load(["solar_zenith_angle"]) + assert scn["solar_zenith_angle"].attrs["units"] == "degrees" + assert scn["solar_zenith_angle"].attrs["standard_name"] == "solar_zenith_angle" + np.testing.assert_allclose(scn["solar_zenith_angle"].values * 100, + np.array(sza_data), + atol=0.01, + rtol=1e-3) + + def test_metadata(self, mda_file): + """Check that metadata values loaded correctly.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader + mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + + cal_test_dict = {"B1": (0.012357, -61.78647, 2e-05, -0.1), + "B5": (0.0060172, -30.08607, 2e-05, -0.1), + "B10": (0.0003342, 0.1, 774.8853, 1321.0789)} + + assert mda.platform_name == "Landsat-8" + assert mda.earth_sun_distance() == 1.0079981 + assert mda.band_calibration["B1"] == cal_test_dict["B1"] + assert mda.band_calibration["B5"] == cal_test_dict["B5"] + assert mda.band_calibration["B10"] == cal_test_dict["B10"] + assert not mda.band_saturation["B1"] + assert mda.band_saturation["B4"] + assert not mda.band_saturation["B5"] + with pytest.raises(KeyError): + mda.band_saturation["B10"] + + def test_area_def(self, mda_file): + """Check we can get the area defs properly.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader + mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + + standard_area = mda.build_area_def("B1") + pan_area = mda.build_area_def("B8") + + assert standard_area.area_extent == (619485.0, 2440485.0, 850515.0, 2675715.0) + assert pan_area.area_extent == (619492.5, 2440492.5, 850507.5, 2675707.5) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 9e24c00c4e..0d88190a5d 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -26,10 +26,12 @@ import pytest import yaml -from satpy._config import PACKAGE_CONFIG_PATH -from satpy.dataset import DataQuery -from satpy.dataset.dataid import DataID -from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation +geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") + +from satpy._config import PACKAGE_CONFIG_PATH # noqa: E402 +from satpy.dataset import DataQuery # noqa: E402 +from satpy.dataset.dataid import DataID # noqa: E402 +from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation # noqa: E402 rasterio = pytest.importorskip("rasterio") @@ -174,102 +176,26 @@ def measurement_filehandler(measurement_file, noise_filehandler, calibration_fil -expected_longitudes = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, - 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, - 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, - -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, - -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, - 1.00000000e+00], - [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, - 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, - 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, - 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, - 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, - 7.41666667e-01], - [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, - 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, - 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, - 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, - 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, - 7.60912698e-01], - [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, - 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, - 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, - 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, - 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, - 1.00000000e+00], - [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, - 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, - 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, - 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, - 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, - 1.40119048e+00], - [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, - 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, - 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, - 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, - 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, - 1.90674603e+00], - [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, - 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, - 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, - 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, - 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, - 2.45892857e+00], - [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, - 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, - 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, - 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, - 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, - 3.00000000e+00], - [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, - 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, - 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, - 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, - 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, - 3.47222222e+00], - [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, - 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, - 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, - 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, - 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, - 3.81785714e+00], - [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, - 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, - 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, - 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, - 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, - 3.97916667e+00], - [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, - 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, - 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, - 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, - 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, - 3.89841270e+00], - [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, - 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, - 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, - 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, - 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, - 3.51785714e+00], - [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, - 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, - 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, - 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, - 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, - 2.77976190e+00], - [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, - 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, - 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, - 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, - 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, - 1.62638889e+00], - [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, - 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, - 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, - 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, - 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, - 0.00000000e+00]]) +expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, + 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], + [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , + 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], + [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, + 1.58494674, 1.52376394, 1.45880655, 1.39007883, 1.31758574], + [2., 1.99615628, 1.99615609, 2., 2.00768917, + 2.0192253 , 2.02115051, 2. , 1.95576762, 1.88845002], + [1.82332931, 2.02143515, 2.18032829, 2.30002491, 2.38053511, + 2.4218612 , 2.43113105, 2.41546985, 2.37487052, 2.3093278 ], + [1.22479001, 1.81701462, 2.26984318, 2.58335874, 2.75765719, + 2.79279164, 2.75366973, 2.70519769, 2.64737395, 2.58019762], + [0.51375081, 1.53781389, 2.3082042 , 2.82500549, 3.0885147 , + 3.09893859, 2.98922885, 2.89232293, 2.8082302 , 2.7369586 ], + [0., 1.33889733, 2.33891557, 3., 3.32266837, + 3.30731797, 3.1383157 , 3., 2.8923933 , 2.81551297], + [-0.31638932, 1.22031759, 2.36197571, 3.10836734, 3.46019271, + 3.41800603, 3.20098223, 3.02826595, 2.89989242, 2.81588745], + [-0.43541441, 1.18211505, 2.37738272, 3.1501186 , 3.50112948, + 3.43104055, 3.17724665, 2.97712796, 2.83072911, 2.73808164]]) class Calibration(Enum): @@ -289,23 +215,28 @@ def test_read_calibrated_natural(self, measurement_filehandler): calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="natural"), info=dict()) - expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]]) + expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) + assert xarr.dtype == np.float32 + assert xarr.compute().dtype == np.float32 def test_read_calibrated_dB(self, measurement_filehandler): """Test the calibration routines.""" calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="dB"), info=dict()) - expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) - np.testing.assert_allclose(xarr.values[:2, :2], expected) + expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]], dtype=np.float32) + np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=1e-6) + assert xarr.dtype == np.float32 + assert xarr.compute().dtype == np.float32 def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) - expected = expected_longitudes - np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) + np.testing.assert_allclose(xarr.values, expected_longitudes) + assert xarr.dtype == np.float64 + assert xarr.compute().dtype == np.float64 annotation_xml = b""" @@ -777,6 +708,8 @@ def test_get_noise_dataset(self, noise_filehandler): query = DataQuery(name="noise", polarization="vv") res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" @@ -799,12 +732,16 @@ def test_dn_calibration_array(self, calibration_filehandler): expected_dn = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" @@ -812,18 +749,23 @@ def test_sigma_calibration_array(self, calibration_filehandler): 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) - + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" @@ -837,6 +779,7 @@ def test_get_calibration_constant(self, calibration_filehandler): query = DataQuery(name="calibration_constant", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) assert res == 1 + assert type(res) is np.float32 def test_incidence_angle(annotation_filehandler): @@ -844,6 +787,8 @@ def test_incidence_angle(annotation_filehandler): query = DataQuery(name="incidence_angle", polarization="vv") res = annotation_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) + assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): @@ -860,9 +805,11 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] - np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) - np.testing.assert_allclose(array.values[:2, :2], expected_db) + np.testing.assert_allclose(array.values[:2, :2], expected_db, rtol=1e-6) + assert array.dtype == np.float32 + assert array.compute().dtype == np.float32 def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): @@ -889,7 +836,7 @@ def test_filename_filtering_from_reader(measurement_file, calibration_file, nois pytest.fail(str(err)) -def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, annotation_file): +def test_swath_def_contains_gcps_and_bounding_box(measurement_file, calibration_file, noise_file, annotation_file): """Test reading using the reader defined in the config.""" with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: config = yaml.load(fd, Loader=yaml.UnsafeLoader) @@ -904,3 +851,4 @@ def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, dataset_dict = reader.load([query]) array = dataset_dict["measurement"] assert array.attrs["area"].attrs["gcps"] is not None + assert array.attrs["area"].attrs["bounding_box"] is not None diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e7a5d0f5f3..5c6a86596e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1314,6 +1314,8 @@ def test_read_physical_seviri_nat_file(full_path): with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) scene.load(["VIS006"]) + assert scene["VIS006"].dtype == np.float32 + assert scene["VIS006"].values.dtype == np.float32 assert scene["VIS006"].shape == (3712, 3712) assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py deleted file mode 100644 index d3b40d6caa..0000000000 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2019 Satpy developers -# -# satpy is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . - -"""SEVIRI L2 GRIB-reader test package.""" - -import datetime -import sys -import unittest -from unittest import mock - -import numpy as np - -from satpy.tests.utils import make_dataid - -# Dictionary to be used as fake GRIB message -FAKE_MESSAGE = { - "longitudeOfSubSatellitePointInDegrees": 9.5, - "dataDate": 20191020, - "dataTime": 1745, - "Nx": 1000, - "Ny": 1200, - "earthMajorAxis": 6400., - "earthMinorAxis": 6300., - "NrInRadiusOfEarth": 6., - "XpInGridLengths": 500, - "parameterNumber": 30, - "missingValue": 9999, -} - -# List to be used as fake GID source -FAKE_GID = [0, 1, 2, 3, None] - - -class Test_SeviriL2GribFileHandler(unittest.TestCase): - """Test the SeviriL2GribFileHandler reader.""" - - @mock.patch("satpy.readers.seviri_l2_grib.ec") - def setUp(self, ec_): - """Set up the test by creating a mocked eccodes library.""" - fake_gid_generator = (i for i in FAKE_GID) - ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - ec_.codes_get.side_effect = lambda gid, key: FAKE_MESSAGE[key] - ec_.codes_get_values.return_value = np.ones(1000*1200) - self.ec_ = ec_ - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.seviri_l2_grib.xr") - @mock.patch("satpy.readers.seviri_l2_grib.da") - def test_data_reading(self, da_, xr_): - """Test the reading of data from the product.""" - from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler - from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): - self.reader = SeviriL2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft": "MET11", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) - }, - filetype_info={} - ) - - dataset_id = make_dataid(name="dummmy", resolution=3000) - - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 9.5 - }, - "sensor": "seviri", - "platform_name": "Meteosat-11" - } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6400000., - "b": 6300000., - "h": 32000000., - "ssp_lon": 9.5, - "nlines": 1000, - "ncols": 1200, - "a_name": "msg_seviri_rss_3km", - "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", - "p_id": "", - } - assert pdict == expected_pdict - expected_area_dict = { - "center_point": 500, - "north": 1200, - "east": 1, - "west": 1000, - "south": 1, - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", - mock.Mock(name="calculate_area_extent")) as cae: - with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=400.) - self.reader.get_area_def(dataset_id) - # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, - "column_step": 400., "line_step": 400.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of calculate_area_extent - assert args[1]._extract_mock_name() == "calculate_area_extent()" diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 8e69df5313..447d43938d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -468,7 +468,8 @@ def test_availability_veg_idx(self, data_file, exp_available): [ ("npp", "Suomi-NPP"), ("JPSS-1", "NOAA-20"), - ("J01", "NOAA-20") + ("J01", "NOAA-20"), + ("n21", "NOAA-21") ]) def test_get_platformname(self, surface_reflectance_file, filename_platform, exp_shortname): """Test finding start and end times of granules.""" diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 11db6b1252..96a19b7ed7 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -17,7 +17,9 @@ """Unit tests for Scene conversion functionality.""" import datetime as dt +from datetime import datetime +import numpy as np import pytest import xarray as xr from dask import array as da @@ -51,14 +53,6 @@ def test_serialization_with_readers_and_data_arr(self): class TestSceneConversions: """Test Scene conversion to geoviews, xarray, etc.""" - def test_to_xarray_dataset_with_empty_scene(self): - """Test converting empty Scene to xarray dataset.""" - scn = Scene() - xrds = scn.to_xarray_dataset() - assert isinstance(xrds, xr.Dataset) - assert len(xrds.variables) == 0 - assert len(xrds.coords) == 0 - def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition @@ -164,6 +158,43 @@ def single_area_scn(self): scn["var1"] = data_array return scn + def test_to_xarray_dataset_with_conflicting_variables(self): + """Test converting Scene with DataArrays with conflicting variables. + + E.g. "acq_time" in the seviri_l1b_nc reader + """ + from pyresample.geometry import AreaDefinition + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn = Scene() + + acq_time_1 = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02")]) + ds = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area}) + ds["acq_time"] = acq_time_1 + + scn["ds1"] = ds + + acq_time_2 = ("y", [np.datetime64("1958-02-02 00:00:01"), + np.datetime64("1958-02-02 00:00:02")]) + ds2 = ds.copy() + ds2["acq_time"] = acq_time_2 + + scn["ds2"] = ds2 + + # drop case (compat="minimal") + xrds = scn.to_xarray_dataset() + assert isinstance(xrds, xr.Dataset) + assert "acq_time" not in xrds.coords + + # override: pick variable from first dataset + xrds = scn.to_xarray_dataset(datasets=["ds1", "ds2"], compat="override") + assert isinstance(xrds, xr.Dataset) + assert "acq_time" in xrds.coords + xr.testing.assert_equal(xrds["acq_time"], ds["acq_time"]) + @pytest.fixture def multi_area_scn(self): """Define Scene with multiple area.""" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 2af010e9ac..eb3f90b715 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -257,20 +257,28 @@ def test_self_sharpened_no_high_res(self): with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) - def test_basic_no_high_res(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_no_high_res(self, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") - res = comp((self.ds1, self.ds2, self.ds3)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) assert res.shape == (3, 2, 2) + assert res.dtype == dtype + assert res.values.dtype == dtype - def test_basic_no_sharpen(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_no_sharpen(self, dtype): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), + optional_datasets=(self.ds4.astype(dtype),)) assert res.shape == (3, 2, 2) + assert res.dtype == dtype + assert res.values.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), [ @@ -300,22 +308,26 @@ def test_basic_no_sharpen(self): np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) ] ) - def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): + def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b, dtype): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), + optional_datasets=(self.ds4.astype(dtype),)) assert "units" not in res.attrs assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) + assert res.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("exp_shape", "exp_r", "exp_g", "exp_b"), [ @@ -325,17 +337,19 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e np.array([[16 / 3, 16 / 3], [16 / 3, 0]], dtype=np.float64)) ] ) - def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): + def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color") - res = comp((self.ds1, self.ds2, self.ds3)) - data = res.values + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) + assert res.dtype == dtype + data = res.values assert data.shape == exp_shape np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) + assert data.dtype == dtype class TestDifferenceCompositor(unittest.TestCase): @@ -1302,7 +1316,7 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1311,13 +1325,14 @@ def test_add_bands_l_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}, attrs={"mode": "L"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), coords={"bands": ["R", "G", "B", "A"]}) @@ -1326,13 +1341,14 @@ def test_add_bands_l_rgba(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((2, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1341,13 +1357,14 @@ def test_add_bands_la_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((3, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"mode": "RGB"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), @@ -1357,6 +1374,7 @@ def test_add_bands_rgb_rbga(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_p_l(self): """Test adding bands.""" diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 7e28a7456b..a4aca52e64 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -135,29 +135,46 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords + if as_32bit: + assert res.dtype == np.float32 - def test_basic_lims_not_provided(self, sunz_ds1): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_lims_not_provided(self, sunz_ds1, dtype): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) - res = comp((sunz_ds1,), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) - + res = comp((sunz_ds1.astype(dtype),), test_attr="test") + expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected, rtol=1e-5) + assert res.dtype == dtype + assert values.dtype == dtype + + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) - def test_basic_default_provided(self, data_arr, sunz_sza): + def test_basic_default_provided(self, data_arr, sunz_sza, dtype): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) - res = comp((data_arr, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - + res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[22.401667, 22.31777], [22.437503, 22.353533]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected) + assert res.dtype == dtype + assert values.dtype == dtype + + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) - def test_basic_lims_provided(self, data_arr, sunz_sza): + def test_basic_lims_provided(self, data_arr, sunz_sza, dtype): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) - res = comp((data_arr, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) + res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected, rtol=1e-5) + assert res.dtype == dtype + assert values.dtype == dtype def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" @@ -179,18 +196,28 @@ def setup_class(cls): cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) - def test_default_settings(self, sunz_ds1, sunz_sza): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_default_settings(self, sunz_ds1, sunz_sza, dtype): """Test default settings with sza data available.""" - res = self.default((sunz_ds1, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, - np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]]), - rtol=1e-5) - - def test_custom_settings(self, sunz_ds1, sunz_sza): + res = self.default((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]], dtype=dtype) + assert res.dtype == dtype + values = res.values + assert values.dtype == dtype + np.testing.assert_allclose(values, + expected, + rtol=2e-5) + + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_custom_settings(self, sunz_ds1, sunz_sza, dtype): """Test custom settings with sza data available.""" - res = self.custom((sunz_ds1, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, - np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), + res = self.custom((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]], dtype=dtype) + assert res.dtype == dtype + values = res.values + assert values.dtype == dtype + np.testing.assert_allclose(values, + expected, rtol=1e-5) def test_invalid_max_sza(self, sunz_ds1, sunz_sza): @@ -502,6 +529,7 @@ def _create_test_data(self, name, wavelength, resolution): }) return input_band, red_band, angle1, angle1, angle1, angle1 + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("name", "wavelength", "resolution", "aerosol_type", "reduce_lim_low", "reduce_lim_high", "reduce_strength", "exp_mean", "exp_unique"), @@ -521,7 +549,7 @@ def _create_test_data(self, name, wavelength, resolution): ] ) def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, reduce_lim_low, reduce_lim_high, - reduce_strength, exp_mean, exp_unique): + reduce_strength, exp_mean, exp_unique, dtype): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, @@ -535,42 +563,48 @@ def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, re assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) - res = ray_cor([input_band, red_band]) + res = ray_cor([input_band.astype(dtype), red_band.astype(dtype)]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) assert data.shape == (3, 5) np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) + assert data.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("as_optionals", [False, True]) - def test_rayleigh_with_angles(self, as_optionals): + def test_rayleigh_with_angles(self, as_optionals, dtype): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) - prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals) + prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals, dtype) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) get_angles.assert_not_called() assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(unique, np.array([-75.0, -37.71298492, 31.14350754]), rtol=1e-5) assert data.shape == (3, 5) + assert data.dtype == dtype - def _get_angles_prereqs_and_opts(self, as_optionals): + def _get_angles_prereqs_and_opts(self, as_optionals, dtype): wavelength = (0.45, 0.47, 0.49) resolution = 1000 input_band, red_band, *angles = self._create_test_data("B01", wavelength, resolution) - prereqs = [input_band, red_band] + prereqs = [input_band.astype(dtype), red_band.astype(dtype)] opt_prereqs = [] + angles = [a.astype(dtype) for a in angles] if as_optionals: opt_prereqs = angles else: diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index eb8983d3cf..f11d181833 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -258,6 +258,12 @@ class TestReaderLoader(unittest.TestCase): Assumes that the VIIRS SDR reader exists and works. """ + @pytest.fixture(autouse=True) + def inject_fixtures(self, caplog, tmp_path): # noqa: PT004 + """Inject caplog to the test class.""" + self._caplog = caplog + self._tmp_path = tmp_path + def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler @@ -439,6 +445,31 @@ def test_almost_all_filtered(self): assert "abi_l1b" in readers assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 + def test_yaml_error_message(self): + """Test that YAML errors are logged properly.""" + import logging + + import satpy + from satpy.readers import load_readers + + reader_config = "reader:\n" + reader_config += " name: nonreader\n" + reader_config += " reader: !!python/name:notapackage.notareader.BadClass\n" + + os.mkdir(self._tmp_path / "readers") + reader_fname = self._tmp_path / "readers" / "nonreader.yaml" + with open(reader_fname, "w") as fid: + fid.write(reader_config) + + filenames = ["foo.bar"] + error_message = "No module named 'notapackage'" + + with self._caplog.at_level(logging.ERROR): + with satpy.config.set({"config_path": [str(self._tmp_path)]}): + with pytest.raises(ValueError, match="No supported files found"): + _ = load_readers(filenames=filenames, reader="nonreader") + assert error_message in self._caplog.text + class TestFindFilesAndReaders: """Test the find_files_and_readers utility function.""" diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 61255c8006..ac3a2e921f 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -274,17 +274,44 @@ def test_basic_check_satpy(self): from satpy.utils import check_satpy check_satpy() - def test_specific_check_satpy(self): + def test_specific_check_satpy(self, capsys): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch("satpy.utils.print") as print_mock: - check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) - checked_fake = False - for call in print_mock.mock_calls: - if len(call[1]) > 0 and "__fake" in call[1][0]: - assert "ok" not in call[1][1] - checked_fake = True - assert checked_fake, "Did not find __fake module mentioned in checks" + check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) + out, _ = capsys.readouterr() + checked_fake = "__fake: not installed" in out + checked_viirs_sdr = "Readers\n=======\nviirs_sdr" in out + assert checked_fake, "Did not find __fake package mentioned in checks" + assert checked_viirs_sdr, "Did not find viirs_sdr in readers mentioned in checks" + + +class TestShowVersions: + """Test the 'show_versions' function.""" + + def test_basic_show_versions(self): + """Test 'check_satpy' basic functionality.""" + from satpy.utils import show_versions + show_versions() + + def test_show_specific_version(self, capsys): + """Test 'show_version' works with installed package.""" + from satpy.utils import show_versions + show_versions(packages=["pytest"]) + out, _ = capsys.readouterr() + + pytest_mentioned = "pytest:" in out + pytest_installed = "pytest: not installed" not in out + check_pytest = pytest_mentioned and pytest_installed + assert check_pytest, "pytest with package version not in print output" + + def test_show_missing_specific_version(self, capsys): + """Test 'show_version' works with missing package.""" + from satpy.utils import show_versions + show_versions(packages=["__fake"]) + out, _ = capsys.readouterr() + + check_fake = "__fake: not installed" in out + assert check_fake, "Did not find '__fake: not installed' in print output" def test_debug_on(caplog): @@ -294,12 +321,7 @@ def test_debug_on(caplog): def depwarn(): logger = logging.getLogger("satpy.silly") logger.debug("But now it's just got SILLY.") - warnings.warn( - "Stop that! It's SILLY.", - DeprecationWarning, - stacklevel=2 - ) - + warnings.warn("Stop that! It's SILLY.", DeprecationWarning, stacklevel=2) warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 1a72894108..ba305fbe58 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -528,12 +528,19 @@ def _assert_encoding_as_expected(self, filename, expected): assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] - def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch): + @pytest.mark.parametrize( + "versions", + [ + {"netCDF4": "1.5.0", "libnetcdf": "4.9.1-development"}, + {"netCDF4": "1.6.0", "libnetcdf": "invalid-version"} + ] + ) + def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch, versions): """Test warning if backends don't match.""" import netCDF4 with monkeypatch.context() as m: - m.setattr(netCDF4, "__version__", "1.5.0") - m.setattr(netCDF4, "__netcdf4libversion__", "4.9.1") + m.setattr(netCDF4, "__version__", versions["netCDF4"]) + m.setattr(netCDF4, "__netcdf4libversion__", versions["libnetcdf"]) with pytest.warns(UserWarning, match=r"Backend version mismatch"): scene.save_datasets(filename=filename, writer="cf") diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index 31fcb25efc..a9b283cf11 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -20,6 +20,7 @@ import datetime import logging import os +from unittest.mock import Mock import dask.array as da import numpy as np @@ -247,6 +248,12 @@ def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84): "start_time": datetime.datetime(2027, 8, 2, 8, 20), "area": test_area_tiny_stereographic_wgs84, "mode": "P"}) + # simulate an enhancement history such as palettize may add + arr.attrs["enhancement_history"] = [ + {"scale": np.float64(0.01), + "offset": np.float64(0.0), + "colormap": Mock()}] + return to_image(arr) @@ -577,8 +584,8 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): test_image_small_arctic_P, filename=fn, fill_value=255, - PhysicUnit="N/A", - PhysicValue="N/A", + PhysicUnit="satdata", + PhysicValue="satdata", SatelliteNameID=6400014, ChannelID=900015, DataType="PPRN", @@ -591,8 +598,8 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" - assert "ninjo_Gradient" not in tgs - assert "ninjo_AxisIntercept" not in tgs + assert tgs["ninjo_Gradient"] == "1.0" + assert tgs["ninjo_AxisIntercept"] == "0.0" def test_write_and_read_file_units( diff --git a/satpy/utils.py b/satpy/utils.py index 77645a476a..f4d456d4f6 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -20,9 +20,11 @@ import contextlib import datetime +import importlib.metadata import logging import os import pathlib +import platform import warnings from contextlib import contextmanager from copy import deepcopy @@ -476,30 +478,76 @@ def _check_yaml_configs(configs, key): pass return diagnostic +def _check_package_version(package_name: str) -> Optional[str]: + """Check the version of `package_name`. -def _check_import(module_names): - """Import the specified modules and provide status.""" - diagnostics = {} - for module_name in module_names: - try: - __import__(module_name) - res = "ok" - except ImportError as err: - res = str(err) - diagnostics[module_name] = res - return diagnostics + Args: + package_name (str): the distribution package name. + + Returns: + the version number if available else `None`. + """ + try: + return importlib.metadata.version(package_name) + except importlib.metadata.PackageNotFoundError: + return None + + +def show_versions(packages=None): + """Shows version for system, python and common packages (if installed). + + Args: + packages (list or None): Limit packages to those specified. + + Returns: + None. + + """ + packages = ( + ( + "cartopy", + "geoviews", + "numpy", + "dask", + "xarray", + "gdal", + "rasterio", + "pyproj", + "netcdf4", + "h5py", + "pyhdf", + "h5netcdf", + "fsspec", + ) + if packages is None + else packages + ) + + print("Versions") # noqa: T201 + print("======") # noqa: T201 + print(f"platform: {platform.platform()}") # noqa: T201 + print(f"python: {platform.python_version()}") # noqa: T201 + print() # noqa: T201 + + for package_name in sorted(packages): + package_version = _check_package_version(package_name) + print( # noqa: T201 + f"{package_name}: {package_version if package_version else 'not installed'}" + ) + + print() # noqa: T201 -def check_satpy(readers=None, writers=None, extras=None): +def check_satpy(readers=None, writers=None, packages=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified - extras (list or None): Limit extras checked to those specified + packages (list or None): Limit packages checked to those specified - Returns: bool - True if all specified features were successfully loaded. + Returns: + None """ from satpy.readers import configs_for_reader @@ -517,12 +565,7 @@ def check_satpy(readers=None, writers=None, extras=None): print(writer + ": ", res) # noqa: T201 print() # noqa: T201 - print("Extras") # noqa: T201 - print("======") # noqa: T201 - module_names = extras if extras is not None else ("cartopy", "geoviews") - for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ": ", res) # noqa: T201 - print() # noqa: T201 + show_versions(packages=packages) def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index fdcdbd4e8c..c0b7fd827b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -158,7 +158,7 @@ import numpy as np import xarray as xr -from packaging.version import Version +from packaging.version import InvalidVersion, Version from satpy.cf.coords import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer @@ -390,8 +390,26 @@ def _backend_versions_match(): def _get_backend_versions(): import netCDF4 + libnetcdf_version = _parse_libnetcdf_version( + netCDF4.__netcdf4libversion__ + ) return { "netCDF4": Version(netCDF4.__version__), - "libnetcdf": Version(netCDF4.__netcdf4libversion__), + "libnetcdf": libnetcdf_version, "xarray": Version(xr.__version__) } + + +def _parse_libnetcdf_version(version_str): + # Make libnetcdf development version compatible with PEP440 + version_str = version_str.replace("development", "dev") + try: + return Version(version_str) + except InvalidVersion: + warnings.warn( + f"Unable to parse netcdf-c version {version_str}, " + f"using 0.0.0 as fallback", + UserWarning, + stacklevel=3 + ) + return Version("0.0.0") diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index 5f88cc52ed..1d5cfb69ac 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -74,11 +74,13 @@ NinJo has a functionality to read the corresponding quantity (example: brightness temperature or reflectance). To make this possible, the writer adds the tags ``Gradient`` and ``AxisIntercept``. Those tags are added if -and only if the image has mode ``L`` or ``LA`` and ``PhysicUnit`` is not set +and only if the image has mode ``L``, ``P``, or ``LA`` and ``PhysicUnit`` is not set to ``"N/A"``. In other words, to suppress those tags for images with mode ``L`` or ``LA`` (for example, for the composite ``vis_with_ir``, where the physical interpretation of individual pixels is lost), one should set ``PhysicUnit`` to ``"N/A"``, ``"n/a"``, ``"1"``, or ``""`` (empty string). +If the image has mode ``P``, ``Gradient`` is set to ``1.0`` and ``AxisIntercept`` +to ``0.0`` (as expected by NinJo). """ import copy @@ -204,11 +206,22 @@ def save_image( # noqa: D417 overviews_minsize=overviews_minsize, overviews_resampling=overviews_resampling, tags={**(tags or {}), **ninjo_tags}, - scale_offset_tags=(self.scale_offset_tag_names - if self._check_include_scale_offset(image, PhysicUnit) - else None), + scale_offset_tags=self._get_scale_offset_tags(image, PhysicUnit), **gdal_opts) + def _get_scale_offset_tags(self, image, unit): + """Get scale offset tags (tuple or dict).""" + if self._check_include_scale_offset(image, unit): + # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 + try: + mod = image.data.attrs["mode"] + except KeyError: + mod = image.mode + if mod == "P": + return dict(zip(self.scale_offset_tag_names, (1, 0))) + return self.scale_offset_tag_names + return None # explicit is better than implicit + def _fix_units(self, image, quantity, unit): """Adapt units between °C and K. @@ -236,7 +249,7 @@ def _fix_units(self, image, quantity, unit): def _check_include_scale_offset(self, image, unit): """Check if scale-offset tags should be included.""" - if image.mode.startswith("L") and unit.lower() not in ("n/a", "1", ""): + if image.mode[0] in "LP" and unit.lower() not in ("n/a", "1", ""): return True return False diff --git a/utils/create_reference.py b/utils/create_reference.py new file mode 100644 index 0000000000..04bffdd9a3 --- /dev/null +++ b/utils/create_reference.py @@ -0,0 +1,109 @@ +# Copyright (c) 2024-2025 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Script to create image testing references. + +Script to create reference images for the automated image testing system. + +The input data directory must follow the data structure from the +image-comparison-tests repository with satellite_data/. + +This script is a work in progress and expected to change significantly. + +DO NOT USE FOR OPERATIONAL PRODUCTION! +""" + +import argparse +import os +import pathlib + +import hdf5plugin # noqa: F401 + +from satpy import Scene + + +def generate_images(props): + """Generate reference images for testing purposes. + + Args: + props (namespace): Object with attributes corresponding to command line + arguments as defined by :func:get_parser. + """ + filenames = (props.basedir / "satellite_data" / props.satellite / + props.case).glob("*") + + scn = Scene(reader=props.reader, filenames=filenames) + + scn.load(props.composites) + if props.area == "native": + ls = scn.resample(resampler="native") + elif props.area is not None: + ls = scn.resample(props.area, resampler="gradient_search") + else: + ls = scn + + from dask.diagnostics import ProgressBar + with ProgressBar(): + ls.save_datasets( + writer="simple_image", + filename=os.fspath( + props.basedir / "reference_images" / + "satpy-reference-image-{platform_name}-{sensor}-" + "{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png")) + +def get_parser(): + """Return argument parser.""" + parser = argparse.ArgumentParser(description=__doc__) + + parser.add_argument( + "satellite", action="store", type=str, + help="Satellite name.") + + parser.add_argument( + "reader", action="store", type=str, + help="Reader name.") + + parser.add_argument( + "case", help="case to generate", type=str) + + parser.add_argument( + "-b", "--basedir", action="store", type=pathlib.Path, + default=pathlib.Path("."), + help="Base directory for reference data. " + "This must contain a subdirectories satellite_data and " + "reference_images. The directory satellite_data must contain " + "input data in a subdirectory for the satellite and case. Output images " + "will be written to the subdirectory reference_images.") + + parser.add_argument( + "-c", "--composites", nargs="+", help="composites to generate", + type=str, default=["ash", "airmass"]) + + parser.add_argument( + "-a", "--area", action="store", + default=None, + help="Area name, or 'native' (native resampling)") + + return parser + +def main(): + """Main function.""" + parsed = get_parser().parse_args() + + generate_images(parsed) + +if __name__ == "__main__": + main()