From 9173def69d6629a90849e50e4e632a7ba68f03a5 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Tue, 24 Sep 2019 14:02:40 +0200 Subject: [PATCH 001/340] Add available datasets method template to modis_l2 reader --- satpy/readers/modis_l2.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index ac91a7a6e0..a7b6cd4628 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -168,6 +168,23 @@ def get_dataset(self, dataset_id, dataset_info): return dataset + def available_datasets(self, configured_datasets=None): + "Add information to configured datasets." + # pass along existing datasets + for is_avail, ds_info in (configured_datasets or []): + yield is_avail, ds_info + + + + # get dynamic variables known to this file (that we created) + for var_name, val in self.dynamic_variables.items(): + ds_info = { + 'file_type': self.filetype_info['file_type'], + 'resolution': 1000, + 'name': var_name, + } + yield True, ds_info + def bits_strip(bit_start, bit_count, value): """Extract specified bit from bit representation of integer value. From 7ef3452c4667115a009762ead419d1ab014e817a Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 25 Sep 2019 10:36:09 +0200 Subject: [PATCH 002/340] Add add_offset to hdfeos dataset reading --- satpy/readers/hdfeos_base.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index f715c2714b..3faac39582 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -164,8 +164,9 @@ def load_dataset(self, dataset_name): good_mask = data != fill_value scale_factor = data.attrs.get('scale_factor') + add_offset = data.attrs.get('add_offset', 0) if scale_factor is not None: - data = data * scale_factor + data = data * scale_factor + add_offset data = data.where(good_mask, new_fill) return data From 7f1c5723e7031852f4de0a8f5a0233c41ee7e4c3 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 25 Sep 2019 10:36:44 +0200 Subject: [PATCH 003/340] Add available_datasets method to modis_l2 reader --- satpy/etc/readers/modis_l2.yaml | 8 ++++++-- satpy/readers/modis_l2.py | 31 +++++++++++++++++++++---------- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/satpy/etc/readers/modis_l2.yaml b/satpy/etc/readers/modis_l2.yaml index 8f23d5c75b..0caa1ebdeb 100644 --- a/satpy/etc/readers/modis_l2.yaml +++ b/satpy/etc/readers/modis_l2.yaml @@ -10,6 +10,10 @@ file_types: file_patterns: - 'M{platform_indicator:1s}D35_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler + modis_l2_product: + file_patterns: + - 'M{platform_indicator:1s}D{product:2s}_L2.A{acquisition_time:%Y%j.%H%M}.{collection:03d}.{production_time:%Y%j%H%M%S}.hdf' + file_reader: !!python/name:satpy.readers.modis_l2.ModisL2HDFFileHandler hdf_eos_geo: file_patterns: - 'M{platform_indicator:1s}D03_A{start_time:%y%j_%H%M%S}_{processing_time:%Y%j%H%M%S}.hdf' @@ -49,7 +53,7 @@ datasets: 5000: file_type: mod35_hdf 1000: - file_type: [hdf_eos_geo, mod35_hdf] + file_type: [hdf_eos_geo, mod35_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: @@ -64,7 +68,7 @@ datasets: # For EUM reduced (thinned) files file_type: mod35_hdf 1000: - file_type: [hdf_eos_geo, mod35_hdf] + file_type: [hdf_eos_geo, mod35_hdf, modis_l2_product] 500: file_type: hdf_eos_geo 250: diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index a7b6cd4628..d379a209d6 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -168,22 +168,33 @@ def get_dataset(self, dataset_id, dataset_info): return dataset - def available_datasets(self, configured_datasets=None): - "Add information to configured datasets." + def available_datasets(self, configured_datasets): + """ + Adds dataset information not specifically specified in reader yaml file + from arbitrary modis level 2 product files to available datasets. + + Notes: + Currently only adds 2D datasets and does not decode bit encoded information. + """ # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info - + res_dict = {(8120, 5416): 250, (4060, 2708): 500, (2030, 1354): 1000, (406, 270): 5000, (203, 135): 10000} # get dynamic variables known to this file (that we created) - for var_name, val in self.dynamic_variables.items(): - ds_info = { - 'file_type': self.filetype_info['file_type'], - 'resolution': 1000, - 'name': var_name, - } - yield True, ds_info + for var_name, val in self.sd.datasets().items(): + if len(val[0]) == 2: + resolution = res_dict.get(val[1]) + if not resolution is None: + ds_info = { + 'file_type': self.filetype_info['file_type'], + 'resolution': resolution, + 'name': var_name, + 'file_key': var_name, + 'coordinates': ["longitude", "latitude"] + } + yield True, ds_info def bits_strip(bit_start, bit_count, value): From cb2272b87886db276974de6c185c05fed968d91a Mon Sep 17 00:00:00 2001 From: ro Date: Mon, 30 Sep 2019 10:55:44 +0200 Subject: [PATCH 004/340] Change resolution dict to use only columns --- satpy/readers/modis_l2.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index d379a209d6..234611d676 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -180,13 +180,13 @@ def available_datasets(self, configured_datasets): for is_avail, ds_info in (configured_datasets or []): yield is_avail, ds_info - res_dict = {(8120, 5416): 250, (4060, 2708): 500, (2030, 1354): 1000, (406, 270): 5000, (203, 135): 10000} + res_dict = {5416: 250, 2708: 500, 1354: 1000, 270: 5000, 135: 10000} # get dynamic variables known to this file (that we created) for var_name, val in self.sd.datasets().items(): if len(val[0]) == 2: - resolution = res_dict.get(val[1]) - if not resolution is None: + resolution = res_dict.get(val[1][-1]) + if resolution is not None: ds_info = { 'file_type': self.filetype_info['file_type'], 'resolution': resolution, From 1cd358189477b5f0d47bf51c54eaa08c5c47e749 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 28 Dec 2022 12:42:35 +0100 Subject: [PATCH 005/340] refactor: add compat keyword to to_xarray_dataset --- satpy/scene.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/satpy/scene.py b/satpy/scene.py index 92e9ff5615..cff5648849 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1036,12 +1036,16 @@ def to_geoviews(self, gvtype=None, datasets=None, kdims=None, vdims=None, dynami return gview - def to_xarray_dataset(self, datasets=None): + def to_xarray_dataset(self, datasets=None, compat="minimal"): """Merge all xr.DataArrays of a scene to a xr.DataSet. Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` + compat (Optional[str]): + How to compare variables with the same name for conflicts. + See :func:`xarray.merge` for possible options. Defaults to + "minimal" which drops conflicting variables. Returns: :class:`xarray.Dataset` @@ -1055,7 +1059,7 @@ def to_xarray_dataset(self, datasets=None): mdata = combine_metadata(*tuple(i.attrs for i in dataarrays)) if mdata.get('area') is None or not isinstance(mdata['area'], SwathDefinition): # either don't know what the area is or we have an AreaDefinition - ds = xr.merge(ds_dict.values()) + ds = xr.merge(ds_dict.values(), compat=compat) else: # we have a swath definition and should use lon/lat values lons, lats = mdata['area'].get_lonlats() From 549051527a065eeeb8b6376c59ff2be4b16fdd41 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 28 Dec 2022 12:51:13 +0100 Subject: [PATCH 006/340] tests: add tests --- satpy/tests/test_scene.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/satpy/tests/test_scene.py b/satpy/tests/test_scene.py index 1d9386d014..0050a783d3 100644 --- a/satpy/tests/test_scene.py +++ b/satpy/tests/test_scene.py @@ -2036,6 +2036,36 @@ def test_to_xarray_dataset_with_empty_scene(self): assert len(xrds.variables) == 0 assert len(xrds.coords) == 0 + def test_to_xarray_dataset_with_conflicting_variables(self): + """Test converting Scene with DataArrays with conflicting variables. + + E.g. "acq_time" in the seviri_l1b_nc reader + """ + scn = Scene() + + acq_time_1 = ('y', [np.datetime64('1958-01-02 00:00:01'), + np.datetime64('1958-01-02 00:00:02')]) + ds = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=('y', 'x'), + attrs={'start_time': datetime(2018, 1, 1)}) + ds['acq_time'] = acq_time_1 + + scn['ds1'] = ds + + acq_time_2 = ('y', [np.datetime64('1958-02-02 00:00:01'), + np.datetime64('1958-02-02 00:00:02')]) + ds['acq_time'] = acq_time_2 + + scn['ds2'] = ds + + xrds = scn.to_xarray_dataset() + assert isinstance(xrds, xr.Dataset) + assert 'acq_time' not in xrds.coords + + xrds = scn.to_xarray_dataset(compat='override') + assert isinstance(xrds, xr.Dataset) + assert 'acq_time' in xrds.coords + assert xrds['acq_time'] == acq_time_1 + def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition From a35b1dec238970f6be8118febb763bc601098897 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 4 Sep 2023 23:41:36 +0200 Subject: [PATCH 007/340] First version of the l1b reader for the Arctic Weather Satellite Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws_l1b_nc.yaml | 425 ++++++++++++++++++++++++++++++ satpy/readers/aws_l1b.py | 311 ++++++++++++++++++++++ 2 files changed, 736 insertions(+) create mode 100644 satpy/etc/readers/aws_l1b_nc.yaml create mode 100644 satpy/readers/aws_l1b.py diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml new file mode 100644 index 0000000000..86ed11bc16 --- /dev/null +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -0,0 +1,425 @@ +reader: + name: aws_l1b_nc + short_name: AWS L1B RAD NetCDF4 + long_name: AWS L1B Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) Sounder level-1b files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [aws,] + status: Beta + default_channels: [] + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_1, lat_horn_1] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_2, lat_horn_2] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_3, lat_horn_3] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_4, lat_horn_4] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_4, lat_horn_4] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_4, lat_horn_4] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [lon_horn_4, lat_horn_4] + file_type: aws_l1b_nc + file_key: data/calibration/aws_toa_brightness_temperature + +# --- Coordinates --- + + lon_horn_1: + name: lon_horn_1 + file_type: aws_l1b_nc + file_key: aws_lon + standard_name: longitude + units: degrees_east + n_horns: 0 + + lat_horn_1: + name: lat_horn_1 + file_type: aws_l1b_nc + file_key: aws_lat + standard_name: latitude + units: degrees_north + n_horns: 0 + + lon_horn_2: + name: lon_horn_2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lon + standard_name: longitude + units: degrees_east + n_horns: 1 + + lat_horn_2: + name: lat_horn_2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lat + standard_name: latitude + units: degrees_north + n_horns: 1 + + lon_horn_3: + name: lon_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lon + standard_name: longitude + units: degrees_east + n_horns: 2 + + lat_horn_3: + name: lat_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lat + standard_name: latitude + units: degrees_north + n_horns: 2 + + lon_horn_4: + name: lon_horn_4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lon + standard_name: longitude + units: degrees_east + n_horns: 3 + + lat_horn_4: + name: lat_horn_4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_lat + standard_name: latitude + units: degrees_north + n_horns: 3 + + aws_lat: + name: aws_lat + file_type: aws_l1b_nc + file_key: data/navigation/aws_lat + standard_name: latitude + units: degrees_north + + aws_lon: + name: aws_lon + file_type: aws_l1b_nc + file_key: data/navigation/aws_lon + standard_name: longitude + units: degrees_east + +# --- Navigation data --- + + solar_azimuth: + name: solar_azimuth + standard_name: solar_azimuth_angle + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + coordinates: + - aws_lon + - aws_lat + solar_zenith: + name: solar_zenith + standard_name: solar_zenith_angle + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + coordinates: + - aws_lon + - aws_lat + satellite_azimuth: + name: satellite_azimuth + standard_name: satellite_azimuth_angle + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + coordinates: + - aws_lon + - aws_lat + satellite_zenith: + name: satellite_zenith + standard_name: satellite_zenith_angle + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + coordinates: + - aws_lon + - aws_lat + + +file_types: + aws_l1b_nc: + # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile + file_patterns: ['W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc'] diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py new file mode 100644 index 0000000000..02df6734fc --- /dev/null +++ b/satpy/readers/aws_l1b.py @@ -0,0 +1,311 @@ +# Copyright (c) 2023 Pytroll Developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Reader for the Arctic Weather Satellite (AWS) Sounder level-1b data. + +Test data provided by ESA August 23, 2023. +""" + +import logging +from datetime import datetime + +import dask.array as da +import numpy as np +import xarray as xr +from netCDF4 import default_fillvals + +from .netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + +DUMMY_STARTTIME = datetime(2023, 7, 7, 12, 0) +DUMMY_ENDTIME = datetime(2023, 7, 7, 12, 10) +# dict containing all available auxiliary data parameters to be read using the index map. Keys are the +# parameter name and values are the paths to the variable inside the netcdf + +AUX_DATA = { + 'scantime_utc': 'data/navigation/aws_scantime_utc', + 'solar_azimuth': 'data/navigation/aws_solar_azimuth_angle', + 'solar_zenith': 'data/navigation/aws_solar_zenith_angle', + 'satellite_azimuth': 'data/navigation/aws_satellite_azimuth_angle', + 'satellite_zenith': 'data/navigation/aws_satellite_zenith_angle', + 'surface_type': 'data/navigation/aws_surface_type', + 'terrain_elevation': 'data/navigation/aws_terrain_elevation', + 'aws_lat': 'data/navigation/aws_lat', + 'aws_lon': 'data/navigation/aws_lon', +} + +AWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, + '5': 5, '6': 6, '7': 7, '8': 8, + '9': 9, '10': 10, '11': 11, '12': 12, + '13': 13, '14': 14, '15': 15, '16': 16, + '17': 17, '18': 18, '19': 19} + +AWS_CHANNEL_NAMES = list(AWS_CHANNEL_NAMES_TO_NUMBER.keys()) +AWS_CHANNELS = set(AWS_CHANNEL_NAMES) + + +def get_channel_index_from_name(chname): + """Get the AWS channel index from the channel name.""" + chindex = AWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1 + if 0 <= chindex < 19: + return chindex + raise AttributeError(f"Channel name {chname!r} not supported") + + +def _get_aux_data_name_from_dsname(dsname): + aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] + if len(aux_data_name) > 0: + return aux_data_name[0] + + +class AWSL1BFile(NetCDF4FileHandler): + """Class implementing the AWS L1b Filehandler. + + This class implements the ESA Arctic Weather Satellite (AWS) Level-1b + NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` + class using the :mod:`~satpy.Scene.load` method with the reader + ``"aws_l1b_nc"``. + + """ + + _platform_name_translate = { + "": "AWS", + } + + def __init__(self, filename, filename_info, filetype_info): + """Initialize file handler.""" + xarray_kwargs = {'decode_times': False} + super().__init__(filename, filename_info, + filetype_info, + xarray_kwargs=xarray_kwargs, + cache_var_size=10000, + cache_handle=True) + logger.debug('Reading: {}'.format(self.filename)) + logger.debug('Start: {}'.format(self.start_time)) + logger.debug('End: {}'.format(self.end_time)) + + self._cache = {} + + self._channel_names = AWS_CHANNEL_NAMES + + @property + def start_time(self): + """Get start time.""" + try: + return datetime.strptime(self['/attr/sensing_start_time_utc'], + '%Y-%m-%d %H:%M:%S.%f') + except ValueError: + return DUMMY_STARTTIME + + @property + def end_time(self): + """Get end time.""" + try: + return datetime.strptime(self['/attr/sensing_end_time_utc'], + '%Y-%m-%d %H:%M:%S.%f') + except ValueError: + return DUMMY_ENDTIME + + @property + def sensor(self): + """Get the sensor name.""" + return self['/attr/instrument'] + + @property + def platform_name(self): + """Get the platform name.""" + return self._platform_name_translate.get(self['/attr/Spacecraft']) + + @property + def sub_satellite_longitude_start(self): + """Get the longitude of sub-satellite point at start of the product.""" + return self['status/satellite/subsat_longitude_start'].data.item() + + @property + def sub_satellite_latitude_start(self): + """Get the latitude of sub-satellite point at start of the product.""" + return self['status/satellite/subsat_latitude_start'].data.item() + + @property + def sub_satellite_longitude_end(self): + """Get the longitude of sub-satellite point at end of the product.""" + return self['status/satellite/subsat_longitude_end'].data.item() + + @property + def sub_satellite_latitude_end(self): + """Get the latitude of sub-satellite point at end of the product.""" + return self['status/satellite/subsat_latitude_end'].data.item() + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using file_key in dataset_info.""" + logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) + + var_key = dataset_info['file_key'] + # if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: + if _get_aux_data_name_from_dsname(var_key) is not None: + nhorn = dataset_info['n_horns'] + variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) + elif dataset_id['name'] in AWS_CHANNELS: + logger.debug(f'Reading in file to get dataset with key {var_key}.') + variable = self._get_dataset_channel(dataset_id, dataset_info) + else: + logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 + return None + + variable = self._manage_attributes(variable, dataset_info) + variable = self._drop_coords(variable) + variable = self._standardize_dims(variable) + + if dataset_info['standard_name'] in ['longitude', 'latitude']: + lon_or_lat = xr.DataArray( + variable.data[:, :], + attrs=variable.attrs, + dims=(variable.dims[0], variable.dims[1]) + ) + variable = lon_or_lat + + return variable + + @staticmethod + def _standardize_dims(variable): + """Standardize dims to y, x.""" + if 'n_scans' in variable.dims: + variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) + if variable.dims[0] == 'x': + variable = variable.transpose('y', 'x') + return variable + + @staticmethod + def _drop_coords(variable): + """Drop coords that are not in dims.""" + for coord in variable.coords: + if coord not in variable.dims: + variable = variable.drop_vars(coord) + return variable + + def _manage_attributes(self, variable, dataset_info): + """Manage attributes of the dataset.""" + variable.attrs.setdefault('units', None) + variable.attrs.update(dataset_info) + variable.attrs.update(self._get_global_attributes()) + return variable + + def _get_dataset_channel(self, key, dataset_info): + """Load dataset corresponding to channel measurement. + + Load a dataset when the key refers to a measurand, whether uncalibrated + (counts) or calibrated in terms of brightness temperature or radiance. + + """ + # Get the dataset + # Get metadata for given dataset + grp_pth = dataset_info['file_key'] + channel_index = get_channel_index_from_name(key['name']) + + # data = self[grp_pth][:, :, channel_index] + data = self[grp_pth][channel_index, :, :] + data = data.transpose() + # This transposition should not be needed were the format following the EPS-SG format spec!! + attrs = data.attrs.copy() + + fv = attrs.pop( + "FillValue", + default_fillvals.get(data.dtype.str[1:], np.nan)) + vr = attrs.get("valid_range", [-np.inf, np.inf]) + + if key['calibration'] == "counts": + attrs["_FillValue"] = fv + nfv = fv + else: + nfv = np.nan + data = data.where(data >= vr[0], nfv) + data = data.where(data <= vr[1], nfv) + + # Manage the attributes of the dataset + data.attrs.setdefault('units', None) + data.attrs.update(dataset_info) + + dataset_attrs = getattr(data, 'attrs', {}) + dataset_attrs.update(dataset_info) + dataset_attrs.update({ + "platform_name": self.platform_name, + "sensor": self.sensor, + "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, + 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, + 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, + 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, + }) + + try: + dataset_attrs.update(key.to_dict()) + except AttributeError: + dataset_attrs.update(key) + + data.attrs.update(dataset_attrs) + return data + + def _get_dataset_aux_data(self, dsname, nhorn): + """Get the auxiliary data arrays using the index map.""" + # Geolocation and navigation data: + if dsname in ['aws_lat', 'aws_lon', + 'solar_azimuth', 'solar_zenith', + 'satellite_azimuth', 'satellite_zenith', + 'surface_type', 'terrain_elevation']: + var_key = AUX_DATA.get(dsname) + else: + raise NotImplementedError(f"Dataset {dsname!r} not supported!") + + try: + variable = self[var_key][nhorn, :, :] + except KeyError: + logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key) + raise + + # Scale the data: + if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: + missing_value = variable.attrs['missing_value'] + variable.data = da.where(variable.data == missing_value, np.nan, + variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) + + return variable + + def _get_global_attributes(self): + """Create a dictionary of global attributes.""" + return { + 'filename': self.filename, + 'start_time': self.start_time, + 'end_time': self.end_time, + 'spacecraft_name': self.platform_name, + 'sensor': self.sensor, + 'filename_start_time': self.filename_info['start_time'], + 'filename_end_time': self.filename_info['end_time'], + 'platform_name': self.platform_name, + 'quality_group': self._get_quality_attributes(), + } + + def _get_quality_attributes(self): + """Get quality attributes.""" + quality_group = self['quality'] + quality_dict = {} + for key in quality_group: + # Add the values (as Numpy array) of each variable in the group + # where possible + try: + quality_dict[key] = quality_group[key].values + except ValueError: + quality_dict[key] = None + + quality_dict.update(quality_group.attrs) + return quality_dict From 4d6fee9f788a6d8b0bb5a2a36ae2171565d4ea22 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 22 Sep 2023 10:30:41 +0200 Subject: [PATCH 008/340] Fix longitudes to be bbetween -180 and 180 and other fixes for geo-location and angles Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws_l1b_nc.yaml | 125 ++++++++++++++++++++---------- satpy/readers/aws_l1b.py | 30 ++++--- 2 files changed, 106 insertions(+), 49 deletions(-) diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index 86ed11bc16..810ad3eb5a 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -322,7 +322,7 @@ datasets: lon_horn_2: name: lon_horn_2 file_type: aws_l1b_nc - file_key: data/navigation/aws_lon + file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 1 @@ -330,7 +330,7 @@ datasets: lat_horn_2: name: lat_horn_2 file_type: aws_l1b_nc - file_key: data/navigation/aws_lat + file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 1 @@ -338,7 +338,7 @@ datasets: lon_horn_3: name: lon_horn_3 file_type: aws_l1b_nc - file_key: data/navigation/aws_lon + file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 2 @@ -346,7 +346,7 @@ datasets: lat_horn_3: name: lat_horn_3 file_type: aws_l1b_nc - file_key: data/navigation/aws_lat + file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 2 @@ -354,7 +354,7 @@ datasets: lon_horn_4: name: lon_horn_4 file_type: aws_l1b_nc - file_key: data/navigation/aws_lon + file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 3 @@ -362,64 +362,109 @@ datasets: lat_horn_4: name: lat_horn_4 file_type: aws_l1b_nc - file_key: data/navigation/aws_lat + file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 3 - aws_lat: - name: aws_lat - file_type: aws_l1b_nc - file_key: data/navigation/aws_lat - standard_name: latitude - units: degrees_north + # aws_lat: + # name: aws_lat + # file_type: aws_l1b_nc + # file_key: data/navigation/aws_lat + # standard_name: latitude + # units: degrees_north - aws_lon: - name: aws_lon - file_type: aws_l1b_nc - file_key: data/navigation/aws_lon - standard_name: longitude - units: degrees_east + # aws_lon: + # name: aws_lon + # file_type: aws_l1b_nc + # file_key: data/navigation/aws_lon + # standard_name: longitude + # units: degrees_east # --- Navigation data --- - solar_azimuth: - name: solar_azimuth - standard_name: solar_azimuth_angle + solar_azimuth_horn_1: + name: solar_azimuth_horn_1 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + n_horns: 0 coordinates: - - aws_lon - - aws_lat - solar_zenith: - name: solar_zenith - standard_name: solar_zenith_angle + - lon_horn_1 + - lat_horn_1 + + # solar_zenith: + # name: solar_zenith + # standard_name: solar_zenith_angle + # file_type: aws_l1b_nc + # file_key: data/navigation/aws_solar_zenith_angle + # coordinates: + # - aws_lon + # - aws_lat + # satellite_azimuth: + # name: satellite_azimuth + # standard_name: satellite_azimuth_angle + # file_type: aws_l1b_nc + # file_key: data/navigation/aws_satellite_azimuth_angle + # coordinates: + # - aws_lon + # - aws_lat + # satellite_zenith: + # name: satellite_zenith + # standard_name: satellite_zenith_angle + # file_type: aws_l1b_nc + # file_key: data/navigation/aws_satellite_zenith_angle + # coordinates: + # - aws_lon + # - aws_lat + + satellite_zenith_horn_1: + name: satellite_zenith_horn_1 file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_zenith_angle + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + n_horns: 0 coordinates: - - aws_lon - - aws_lat - satellite_azimuth: - name: satellite_azimuth - standard_name: satellite_azimuth_angle + - lon_horn_1 + - lat_horn_1 + + satellite_zenith_horn_2: + name: satellite_zenith_horn_2 file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_azimuth_angle + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + n_horns: 1 coordinates: - - aws_lon - - aws_lat - satellite_zenith: - name: satellite_zenith + - lon_horn_2 + - lat_horn_2 + + satellite_zenith_horn_3: + name: satellite_zenith_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle + n_horns: 2 + coordinates: + - lon_horn_3 + - lat_horn_3 + + satellite_zenith_horn_4: + name: satellite_zenith_horn_4 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + n_horns: 3 coordinates: - - aws_lon - - aws_lat + - lon_horn_4 + - lat_horn_4 file_types: aws_l1b_nc: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile - file_patterns: ['W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc'] + file_patterns: [ + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____radsim.nc'] diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index 02df6734fc..aa34852f09 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -36,14 +36,16 @@ AUX_DATA = { 'scantime_utc': 'data/navigation/aws_scantime_utc', - 'solar_azimuth': 'data/navigation/aws_solar_azimuth_angle', - 'solar_zenith': 'data/navigation/aws_solar_zenith_angle', - 'satellite_azimuth': 'data/navigation/aws_satellite_azimuth_angle', - 'satellite_zenith': 'data/navigation/aws_satellite_zenith_angle', + 'solar_azimuth_angle': 'data/navigation/aws_solar_azimuth_angle', + 'solar_zenith_angle': 'data/navigation/aws_solar_zenith_angle', + 'satellite_azimuth_angle': 'data/navigation/aws_satellite_azimuth_angle', + 'satellite_zenith_angle': 'data/navigation/aws_satellite_zenith_angle', 'surface_type': 'data/navigation/aws_surface_type', 'terrain_elevation': 'data/navigation/aws_terrain_elevation', 'aws_lat': 'data/navigation/aws_lat', 'aws_lon': 'data/navigation/aws_lon', + 'latitude': 'data/navigation/aws_lat', + 'longitude': 'data/navigation/aws_lon', } AWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, @@ -156,7 +158,10 @@ def get_dataset(self, dataset_id, dataset_info): # if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: if _get_aux_data_name_from_dsname(var_key) is not None: nhorn = dataset_info['n_horns'] - variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) + standard_name = dataset_info['standard_name'] + + # variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) + variable = self._get_dataset_aux_data(standard_name, nhorn) # (dataset_id['name']) elif dataset_id['name'] in AWS_CHANNELS: logger.debug(f'Reading in file to get dataset with key {var_key}.') variable = self._get_dataset_channel(dataset_id, dataset_info) @@ -169,8 +174,11 @@ def get_dataset(self, dataset_id, dataset_info): variable = self._standardize_dims(variable) if dataset_info['standard_name'] in ['longitude', 'latitude']: + data = variable.data[:, :] + if dataset_info['standard_name'] in ['longitude']: + data = self._scale_lons(data) lon_or_lat = xr.DataArray( - variable.data[:, :], + data, attrs=variable.attrs, dims=(variable.dims[0], variable.dims[1]) ) @@ -178,6 +186,10 @@ def get_dataset(self, dataset_id, dataset_info): return variable + @staticmethod + def _scale_lons(lons): + return xr.where(lons > 180, lons - 360, lons) + @staticmethod def _standardize_dims(variable): """Standardize dims to y, x.""" @@ -259,9 +271,9 @@ def _get_dataset_channel(self, key, dataset_info): def _get_dataset_aux_data(self, dsname, nhorn): """Get the auxiliary data arrays using the index map.""" # Geolocation and navigation data: - if dsname in ['aws_lat', 'aws_lon', - 'solar_azimuth', 'solar_zenith', - 'satellite_azimuth', 'satellite_zenith', + if dsname in ['latitude', 'longitude', + 'solar_azimuth_angle', 'solar_zenith_angle', + 'satellite_azimuth_angle', 'satellite_zenith_angle', 'surface_type', 'terrain_elevation']: var_key = AUX_DATA.get(dsname) else: From 5c3a7d3703d100cae41f51441861e4ce4924201d Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 22 Sep 2023 10:33:06 +0200 Subject: [PATCH 009/340] Add some first basic humidity RGB recipes Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/aws.yaml | 18 ++++++++++++++++++ satpy/etc/enhancements/aws.yaml | 29 +++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 satpy/etc/composites/aws.yaml create mode 100644 satpy/etc/enhancements/aws.yaml diff --git a/satpy/etc/composites/aws.yaml b/satpy/etc/composites/aws.yaml new file mode 100644 index 0000000000..77d2014794 --- /dev/null +++ b/satpy/etc/composites/aws.yaml @@ -0,0 +1,18 @@ +sensor_name: aws + +composites: + mw183_humidity: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '15' + - name: '13' + - name: '11' + standard_name: mw183_humidity + + mw183_humidity_surface: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '9' + - name: '10' + - name: '15' + standard_name: mw183_humidity_surface diff --git a/satpy/etc/enhancements/aws.yaml b/satpy/etc/enhancements/aws.yaml new file mode 100644 index 0000000000..c997a11350 --- /dev/null +++ b/satpy/etc/enhancements/aws.yaml @@ -0,0 +1,29 @@ +enhancements: + + mw183_humidity: + standard_name: mw183_humidity + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} + + mw183_humidity_surface: + standard_name: mw183_humidity_surface + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} From 8f0d1bf006afdaf3a1ca29095078440b08544502 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 6 Nov 2023 20:56:26 +0100 Subject: [PATCH 010/340] Start working on tests for aws l1b --- satpy/etc/readers/aws_l1b_nc.yaml | 164 +++++-- satpy/readers/aws_l1b.py | 558 ++++++++++++----------- satpy/tests/reader_tests/test_aws_l1b.py | 128 ++++++ 3 files changed, 546 insertions(+), 304 deletions(-) create mode 100644 satpy/tests/reader_tests/test_aws_l1b.py diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index 810ad3eb5a..0989579a8f 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -306,80 +306,67 @@ datasets: lon_horn_1: name: lon_horn_1 file_type: aws_l1b_nc - file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 0 + file_key: data/navigation/aws_lon + lat_horn_1: name: lat_horn_1 file_type: aws_l1b_nc - file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 0 + file_key: data/navigation/aws_lat lon_horn_2: name: lon_horn_2 file_type: aws_l1b_nc - file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 1 + file_key: data/navigation/aws_lon lat_horn_2: name: lat_horn_2 file_type: aws_l1b_nc - file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 1 + file_key: data/navigation/aws_lat lon_horn_3: name: lon_horn_3 file_type: aws_l1b_nc - file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 2 + file_key: data/navigation/aws_lon lat_horn_3: name: lat_horn_3 file_type: aws_l1b_nc - file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 2 + file_key: data/navigation/aws_lat lon_horn_4: name: lon_horn_4 file_type: aws_l1b_nc - file_key: aws_lon standard_name: longitude units: degrees_east n_horns: 3 + file_key: data/navigation/aws_lon lat_horn_4: name: lat_horn_4 file_type: aws_l1b_nc - file_key: aws_lat standard_name: latitude units: degrees_north n_horns: 3 - - # aws_lat: - # name: aws_lat - # file_type: aws_l1b_nc - # file_key: data/navigation/aws_lat - # standard_name: latitude - # units: degrees_north - - # aws_lon: - # name: aws_lon - # file_type: aws_l1b_nc - # file_key: data/navigation/aws_lon - # standard_name: longitude - # units: degrees_east + file_key: data/navigation/aws_lat # --- Navigation data --- @@ -393,30 +380,115 @@ datasets: - lon_horn_1 - lat_horn_1 - # solar_zenith: - # name: solar_zenith - # standard_name: solar_zenith_angle - # file_type: aws_l1b_nc - # file_key: data/navigation/aws_solar_zenith_angle - # coordinates: - # - aws_lon - # - aws_lat - # satellite_azimuth: - # name: satellite_azimuth - # standard_name: satellite_azimuth_angle - # file_type: aws_l1b_nc - # file_key: data/navigation/aws_satellite_azimuth_angle - # coordinates: - # - aws_lon - # - aws_lat - # satellite_zenith: - # name: satellite_zenith - # standard_name: satellite_zenith_angle - # file_type: aws_l1b_nc - # file_key: data/navigation/aws_satellite_zenith_angle - # coordinates: - # - aws_lon - # - aws_lat + solar_azimuth_horn_2: + name: solar_azimuth_horn_2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + n_horns: 1 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_azimuth_horn_3: + name: solar_azimuth_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + n_horns: 2 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_azimuth_horn_4: + name: solar_azimuth_horn_4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + n_horns: 3 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_zenith_horn_1: + name: solar_zenith_horn_1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + n_horns: 0 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_zenith_horn_2: + name: solar_zenith_horn_2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + n_horns: 1 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_zenith_horn_3: + name: solar_zenith_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + n_horns: 2 + coordinates: + - lon_horn_1 + - lat_horn_1 + + solar_zenith_horn_4: + name: solar_zenith_horn_4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + n_horns: 3 + coordinates: + - lon_horn_1 + - lat_horn_1 + + satellite_azimuth_horn_1: + name: satellite_azimuth_horn_1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + n_horns: 0 + coordinates: + - lon_horn_1 + - lat_horn_1 + + satellite_azimuth_horn_2: + name: satellite_azimuth_horn_2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + n_horns: 1 + coordinates: + - lon_horn_2 + - lat_horn_2 + + satellite_azimuth_horn_3: + name: satellite_azimuth_horn_3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + n_horns: 2 + coordinates: + - lon_horn_3 + - lat_horn_3 + + satellite_azimuth_horn_4: + name: satellite_azimuth_horn_4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + n_horns: 3 + coordinates: + - lon_horn_4 + - lat_horn_4 satellite_zenith_horn_1: name: satellite_zenith_horn_1 diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index aa34852f09..aef8df1a5e 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -18,36 +18,41 @@ """ import logging -from datetime import datetime -import dask.array as da -import numpy as np +# import dask.array as da +# import numpy as np import xarray as xr -from netCDF4 import default_fillvals from .netcdf_utils import NetCDF4FileHandler -logger = logging.getLogger(__name__) +# from datetime import datetime + +# from netCDF4 import default_fillvals -DUMMY_STARTTIME = datetime(2023, 7, 7, 12, 0) -DUMMY_ENDTIME = datetime(2023, 7, 7, 12, 10) -# dict containing all available auxiliary data parameters to be read using the index map. Keys are the -# parameter name and values are the paths to the variable inside the netcdf -AUX_DATA = { - 'scantime_utc': 'data/navigation/aws_scantime_utc', - 'solar_azimuth_angle': 'data/navigation/aws_solar_azimuth_angle', - 'solar_zenith_angle': 'data/navigation/aws_solar_zenith_angle', - 'satellite_azimuth_angle': 'data/navigation/aws_satellite_azimuth_angle', - 'satellite_zenith_angle': 'data/navigation/aws_satellite_zenith_angle', - 'surface_type': 'data/navigation/aws_surface_type', - 'terrain_elevation': 'data/navigation/aws_terrain_elevation', - 'aws_lat': 'data/navigation/aws_lat', - 'aws_lon': 'data/navigation/aws_lon', - 'latitude': 'data/navigation/aws_lat', - 'longitude': 'data/navigation/aws_lon', -} +logger = logging.getLogger(__name__) +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" + +# DUMMY_STARTTIME = datetime(2023, 7, 7, 12, 0) +# DUMMY_ENDTIME = datetime(2023, 7, 7, 12, 10) +# # dict containing all available auxiliary data parameters to be read using the index map. Keys are the +# # parameter name and values are the paths to the variable inside the netcdf +# +# AUX_DATA = { +# 'scantime_utc': 'data/navigation/aws_scantime_utc', +# 'solar_azimuth_angle': 'data/navigation/aws_solar_azimuth_angle', +# 'solar_zenith_angle': 'data/navigation/aws_solar_zenith_angle', +# 'satellite_azimuth_angle': 'data/navigation/aws_satellite_azimuth_angle', +# 'satellite_zenith_angle': 'data/navigation/aws_satellite_zenith_angle', +# 'surface_type': 'data/navigation/aws_surface_type', +# 'terrain_elevation': 'data/navigation/aws_terrain_elevation', +# 'aws_lat': 'data/navigation/aws_lat', +# 'aws_lon': 'data/navigation/aws_lon', +# 'latitude': 'data/navigation/aws_lat', +# 'longitude': 'data/navigation/aws_lon', +# } +# AWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, '10': 10, '11': 11, '12': 12, @@ -55,21 +60,23 @@ '17': 17, '18': 18, '19': 19} AWS_CHANNEL_NAMES = list(AWS_CHANNEL_NAMES_TO_NUMBER.keys()) -AWS_CHANNELS = set(AWS_CHANNEL_NAMES) - - -def get_channel_index_from_name(chname): - """Get the AWS channel index from the channel name.""" - chindex = AWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1 - if 0 <= chindex < 19: - return chindex - raise AttributeError(f"Channel name {chname!r} not supported") - - -def _get_aux_data_name_from_dsname(dsname): - aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] - if len(aux_data_name) > 0: - return aux_data_name[0] +# AWS_CHANNELS = set(AWS_CHANNEL_NAMES) +# +# +# def get_channel_index_from_name(chname): +# """Get the AWS channel index from the channel name.""" +# chindex = AWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1 +# if 0 <= chindex < 19: +# return chindex +# raise AttributeError(f"Channel name {chname!r} not supported") +# +# +# def _get_aux_data_name_from_dsname(dsname): +# aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] +# if len(aux_data_name) > 0: +# return aux_data_name[0] +# +# class AWSL1BFile(NetCDF4FileHandler): @@ -82,43 +89,20 @@ class using the :mod:`~satpy.Scene.load` method with the reader """ - _platform_name_translate = { - "": "AWS", - } - - def __init__(self, filename, filename_info, filetype_info): - """Initialize file handler.""" - xarray_kwargs = {'decode_times': False} - super().__init__(filename, filename_info, - filetype_info, - xarray_kwargs=xarray_kwargs, - cache_var_size=10000, - cache_handle=True) - logger.debug('Reading: {}'.format(self.filename)) - logger.debug('Start: {}'.format(self.start_time)) - logger.debug('End: {}'.format(self.end_time)) - - self._cache = {} - - self._channel_names = AWS_CHANNEL_NAMES + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info) + self.filename_info = filename_info @property def start_time(self): - """Get start time.""" - try: - return datetime.strptime(self['/attr/sensing_start_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') - except ValueError: - return DUMMY_STARTTIME + """Get the start time.""" + return self.filename_info["start_time"] @property def end_time(self): - """Get end time.""" - try: - return datetime.strptime(self['/attr/sensing_end_time_utc'], - '%Y-%m-%d %H:%M:%S.%f') - except ValueError: - return DUMMY_ENDTIME + """Get the end time.""" + return self.filename_info["end_time"] @property def sensor(self): @@ -128,196 +112,254 @@ def sensor(self): @property def platform_name(self): """Get the platform name.""" - return self._platform_name_translate.get(self['/attr/Spacecraft']) - - @property - def sub_satellite_longitude_start(self): - """Get the longitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_longitude_start'].data.item() - - @property - def sub_satellite_latitude_start(self): - """Get the latitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_latitude_start'].data.item() - - @property - def sub_satellite_longitude_end(self): - """Get the longitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_longitude_end'].data.item() - - @property - def sub_satellite_latitude_end(self): - """Get the latitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_latitude_end'].data.item() + return self.filename_info["platform_name"] def get_dataset(self, dataset_id, dataset_info): - """Get dataset using file_key in dataset_info.""" - logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) - - var_key = dataset_info['file_key'] - # if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: - if _get_aux_data_name_from_dsname(var_key) is not None: - nhorn = dataset_info['n_horns'] - standard_name = dataset_info['standard_name'] - - # variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) - variable = self._get_dataset_aux_data(standard_name, nhorn) # (dataset_id['name']) - elif dataset_id['name'] in AWS_CHANNELS: - logger.debug(f'Reading in file to get dataset with key {var_key}.') - variable = self._get_dataset_channel(dataset_id, dataset_info) - else: - logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 - return None - - variable = self._manage_attributes(variable, dataset_info) - variable = self._drop_coords(variable) - variable = self._standardize_dims(variable) - - if dataset_info['standard_name'] in ['longitude', 'latitude']: - data = variable.data[:, :] - if dataset_info['standard_name'] in ['longitude']: - data = self._scale_lons(data) - lon_or_lat = xr.DataArray( - data, - attrs=variable.attrs, - dims=(variable.dims[0], variable.dims[1]) - ) - variable = lon_or_lat - - return variable - - @staticmethod - def _scale_lons(lons): - return xr.where(lons > 180, lons - 360, lons) - - @staticmethod - def _standardize_dims(variable): - """Standardize dims to y, x.""" - if 'n_scans' in variable.dims: - variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) - if variable.dims[0] == 'x': - variable = variable.transpose('y', 'x') - return variable - - @staticmethod - def _drop_coords(variable): - """Drop coords that are not in dims.""" - for coord in variable.coords: - if coord not in variable.dims: - variable = variable.drop_vars(coord) - return variable - - def _manage_attributes(self, variable, dataset_info): - """Manage attributes of the dataset.""" - variable.attrs.setdefault('units', None) - variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) - return variable - - def _get_dataset_channel(self, key, dataset_info): - """Load dataset corresponding to channel measurement. - - Load a dataset when the key refers to a measurand, whether uncalibrated - (counts) or calibrated in terms of brightness temperature or radiance. - - """ - # Get the dataset - # Get metadata for given dataset - grp_pth = dataset_info['file_key'] - channel_index = get_channel_index_from_name(key['name']) - - # data = self[grp_pth][:, :, channel_index] - data = self[grp_pth][channel_index, :, :] - data = data.transpose() - # This transposition should not be needed were the format following the EPS-SG format spec!! - attrs = data.attrs.copy() - - fv = attrs.pop( - "FillValue", - default_fillvals.get(data.dtype.str[1:], np.nan)) - vr = attrs.get("valid_range", [-np.inf, np.inf]) - - if key['calibration'] == "counts": - attrs["_FillValue"] = fv - nfv = fv + """Get the data.""" + if dataset_id["name"] in AWS_CHANNEL_NAMES: + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES + data_array = channel_data.sel(n_channels=dataset_id["name"]) + elif (dataset_id["name"].startswith("lon") or dataset_id["name"].startswith("lat") + or dataset_id["name"].startswith("solar_azimuth") + or dataset_id["name"].startswith("solar_zenith") + or dataset_id["name"].startswith("satellite_azimuth") + or dataset_id["name"].startswith("satellite_zenith")): + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_geo_groups"] = [0, 1, 2, 3] + n_horn = dataset_info["n_horns"] + data_array = channel_data.sel(n_geo_groups=n_horn) else: - nfv = np.nan - data = data.where(data >= vr[0], nfv) - data = data.where(data <= vr[1], nfv) - - # Manage the attributes of the dataset - data.attrs.setdefault('units', None) - data.attrs.update(dataset_info) - - dataset_attrs = getattr(data, 'attrs', {}) - dataset_attrs.update(dataset_info) - dataset_attrs.update({ - "platform_name": self.platform_name, - "sensor": self.sensor, - "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, - 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, - 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, - 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, - }) - - try: - dataset_attrs.update(key.to_dict()) - except AttributeError: - dataset_attrs.update(key) - - data.attrs.update(dataset_attrs) - return data - - def _get_dataset_aux_data(self, dsname, nhorn): - """Get the auxiliary data arrays using the index map.""" - # Geolocation and navigation data: - if dsname in ['latitude', 'longitude', - 'solar_azimuth_angle', 'solar_zenith_angle', - 'satellite_azimuth_angle', 'satellite_zenith_angle', - 'surface_type', 'terrain_elevation']: - var_key = AUX_DATA.get(dsname) - else: - raise NotImplementedError(f"Dataset {dsname!r} not supported!") - - try: - variable = self[var_key][nhorn, :, :] - except KeyError: - logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key) - raise - - # Scale the data: - if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: - missing_value = variable.attrs['missing_value'] - variable.data = da.where(variable.data == missing_value, np.nan, - variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) - - return variable - - def _get_global_attributes(self): - """Create a dictionary of global attributes.""" - return { - 'filename': self.filename, - 'start_time': self.start_time, - 'end_time': self.end_time, - 'spacecraft_name': self.platform_name, - 'sensor': self.sensor, - 'filename_start_time': self.filename_info['start_time'], - 'filename_end_time': self.filename_info['end_time'], - 'platform_name': self.platform_name, - 'quality_group': self._get_quality_attributes(), - } - - def _get_quality_attributes(self): - """Get quality attributes.""" - quality_group = self['quality'] - quality_dict = {} - for key in quality_group: - # Add the values (as Numpy array) of each variable in the group - # where possible - try: - quality_dict[key] = quality_group[key].values - except ValueError: - quality_dict[key] = None - - quality_dict.update(quality_group.attrs) - return quality_dict + raise NotImplementedError + + if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] + data_array.attrs.pop("scale_factor") + data_array.attrs.pop("add_offset") + # if "missing_value" in data_array.attrs: + # with xr.set_options(keep_attrs=True): + # data_array = data_array.where(data_array != data_array.attrs["missing_value"]) + return data_array + + +# class AWSL1BFile(NetCDF4FileHandler): +# """Class implementing the AWS L1b Filehandler. +# +# This class implements the ESA Arctic Weather Satellite (AWS) Level-1b +# NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` +# class using the :mod:`~satpy.Scene.load` method with the reader +# ``"aws_l1b_nc"``. +# +# """ +# +# _platform_name_translate = { +# "": "AWS", +# } +# +# def __init__(self, filename, filename_info, filetype_info): +# """Initialize file handler.""" +# xarray_kwargs = {'decode_times': False} +# super().__init__(filename, filename_info, +# filetype_info, +# xarray_kwargs=xarray_kwargs, +# cache_var_size=10000, +# cache_handle=True) +# logger.debug('Reading: {}'.format(self.filename)) +# logger.debug('Start: {}'.format(self.start_time)) +# logger.debug('End: {}'.format(self.end_time)) +## +# self._channel_names = AWS_CHANNEL_NAMES + +# +# @property +# def sub_satellite_longitude_start(self): +# """Get the longitude of sub-satellite point at start of the product.""" +# return self['status/satellite/subsat_longitude_start'].data.item() +# +# @property +# def sub_satellite_latitude_start(self): +# """Get the latitude of sub-satellite point at start of the product.""" +# return self['status/satellite/subsat_latitude_start'].data.item() +# +# @property +# def sub_satellite_longitude_end(self): +# """Get the longitude of sub-satellite point at end of the product.""" +# return self['status/satellite/subsat_longitude_end'].data.item() +# +# @property +# def sub_satellite_latitude_end(self): +# """Get the latitude of sub-satellite point at end of the product.""" +# return self['status/satellite/subsat_latitude_end'].data.item() +# +# def get_dataset(self, dataset_id, dataset_info): +# """Get dataset using file_key in dataset_info.""" +# logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) +# +# var_key = dataset_info['file_key'] +# # if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: +# if _get_aux_data_name_from_dsname(var_key) is not None: +# nhorn = dataset_info['n_horns'] +# standard_name = dataset_info['standard_name'] +# +# # variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) +# variable = self._get_dataset_aux_data(standard_name, nhorn) # (dataset_id['name']) +# elif dataset_id['name'] in AWS_CHANNELS: +# logger.debug(f'Reading in file to get dataset with key {var_key}.') +# variable = self._get_dataset_channel(dataset_id, dataset_info) +# else: +# logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 +# return None +# +# variable = self._manage_attributes(variable, dataset_info) +# variable = self._drop_coords(variable) +# variable = self._standardize_dims(variable) +# +# if dataset_info['standard_name'] in ['longitude', 'latitude']: +# data = variable.data[:, :] +# if dataset_info['standard_name'] in ['longitude']: +# data = self._scale_lons(data) +# lon_or_lat = xr.DataArray( +# data, +# attrs=variable.attrs, +# dims=(variable.dims[0], variable.dims[1]) +# ) +# variable = lon_or_lat +# +# return variable +# +# @staticmethod +# def _scale_lons(lons): +# return xr.where(lons > 180, lons - 360, lons) +# +# @staticmethod +# def _standardize_dims(variable): +# """Standardize dims to y, x.""" +# if 'n_scans' in variable.dims: +# variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) +# if variable.dims[0] == 'x': +# variable = variable.transpose('y', 'x') +# return variable +# +# @staticmethod +# def _drop_coords(variable): +# """Drop coords that are not in dims.""" +# for coord in variable.coords: +# if coord not in variable.dims: +# variable = variable.drop_vars(coord) +# return variable +# +# def _manage_attributes(self, variable, dataset_info): +# """Manage attributes of the dataset.""" +# variable.attrs.setdefault('units', None) +# variable.attrs.update(dataset_info) +# variable.attrs.update(self._get_global_attributes()) +# return variable +# +# def _get_dataset_channel(self, key, dataset_info): +# """Load dataset corresponding to channel measurement. +# +# Load a dataset when the key refers to a measurand, whether uncalibrated +# (counts) or calibrated in terms of brightness temperature or radiance. +# +# """ +# # Get the dataset +# # Get metadata for given dataset +# grp_pth = dataset_info['file_key'] +# channel_index = get_channel_index_from_name(key['name']) +# +# # data = self[grp_pth][:, :, channel_index] +# data = self[grp_pth][channel_index, :, :] +# data = data.transpose() +# # This transposition should not be needed were the format following the EPS-SG format spec!! +# attrs = data.attrs.copy() +# +# fv = attrs.pop( +# "FillValue", +# default_fillvals.get(data.dtype.str[1:], np.nan)) +# vr = attrs.get("valid_range", [-np.inf, np.inf]) +# +# if key['calibration'] == "counts": +# attrs["_FillValue"] = fv +# nfv = fv +# else: +# nfv = np.nan +# data = data.where(data >= vr[0], nfv) +# data = data.where(data <= vr[1], nfv) +# +# # Manage the attributes of the dataset +# data.attrs.setdefault('units', None) +# data.attrs.update(dataset_info) +# +# dataset_attrs = getattr(data, 'attrs', {}) +# dataset_attrs.update(dataset_info) +# dataset_attrs.update({ +# "platform_name": self.platform_name, +# "sensor": self.sensor, +# "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, +# 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, +# 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, +# 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, +# }) +# +# try: +# dataset_attrs.update(key.to_dict()) +# except AttributeError: +# dataset_attrs.update(key) +# +# data.attrs.update(dataset_attrs) +# return data +# +# def _get_dataset_aux_data(self, dsname, nhorn): +# """Get the auxiliary data arrays using the index map.""" +# # Geolocation and navigation data: +# if dsname in ['latitude', 'longitude', +# 'solar_azimuth_angle', 'solar_zenith_angle', +# 'satellite_azimuth_angle', 'satellite_zenith_angle', +# 'surface_type', 'terrain_elevation']: +# var_key = AUX_DATA.get(dsname) +# else: +# raise NotImplementedError(f"Dataset {dsname!r} not supported!") +# +# try: +# variable = self[var_key][nhorn, :, :] +# except KeyError: +# logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key) +# raise +# +# # Scale the data: +# if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: +# missing_value = variable.attrs['missing_value'] +# variable.data = da.where(variable.data == missing_value, np.nan, +# variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) +# +# return variable +# +# def _get_global_attributes(self): +# """Create a dictionary of global attributes.""" +# return { +# 'filename': self.filename, +# 'start_time': self.start_time, +# 'end_time': self.end_time, +# 'spacecraft_name': self.platform_name, +# 'sensor': self.sensor, +# 'filename_start_time': self.filename_info['start_time'], +# 'filename_end_time': self.filename_info['end_time'], +# 'platform_name': self.platform_name, +# 'quality_group': self._get_quality_attributes(), +# } +# +# def _get_quality_attributes(self): +# """Get quality attributes.""" +# quality_group = self['quality'] +# quality_dict = {} +# for key in quality_group: +# # Add the values (as Numpy array) of each variable in the group +# # where possible +# try: +# quality_dict[key] = quality_group[key].values +# except ValueError: +# quality_dict[key] = None +# +# quality_dict.update(quality_group.attrs) +# return quality_dict diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py new file mode 100644 index 0000000000..f3df0b077a --- /dev/null +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -0,0 +1,128 @@ +"""Tests for aws l1b filehandlers.""" + +import os +from datetime import datetime, timedelta +from random import randrange + +import numpy as np +import pytest +import xarray as xr +from datatree import DataTree +from trollsift import compose, parse + +from satpy.readers.aws_l1b import DATETIME_FORMAT, AWSL1BFile + +platform_name = "AWS1" +file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa +fake_data = xr.DataArray(np.random.randint(0, 700000, size=19*5*5).reshape((19, 5, 5)), + dims=["n_channels", "n_fovs", "n_scans"]) +fake_lon_data = xr.DataArray(np.random.randint(0, 3599999, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_lat_data = xr.DataArray(np.random.randint(-900000, 900000, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_sun_azi_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_sun_zen_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_sat_azi_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_sat_zen_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), + dims=["n_geo_groups", "n_fovs", "n_scans"]) + + +def random_date(start, end): + """Create a random datetime between two datetimes.""" + delta = end - start + int_delta = (delta.days * 24 * 60 * 60) + delta.seconds + random_second = randrange(int_delta) + return start + timedelta(seconds=random_second) + + +@pytest.fixture(scope="session") +def aws_file(tmp_path_factory): + """Create an AWS file.""" + ds = DataTree() + start_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + + instrument = "AWS" + ds.attrs["instrument"] = instrument + ds["data/calibration/aws_toa_brightness_temperature"] = fake_data + ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/navigation/aws_lon"] = fake_lon_data + ds["data/navigation/aws_lat"] = fake_lat_data + ds["data/navigation/aws_solar_azimuth_angle"] = fake_sun_azi_data + ds["data/navigation/aws_solar_zenith_angle"] = fake_sun_zen_data + ds["data/navigation/aws_satellite_azimuth_angle"] = fake_sat_azi_data + ds["data/navigation/aws_satellite_zenith_angle"] = fake_sat_zen_data + + tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") + filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, + processing_time=processing_time, platform_name=platform_name)) + + ds.to_netcdf(filename) + return filename + + +@pytest.fixture +def aws_handler(aws_file): + """Create an aws filehandler.""" + filename_info = parse(file_pattern, os.path.basename(aws_file)) + return AWSL1BFile(aws_file, filename_info, dict()) + + +def test_start_end_time(aws_file): + """Test that start and end times are read correctly.""" + filename_info = parse(file_pattern, os.path.basename(aws_file)) + handler = AWSL1BFile(aws_file, filename_info, dict()) + + assert handler.start_time == filename_info["start_time"] + assert handler.end_time == filename_info["end_time"] + + +def test_metadata(aws_handler): + """Test that the metadata is read correctly.""" + assert aws_handler.sensor == "AWS" + assert aws_handler.platform_name == platform_name + + +def test_get_channel_data(aws_handler): + """Test retrieving the channel data.""" + did = dict(name="1") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + np.testing.assert_allclose(aws_handler.get_dataset(did, dataset_info), fake_data.isel(n_channels=0) * 0.001) + + +@pytest.mark.parametrize(["id_name", "file_key", "fake_array"], + [("lon_horn_1", "data/navigation/aws_lon", fake_lon_data), + ("lat_horn_1", "data/navigation/aws_lat", fake_lat_data), + ("solar_azimuth_horn_1", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith_horn_1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth_horn_1", "data/navigation/aws_satellite_azimuth_angle", + fake_sat_azi_data), + ("satellite_zenith_horn_1", "data/navigation/aws_satellite_zenith_angle", + fake_sat_zen_data)]) +def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): + """Test retrieving the angles_data.""" + did = dict(name=id_name) + dataset_info = dict(file_key=file_key, n_horns=0) + np.testing.assert_allclose(aws_handler.get_dataset(did, dataset_info), fake_array.isel(n_geo_groups=0)) + + +# def test_channel_is_masked_and_scaled(): +# pass + +# def test_navigation_is_scaled_and_scaled(): +# pass + + +# def test_orbital_parameters_are_provided(): +# pass + + +# def test_coords_contain_xy(): +# pass From 7191df150afadafd1b861b3ae2239103dd418232 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 7 Nov 2023 09:52:49 +0100 Subject: [PATCH 011/340] Finalize aws_l1b refactoring and tests --- satpy/etc/readers/aws_l1b_nc.yaml | 297 ++++++------------- satpy/readers/aws_l1b.py | 344 +++++------------------ satpy/tests/reader_tests/test_aws_l1b.py | 82 ++++-- 3 files changed, 214 insertions(+), 509 deletions(-) diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index 0989579a8f..adee00ed2a 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -20,6 +20,12 @@ reader: enum: - QH - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" calibration: enum: - brightness_temperature @@ -29,6 +35,21 @@ reader: default: [] type: !!python/name:satpy.dataset.ModifierTuple + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + datasets: '1': name: '1' @@ -41,7 +62,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '2': @@ -55,7 +77,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '3': @@ -69,7 +92,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '4': @@ -83,7 +107,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '5': @@ -97,7 +122,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '6': @@ -111,7 +137,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '7': @@ -125,7 +152,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '8': @@ -139,7 +167,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_1, lat_horn_1] + horn: "1" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '9': @@ -153,7 +182,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_2, lat_horn_2] + horn: "2" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '10': @@ -167,7 +197,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '11': @@ -181,7 +212,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '12': @@ -195,7 +227,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '13': @@ -209,7 +242,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '14': @@ -223,7 +257,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '15': @@ -237,7 +272,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_3, lat_horn_3] + horn: "3" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '16': @@ -252,7 +288,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_4, lat_horn_4] + horn: "4" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '17': @@ -267,7 +304,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_4, lat_horn_4] + horn: "4" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '18': @@ -282,7 +320,8 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_4, lat_horn_4] + horn: "4" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature '19': @@ -297,238 +336,72 @@ datasets: calibration: brightness_temperature: standard_name: toa_brightness_temperature - coordinates: [lon_horn_4, lat_horn_4] + horn: "4" + coordinates: [longitude, latitude] file_type: aws_l1b_nc file_key: data/calibration/aws_toa_brightness_temperature # --- Coordinates --- - lon_horn_1: - name: lon_horn_1 + longitude: + name: longitude file_type: aws_l1b_nc standard_name: longitude units: degrees_east - n_horns: 0 + horn: ["1", "2", "3", "4"] file_key: data/navigation/aws_lon - lat_horn_1: - name: lat_horn_1 - file_type: aws_l1b_nc - standard_name: latitude - units: degrees_north - n_horns: 0 - file_key: data/navigation/aws_lat - - lon_horn_2: - name: lon_horn_2 - file_type: aws_l1b_nc - standard_name: longitude - units: degrees_east - n_horns: 1 - file_key: data/navigation/aws_lon - - lat_horn_2: - name: lat_horn_2 - file_type: aws_l1b_nc - standard_name: latitude - units: degrees_north - n_horns: 1 - file_key: data/navigation/aws_lat - - lon_horn_3: - name: lon_horn_3 - file_type: aws_l1b_nc - standard_name: longitude - units: degrees_east - n_horns: 2 - file_key: data/navigation/aws_lon - - lat_horn_3: - name: lat_horn_3 + latitude: + name: latitude file_type: aws_l1b_nc standard_name: latitude units: degrees_north - n_horns: 2 + horn: ["1", "2", "3", "4"] file_key: data/navigation/aws_lat - lon_horn_4: - name: lon_horn_4 - file_type: aws_l1b_nc - standard_name: longitude - units: degrees_east - n_horns: 3 - file_key: data/navigation/aws_lon - - lat_horn_4: - name: lat_horn_4 - file_type: aws_l1b_nc - standard_name: latitude - units: degrees_north - n_horns: 3 - file_key: data/navigation/aws_lat # --- Navigation data --- - solar_azimuth_horn_1: - name: solar_azimuth_horn_1 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_azimuth_angle - standard_name: solar_azimuth_angle - n_horns: 0 - coordinates: - - lon_horn_1 - - lat_horn_1 - - solar_azimuth_horn_2: - name: solar_azimuth_horn_2 + solar_azimuth: + name: solar_azimuth file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle - n_horns: 1 + horn: ["1", "2", "3", "4"] coordinates: - - lon_horn_1 - - lat_horn_1 + - longitude + - latitude - solar_azimuth_horn_3: - name: solar_azimuth_horn_3 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_azimuth_angle - standard_name: solar_azimuth_angle - n_horns: 2 - coordinates: - - lon_horn_1 - - lat_horn_1 - - solar_azimuth_horn_4: - name: solar_azimuth_horn_4 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_azimuth_angle - standard_name: solar_azimuth_angle - n_horns: 3 - coordinates: - - lon_horn_1 - - lat_horn_1 - - solar_zenith_horn_1: - name: solar_zenith_horn_1 + solar_zenith: + name: solar_zenith file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle - n_horns: 0 + horn: ["1", "2", "3", "4"] coordinates: - - lon_horn_1 - - lat_horn_1 + - longitude + - latitude - solar_zenith_horn_2: - name: solar_zenith_horn_2 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_zenith_angle - standard_name: solar_zenith_angle - n_horns: 1 - coordinates: - - lon_horn_1 - - lat_horn_1 - - solar_zenith_horn_3: - name: solar_zenith_horn_3 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_zenith_angle - standard_name: solar_zenith_angle - n_horns: 2 - coordinates: - - lon_horn_1 - - lat_horn_1 - - solar_zenith_horn_4: - name: solar_zenith_horn_4 - file_type: aws_l1b_nc - file_key: data/navigation/aws_solar_zenith_angle - standard_name: solar_zenith_angle - n_horns: 3 - coordinates: - - lon_horn_1 - - lat_horn_1 - - satellite_azimuth_horn_1: - name: satellite_azimuth_horn_1 + satellite_azimuth: + name: satellite_azimuth file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle - n_horns: 0 - coordinates: - - lon_horn_1 - - lat_horn_1 - - satellite_azimuth_horn_2: - name: satellite_azimuth_horn_2 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_azimuth_angle - standard_name: satellite_azimuth_angle - n_horns: 1 - coordinates: - - lon_horn_2 - - lat_horn_2 - - satellite_azimuth_horn_3: - name: satellite_azimuth_horn_3 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_azimuth_angle - standard_name: satellite_azimuth_angle - n_horns: 2 - coordinates: - - lon_horn_3 - - lat_horn_3 - - satellite_azimuth_horn_4: - name: satellite_azimuth_horn_4 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_azimuth_angle - standard_name: satellite_azimuth_angle - n_horns: 3 - coordinates: - - lon_horn_4 - - lat_horn_4 - - satellite_zenith_horn_1: - name: satellite_zenith_horn_1 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_zenith_angle - standard_name: satellite_zenith_angle - n_horns: 0 - coordinates: - - lon_horn_1 - - lat_horn_1 - - satellite_zenith_horn_2: - name: satellite_zenith_horn_2 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_zenith_angle - standard_name: satellite_zenith_angle - n_horns: 1 - coordinates: - - lon_horn_2 - - lat_horn_2 - - satellite_zenith_horn_3: - name: satellite_zenith_horn_3 - file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_zenith_angle - standard_name: satellite_zenith_angle - n_horns: 2 + horn: ["1", "2", "3", "4"] coordinates: - - lon_horn_3 - - lat_horn_3 + - longitude + - latitude - satellite_zenith_horn_4: - name: satellite_zenith_horn_4 + satellite_zenith: + name: satellite_zenith file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle - n_horns: 3 + horn: ["1", "2", "3", "4"] coordinates: - - lon_horn_4 - - lat_horn_4 + - longitude + - latitude file_types: diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index aef8df1a5e..465c4b86a2 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -19,40 +19,14 @@ import logging -# import dask.array as da -# import numpy as np import xarray as xr from .netcdf_utils import NetCDF4FileHandler -# from datetime import datetime - -# from netCDF4 import default_fillvals - - logger = logging.getLogger(__name__) DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" -# DUMMY_STARTTIME = datetime(2023, 7, 7, 12, 0) -# DUMMY_ENDTIME = datetime(2023, 7, 7, 12, 10) -# # dict containing all available auxiliary data parameters to be read using the index map. Keys are the -# # parameter name and values are the paths to the variable inside the netcdf -# -# AUX_DATA = { -# 'scantime_utc': 'data/navigation/aws_scantime_utc', -# 'solar_azimuth_angle': 'data/navigation/aws_solar_azimuth_angle', -# 'solar_zenith_angle': 'data/navigation/aws_solar_zenith_angle', -# 'satellite_azimuth_angle': 'data/navigation/aws_satellite_azimuth_angle', -# 'satellite_zenith_angle': 'data/navigation/aws_satellite_zenith_angle', -# 'surface_type': 'data/navigation/aws_surface_type', -# 'terrain_elevation': 'data/navigation/aws_terrain_elevation', -# 'aws_lat': 'data/navigation/aws_lat', -# 'aws_lon': 'data/navigation/aws_lon', -# 'latitude': 'data/navigation/aws_lat', -# 'longitude': 'data/navigation/aws_lon', -# } -# AWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, '5': 5, '6': 6, '7': 7, '8': 8, '9': 9, '10': 10, '11': 11, '12': 12, @@ -60,23 +34,6 @@ '17': 17, '18': 18, '19': 19} AWS_CHANNEL_NAMES = list(AWS_CHANNEL_NAMES_TO_NUMBER.keys()) -# AWS_CHANNELS = set(AWS_CHANNEL_NAMES) -# -# -# def get_channel_index_from_name(chname): -# """Get the AWS channel index from the channel name.""" -# chindex = AWS_CHANNEL_NAMES_TO_NUMBER.get(chname, 0) - 1 -# if 0 <= chindex < 19: -# return chindex -# raise AttributeError(f"Channel name {chname!r} not supported") -# -# -# def _get_aux_data_name_from_dsname(dsname): -# aux_data_name = [key for key in AUX_DATA.keys() if key in dsname] -# if len(aux_data_name) > 0: -# return aux_data_name[0] -# -# class AWSL1BFile(NetCDF4FileHandler): @@ -91,7 +48,9 @@ class using the :mod:`~satpy.Scene.load` method with the reader def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" - super().__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info, + cache_var_size=10000, + cache_handle=True) self.filename_info = filename_info @property @@ -114,240 +73,87 @@ def platform_name(self): """Get the platform name.""" return self.filename_info["platform_name"] + @property + def sub_satellite_longitude_start(self): + """Get the longitude of sub-satellite point at start of the product.""" + return self['status/satellite/subsat_longitude_start'].data.item() + + @property + def sub_satellite_latitude_start(self): + """Get the latitude of sub-satellite point at start of the product.""" + return self['status/satellite/subsat_latitude_start'].data.item() + + @property + def sub_satellite_longitude_end(self): + """Get the longitude of sub-satellite point at end of the product.""" + return self['status/satellite/subsat_longitude_end'].data.item() + + @property + def sub_satellite_latitude_end(self): + """Get the latitude of sub-satellite point at end of the product.""" + return self['status/satellite/subsat_latitude_end'].data.item() + def get_dataset(self, dataset_id, dataset_info): """Get the data.""" if dataset_id["name"] in AWS_CHANNEL_NAMES: - channel_data = self[dataset_info["file_key"]] - channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES - data_array = channel_data.sel(n_channels=dataset_id["name"]) - elif (dataset_id["name"].startswith("lon") or dataset_id["name"].startswith("lat") - or dataset_id["name"].startswith("solar_azimuth") - or dataset_id["name"].startswith("solar_zenith") - or dataset_id["name"].startswith("satellite_azimuth") - or dataset_id["name"].startswith("satellite_zenith")): - channel_data = self[dataset_info["file_key"]] - channel_data.coords["n_geo_groups"] = [0, 1, 2, 3] - n_horn = dataset_info["n_horns"] - data_array = channel_data.sel(n_geo_groups=n_horn) + data_array = self._get_channel_data(dataset_id, dataset_info) + elif (dataset_id["name"] in ["longitude", "latitude", + "solar_azimuth", "solar_zenith", + "satellite_zenith", "satellite_azimuth"]): + data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError - if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: - with xr.set_options(keep_attrs=True): - data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] - data_array.attrs.pop("scale_factor") - data_array.attrs.pop("add_offset") - # if "missing_value" in data_array.attrs: - # with xr.set_options(keep_attrs=True): - # data_array = data_array.where(data_array != data_array.attrs["missing_value"]) - return data_array + data_array = mask_and_scale(data_array) + if dataset_id["name"] == "longitude": + data_array = data_array.where(data_array <= 180, data_array - 360) + data_array.attrs.update(dataset_info) -# class AWSL1BFile(NetCDF4FileHandler): -# """Class implementing the AWS L1b Filehandler. -# -# This class implements the ESA Arctic Weather Satellite (AWS) Level-1b -# NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` -# class using the :mod:`~satpy.Scene.load` method with the reader -# ``"aws_l1b_nc"``. -# -# """ -# -# _platform_name_translate = { -# "": "AWS", -# } -# -# def __init__(self, filename, filename_info, filetype_info): -# """Initialize file handler.""" -# xarray_kwargs = {'decode_times': False} -# super().__init__(filename, filename_info, -# filetype_info, -# xarray_kwargs=xarray_kwargs, -# cache_var_size=10000, -# cache_handle=True) -# logger.debug('Reading: {}'.format(self.filename)) -# logger.debug('Start: {}'.format(self.start_time)) -# logger.debug('End: {}'.format(self.end_time)) -## -# self._channel_names = AWS_CHANNEL_NAMES + data_array.attrs["orbital_parameters"] = {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, + 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, + 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, + 'sub_satellite_longitude_end': self.sub_satellite_longitude_end} + + data_array.attrs["platform_name"] = self.platform_name + data_array.attrs["sensor"] = self.sensor + return data_array + + def _get_channel_data(self, dataset_id, dataset_info): + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES + channel_data = channel_data.rename({'n_fovs': 'x', 'n_scans': 'y'}) + return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") + + def _get_navigation_data(self, dataset_id, dataset_info): + geo_data = self[dataset_info["file_key"]] + geo_data.coords["n_geo_groups"] = ["1", "2", "3", "4"] + geo_data = geo_data.rename({'n_fovs': 'x', 'n_scans': 'y'}) + horn = dataset_id["horn"].name + return geo_data.sel(n_geo_groups=horn).drop_vars("n_geo_groups") + + +def mask_and_scale(data_array): + """Mask then scale the data array.""" + if "missing_value" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array != data_array.attrs["missing_value"]) + data_array.attrs.pop("missing_value") + if "valid_max" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array <= data_array.attrs["valid_max"]) + data_array.attrs.pop("valid_max") + if "valid_min" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array >= data_array.attrs["valid_min"]) + data_array.attrs.pop("valid_min") + if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] + data_array.attrs.pop("scale_factor") + data_array.attrs.pop("add_offset") + return data_array -# -# @property -# def sub_satellite_longitude_start(self): -# """Get the longitude of sub-satellite point at start of the product.""" -# return self['status/satellite/subsat_longitude_start'].data.item() -# -# @property -# def sub_satellite_latitude_start(self): -# """Get the latitude of sub-satellite point at start of the product.""" -# return self['status/satellite/subsat_latitude_start'].data.item() -# -# @property -# def sub_satellite_longitude_end(self): -# """Get the longitude of sub-satellite point at end of the product.""" -# return self['status/satellite/subsat_longitude_end'].data.item() -# -# @property -# def sub_satellite_latitude_end(self): -# """Get the latitude of sub-satellite point at end of the product.""" -# return self['status/satellite/subsat_latitude_end'].data.item() -# -# def get_dataset(self, dataset_id, dataset_info): -# """Get dataset using file_key in dataset_info.""" -# logger.debug('Reading {} from {}'.format(dataset_id['name'], self.filename)) -# -# var_key = dataset_info['file_key'] -# # if _get_aux_data_name_from_dsname(dataset_id['name']) is not None: -# if _get_aux_data_name_from_dsname(var_key) is not None: -# nhorn = dataset_info['n_horns'] -# standard_name = dataset_info['standard_name'] -# -# # variable = self._get_dataset_aux_data(var_key, nhorn) # (dataset_id['name']) -# variable = self._get_dataset_aux_data(standard_name, nhorn) # (dataset_id['name']) -# elif dataset_id['name'] in AWS_CHANNELS: -# logger.debug(f'Reading in file to get dataset with key {var_key}.') -# variable = self._get_dataset_channel(dataset_id, dataset_info) -# else: -# logger.warning(f'Could not find key {var_key} in NetCDF file, no valid Dataset created') # noqa: E501 -# return None -# -# variable = self._manage_attributes(variable, dataset_info) -# variable = self._drop_coords(variable) -# variable = self._standardize_dims(variable) -# -# if dataset_info['standard_name'] in ['longitude', 'latitude']: -# data = variable.data[:, :] -# if dataset_info['standard_name'] in ['longitude']: -# data = self._scale_lons(data) -# lon_or_lat = xr.DataArray( -# data, -# attrs=variable.attrs, -# dims=(variable.dims[0], variable.dims[1]) -# ) -# variable = lon_or_lat -# -# return variable -# -# @staticmethod -# def _scale_lons(lons): -# return xr.where(lons > 180, lons - 360, lons) -# -# @staticmethod -# def _standardize_dims(variable): -# """Standardize dims to y, x.""" -# if 'n_scans' in variable.dims: -# variable = variable.rename({'n_fovs': 'x', 'n_scans': 'y'}) -# if variable.dims[0] == 'x': -# variable = variable.transpose('y', 'x') -# return variable -# -# @staticmethod -# def _drop_coords(variable): -# """Drop coords that are not in dims.""" -# for coord in variable.coords: -# if coord not in variable.dims: -# variable = variable.drop_vars(coord) -# return variable -# -# def _manage_attributes(self, variable, dataset_info): -# """Manage attributes of the dataset.""" -# variable.attrs.setdefault('units', None) -# variable.attrs.update(dataset_info) -# variable.attrs.update(self._get_global_attributes()) -# return variable -# -# def _get_dataset_channel(self, key, dataset_info): -# """Load dataset corresponding to channel measurement. -# -# Load a dataset when the key refers to a measurand, whether uncalibrated -# (counts) or calibrated in terms of brightness temperature or radiance. -# -# """ -# # Get the dataset -# # Get metadata for given dataset -# grp_pth = dataset_info['file_key'] -# channel_index = get_channel_index_from_name(key['name']) -# -# # data = self[grp_pth][:, :, channel_index] -# data = self[grp_pth][channel_index, :, :] -# data = data.transpose() -# # This transposition should not be needed were the format following the EPS-SG format spec!! -# attrs = data.attrs.copy() -# -# fv = attrs.pop( -# "FillValue", -# default_fillvals.get(data.dtype.str[1:], np.nan)) -# vr = attrs.get("valid_range", [-np.inf, np.inf]) -# -# if key['calibration'] == "counts": -# attrs["_FillValue"] = fv -# nfv = fv -# else: -# nfv = np.nan -# data = data.where(data >= vr[0], nfv) -# data = data.where(data <= vr[1], nfv) -# -# # Manage the attributes of the dataset -# data.attrs.setdefault('units', None) -# data.attrs.update(dataset_info) -# -# dataset_attrs = getattr(data, 'attrs', {}) -# dataset_attrs.update(dataset_info) -# dataset_attrs.update({ -# "platform_name": self.platform_name, -# "sensor": self.sensor, -# "orbital_parameters": {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, -# 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, -# 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, -# 'sub_satellite_longitude_end': self.sub_satellite_longitude_end}, -# }) -# -# try: -# dataset_attrs.update(key.to_dict()) -# except AttributeError: -# dataset_attrs.update(key) -# -# data.attrs.update(dataset_attrs) -# return data -# -# def _get_dataset_aux_data(self, dsname, nhorn): -# """Get the auxiliary data arrays using the index map.""" -# # Geolocation and navigation data: -# if dsname in ['latitude', 'longitude', -# 'solar_azimuth_angle', 'solar_zenith_angle', -# 'satellite_azimuth_angle', 'satellite_zenith_angle', -# 'surface_type', 'terrain_elevation']: -# var_key = AUX_DATA.get(dsname) -# else: -# raise NotImplementedError(f"Dataset {dsname!r} not supported!") -# -# try: -# variable = self[var_key][nhorn, :, :] -# except KeyError: -# logger.exception("Could not find key %s in NetCDF file, no valid Dataset created", var_key) -# raise -# -# # Scale the data: -# if 'scale_factor' in variable.attrs and 'add_offset' in variable.attrs: -# missing_value = variable.attrs['missing_value'] -# variable.data = da.where(variable.data == missing_value, np.nan, -# variable.data * variable.attrs['scale_factor'] + variable.attrs['add_offset']) -# -# return variable -# -# def _get_global_attributes(self): -# """Create a dictionary of global attributes.""" -# return { -# 'filename': self.filename, -# 'start_time': self.start_time, -# 'end_time': self.end_time, -# 'spacecraft_name': self.platform_name, -# 'sensor': self.sensor, -# 'filename_start_time': self.filename_info['start_time'], -# 'filename_end_time': self.filename_info['end_time'], -# 'platform_name': self.platform_name, -# 'quality_group': self._get_quality_attributes(), -# } # # def _get_quality_attributes(self): # """Get quality attributes.""" diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index f3df0b077a..7ab910d194 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -2,6 +2,7 @@ import os from datetime import datetime, timedelta +from enum import Enum from random import randrange import numpy as np @@ -14,7 +15,12 @@ platform_name = "AWS1" file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa -fake_data = xr.DataArray(np.random.randint(0, 700000, size=19*5*5).reshape((19, 5, 5)), +fake_data_np = np.random.randint(0, 700000, size=19*5*5).reshape((19, 5, 5)) +fake_data_np[0, 0, 0] = -2147483648 +fake_data_np[0, 0, 1] = 700000 + 10 +fake_data_np[0, 0, 2] = -10 + +fake_data = xr.DataArray(fake_data_np, dims=["n_channels", "n_fovs", "n_scans"]) fake_lon_data = xr.DataArray(np.random.randint(0, 3599999, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) @@ -53,12 +59,22 @@ def aws_file(tmp_path_factory): ds["data/calibration/aws_toa_brightness_temperature"] = fake_data ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 + ds["data/navigation/aws_lon"] = fake_lon_data + ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 + ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 ds["data/navigation/aws_lat"] = fake_lat_data ds["data/navigation/aws_solar_azimuth_angle"] = fake_sun_azi_data ds["data/navigation/aws_solar_zenith_angle"] = fake_sun_zen_data ds["data/navigation/aws_satellite_azimuth_angle"] = fake_sat_azi_data ds["data/navigation/aws_satellite_zenith_angle"] = fake_sat_zen_data + ds['status/satellite/subsat_latitude_end'] = np.array(22.39) + ds['status/satellite/subsat_longitude_start'] = np.array(304.79) + ds['status/satellite/subsat_latitude_start'] = np.array(55.41) + ds['status/satellite/subsat_longitude_end'] = np.array(296.79) tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, @@ -94,35 +110,45 @@ def test_get_channel_data(aws_handler): """Test retrieving the channel data.""" did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") - np.testing.assert_allclose(aws_handler.get_dataset(did, dataset_info), fake_data.isel(n_channels=0) * 0.001) + expected = fake_data.isel(n_channels=0) + expected = expected.where(expected != -2147483648) + expected = expected.where(expected <= 700000) + expected = expected.where(expected >= 0) + expected = expected * 0.001 + res = aws_handler.get_dataset(did, dataset_info) + np.testing.assert_allclose(res, expected) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 + assert res.dims == ("x", "y") + assert "n_channels" not in res.coords + assert res.attrs["sensor"] == "AWS" + assert res.attrs["platform_name"] == "AWS1" @pytest.mark.parametrize(["id_name", "file_key", "fake_array"], - [("lon_horn_1", "data/navigation/aws_lon", fake_lon_data), - ("lat_horn_1", "data/navigation/aws_lat", fake_lat_data), - ("solar_azimuth_horn_1", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), - ("solar_zenith_horn_1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), - ("satellite_azimuth_horn_1", "data/navigation/aws_satellite_azimuth_angle", - fake_sat_azi_data), - ("satellite_zenith_horn_1", "data/navigation/aws_satellite_zenith_angle", - fake_sat_zen_data)]) + [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), + ("latitude", "data/navigation/aws_lat", fake_lat_data), + ("solar_azimuth", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" - did = dict(name=id_name) - dataset_info = dict(file_key=file_key, n_horns=0) - np.testing.assert_allclose(aws_handler.get_dataset(did, dataset_info), fake_array.isel(n_geo_groups=0)) - - -# def test_channel_is_masked_and_scaled(): -# pass - -# def test_navigation_is_scaled_and_scaled(): -# pass - - -# def test_orbital_parameters_are_provided(): -# pass - - -# def test_coords_contain_xy(): -# pass + Horn = Enum("Horn", ["1", "2", "3", "4"]) + did = dict(name=id_name, horn=Horn["1"]) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("x", "y") + assert "standard_name" in res.attrs + assert "n_geo_groups" not in res.coords + if id_name == "longitude": + assert res.max() <= 180 From e526ff5f1f6716dc68063638fe4fa7acc03139b6 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 7 Nov 2023 09:57:58 +0100 Subject: [PATCH 012/340] Replace single quotes with double quotes --- satpy/readers/aws_l1b.py | 32 ++++++++++++------------ satpy/tests/reader_tests/test_aws_l1b.py | 12 ++++----- 2 files changed, 22 insertions(+), 22 deletions(-) diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index 465c4b86a2..afeae3b80e 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -27,11 +27,11 @@ DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" -AWS_CHANNEL_NAMES_TO_NUMBER = {'1': 1, '2': 2, '3': 3, '4': 4, - '5': 5, '6': 6, '7': 7, '8': 8, - '9': 9, '10': 10, '11': 11, '12': 12, - '13': 13, '14': 14, '15': 15, '16': 16, - '17': 17, '18': 18, '19': 19} +AWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, + "5": 5, "6": 6, "7": 7, "8": 8, + "9": 9, "10": 10, "11": 11, "12": 12, + "13": 13, "14": 14, "15": 15, "16": 16, + "17": 17, "18": 18, "19": 19} AWS_CHANNEL_NAMES = list(AWS_CHANNEL_NAMES_TO_NUMBER.keys()) @@ -66,7 +66,7 @@ def end_time(self): @property def sensor(self): """Get the sensor name.""" - return self['/attr/instrument'] + return self["/attr/instrument"] @property def platform_name(self): @@ -76,22 +76,22 @@ def platform_name(self): @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_longitude_start'].data.item() + return self["status/satellite/subsat_longitude_start"].data.item() @property def sub_satellite_latitude_start(self): """Get the latitude of sub-satellite point at start of the product.""" - return self['status/satellite/subsat_latitude_start'].data.item() + return self["status/satellite/subsat_latitude_start"].data.item() @property def sub_satellite_longitude_end(self): """Get the longitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_longitude_end'].data.item() + return self["status/satellite/subsat_longitude_end"].data.item() @property def sub_satellite_latitude_end(self): """Get the latitude of sub-satellite point at end of the product.""" - return self['status/satellite/subsat_latitude_end'].data.item() + return self["status/satellite/subsat_latitude_end"].data.item() def get_dataset(self, dataset_id, dataset_info): """Get the data.""" @@ -110,10 +110,10 @@ def get_dataset(self, dataset_id, dataset_info): data_array.attrs.update(dataset_info) - data_array.attrs["orbital_parameters"] = {'sub_satellite_latitude_start': self.sub_satellite_latitude_start, - 'sub_satellite_longitude_start': self.sub_satellite_longitude_start, - 'sub_satellite_latitude_end': self.sub_satellite_latitude_end, - 'sub_satellite_longitude_end': self.sub_satellite_longitude_end} + data_array.attrs["orbital_parameters"] = {"sub_satellite_latitude_start": self.sub_satellite_latitude_start, + "sub_satellite_longitude_start": self.sub_satellite_longitude_start, + "sub_satellite_latitude_end": self.sub_satellite_latitude_end, + "sub_satellite_longitude_end": self.sub_satellite_longitude_end} data_array.attrs["platform_name"] = self.platform_name data_array.attrs["sensor"] = self.sensor @@ -122,13 +122,13 @@ def get_dataset(self, dataset_id, dataset_info): def _get_channel_data(self, dataset_id, dataset_info): channel_data = self[dataset_info["file_key"]] channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES - channel_data = channel_data.rename({'n_fovs': 'x', 'n_scans': 'y'}) + channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") def _get_navigation_data(self, dataset_id, dataset_info): geo_data = self[dataset_info["file_key"]] geo_data.coords["n_geo_groups"] = ["1", "2", "3", "4"] - geo_data = geo_data.rename({'n_fovs': 'x', 'n_scans': 'y'}) + geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) horn = dataset_id["horn"].name return geo_data.sel(n_geo_groups=horn).drop_vars("n_geo_groups") diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index 7ab910d194..480c692b57 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -71,10 +71,10 @@ def aws_file(tmp_path_factory): ds["data/navigation/aws_solar_zenith_angle"] = fake_sun_zen_data ds["data/navigation/aws_satellite_azimuth_angle"] = fake_sat_azi_data ds["data/navigation/aws_satellite_zenith_angle"] = fake_sat_zen_data - ds['status/satellite/subsat_latitude_end'] = np.array(22.39) - ds['status/satellite/subsat_longitude_start'] = np.array(304.79) - ds['status/satellite/subsat_latitude_start'] = np.array(55.41) - ds['status/satellite/subsat_longitude_end'] = np.array(296.79) + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, @@ -84,7 +84,7 @@ def aws_file(tmp_path_factory): return filename -@pytest.fixture +@pytest.fixture() def aws_handler(aws_file): """Create an aws filehandler.""" filename_info = parse(file_pattern, os.path.basename(aws_file)) @@ -127,7 +127,7 @@ def test_get_channel_data(aws_handler): assert res.attrs["platform_name"] == "AWS1" -@pytest.mark.parametrize(["id_name", "file_key", "fake_array"], +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), ("solar_azimuth", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), From f03971533795e871342299e33fa5536dd88380b1 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 8 Nov 2023 09:32:35 +0100 Subject: [PATCH 013/340] Fix aws yaml file --- satpy/etc/readers/aws_l1b_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index adee00ed2a..d1d5ea8c5f 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -6,7 +6,7 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [aws,] status: Beta - default_channels: [] + supports_fsspec: false data_identification_keys: name: From 9d43755504de9dc4f9f8188791db957473e81c00 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Thu, 30 Nov 2023 08:34:42 +0100 Subject: [PATCH 014/340] fix: dynamic datasets overwriting configured datasets. --- satpy/readers/modis_l2.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 3dd0a8dcb4..6a4d69370b 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -155,13 +155,22 @@ def available_datasets(self, configured_datasets): Currently only adds 2D datasets and does not decode bit encoded information. """ # pass along existing datasets + handled = set() for is_avail, ds_info in (configured_datasets or []): - yield is_avail, ds_info + file_key = ds_info.get("file_key", ds_info["name"]) + handled.add(file_key) + + if is_avail is not None: + yield is_avail, ds_info + continue + yield self.file_type_matches(ds_info["file_type"]), ds_info res_dict = {5416: 250, 2708: 500, 1354: 1000, 270: 5000, 135: 10000} # get dynamic variables known to this file (that we created) for var_name, val in self.sd.datasets().items(): + if var_name in handled: + continue if len(val[0]) == 2: resolution = res_dict.get(val[1][-1]) if resolution is not None: From d7eb4d2e922c7cc0754fc3c6e99a0bf96f7fccd6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Thu, 30 Nov 2023 11:13:15 +0100 Subject: [PATCH 015/340] doc: update docstrings and comments. --- satpy/readers/file_handlers.py | 10 ++++++++-- satpy/readers/modis_l2.py | 5 +++-- satpy/readers/yaml_reader.py | 5 +++-- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/satpy/readers/file_handlers.py b/satpy/readers/file_handlers.py index 3fdeed1edc..02090d0827 100644 --- a/satpy/readers/file_handlers.py +++ b/satpy/readers/file_handlers.py @@ -273,10 +273,16 @@ def available_datasets(self, configured_datasets=None): Example 2 - Add dynamic datasets from the file:: def available_datasets(self, configured_datasets=None): - "Add information to configured datasets." + "Add datasets dynamically determined from the file." # pass along existing datasets for is_avail, ds_info in (configured_datasets or []): - yield is_avail, ds_info + if is_avail is not None: + # some other file handler said it has this dataset + # we don't know any more information than the previous + # file handler so let's yield early + yield is_avail, ds_info + continue + yield self.file_type_matches(ds_info["file_type"]), ds_info # get dynamic variables known to this file (that we created) for var_name, val in self.dynamic_variables.items(): diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 6a4d69370b..1ed853d4f9 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -154,7 +154,8 @@ def available_datasets(self, configured_datasets): Notes: Currently only adds 2D datasets and does not decode bit encoded information. """ - # pass along existing datasets + # pass along yaml configured (handled) datasets and collect their file keys to check against dynamically + # collected variables later on. handled = set() for is_avail, ds_info in (configured_datasets or []): file_key = ds_info.get("file_key", ds_info["name"]) @@ -167,7 +168,7 @@ def available_datasets(self, configured_datasets): res_dict = {5416: 250, 2708: 500, 1354: 1000, 270: 5000, 135: 10000} - # get dynamic variables known to this file (that we created) + # get variables from file dynamically and only add those which are not already configured in yaml for var_name, val in self.sd.datasets().items(): if var_name in handled: continue diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index ff3599052a..d08075e5af 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -623,8 +623,9 @@ def create_filehandlers(self, filenames, fh_kwargs=None): self.file_handlers.get(filetype, []) + filehandlers, key=lambda fhd: (fhd.start_time, fhd.filename)) - # load any additional dataset IDs determined dynamically from the file - # and update any missing metadata that only the file knows + # Update dataset IDs with IDs determined dynamically from the file + # and/or update any missing metadata that only the file knows. + # Check if the dataset ID is loadable from that file. self.update_ds_ids_from_file_handlers() return created_fhs From 47821d6b5bbe509fc5e044dcd8e48a1233a04c56 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 30 Nov 2023 15:17:02 +0100 Subject: [PATCH 016/340] Fix aws reader according to review comments --- satpy/readers/aws_l1b.py | 24 +----------------------- satpy/tests/reader_tests/test_aws_l1b.py | 3 +++ 2 files changed, 4 insertions(+), 23 deletions(-) diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index afeae3b80e..b23dd4119b 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -27,13 +27,7 @@ DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" -AWS_CHANNEL_NAMES_TO_NUMBER = {"1": 1, "2": 2, "3": 3, "4": 4, - "5": 5, "6": 6, "7": 7, "8": 8, - "9": 9, "10": 10, "11": 11, "12": 12, - "13": 13, "14": 14, "15": 15, "16": 16, - "17": 17, "18": 18, "19": 19} - -AWS_CHANNEL_NAMES = list(AWS_CHANNEL_NAMES_TO_NUMBER.keys()) +AWS_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) class AWSL1BFile(NetCDF4FileHandler): @@ -153,19 +147,3 @@ def mask_and_scale(data_array): data_array.attrs.pop("scale_factor") data_array.attrs.pop("add_offset") return data_array - -# -# def _get_quality_attributes(self): -# """Get quality attributes.""" -# quality_group = self['quality'] -# quality_dict = {} -# for key in quality_group: -# # Add the values (as Numpy array) of each variable in the group -# # where possible -# try: -# quality_dict[key] = quality_group[key].values -# except ValueError: -# quality_dict[key] = None -# -# quality_dict.update(quality_group.attrs) -# return quality_dict diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index 480c692b57..abbf517ab8 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -111,9 +111,12 @@ def test_get_channel_data(aws_handler): did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") expected = fake_data.isel(n_channels=0) + # mask no_data value expected = expected.where(expected != -2147483648) + # mask outside the valid range expected = expected.where(expected <= 700000) expected = expected.where(expected >= 0) + # "calibrate" expected = expected * 0.001 res = aws_handler.get_dataset(did, dataset_info) np.testing.assert_allclose(res, expected) From 8438837a839ba7ea64f0c38ed960be9d5b68db58 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 15 Dec 2023 13:29:25 +0100 Subject: [PATCH 017/340] Update satpy/readers/modis_l2.py Co-authored-by: Panu Lahtinen --- satpy/readers/modis_l2.py | 23 ++++++++++++----------- 1 file changed, 12 insertions(+), 11 deletions(-) diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 1ed853d4f9..4d204064ed 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -172,17 +172,18 @@ def available_datasets(self, configured_datasets): for var_name, val in self.sd.datasets().items(): if var_name in handled: continue - if len(val[0]) == 2: - resolution = res_dict.get(val[1][-1]) - if resolution is not None: - ds_info = { - "file_type": self.filetype_info["file_type"], - "resolution": resolution, - "name": var_name, - "file_key": var_name, - "coordinates": ["longitude", "latitude"] - } - yield True, ds_info + if len(val[0]) != 2: + continue + resolution = res_dict.get(val[1][-1]) + if resolution is not None: + ds_info = { + "file_type": self.filetype_info["file_type"], + "resolution": resolution, + "name": var_name, + "file_key": var_name, + "coordinates": ["longitude", "latitude"] + } + yield True, ds_info def _extract_and_mask_category_dataset(self, dataset_id, dataset_info, var_name): # what dimension is per-byte From 0cc050feba14703388552ed9cea2a1b6b6344413 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Mon, 18 Dec 2023 14:01:27 +0100 Subject: [PATCH 018/340] test: add tests. --- satpy/etc/readers/modis_l2.yaml | 6 +-- .../modis_tests/_modis_fixtures.py | 37 ++++++++++++++++--- .../reader_tests/modis_tests/conftest.py | 1 + .../reader_tests/modis_tests/test_modis_l2.py | 19 +++++++++- 4 files changed, 54 insertions(+), 9 deletions(-) diff --git a/satpy/etc/readers/modis_l2.yaml b/satpy/etc/readers/modis_l2.yaml index eb9ad887d6..7d53a2f893 100644 --- a/satpy/etc/readers/modis_l2.yaml +++ b/satpy/etc/readers/modis_l2.yaml @@ -1,7 +1,7 @@ reader: name: modis_l2 short_name: MODIS l2 - long_name: MODIS Level 2 (mod35) data in HDF-EOS format + long_name: Terra and Aqua MODIS Level 2 (mod35) data in HDF-EOS format description: MODIS HDF-EOS L2 Reader status: Beta supports_fsspec: false @@ -83,7 +83,7 @@ datasets: name: longitude resolution: 5000: - file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf] + file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: @@ -98,7 +98,7 @@ datasets: resolution: 5000: # For EUM reduced (thinned) files - file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf] + file_type: [mod35_hdf, mod06_hdf, mod06ct_hdf, mod07_hdf, mod05_hdf, modis_l2_product] 1000: file_type: [hdf_eos_geo, mod35_hdf, mod06_hdf, mod05_hdf, modis_l2_product] 500: diff --git a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py index 6dc4bf2d05..d6faa20f07 100644 --- a/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py +++ b/satpy/tests/reader_tests/modis_tests/_modis_fixtures.py @@ -19,7 +19,7 @@ from __future__ import annotations from datetime import datetime, timedelta -from typing import Optional +from typing import Literal, Optional import numpy as np import pytest @@ -459,11 +459,20 @@ def modis_l1b_nasa_1km_mod03_files(modis_l1b_nasa_mod021km_file, modis_l1b_nasa_ # Level 2 Fixtures -def _get_basic_variable_info(var_name: str, resolution: int) -> dict: +def _get_basic_variable_info(var_name: str, resolution: int, dim_size: Literal[2, 3]=2) -> dict: shape = _shape_for_resolution(resolution) - data = np.ones((shape[0], shape[1]), dtype=np.uint16) + row_dim_name = f"Cell_Along_Swath_{resolution}m:modl2" col_dim_name = f"Cell_Across_Swath_{resolution}m:modl2" + + if dim_size == 3: + data = np.ones((1, shape[0], shape[1]), dtype=np.uint16) + dim_labels = ["channel", row_dim_name, col_dim_name] + elif dim_size == 2: + data = np.ones((shape[0], shape[1]), dtype=np.uint16) + dim_labels = [row_dim_name, col_dim_name] + + return { var_name: { "data": data, @@ -471,8 +480,7 @@ def _get_basic_variable_info(var_name: str, resolution: int) -> dict: "fill_value": 0, "attrs": { # dim_labels are just unique dimension names, may not match exactly with real world files - "dim_labels": [row_dim_name, - col_dim_name], + "dim_labels": dim_labels, "valid_range": (0, 32767), "scale_factor": 2.0, "add_offset": -1.0, @@ -662,6 +670,8 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) variable_infos.update(_get_basic_variable_info("Surface_Pressure", 5000)) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 5000)) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_3D_var", 5000, dim_size=3)) create_hdfeos_test_file(full_path, variable_infos, _create_struct_metadata(5000), @@ -669,6 +679,23 @@ def modis_l2_nasa_mod06_file(tmpdir_factory) -> list[str]: _create_header_metadata()) return [full_path] +@pytest.fixture(scope="session") +def modis_l2_nasa_mod99_file(tmpdir_factory) -> list[str]: + """Create an "artificial" MOD99 L2 HDF4 file with headers. + + There exists no MOD99 Level 2 product. This is just for testing available datasets + in arbitrary level 2 file. + """ + filename = generate_nasa_l2_filename("MOD99") + full_path = str(tmpdir_factory.mktemp("modis_l2").join(filename)) + variable_infos = _get_l1b_geo_variable_info(filename, 5000, include_angles=True) + variable_infos.update(_get_basic_variable_info("non_yaml_configured_2D_var", 1000)) + create_hdfeos_test_file(full_path, + variable_infos, + _create_struct_metadata(5000), + _create_core_metadata("MOD99"), + _create_header_metadata()) + return [full_path] @pytest.fixture(scope="session") def modis_l2_imapp_snowmask_file(tmpdir_factory) -> list[str]: diff --git a/satpy/tests/reader_tests/modis_tests/conftest.py b/satpy/tests/reader_tests/modis_tests/conftest.py index 309b16321f..42e2a0dfa8 100644 --- a/satpy/tests/reader_tests/modis_tests/conftest.py +++ b/satpy/tests/reader_tests/modis_tests/conftest.py @@ -32,6 +32,7 @@ modis_l2_nasa_mod06_file, modis_l2_nasa_mod35_file, modis_l2_nasa_mod35_mod03_files, + modis_l2_nasa_mod99_file, modis_l3_nasa_mod09_file, modis_l3_nasa_mod43_file, ) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 8876decb59..18e34814f1 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -38,6 +38,7 @@ # - modis_l2_imapp_snowmask_geo_files # - modis_l2_nasa_mod06_file # - modis_l2_nasa_mod35_file +# - modis_l2_nasa_mod99_file # - modis_l2_nasa_mod35_mod03_files @@ -162,7 +163,9 @@ def test_load_250m_cloud_mask_dataset(self, input_files, exp_area): @pytest.mark.parametrize( ("input_files", "loadables", "exp_resolution", "exp_area", "exp_value"), [ - (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure"], 5000, True, 4.0), + (lazy_fixture("modis_l2_nasa_mod06_file"), ["surface_pressure", "non_yaml_configured_2D_var"], + 5000, True, 4.0), + (lazy_fixture("modis_l2_nasa_mod99_file"), ["non_yaml_configured_2D_var"], 1000, True, 4.0), # snow mask is considered a category product, factor/offset ignored (lazy_fixture("modis_l2_imapp_snowmask_file"), ["snow_mask"], 1000, False, 1.0), (lazy_fixture("modis_l2_imapp_snowmask_geo_files"), ["snow_mask"], 1000, True, 1.0), @@ -181,3 +184,17 @@ def test_load_l2_dataset(self, input_files, loadables, exp_resolution, exp_area, assert data_arr.shape == _shape_for_resolution(exp_resolution) assert data_arr.attrs.get("resolution") == exp_resolution _check_shared_metadata(data_arr, expect_area=exp_area) + + def test_scene_dynamic_available_datasets(self, modis_l2_nasa_mod06_file): + """Test available datasets method to dynmically add non configured datasets.""" + import xarray as xr + scene = Scene(reader="modis_l2", filenames=modis_l2_nasa_mod06_file) + available_datasets = scene.all_dataset_names() + assert len(available_datasets) > 0 + assert "surface_pressure" in available_datasets + # make sure configured datasets are added again + assert available_datasets.count("surface_pressure") == 1 + assert "non_yaml_configured_2D_var" in available_datasets + file_ds = xr.open_dataset(modis_l2_nasa_mod06_file[0], engine="netcdf4") + assert "non_yaml_configured_3D_var" not in available_datasets and "non_yaml_configured_3D_var" in file_ds # noqa PT018 + assert "non_yaml_configured_3D_var" in file_ds From afe779ffa3de6dc01c67a54ad0ae662ae5e384db Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Mon, 18 Dec 2023 14:01:49 +0100 Subject: [PATCH 019/340] doc: update documentation. --- doc/source/conf.py | 8 ++++++-- doc/source/reading.rst | 3 +++ satpy/readers/modis_l2.py | 6 +++++- 3 files changed, 14 insertions(+), 3 deletions(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index 3aa810420e..33c60272d2 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -85,10 +85,14 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ["sphinx.ext.autodoc", "sphinx.ext.intersphinx", "sphinx.ext.todo", "sphinx.ext.coverage", - "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "doi_role", - "sphinx.ext.viewcode", "sphinxcontrib.apidoc", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel", + "doi_role", "sphinx.ext.viewcode", "sphinxcontrib.apidoc", "sphinx.ext.mathjax"] +# Autosectionlabel +# Make sure target is unique +autosectionlabel_prefix_document = True + # API docs apidoc_module_dir = "../../satpy" apidoc_output_dir = "api" diff --git a/doc/source/reading.rst b/doc/source/reading.rst index b7264eeb6e..c9b23a0381 100644 --- a/doc/source/reading.rst +++ b/doc/source/reading.rst @@ -98,6 +98,9 @@ load the datasets using e.g.:: :meth:`scn.missing_datasets ` property for any ``DataID`` that could not be loaded. +Available datasets +------------------ + To find out what datasets are available from a reader from the files that were provided to the ``Scene`` use :meth:`~satpy.scene.Scene.available_dataset_ids`:: diff --git a/satpy/readers/modis_l2.py b/satpy/readers/modis_l2.py index 4d204064ed..f8f39b957d 100644 --- a/satpy/readers/modis_l2.py +++ b/satpy/readers/modis_l2.py @@ -28,7 +28,11 @@ - m[o/y]d35_l2: cloud_mask dataset - some datasets in m[o/y]d06 files -To get a list of the available datasets for a given file refer to the "Load data" section in :doc:`../reading`. +Additionally the reader tries to add non yaml configured 2D datasets dynamically. As mentioned above there are a lot +of different level 2 datasets so this might not work in every case (for example bit encoded datasets similar to the +supported m[0/y]d35_l2 cloud mask are not decoded). + +To get a list of the available datasets for a given file refer to the :ref:`reading:available datasets` section. Geolocation files From 28f44c7ee01f6d1abe48ebf233076cb39f1993ea Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 09:57:57 +0100 Subject: [PATCH 020/340] Add common functionality for FCI data readers --- satpy/readers/fci_base.py | 50 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 satpy/readers/fci_base.py diff --git a/satpy/readers/fci_base.py b/satpy/readers/fci_base.py new file mode 100644 index 0000000000..c4a3714291 --- /dev/null +++ b/satpy/readers/fci_base.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017-2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Common functionality for FCI data readers.""" +from __future__ import annotations + + +def calculate_area_extent(area_dict): + """Calculate the area extent seen by MTG FCI instrument. + + Since the center of the FCI L2 grid is located at the interface between the pixels, there are equally many + pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent + can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). + + Args: + area_dict: A dictionary containing the required parameters + ncols: number of pixels in east-west direction + nlines: number of pixels in south-north direction + column_step: Pixel resulution in meters in east-west direction + line_step: Pixel resulution in meters in south-north direction + Returns: + tuple: An area extent for the scene defined by the lower left and + upper right corners + + """ + ncols = area_dict["ncols"] + nlines = area_dict["nlines"] + column_step = area_dict["column_step"] + line_step = area_dict["line_step"] + + ll_c = (0 - ncols / 2.) * column_step + ll_l = (0 + nlines / 2.) * line_step + ur_c = (0 + ncols / 2.) * column_step + ur_l = (0 - nlines / 2.) * line_step + + return (ll_c, ll_l, ur_c, ur_l) From d3ccedc745abbb0b523e04d16a84283d58deb827 Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:07:19 +0100 Subject: [PATCH 021/340] Add reader for both SEVIRI and FCI L2 products in GRIB2 format --- satpy/etc/readers/eum_l2_grib.yaml | 387 +++++++++++++++++++++++++++++ satpy/readers/eum_l2_grib.py | 315 +++++++++++++++++++++++ 2 files changed, 702 insertions(+) create mode 100644 satpy/etc/readers/eum_l2_grib.yaml create mode 100644 satpy/readers/eum_l2_grib.py diff --git a/satpy/etc/readers/eum_l2_grib.yaml b/satpy/etc/readers/eum_l2_grib.yaml new file mode 100644 index 0000000000..80edd3b2e5 --- /dev/null +++ b/satpy/etc/readers/eum_l2_grib.yaml @@ -0,0 +1,387 @@ +reader: + name: eum_l2_grib + short_name: EUM L2 GRIB + long_name: MSG (Meteosat 8 to 11) SEVIRI Level products 2 and FCI L2 products in GRIB2 format + description: Reader for EUMETSAT MSG SEVIRI L2 files and FCI L2 files in GRIB format. + status: Alpha + supports_fsspec: false + sensors: [seviri,fci] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + + +file_types: + + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES + grib_seviri_aes: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM + grib_seviri_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH + grib_seviri_cth: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM + grib_seviri_crm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR + grib_seviri_fir: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB + grib_seviri_mpe: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA + grib_seviri_oca: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' + - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' + + grib_fci_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' + +datasets: + + # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product + aerosol_optical_thickness_vis06: + name: aerosol_optical_thickness_vis06 + long_name: Aerosol optical Thickness at 0.6um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 20 + units: "1" + + aerosol_optical_thickness_vis08: + name: aerosol_optical_thickness_vis08 + long_name: Aerosol optical Thickness at 0.8um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 21 + units: "1" + + aerosol_optical_thickness_vis16: + name: aerosol_optical_thickness_vis16 + long_name: Aerosol optical Thickness at 1.6um + standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 22 + units: "1" + + angstroem_coefficient: + name: angstroem_coefficient + long_name: Angstroem Coefficient + standard_name: aerosol_angstrom_exponent + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 23 + units: "1" + + aes_quality: + name: aes_quality + long_name: AES Product Quality Flag + standard_name: quality_flag + resolution: 9001.209497451 + file_type: grib_seviri_aes + parameter_number: 192 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + + # EUMETSAT MSG SEVIRI L2 Cloud Mask product + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: + 3000.403165817: {file_type: grib_seviri_clm} + 2000: {file_type: grib_fci_clm} + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] + + + # EUMETSAT MSG SEVIRI L2 Cloud Top Height product + cloud_top_height: + name: cloud_top_height + long_name: Cloud Top Height + standard_name: height_at_cloud_top + resolution: 9001.209497451 + file_type: grib_seviri_cth + parameter_number: 2 + units: m + + cloud_top_quality: + name: cloud_top_quality + long_name: CTH Product Quality Flag + standard_name: height_at_cloud_top quality_flag + resolution: 9001.209497451 + file_type: grib_seviri_cth + parameter_number: 3 + units: "1" + flag_values: [0, 1] + flag_meanings: ['good quality retrieval','poor quality retrieval' ] + + + # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product + vis_refl_06: + name: vis_refl_06 + long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [0.56, 0.635, 0.71] + file_type: grib_seviri_crm + parameter_number: 9 + units: "%" + + vis_refl_08: + name: vis_refl_08 + long_name: TOA Bidirectional Reflectance at 0.8um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [0.74, 0.81, 0.88] + file_type: grib_seviri_crm + parameter_number: 10 + units: "%" + + vis_refl_16: + name: vis_refl_16 + long_name: TOA Bidirectional Reflectance at 1.6um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [1.5, 1.64, 1.78] + file_type: grib_seviri_crm + parameter_number: 11 + units: "%" + + nir_refl_39: + name: nir_refl_39 + long_name: TOA Bidirectional Reflectance at 3.9um (7 days average) + standard_name: toa_bidirectional_reflectance + resolution: 3000.403165817 + wavelength: [3.48, 3.92, 4.36] + file_type: grib_seviri_crm + parameter_number: 12 + units: "%" + + num_accumulations: + name: num_accumulations + long_name: CRM Product Number of Accumulations + standard_name: number_of_accumulations + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 6 + units: "1" + + solar_zenith_angle: + name: solar_zenith_angle + long_name: Solar Zenith Angle (7 days average) + standard_name: solar_zenith_angle + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 7 + units: degrees + + relative_azimuth_angle: + name: relative_azimuth_angle + long_name: Relative Azimuth Angle (7 days average) + standard_name: relative_sensor_azimuth_angle + resolution: 3000.403165817 + file_type: grib_seviri_crm + parameter_number: 8 + units: degrees + + + # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product + active_fires: + name: active_fires + long_name: Active Fire Classification + standard_name: active_fire_classification + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 9 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] + + fire_probability: + name: fire_probability + long_name: Fire Probability + standard_name: fire_probability + resolution: 3000.403165817 + file_type: grib_seviri_fir + parameter_number: 192 + units: "%" + + + # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product + instantaneous_rain_rate: + name: instantaneous_rain_rate + long_name: MPE Product Instantaneous Rain Rate + standard_name: rainfall_rate + resolution: 3000.403165817 + file_type: grib_seviri_mpe + parameter_number: 1 + units: "kg m-2 s-1" + + + # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product + pixel_scene_type: + name: pixel_scene_type + long_name: Cloud Type + standard_name: scene_classification + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 8 + units: "1" + flag_values: [24,111,112] + flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] + + measurement_cost: + name: measurement_cost + long_name: OCA Cost Function - Measurement part + standard_name: cost_function + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 30 + units: "1" + + upper_layer_cloud_optical_depth: + name: upper_layer_cloud_optical_depth + long_name: Upper Cloud Layer Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 31 + units: "1" + + upper_layer_cloud_top_pressure: + name: upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure + standard_name: air_pressure_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 32 + units: Pa + + upper_layer_cloud_effective_radius: + name: upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 33 + units: m + + error_in_upper_layer_cloud_optical_depth: + name: error_in_upper_layer_cloud_optical_depth + long_name: Upper Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 34 + units: "1" + + error_in_upper_layer_cloud_top_pressure: + name: error_in_upper_layer_cloud_top_pressure + long_name: Upper Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 35 + units: Pa + + error_in_upper_layer_cloud_effective_radius: + name: error_in_upper_layer_cloud_effective_radius + long_name: Upper Cloud Particle Effective Radius Error Estimate + standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 36 + units: m + + lower_layer_cloud_optical_depth: + name: lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 37 + units: "1" + + lower_layer_cloud_top_pressure: + name: lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 38 + units: Pa + + error_in_lower_layer_cloud_optical_depth: + name: error_in_lower_layer_cloud_optical_depth + long_name: Lower Cloud Optical Depth Error Estimate + standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 39 + units: "1" + + error_in_lower_layer_cloud_top_pressure: + name: error_in_lower_layer_cloud_top_pressure + long_name: Lower Cloud Top Pressure Error Estimate + standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error + resolution: 3000.403165817 + file_type: grib_seviri_oca + parameter_number: 40 + units: Pa diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py new file mode 100644 index 0000000000..47cf9a0ba9 --- /dev/null +++ b/satpy/readers/eum_l2_grib.py @@ -0,0 +1,315 @@ +# Copyright (c) 2019-2023 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""Reader for both SEVIRI and FCI L2 products in GRIB2 format. + +References: + FM 92 GRIB Edition 2 + https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf + EUMETSAT Product Navigator + https://navigator.eumetsat.int/ +""" + +import logging +from datetime import timedelta + +import dask.array as da +import numpy as np +import xarray as xr + +from satpy.readers._geos_area import get_area_definition, get_geos_area_naming +from satpy.readers.eum_base import get_service_mode +from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent +from satpy.readers.file_handlers import BaseFileHandler +from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent +from satpy.utils import get_legacy_chunk_size + +CHUNK_SIZE = get_legacy_chunk_size() + +try: + import eccodes as ec +except ImportError: + raise ImportError( + "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") + +logger = logging.getLogger(__name__) + + +class EUML2GribFileHandler(BaseFileHandler): + """Reader class for EUM L2 products in GRIB format.""" + + calculate_area_extent = None + + def __init__(self, filename, filename_info, filetype_info): + """Read the global attributes and prepare for dataset reading.""" + super().__init__(filename, filename_info, filetype_info) + # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) + ec.codes_grib_multi_support_on() + + if "seviri" in self.filetype_info["file_type"]: + self.sensor = "seviri" + self.PLATFORM_NAME = PLATFORM_DICT[self.filename_info["spacecraft"]] + elif "fci" in self.filetype_info["file_type"]: + self.sensor = "fci" + self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" + pass + + @property + def start_time(self): + """Return the sensing start time.""" + return self.filename_info["start_time"] + + @property + def end_time(self): + """Return the sensing end time.""" + if self.sensor == "seviri": + return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) + elif self.sensor == "fci": + return self.filename_info["end_time"] + + def get_area_def(self, dataset_id): + """Return the area definition for a dataset.""" + # Compute the dictionary with the area extension + + self._area_dict["column_step"] = dataset_id["resolution"] + self._area_dict["line_step"] = dataset_id["resolution"] + + if self.sensor == "seviri": + area_extent = seviri_calculate_area_extent(self._area_dict) + + elif self.sensor == "fci": + area_extent = fci_calculate_area_extent(self._area_dict) + + # Call the get_area_definition function to obtain the area + area_def = get_area_definition(self._pdict, area_extent) + + return area_def + + def get_dataset(self, dataset_id, dataset_info): + """Get dataset using the parameter_number key in dataset_info. + + In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information + (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from + the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the + reader would sometimes give corrupt information about the number of messages in the file and the dataset + dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier + instance. + """ + logger.debug("Reading in file to get dataset with parameter number %d.", + dataset_info["parameter_number"]) + + xarr = None + message_found = False + with open(self.filename, "rb") as fh: + + # Iterate over all messages and fetch data when the correct parameter number is found + while True: + gid = ec.codes_grib_new_from_file(fh) + + if gid is None: + if not message_found: + # Could not obtain a valid message ID from the grib file + logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", + dataset_info["parameter_number"]) + break + + # Check if the parameter number in the GRIB message corresponds to the required key + parameter_number = self._get_from_msg(gid, "parameterNumber") + + if parameter_number == dataset_info["parameter_number"]: + + self._res = dataset_id["resolution"] + self._read_attributes(gid) + + # Read the missing value + missing_value = self._get_from_msg(gid, "missingValue") + + # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value + xarr = self._get_xarray_from_msg(gid) + + xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) + + ec.codes_release(gid) + + # Combine all metadata into the dataset attributes and break out of the loop + xarr.attrs.update(dataset_info) + xarr.attrs.update(self._get_attributes()) + + message_found = True + + else: + # The parameter number is not the correct one, release gid and skip to next message + ec.codes_release(gid) + + return xarr + + def _read_attributes(self, gid): + """Read the parameter attributes from the message and create the projection and area dictionaries.""" + # Read SSP and date/time + self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") + + # Read number of points on the x and y axes + self._nrows = self._get_from_msg(gid, "Ny") + self._ncols = self._get_from_msg(gid, "Nx") + + # Creates the projection and area dictionaries + self._pdict, self._area_dict = self._get_proj_area(gid) + + def _get_proj_area(self, gid): + """Compute the dictionary with the projection and area definition from a GRIB message. + + Args: + gid: The ID of the GRIB message. + + Returns: + tuple: A tuple of two dictionaries for the projection and the area definition. + pdict: + a: Earth major axis [m] + b: Earth minor axis [m] + h: Height over surface [m] + ssp_lon: longitude of subsatellite point [deg] + nlines: number of lines + ncols: number of columns + a_name: name of the area + a_desc: description of the area + p_id: id of the projection + area_dict: + center_point: coordinate of the center point + north: coodinate of the north limit + east: coodinate of the east limit + west: coodinate of the west limit + south: coodinate of the south limit + """ + # Get name of area definition + area_naming_input_dict = {"platform_name": "msg", + "instrument_name": self.sensor, + "resolution": self._res, + } + + area_naming = get_geos_area_naming({**area_naming_input_dict, + **get_service_mode(self.sensor, self._ssp_lon)}) + + # Read all projection and area parameters from the message + earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] + earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] + + if self.sensor == "seviri": + earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) + earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) + + nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") + xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") + h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] + + # Create the dictionary with the projection data + pdict = { + "a": earth_major_axis_in_meters, + "b": earth_minor_axis_in_meters, + "h": h_in_meters, + "ssp_lon": self._ssp_lon, + "nlines": self._ncols, + "ncols": self._nrows, + "a_name": area_naming["area_id"], + "a_desc": area_naming["description"], + "p_id": "", + } + + if self.sensor == "seviri": + # Compute the dictionary with the area extension + area_dict = { + "center_point": xp_in_grid_lengths, + "north": self._nrows, + "east": 1, + "west": self._ncols, + "south": 1, + } + + elif self.sensor == "fci": + area_dict = { + "nlines": self._ncols, + "ncols": self._nrows, + } + + return pdict, area_dict + + @staticmethod + def _scale_earth_axis(data): + """Scale Earth axis data to make sure the value matched the expected unit [m]. + + The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This + method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has + been resolved by EUMETSAT this workaround can be removed. + + """ + scale_factor = 10 ** np.ceil(np.log10(1e6/data)) + return data * scale_factor + + def _get_xarray_from_msg(self, gid): + """Read the values from the GRIB message and return a DataArray object. + + Args: + gid: The ID of the GRIB message. + + Returns: + DataArray: The array containing the retrieved values. + """ + # Data from GRIB message are read into an Xarray... + xarr = xr.DataArray(da.from_array(ec.codes_get_values( + gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) + + return xarr + + def _get_attributes(self): + """Create a dictionary of attributes to be added to the dataset. + + Returns: + dict: A dictionary of parameter attributes. + ssp_lon: longitude of subsatellite point + sensor: name of sensor + platform_name: name of the platform + """ + orbital_parameters = { + "projection_longitude": self._ssp_lon + } + + attributes = { + "orbital_parameters": orbital_parameters, + "sensor": self.sensor + } + + + attributes["platform_name"] = self.PLATFORM_NAME + + return attributes + + @staticmethod + def _get_from_msg(gid, key): + """Get a value from the GRIB message based on the key, return None if missing. + + Args: + gid: The ID of the GRIB message. + key: The key of the required attribute. + + Returns: + The retrieved attribute or None if the key is missing. + """ + try: + attr = ec.codes_get(gid, key) + except ec.KeyValueNotFoundError: + logger.warning("Key %s not found in GRIB message", key) + attr = None + return attr From de65b636e5ccd21cca449b93afee3b477c75b1b8 Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:11:24 +0100 Subject: [PATCH 022/340] Add EUM L2 GRIB-reader test package --- satpy/tests/reader_tests/test_eum_l2_grib.py | 319 +++++++++++++++++++ 1 file changed, 319 insertions(+) create mode 100644 satpy/tests/reader_tests/test_eum_l2_grib.py diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py new file mode 100644 index 0000000000..3e4dee87a8 --- /dev/null +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -0,0 +1,319 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# satpy is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# satpy is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with satpy. If not, see . + +"""EUM L2 GRIB-reader test package.""" + +import datetime +import sys +import unittest +from unittest import mock + +import numpy as np + +from satpy.tests.utils import make_dataid + +# Dictionary to be used as fake GRIB message +FAKE_SEVIRI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 9.5, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 1000, + "Ny": 1200, + "earthMajorAxis": 6400., + "earthMinorAxis": 6300., + "NrInRadiusOfEarth": 6., + "XpInGridLengths": 500, + "parameterNumber": 30, + "missingValue": 9999, +} + +FAKE_FCI_MESSAGE = { + "longitudeOfSubSatellitePointInDegrees": 0.0, + "dataDate": 20191020, + "dataTime": 1745, + "Nx": 5568, + "Ny": 5568, + "earthMajorAxis": 6378140., + "earthMinorAxis": 6356755., + "NrInRadiusOfEarth": 6.6107, + "XpInGridLengths": 2784.0, + "parameterNumber": 30, + "missingValue": 9999, +} + +# List to be used as fake GID source +FAKE_GID = [0, 1, 2, 3, None] + + +class Test_EUML2GribFileHandler(unittest.TestCase): + """Test the EUML2GribFileHandler reader.""" + + @mock.patch("satpy.readers.eum_l2_grib.ec") + def setUp(self, ec_): + """Set up the test by creating a mocked eccodes library.""" + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_ = ec_ + + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.eum_l2_grib.xr") + @mock.patch("satpy.readers.eum_l2_grib.da") + def test_seviri_data_reading(self, da_, xr_): + """Test the reading of data from the product.""" + from satpy.readers.eum_l2_grib import REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + CHUNK_SIZE = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): + self.ec_.codes_get_values.return_value = np.ones(1000*1200) + self.ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] + self.reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type" : "seviri" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=3000) + + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Checks the basic data reading + assert REPEAT_CYCLE_DURATION == 15 + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = self.reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 9.5 + }, + "sensor": "seviri", + "platform_name": "Meteosat-11" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + self.reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == CHUNK_SIZE + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = self.reader._get_proj_area(0) + + expected_pdict = { + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", + } + assert pdict == expected_pdict + expected_area_dict = { + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", + mock.Mock(name="seviri_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) + self.reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" + + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") + @mock.patch("satpy.readers.eum_l2_grib.xr") + @mock.patch("satpy.readers.eum_l2_grib.da") + def test_fci_data_reading(self, da_, xr_): + """Test the reading of fci data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + CHUNK_SIZE = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): + self.ec_.codes_get_values.return_value = np.ones(5568*5568) + self.ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] + self.reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft_id": "1", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type" : "fci" + } + ) + + dataset_id = make_dataid(name="dummmy", resolution=2000) + + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = self.reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 0.0 + }, + "sensor": "fci", + "platform_name": "MTG-i1" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + self.reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((5568, 5568))) + assert args[1] == CHUNK_SIZE + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = self.reader._get_proj_area(0) + + expected_pdict = { + "a": 6378140000.0, + "b": 6356755000.0, + "h": 35785830098.0, + "ssp_lon": 0.0, + "nlines": 5568, + "ncols": 5568, + "a_name": "msg_fci_fdss_2km", + "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", + "p_id": "" + } + assert pdict == expected_pdict + expected_area_dict = { + "nlines": 5568, + "ncols": 5568 + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", + mock.Mock(name="fci_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=2000.) + self.reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"nlines": 5568, "ncols": 5568, + "column_step": 2000., "line_step": 2000.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" From 0474fd083636a2d335b52ead6dd1ef3e8330bdfc Mon Sep 17 00:00:00 2001 From: David Navia Date: Thu, 11 Jan 2024 13:39:30 +0100 Subject: [PATCH 023/340] Add my name to AUTHORS.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 9078e441b4..1d98572541 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -87,3 +87,4 @@ The following people have made contributions to this project: - [Xin Zhang (zxdawn)](https://github.com/zxdawn) - [Yufei Zhu (yufeizhu600)](https://github.com/yufeizhu600) - [Youva Aoun (YouvaEUMex)](https://github.com/YouvaEUMex) +- [David Navia (dnaviap)](https://github.com/dnaviap) From 767aeabf656ab0ffbfa6c68a66bde7372fc37a2e Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:33:31 +0100 Subject: [PATCH 024/340] Delete eum_l2_grib.yaml and update seviri_l2_grib.yaml to avoid changing the user interface to the reader --- satpy/etc/readers/eum_l2_grib.yaml | 387 -------------------------- satpy/etc/readers/seviri_l2_grib.yaml | 14 +- 2 files changed, 7 insertions(+), 394 deletions(-) delete mode 100644 satpy/etc/readers/eum_l2_grib.yaml diff --git a/satpy/etc/readers/eum_l2_grib.yaml b/satpy/etc/readers/eum_l2_grib.yaml deleted file mode 100644 index 80edd3b2e5..0000000000 --- a/satpy/etc/readers/eum_l2_grib.yaml +++ /dev/null @@ -1,387 +0,0 @@ -reader: - name: eum_l2_grib - short_name: EUM L2 GRIB - long_name: MSG (Meteosat 8 to 11) SEVIRI Level products 2 and FCI L2 products in GRIB2 format - description: Reader for EUMETSAT MSG SEVIRI L2 files and FCI L2 files in GRIB format. - status: Alpha - supports_fsspec: false - sensors: [seviri,fci] - reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader - - -file_types: - - # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES - grib_seviri_aes: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Cloud Mask product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM - grib_seviri_clm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Cloud Top Height product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH - grib_seviri_cth: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM - grib_seviri_crm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR - grib_seviri_fir: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB - grib_seviri_mpe: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product - # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA - grib_seviri_oca: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}.grb' - - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-NA.grb' - - grib_fci_clm: - file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler - file_patterns: - - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' - -datasets: - - # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product - aerosol_optical_thickness_vis06: - name: aerosol_optical_thickness_vis06 - long_name: Aerosol optical Thickness at 0.6um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 20 - units: "1" - - aerosol_optical_thickness_vis08: - name: aerosol_optical_thickness_vis08 - long_name: Aerosol optical Thickness at 0.8um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 21 - units: "1" - - aerosol_optical_thickness_vis16: - name: aerosol_optical_thickness_vis16 - long_name: Aerosol optical Thickness at 1.6um - standard_name: atmosphere_absorption_optical_thickness_due_to_ambient_aerosol - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 22 - units: "1" - - angstroem_coefficient: - name: angstroem_coefficient - long_name: Angstroem Coefficient - standard_name: aerosol_angstrom_exponent - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 23 - units: "1" - - aes_quality: - name: aes_quality - long_name: AES Product Quality Flag - standard_name: quality_flag - resolution: 9001.209497451 - file_type: grib_seviri_aes - parameter_number: 192 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] - - - # EUMETSAT MSG SEVIRI L2 Cloud Mask product - cloud_mask: - name: cloud_mask - long_name: Cloud Classification - standard_name: cloud_classification - resolution: - 3000.403165817: {file_type: grib_seviri_clm} - 2000: {file_type: grib_fci_clm} - parameter_number: 7 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'no data' ] - - - # EUMETSAT MSG SEVIRI L2 Cloud Top Height product - cloud_top_height: - name: cloud_top_height - long_name: Cloud Top Height - standard_name: height_at_cloud_top - resolution: 9001.209497451 - file_type: grib_seviri_cth - parameter_number: 2 - units: m - - cloud_top_quality: - name: cloud_top_quality - long_name: CTH Product Quality Flag - standard_name: height_at_cloud_top quality_flag - resolution: 9001.209497451 - file_type: grib_seviri_cth - parameter_number: 3 - units: "1" - flag_values: [0, 1] - flag_meanings: ['good quality retrieval','poor quality retrieval' ] - - - # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product - vis_refl_06: - name: vis_refl_06 - long_name: TOA Bidirectional Reflectance at 0.6um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [0.56, 0.635, 0.71] - file_type: grib_seviri_crm - parameter_number: 9 - units: "%" - - vis_refl_08: - name: vis_refl_08 - long_name: TOA Bidirectional Reflectance at 0.8um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [0.74, 0.81, 0.88] - file_type: grib_seviri_crm - parameter_number: 10 - units: "%" - - vis_refl_16: - name: vis_refl_16 - long_name: TOA Bidirectional Reflectance at 1.6um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [1.5, 1.64, 1.78] - file_type: grib_seviri_crm - parameter_number: 11 - units: "%" - - nir_refl_39: - name: nir_refl_39 - long_name: TOA Bidirectional Reflectance at 3.9um (7 days average) - standard_name: toa_bidirectional_reflectance - resolution: 3000.403165817 - wavelength: [3.48, 3.92, 4.36] - file_type: grib_seviri_crm - parameter_number: 12 - units: "%" - - num_accumulations: - name: num_accumulations - long_name: CRM Product Number of Accumulations - standard_name: number_of_accumulations - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 6 - units: "1" - - solar_zenith_angle: - name: solar_zenith_angle - long_name: Solar Zenith Angle (7 days average) - standard_name: solar_zenith_angle - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 7 - units: degrees - - relative_azimuth_angle: - name: relative_azimuth_angle - long_name: Relative Azimuth Angle (7 days average) - standard_name: relative_sensor_azimuth_angle - resolution: 3000.403165817 - file_type: grib_seviri_crm - parameter_number: 8 - units: degrees - - - # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product - active_fires: - name: active_fires - long_name: Active Fire Classification - standard_name: active_fire_classification - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 9 - units: "1" - flag_values: [0, 1, 2, 3] - flag_meanings: ['no fire','possible fire', 'probable fire', 'missing' ] - - fire_probability: - name: fire_probability - long_name: Fire Probability - standard_name: fire_probability - resolution: 3000.403165817 - file_type: grib_seviri_fir - parameter_number: 192 - units: "%" - - - # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product - instantaneous_rain_rate: - name: instantaneous_rain_rate - long_name: MPE Product Instantaneous Rain Rate - standard_name: rainfall_rate - resolution: 3000.403165817 - file_type: grib_seviri_mpe - parameter_number: 1 - units: "kg m-2 s-1" - - - # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product - pixel_scene_type: - name: pixel_scene_type - long_name: Cloud Type - standard_name: scene_classification - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 8 - units: "1" - flag_values: [24,111,112] - flag_meanings: ['multi-layered cloud','water cloud','ice cloud'] - - measurement_cost: - name: measurement_cost - long_name: OCA Cost Function - Measurement part - standard_name: cost_function - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 30 - units: "1" - - upper_layer_cloud_optical_depth: - name: upper_layer_cloud_optical_depth - long_name: Upper Cloud Layer Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 31 - units: "1" - - upper_layer_cloud_top_pressure: - name: upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure - standard_name: air_pressure_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 32 - units: Pa - - upper_layer_cloud_effective_radius: - name: upper_layer_cloud_effective_radius - long_name: Upper Cloud Particle Effective Radius - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 33 - units: m - - error_in_upper_layer_cloud_optical_depth: - name: error_in_upper_layer_cloud_optical_depth - long_name: Upper Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 34 - units: "1" - - error_in_upper_layer_cloud_top_pressure: - name: error_in_upper_layer_cloud_top_pressure - long_name: Upper Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 35 - units: Pa - - error_in_upper_layer_cloud_effective_radius: - name: error_in_upper_layer_cloud_effective_radius - long_name: Upper Cloud Particle Effective Radius Error Estimate - standard_name: effective_radius_of_cloud_condensed_water_particles_at_cloud_top standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 36 - units: m - - lower_layer_cloud_optical_depth: - name: lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 37 - units: "1" - - lower_layer_cloud_top_pressure: - name: lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 38 - units: Pa - - error_in_lower_layer_cloud_optical_depth: - name: error_in_lower_layer_cloud_optical_depth - long_name: Lower Cloud Optical Depth Error Estimate - standard_name: atmosphere_optical_thickness_due_to_cloud_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 39 - units: "1" - - error_in_lower_layer_cloud_top_pressure: - name: error_in_lower_layer_cloud_top_pressure - long_name: Lower Cloud Top Pressure Error Estimate - standard_name: air_pressure_at_cloud_top_in_lower_atmosphere_layer standard_error - resolution: 3000.403165817 - file_type: grib_seviri_oca - parameter_number: 40 - units: Pa diff --git a/satpy/etc/readers/seviri_l2_grib.yaml b/satpy/etc/readers/seviri_l2_grib.yaml index cbe6c81f09..e9a8cc5231 100644 --- a/satpy/etc/readers/seviri_l2_grib.yaml +++ b/satpy/etc/readers/seviri_l2_grib.yaml @@ -14,7 +14,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Aerosol Properties over Sea product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:AES grib_seviri_aes: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'AESGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGAESE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -24,7 +24,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Mask product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CLM grib_seviri_clm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CLMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLMK-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -34,7 +34,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Cloud Top Height product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CTH grib_seviri_cth: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CTHEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCLTH-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -44,7 +44,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Clear-Sky Reflectance Map product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:CRM grib_seviri_crm: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'CRMEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGCRMN-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -54,7 +54,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Active Fire Monitoring product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:FIR grib_seviri_fir: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'FIREncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGFIRG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -64,7 +64,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Multi-Sensor Precipitation Estimate product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:MPE-GRIB grib_seviri_mpe: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'MPEGRIBProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGMPEG-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' @@ -74,7 +74,7 @@ file_types: # EUMETSAT MSG SEVIRI L2 Optimal Cloud Analysis product # https://navigator.eumetsat.int/product/EO:EUM:DAT:MSG:OCA grib_seviri_oca: - file_reader: !!python/name:satpy.readers.seviri_l2_grib.SeviriL2GribFileHandler + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler file_patterns: - 'OCAEncProd_{start_time:%Y%m%d%H%M%S}Z_00_{server:8s}_{spacecraft:5s}_{scan_mode:3s}_{sub_sat:5s}' - '{spacecraft:4s}-SEVI-MSGOCAE-{id1:4s}-{id2:4s}-{start_time:%Y%m%d%H%M%S}.000000000Z-{product_creation_time:%Y%m%d%H%M%S}-{ord_num:7s}' From a53ddead94ec05d0117eba1c3e971fda11d17980 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:37:12 +0100 Subject: [PATCH 025/340] Add fci_l2_grib.yaml reader --- satpy/etc/readers/fci_l2_grib.yaml | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 satpy/etc/readers/fci_l2_grib.yaml diff --git a/satpy/etc/readers/fci_l2_grib.yaml b/satpy/etc/readers/fci_l2_grib.yaml new file mode 100644 index 0000000000..cc16c77081 --- /dev/null +++ b/satpy/etc/readers/fci_l2_grib.yaml @@ -0,0 +1,28 @@ +reader: + name: fci_l2_grib + short_name: FCI L2 GRIB2 + long_name: MTG FCI L2 data in GRIB2 format + description: Reader for EUMETSAT MTG FCI L2 files in GRIB2 format. + status: Nominal + supports_fsspec: false + sensors: [fci] + reader: !!python/name:satpy.readers.yaml_reader.GEOFlippableFileYAMLReader + +file_types: + grib_fci_clm: + file_reader: !!python/name:satpy.readers.eum_l2_grib.EUML2GribFileHandler + file_patterns: + - '{pflag}_{location_indicator},{data_designator},MTI{spacecraft_id:1d}+FCI-2-CLM-{subtype}-{coverage}-{subsetting}-{component1}-{component2}-{component3}-{purpose}-GRIB2_{oflag}_{originator}_{processing_time:%Y%m%d%H%M%S}_{facility_or_tool}_{environment}_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{processing_mode}_{special_compression}_{disposition_mode}_{repeat_cycle_in_day:>04d}_{count_in_repeat_cycle:>04d}.bin' + + +datasets: + cloud_mask: + name: cloud_mask + long_name: Cloud Classification + standard_name: cloud_classification + resolution: 2000 + file_type: grib_fci_clm + parameter_number: 7 + units: "1" + flag_values: [0, 1, 2, 3] + flag_meanings: ['clear sky over water','clear sky over land', 'cloudy', 'undefined' ] From f94c4f721445de5e873e1077b5d44a6b852e1aa6 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:39:40 +0100 Subject: [PATCH 026/340] Delete seviri_l2_grib.py since eum_l2_grib.py is compatible with FCI and SEVIRI data --- satpy/readers/seviri_l2_grib.py | 282 -------------------------------- 1 file changed, 282 deletions(-) delete mode 100644 satpy/readers/seviri_l2_grib.py diff --git a/satpy/readers/seviri_l2_grib.py b/satpy/readers/seviri_l2_grib.py deleted file mode 100644 index b69c60e7ac..0000000000 --- a/satpy/readers/seviri_l2_grib.py +++ /dev/null @@ -1,282 +0,0 @@ -# Copyright (c) 2019-2023 Satpy developers -# -# satpy is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . - -"""Reader for the SEVIRI L2 products in GRIB2 format. - -References: - FM 92 GRIB Edition 2 - https://www.wmo.int/pages/prog/www/WMOCodes/Guides/GRIB/GRIB2_062006.pdf - EUMETSAT Product Navigator - https://navigator.eumetsat.int/ -""" - -import logging -from datetime import timedelta - -import dask.array as da -import numpy as np -import xarray as xr - -from satpy.readers._geos_area import get_area_definition, get_geos_area_naming -from satpy.readers.eum_base import get_service_mode -from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, calculate_area_extent -from satpy.utils import get_legacy_chunk_size - -try: - import eccodes as ec -except ImportError: - raise ImportError( - "Missing eccodes-python and/or eccodes C-library installation. Use conda to install eccodes") - -CHUNK_SIZE = get_legacy_chunk_size() -logger = logging.getLogger(__name__) - - -class SeviriL2GribFileHandler(BaseFileHandler): - """Reader class for SEVIRI L2 products in GRIB format.""" - - def __init__(self, filename, filename_info, filetype_info): - """Read the global attributes and prepare for dataset reading.""" - super().__init__(filename, filename_info, filetype_info) - # Turn on support for multiple fields in single GRIB messages (required for SEVIRI L2 files) - ec.codes_grib_multi_support_on() - - @property - def start_time(self): - """Return the sensing start time.""" - return self.filename_info["start_time"] - - @property - def end_time(self): - """Return the sensing end time.""" - return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) - - def get_area_def(self, dataset_id): - """Return the area definition for a dataset.""" - self._area_dict["column_step"] = dataset_id["resolution"] - self._area_dict["line_step"] = dataset_id["resolution"] - - area_extent = calculate_area_extent(self._area_dict) - - # Call the get_area_definition function to obtain the area - area_def = get_area_definition(self._pdict, area_extent) - - return area_def - - def get_dataset(self, dataset_id, dataset_info): - """Get dataset using the parameter_number key in dataset_info. - - In a previous version of the reader, the attributes (nrows, ncols, ssp_lon) and projection information - (pdict and area_dict) were computed while initializing the file handler. Also the code would break out from - the While-loop below as soon as the correct parameter_number was found. This has now been revised becasue the - reader would sometimes give corrupt information about the number of messages in the file and the dataset - dimensions within a given message if the file was only partly read (not looping over all messages) in an earlier - instance. - """ - logger.debug("Reading in file to get dataset with parameter number %d.", - dataset_info["parameter_number"]) - - xarr = None - message_found = False - with open(self.filename, "rb") as fh: - - # Iterate over all messages and fetch data when the correct parameter number is found - while True: - gid = ec.codes_grib_new_from_file(fh) - - if gid is None: - if not message_found: - # Could not obtain a valid message ID from the grib file - logger.warning("Could not find parameter_number %d in GRIB file, no valid Dataset created", - dataset_info["parameter_number"]) - break - - # Check if the parameter number in the GRIB message corresponds to the required key - parameter_number = self._get_from_msg(gid, "parameterNumber") - - if parameter_number == dataset_info["parameter_number"]: - - self._res = dataset_id["resolution"] - self._read_attributes(gid) - - # Read the missing value - missing_value = self._get_from_msg(gid, "missingValue") - - # Retrieve values and metadata from the GRIB message, masking the values equal to missing_value - xarr = self._get_xarray_from_msg(gid) - - xarr.data = da.where(xarr.data == missing_value, np.nan, xarr.data) - - ec.codes_release(gid) - - # Combine all metadata into the dataset attributes and break out of the loop - xarr.attrs.update(dataset_info) - xarr.attrs.update(self._get_attributes()) - - message_found = True - - else: - # The parameter number is not the correct one, release gid and skip to next message - ec.codes_release(gid) - - return xarr - - def _read_attributes(self, gid): - """Read the parameter attributes from the message and create the projection and area dictionaries.""" - # Read SSP and date/time - self._ssp_lon = self._get_from_msg(gid, "longitudeOfSubSatellitePointInDegrees") - - # Read number of points on the x and y axes - self._nrows = self._get_from_msg(gid, "Ny") - self._ncols = self._get_from_msg(gid, "Nx") - - # Creates the projection and area dictionaries - self._pdict, self._area_dict = self._get_proj_area(gid) - - def _get_proj_area(self, gid): - """Compute the dictionary with the projection and area definition from a GRIB message. - - Args: - gid: The ID of the GRIB message. - - Returns: - tuple: A tuple of two dictionaries for the projection and the area definition. - pdict: - a: Earth major axis [m] - b: Earth minor axis [m] - h: Height over surface [m] - ssp_lon: longitude of subsatellite point [deg] - nlines: number of lines - ncols: number of columns - a_name: name of the area - a_desc: description of the area - p_id: id of the projection - area_dict: - center_point: coordinate of the center point - north: coodinate of the north limit - east: coodinate of the east limit - west: coodinate of the west limit - south: coodinate of the south limit - """ - # Get name of area definition - area_naming_input_dict = {"platform_name": "msg", - "instrument_name": "seviri", - "resolution": self._res, - } - - area_naming = get_geos_area_naming({**area_naming_input_dict, - **get_service_mode("seviri", self._ssp_lon)}) - - # Read all projection and area parameters from the message - earth_major_axis_in_meters = self._get_from_msg(gid, "earthMajorAxis") * 1000.0 # [m] - earth_minor_axis_in_meters = self._get_from_msg(gid, "earthMinorAxis") * 1000.0 # [m] - - earth_major_axis_in_meters = self._scale_earth_axis(earth_major_axis_in_meters) - earth_minor_axis_in_meters = self._scale_earth_axis(earth_minor_axis_in_meters) - - nr_in_radius_of_earth = self._get_from_msg(gid, "NrInRadiusOfEarth") - xp_in_grid_lengths = self._get_from_msg(gid, "XpInGridLengths") - h_in_meters = earth_major_axis_in_meters * (nr_in_radius_of_earth - 1.0) # [m] - - # Create the dictionary with the projection data - pdict = { - "a": earth_major_axis_in_meters, - "b": earth_minor_axis_in_meters, - "h": h_in_meters, - "ssp_lon": self._ssp_lon, - "nlines": self._ncols, - "ncols": self._nrows, - "a_name": area_naming["area_id"], - "a_desc": area_naming["description"], - "p_id": "", - } - - # Compute the dictionary with the area extension - area_dict = { - "center_point": xp_in_grid_lengths, - "north": self._nrows, - "east": 1, - "west": self._ncols, - "south": 1, - } - - return pdict, area_dict - - @staticmethod - def _scale_earth_axis(data): - """Scale Earth axis data to make sure the value matched the expected unit [m]. - - The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This - method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such - that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has - been resolved by EUMETSAT this workaround can be removed. - - """ - scale_factor = 10 ** np.ceil(np.log10(1e6/data)) - return data * scale_factor - - def _get_xarray_from_msg(self, gid): - """Read the values from the GRIB message and return a DataArray object. - - Args: - gid: The ID of the GRIB message. - - Returns: - DataArray: The array containing the retrieved values. - """ - # Data from GRIB message are read into an Xarray... - xarr = xr.DataArray(da.from_array(ec.codes_get_values( - gid).reshape(self._nrows, self._ncols), CHUNK_SIZE), dims=("y", "x")) - - return xarr - - def _get_attributes(self): - """Create a dictionary of attributes to be added to the dataset. - - Returns: - dict: A dictionary of parameter attributes. - ssp_lon: longitude of subsatellite point - sensor: name of sensor - platform_name: name of the platform - """ - orbital_parameters = { - "projection_longitude": self._ssp_lon - } - - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": "seviri", - "platform_name": PLATFORM_DICT[self.filename_info["spacecraft"]] - } - return attributes - - @staticmethod - def _get_from_msg(gid, key): - """Get a value from the GRIB message based on the key, return None if missing. - - Args: - gid: The ID of the GRIB message. - key: The key of the required attribute. - - Returns: - The retrieved attribute or None if the key is missing. - """ - try: - attr = ec.codes_get(gid, key) - except ec.KeyValueNotFoundError: - logger.warning("Key %s not found in GRIB message", key) - attr = None - return attr From 66946ad5abccdcbee94654d366c31d0402f48fb3 Mon Sep 17 00:00:00 2001 From: David Navia Date: Fri, 12 Jan 2024 16:46:12 +0100 Subject: [PATCH 027/340] Delete obsolete test_seviri_l2_grib.py --- .../tests/reader_tests/test_seviri_l2_grib.py | 182 ------------------ 1 file changed, 182 deletions(-) delete mode 100644 satpy/tests/reader_tests/test_seviri_l2_grib.py diff --git a/satpy/tests/reader_tests/test_seviri_l2_grib.py b/satpy/tests/reader_tests/test_seviri_l2_grib.py deleted file mode 100644 index d3b40d6caa..0000000000 --- a/satpy/tests/reader_tests/test_seviri_l2_grib.py +++ /dev/null @@ -1,182 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# Copyright (c) 2019 Satpy developers -# -# satpy is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# satpy is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with satpy. If not, see . - -"""SEVIRI L2 GRIB-reader test package.""" - -import datetime -import sys -import unittest -from unittest import mock - -import numpy as np - -from satpy.tests.utils import make_dataid - -# Dictionary to be used as fake GRIB message -FAKE_MESSAGE = { - "longitudeOfSubSatellitePointInDegrees": 9.5, - "dataDate": 20191020, - "dataTime": 1745, - "Nx": 1000, - "Ny": 1200, - "earthMajorAxis": 6400., - "earthMinorAxis": 6300., - "NrInRadiusOfEarth": 6., - "XpInGridLengths": 500, - "parameterNumber": 30, - "missingValue": 9999, -} - -# List to be used as fake GID source -FAKE_GID = [0, 1, 2, 3, None] - - -class Test_SeviriL2GribFileHandler(unittest.TestCase): - """Test the SeviriL2GribFileHandler reader.""" - - @mock.patch("satpy.readers.seviri_l2_grib.ec") - def setUp(self, ec_): - """Set up the test by creating a mocked eccodes library.""" - fake_gid_generator = (i for i in FAKE_GID) - ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - ec_.codes_get.side_effect = lambda gid, key: FAKE_MESSAGE[key] - ec_.codes_get_values.return_value = np.ones(1000*1200) - self.ec_ = ec_ - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.seviri_l2_grib.xr") - @mock.patch("satpy.readers.seviri_l2_grib.da") - def test_data_reading(self, da_, xr_): - """Test the reading of data from the product.""" - from satpy.readers.seviri_l2_grib import REPEAT_CYCLE_DURATION, SeviriL2GribFileHandler - from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.seviri_l2_grib.ec", self.ec_): - self.reader = SeviriL2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft": "MET11", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) - }, - filetype_info={} - ) - - dataset_id = make_dataid(name="dummmy", resolution=3000) - - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 9.5 - }, - "sensor": "seviri", - "platform_name": "Meteosat-11" - } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6400000., - "b": 6300000., - "h": 32000000., - "ssp_lon": 9.5, - "nlines": 1000, - "ncols": 1200, - "a_name": "msg_seviri_rss_3km", - "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", - "p_id": "", - } - assert pdict == expected_pdict - expected_area_dict = { - "center_point": 500, - "north": 1200, - "east": 1, - "west": 1000, - "south": 1, - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.seviri_l2_grib.calculate_area_extent", - mock.Mock(name="calculate_area_extent")) as cae: - with mock.patch("satpy.readers.seviri_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=400.) - self.reader.get_area_def(dataset_id) - # Asserts that calculate_area_extent has been called with the correct arguments - expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, - "column_step": 400., "line_step": 400.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of calculate_area_extent - assert args[1]._extract_mock_name() == "calculate_area_extent()" From 5bbb4219ace06aef45b0ca4268b01198a159b2dc Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 15:38:24 +0200 Subject: [PATCH 028/340] Refactor duplicate code in tests --- satpy/tests/reader_tests/test_eum_l2_grib.py | 101 +++++++------------ 1 file changed, 37 insertions(+), 64 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 3e4dee87a8..a7846be706 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -70,6 +70,41 @@ def setUp(self, ec_): ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) self.ec_ = ec_ + def common_checks(self, mock_file, dataset_id): + """Commmon checks for fci and seviri data.""" + # Checks that the codes_grib_multi_support_on function has been called + self.ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + self.ec_.codes_grib_new_from_file.reset_mock() + self.ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") @mock.patch("satpy.readers.eum_l2_grib.xr") @mock.patch("satpy.readers.eum_l2_grib.da") @@ -97,38 +132,7 @@ def test_seviri_data_reading(self, da_, xr_): dataset_id = make_dataid(name="dummmy", resolution=3000) - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + self.common_checks(mock_file, dataset_id) # Checks the basic data reading assert REPEAT_CYCLE_DURATION == 15 @@ -224,38 +228,7 @@ def test_fci_data_reading(self, da_, xr_): dataset_id = make_dataid(name="dummmy", resolution=2000) - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 + self.common_checks(mock_file, dataset_id) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() From 486b3a6e77fbebb0b17e1c4915b5d1abceab7e87 Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 15:50:13 +0200 Subject: [PATCH 029/340] Correct for RSS data --- satpy/readers/eum_l2_grib.py | 16 ++++++---------- satpy/readers/seviri_base.py | 2 ++ 2 files changed, 8 insertions(+), 10 deletions(-) diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index 47cf9a0ba9..c3cc7e61c4 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -23,7 +23,6 @@ """ import logging -from datetime import timedelta import dask.array as da import numpy as np @@ -33,7 +32,7 @@ from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size @@ -75,10 +74,8 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - if self.sensor == "seviri": - return self.start_time + timedelta(minutes=REPEAT_CYCLE_DURATION) - elif self.sensor == "fci": - return self.filename_info["end_time"] + delta = REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else REPEAT_CYCLE_DURATION + return self.start_time + delta def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" @@ -249,10 +246,9 @@ def _get_proj_area(self, gid): def _scale_earth_axis(data): """Scale Earth axis data to make sure the value matched the expected unit [m]. - The earthMinorAxis value stored in the aerosol over sea product is scaled incorrectly by a factor of 1e8. This - method provides a flexible temporarily workaraound by making sure that all earth axis values are scaled such - that they are on the order of millions of meters as expected by the reader. As soon as the scaling issue has - been resolved by EUMETSAT this workaround can be removed. + The earthMinorAxis value stored in the MPEF aerosol over sea product prior to December 12, 2022 has the wrong + unit and this method provides a flexible work-around by making sure that all earth axis values are scaled such + that they are on the order of millions of meters as expected by the reader. """ scale_factor = 10 ** np.ceil(np.log10(1e6/data)) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index 5b19e56833..d2ed5c3847 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -212,6 +212,8 @@ REPEAT_CYCLE_DURATION = 15 +REPEAT_CYCLE_DURATION_RSS = 5 + C1 = 1.19104273e-5 C2 = 1.43877523 From c6322faec97301b7f2578f215c1b3044b4cef4d0 Mon Sep 17 00:00:00 2001 From: David Navia Date: Wed, 8 May 2024 16:02:35 +0200 Subject: [PATCH 030/340] Modify fci_base doc-string --- satpy/readers/fci_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_base.py b/satpy/readers/fci_base.py index c4a3714291..c1f6fc2110 100644 --- a/satpy/readers/fci_base.py +++ b/satpy/readers/fci_base.py @@ -22,7 +22,7 @@ def calculate_area_extent(area_dict): """Calculate the area extent seen by MTG FCI instrument. - Since the center of the FCI L2 grid is located at the interface between the pixels, there are equally many + Since the center of the FCI grids is located at the interface between the pixels, there are equally many pixels (e.g. 5568/2 = 2784 for 2km grid) in each direction from the center points. Hence, the area extent can be easily computed by simply adding and subtracting half the width and height from teh centre point (=0). From f27a91d59ff86633ac5578e9e6b9f27fcff80574 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 17 May 2024 07:02:34 +0000 Subject: [PATCH 031/340] fix for file reading; includes removing chunk reading and decoding times after import --- satpy/readers/mviri_l1b_fiduceo_nc.py | 26 ++++++++++++++----- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 20 ++++++++++---- 2 files changed, 35 insertions(+), 11 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index fc5aea2c8e..0bbc285917 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -153,10 +153,12 @@ """ import abc +import datetime import functools import warnings import dask.array as da +import numpy import numpy as np import xarray as xr @@ -513,11 +515,20 @@ def get_time(self): """Get time coordinate. Variable is sometimes named "time" and sometimes "time_ir_wv". + FillValues in time are set to NaT. """ + try: - return self["time_ir_wv"] + time = self["time_ir_wv"] except KeyError: - return self["time"] + time = self["time"] + + timeOffset = time.attrs["add_offset"] + condition = time == time.attrs["_FillValue"] + + time = xr.where(condition, numpy.nan, time + timeOffset) + time = (time*1e9).astype("datetime64[ns]") + return time def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" @@ -555,11 +566,14 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - chunks={"x": CHUNK_SIZE, - "y": CHUNK_SIZE, - "x_ir_wv": CHUNK_SIZE, - "y_ir_wv": CHUNK_SIZE} + # chunks={"x": CHUNK_SIZE, + # "y": CHUNK_SIZE, + # "x_ir_wv": CHUNK_SIZE, + # "y_ir_wv": CHUNK_SIZE}, + decode_times=False, + decode_cf=False ) + self.nc = DatasetWrapper(nc_raw) # Projection longitude is not provided in the file, read it from the diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 04694c145a..3c50ebcd43 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -19,6 +19,7 @@ from __future__ import annotations +import datetime import os from unittest import mock @@ -61,8 +62,8 @@ {"sun_earth_distance_correction_applied": True, "sun_earth_distance_correction_factor": 1.} ) -acq_time_vis_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), - np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), +acq_time_vis_exp = [np.datetime64("NaT").astype("datetime64[ns]"), + np.datetime64("NaT").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] vis_counts_exp = xr.DataArray( @@ -124,7 +125,7 @@ }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64("1970-01-01 00:30").astype("datetime64[ns]"), +acq_time_ir_wv_exp = [np.datetime64("NaT").astype("datetime64[ns]"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( @@ -272,8 +273,13 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4) * 60 * 60 * 1e9 - time = time.astype("datetime64[ns]").reshape(2, 2) + time = np.arange(4) * 60 * 60 + timeFillValue=4294967295 + timeAddOffset=0 + time[0] = timeFillValue + time[1] = timeFillValue + time = time.reshape(2,2) + ds = xr.Dataset( data_vars={ "count_vis": (("y", "x"), count_vis), @@ -317,6 +323,10 @@ def fixture_fake_dataset(): ) ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" + + ds["time_ir_wv"].attrs["_FillValue"] = timeFillValue + ds["time_ir_wv"].attrs["add_offset"] = timeAddOffset + return ds From 2dbae1af4d8d48638b7c0c7683c356aee071073e Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 17 May 2024 08:23:37 +0000 Subject: [PATCH 032/340] remove import datetime --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 3c50ebcd43..3cb6070fed 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -19,7 +19,6 @@ from __future__ import annotations -import datetime import os from unittest import mock From eb0382a77471505a1170bf6b3c1471edd59ebeb7 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 17 May 2024 09:04:13 +0000 Subject: [PATCH 033/340] add bkremmli to AUTHORS.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index fb43d0168d..ec40df4309 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -44,6 +44,7 @@ The following people have made contributions to this project: - [Johannes Johansson (JohannesSMHI)](https://github.com/JohannesSMHI) - [Sauli Joro (sjoro)](https://github.com/sjoro) - [Janne Kotro (jkotro)](https://github.com/jkotro) +- [Beke Kremmling (bkremmli)](https://github.com/bkremmli) - Deutscher Wetterdienst - [Ralph Kuehn (ralphk11)](https://github.com/ralphk11) - [Panu Lahtinen (pnuu)](https://github.com/pnuu) - [Jussi Leinonen (jleinonen)](https://github.com/jleinonen) - meteoswiss From 7d6608a34d1300e845f9e7936c085e638b5966f1 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 17 May 2024 09:11:59 +0000 Subject: [PATCH 034/340] correct for failures from hook id ruff --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 0bbc285917..fd86273e62 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -153,7 +153,6 @@ """ import abc -import datetime import functools import warnings @@ -517,7 +516,6 @@ def get_time(self): Variable is sometimes named "time" and sometimes "time_ir_wv". FillValues in time are set to NaT. """ - try: time = self["time_ir_wv"] except KeyError: From ec22136ae33d6f05d7f87e55fcaee09312b29233 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 17 May 2024 10:18:25 +0000 Subject: [PATCH 035/340] minor adaptations from PR comments --- satpy/readers/mviri_l1b_fiduceo_nc.py | 5 ++--- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 14 +++++++------- 2 files changed, 9 insertions(+), 10 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index fd86273e62..b17f75fef0 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -157,7 +157,6 @@ import warnings import dask.array as da -import numpy import numpy as np import xarray as xr @@ -521,10 +520,10 @@ def get_time(self): except KeyError: time = self["time"] - timeOffset = time.attrs["add_offset"] + time_offset = time.attrs["add_offset"] condition = time == time.attrs["_FillValue"] - time = xr.where(condition, numpy.nan, time + timeOffset) + time = xr.where(condition, np.nan, time + time_offset) time = (time*1e9).astype("datetime64[ns]") return time diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 3cb6070fed..cf9573737c 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -273,11 +273,11 @@ def fixture_fake_dataset(): ) ) time = np.arange(4) * 60 * 60 - timeFillValue=4294967295 - timeAddOffset=0 - time[0] = timeFillValue - time[1] = timeFillValue - time = time.reshape(2,2) + time_fill_value = 4294967295 + time_add_offset = 0 + time[0] = time_fill_value + time[1] = time_fill_value + time = time.reshape(2, 2) ds = xr.Dataset( data_vars={ @@ -323,8 +323,8 @@ def fixture_fake_dataset(): ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" - ds["time_ir_wv"].attrs["_FillValue"] = timeFillValue - ds["time_ir_wv"].attrs["add_offset"] = timeAddOffset + ds["time_ir_wv"].attrs["_FillValue"] = time_fill_value + ds["time_ir_wv"].attrs["add_offset"] = time_add_offset return ds From 5beedea8e90c30f133eccf7ec12c972f2abae3bf Mon Sep 17 00:00:00 2001 From: bkremmli <157395108+bkremmli@users.noreply.github.com> Date: Fri, 17 May 2024 12:20:19 +0200 Subject: [PATCH 036/340] Update satpy/readers/mviri_l1b_fiduceo_nc.py Co-authored-by: Martin Raspaud --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index b17f75fef0..b2ad3fd22a 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -524,7 +524,7 @@ def get_time(self): condition = time == time.attrs["_FillValue"] time = xr.where(condition, np.nan, time + time_offset) - time = (time*1e9).astype("datetime64[ns]") + time = time.astype("datetime64[s]").astype("datetime64[ns]") return time def get_xy_coords(self, resolution): From 21679c61f9f9c26e8ce4b6f756e735f9fc52fd7b Mon Sep 17 00:00:00 2001 From: bkremmli Date: Fri, 24 May 2024 08:02:54 +0000 Subject: [PATCH 037/340] perform chunking after open_dataset and use decode_cf = False --- satpy/readers/mviri_l1b_fiduceo_nc.py | 30 +++++++++++-------- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 16 +++++----- 2 files changed, 26 insertions(+), 20 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index b17f75fef0..76c3a924c4 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -162,9 +162,7 @@ from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size -CHUNK_SIZE = get_legacy_chunk_size() EQUATOR_RADIUS = 6378140.0 POLE_RADIUS = 6356755.0 ALTITUDE = 42164000.0 - EQUATOR_RADIUS @@ -458,6 +456,22 @@ class DatasetWrapper: def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc + self._fix_duplicate_dimensions(nc) + self.nc = self._chunk(nc) + + def _fix_duplicate_dimensions(self, nc): + nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") + + def _chunk(self, nc): + + (CHUNK_SIZE, CHUNK_SIZE) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] + chunks = { + "x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE + } + return nc.chunk(chunks) @property def attrs(self): @@ -520,10 +534,6 @@ def get_time(self): except KeyError: time = self["time"] - time_offset = time.attrs["add_offset"] - condition = time == time.attrs["_FillValue"] - - time = xr.where(condition, np.nan, time + time_offset) time = (time*1e9).astype("datetime64[ns]") return time @@ -563,12 +573,8 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - # chunks={"x": CHUNK_SIZE, - # "y": CHUNK_SIZE, - # "x_ir_wv": CHUNK_SIZE, - # "y_ir_wv": CHUNK_SIZE}, - decode_times=False, - decode_cf=False + decode_cf=True, + decode_times=False ) self.nc = DatasetWrapper(nc_raw) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index cf9573737c..7a54a8d7ab 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -79,6 +79,7 @@ }, attrs=attrs_exp ) + vis_rad_exp = xr.DataArray( np.array( [[np.nan, 18.56, 38.28, 58.], @@ -272,11 +273,11 @@ def fixture_fake_dataset(): dtype=np.uint8 ) ) - time = np.arange(4) * 60 * 60 - time_fill_value = 4294967295 - time_add_offset = 0 - time[0] = time_fill_value - time[1] = time_fill_value + + cov = da.from_array([[1, 2], [3, 4]]) + time = np.arange(4) * 60 * 60. + time[0] = np.nan + time[1] = np.nan time = time.reshape(2, 2) ds = xr.Dataset( @@ -308,6 +309,7 @@ def fixture_fake_dataset(): "sub_satellite_longitude_end": np.nan, "sub_satellite_latitude_start": np.nan, "sub_satellite_latitude_end": 0.1, + "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), cov), }, coords={ "y": [1, 2, 3, 4], @@ -322,9 +324,7 @@ def fixture_fake_dataset(): ) ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" - - ds["time_ir_wv"].attrs["_FillValue"] = time_fill_value - ds["time_ir_wv"].attrs["add_offset"] = time_add_offset + ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) return ds From 59880ced84accdfe8a3996419d57ff2befa9b060 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 28 May 2024 12:15:40 +0000 Subject: [PATCH 038/340] decode times separatly from other variables, adds TestInterpolator --- satpy/readers/mviri_l1b_fiduceo_nc.py | 32 +++++--- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 73 ++++++++++++++++++- 2 files changed, 90 insertions(+), 15 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 76c3a924c4..fcf6c67c77 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -456,20 +456,27 @@ class DatasetWrapper: def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc - self._fix_duplicate_dimensions(nc) - self.nc = self._chunk(nc) + + # remove time before decoding and add again. + raw_time = nc["time_ir_wv"] + self.nc = self.nc.drop_vars(["time_ir_wv"]) + self.nc = xr.decode_cf(self.nc) + self.nc["time_ir_wv"] = raw_time + + self._fix_duplicate_dimensions(self.nc) + self.nc = self._chunk(self.nc) def _fix_duplicate_dimensions(self, nc): nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") def _chunk(self, nc): - (CHUNK_SIZE, CHUNK_SIZE) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] + (chunk_size, chunk_size) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] chunks = { - "x": CHUNK_SIZE, - "y": CHUNK_SIZE, - "x_ir_wv": CHUNK_SIZE, - "y_ir_wv": CHUNK_SIZE + "x": chunk_size, + "y": chunk_size, + "x_ir_wv": chunk_size, + "y_ir_wv": chunk_size } return nc.chunk(chunks) @@ -527,14 +534,16 @@ def get_time(self): """Get time coordinate. Variable is sometimes named "time" and sometimes "time_ir_wv". - FillValues in time are set to NaT. + FillValues in time are set to NaT, others converted to datetime64. """ try: time = self["time_ir_wv"] except KeyError: time = self["time"] - time = (time*1e9).astype("datetime64[ns]") + time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), + (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) + return time def get_xy_coords(self, resolution): @@ -573,8 +582,9 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, - decode_cf=True, - decode_times=False + decode_cf=False, + decode_times=False, + mask_and_scale=False ) self.nc = DatasetWrapper(nc_raw) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 7a54a8d7ab..470e2d4b6f 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -28,6 +28,7 @@ import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition +from pytest_lazyfixture import lazy_fixture from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, @@ -36,6 +37,7 @@ DatasetWrapper, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, + Interpolator, ) from satpy.tests.utils import make_dataid @@ -125,7 +127,7 @@ }, attrs=attrs_exp ) -acq_time_ir_wv_exp = [np.datetime64("NaT").astype("datetime64[ns]"), +acq_time_ir_wv_exp = [np.datetime64("NaT"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( np.array( @@ -275,9 +277,9 @@ def fixture_fake_dataset(): ) cov = da.from_array([[1, 2], [3, 4]]) - time = np.arange(4) * 60 * 60. - time[0] = np.nan - time[1] = np.nan + time = np.arange(4) * 60 * 60 + time[0] = 4294967295 + time[1] = 4294967295 time = time.reshape(2, 2) ds = xr.Dataset( @@ -325,6 +327,8 @@ def fixture_fake_dataset(): ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) + ds["time_ir_wv"].attrs["_FillValue"] = 4294967295 + ds["time_ir_wv"].attrs["add_offset"] = 0 return ds @@ -412,6 +416,7 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, xr.testing.assert_allclose(ds, expected) assert ds.dtype == expected.dtype assert ds.attrs == expected.attrs + assert True def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" @@ -600,3 +605,63 @@ def test_reassign_coords(self): ds = DatasetWrapper(nc) foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) + +class TestInterpolator: + """Unit tests for Interpolator class.""" + @pytest.fixture(name="time_ir_wv") + def fixture_time_ir_wv(self): + """Returns time_ir_wv.""" + return xr.DataArray( + [ + [np.datetime64("1970-01-01 01:00"), np.datetime64("1970-01-01 02:00")], + [np.datetime64("1970-01-01 03:00"), np.datetime64("1970-01-01 04:00")], + [np.datetime64("NaT"), np.datetime64("1970-01-01 06:00")], + [np.datetime64("NaT"), np.datetime64("NaT")], + ], + dims=("y", "x"), + coords={"y": [1, 3, 5, 7]} + ) + + @pytest.fixture(name="acq_time_vis_exp") + def fixture_acq_time_vis_exp(self): + """Returns acq_time_vis_exp.""" + return xr.DataArray( + [ + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("NaT"), + np.datetime64("NaT") + ], + dims="y", + coords={"y": [1, 2, 3, 4, 5, 6, 7, 8]} + ) + + @pytest.fixture(name="acq_time_ir_exp") + def fixture_acq_time_ir_exp(self): + """Returns acq_time_ir_exp.""" + return xr.DataArray( + [ + np.datetime64("1970-01-01 01:30"), + np.datetime64("1970-01-01 03:30"), + np.datetime64("1970-01-01 06:00"), + np.datetime64("NaT"), + ], + dims="y", + coords={"y": [1, 3, 5, 7]} + ) + + @pytest.mark.parametrize( + "acq_time_exp", + [ + lazy_fixture("acq_time_ir_exp"), + lazy_fixture("acq_time_vis_exp") + ] + ) + def test_interp_acq_time(self, time_ir_wv, acq_time_exp): + """Tests time interpolation.""" + res = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp.coords["y"]) + xr.testing.assert_allclose(res, acq_time_exp) From fb93f00b78adefed918ce122d78e43ff837b99a4 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 4 Jun 2024 13:23:57 +0000 Subject: [PATCH 039/340] fixes _decode_cf() and tests --- satpy/readers/mviri_l1b_fiduceo_nc.py | 41 +++++++++---------- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 6 +-- 2 files changed, 22 insertions(+), 25 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index fcf6c67c77..4ef225ecc2 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -405,7 +405,6 @@ def interp_acq_time(time2d, target_y): """ # Compute mean timestamp per scanline time = time2d.mean(dim="x") - # If required, repeat timestamps in y-direction to obtain higher # resolution y = time.coords["y"].values @@ -453,30 +452,36 @@ def is_high_resol(resolution): class DatasetWrapper: """Helper class for accessing the dataset.""" - def __init__(self, nc): + def __init__(self, nc, decode_nc=True): """Wrap the given dataset.""" self.nc = nc + if decode_nc is True: + self._decode_cf() + self._fix_duplicate_dimensions(self.nc) + self.nc = self._chunk(self.nc) + + def _decode_cf(self): # remove time before decoding and add again. - raw_time = nc["time_ir_wv"] - self.nc = self.nc.drop_vars(["time_ir_wv"]) + time = self.get_time() + time_dims = self.nc["time_ir_wv"].dims + time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), + (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) + self.nc = self.nc.drop_vars(time.name) self.nc = xr.decode_cf(self.nc) - self.nc["time_ir_wv"] = raw_time - - self._fix_duplicate_dimensions(self.nc) - self.nc = self._chunk(self.nc) + self.nc[time.name] = (time_dims, time.values) def _fix_duplicate_dimensions(self, nc): nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") def _chunk(self, nc): - (chunk_size, chunk_size) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] + (chunk_size_y, chunk_size_x) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] chunks = { - "x": chunk_size, - "y": chunk_size, - "x_ir_wv": chunk_size, - "y_ir_wv": chunk_size + "x": chunk_size_x, + "y": chunk_size_y, + "x_ir_wv": chunk_size_x, + "y_ir_wv": chunk_size_y } return nc.chunk(chunks) @@ -534,17 +539,11 @@ def get_time(self): """Get time coordinate. Variable is sometimes named "time" and sometimes "time_ir_wv". - FillValues in time are set to NaT, others converted to datetime64. """ try: - time = self["time_ir_wv"] + return self["time_ir_wv"] except KeyError: - time = self["time"] - - time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), - (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) - - return time + return self["time"] def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 470e2d4b6f..6a3d5525a8 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -28,7 +28,7 @@ import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition -from pytest_lazyfixture import lazy_fixture +from pytest_lazy_fixtures import lf as lazy_fixture from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, @@ -329,7 +329,6 @@ def fixture_fake_dataset(): ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) ds["time_ir_wv"].attrs["_FillValue"] = 4294967295 ds["time_ir_wv"].attrs["add_offset"] = 0 - return ds @@ -416,7 +415,6 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, xr.testing.assert_allclose(ds, expected) assert ds.dtype == expected.dtype assert ds.attrs == expected.attrs - assert True def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" @@ -602,7 +600,7 @@ def test_reassign_coords(self): "x": [.3, .4] } ) - ds = DatasetWrapper(nc) + ds = DatasetWrapper(nc, decode_nc=False) foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) From 951c9b0cb4dcdc143250c4d1a7ab8ead83574252 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Wed, 5 Jun 2024 13:37:26 +0000 Subject: [PATCH 040/340] adds test_fix_duplicate_dimensions and removes leftover dimensions "srf_size" --- satpy/readers/mviri_l1b_fiduceo_nc.py | 3 ++- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 25 +++++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 4ef225ecc2..c170f87bd0 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -473,6 +473,7 @@ def _decode_cf(self): def _fix_duplicate_dimensions(self, nc): nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") + self.nc = nc.drop_dims("srf_size") def _chunk(self, nc): @@ -583,7 +584,7 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 filename, decode_cf=False, decode_times=False, - mask_and_scale=False + mask_and_scale=False, ) self.nc = DatasetWrapper(nc_raw) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 6a3d5525a8..2cc7fdce61 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -329,6 +329,7 @@ def fixture_fake_dataset(): ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) ds["time_ir_wv"].attrs["_FillValue"] = 4294967295 ds["time_ir_wv"].attrs["add_offset"] = 0 + return ds @@ -570,6 +571,30 @@ def test_file_pattern(self, reader): class TestDatasetWrapper: """Unit tests for DatasetWrapper class.""" + def test_fix_duplicate_dimensions(self): + """Test the renaming of duplicate dimensions. + + If duplicate dimensions are within the Dataset, opening the datasets with chunks throws an error. + Thus, the chunking needs to be done after opening the dataset and after the dimensions are renamed. + """ + foo = xr.Dataset( + data_vars={"covariance_spectral_response_function_vis": + (("srf_size", "srf_size"), [[1, 2], [3, 4]])} + ) + foo_ds = DatasetWrapper(foo, decode_nc=False) + foo_ds._fix_duplicate_dimensions(foo_ds.nc) + + foo_exp = xr.Dataset( + data_vars={"covariance_spectral_response_function_vis": + (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]])} + ) + + try: + foo_ds.nc.chunk("auto") + xr.testing.assert_allclose(foo_ds.nc, foo_exp) + except ValueError: + pytest.fail("Chunking failed.") + def test_reassign_coords(self): """Test reassigning of coordinates. From a379a086fbddf1de1c091591f3e621f7d1849ac3 Mon Sep 17 00:00:00 2001 From: bkremmli <157395108+bkremmli@users.noreply.github.com> Date: Wed, 5 Jun 2024 16:27:02 +0200 Subject: [PATCH 041/340] Update mviri_l1b_fiduceo_nc.py small fix to DataWrapper._decode_cf() --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index c170f87bd0..e9cd4eae28 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -464,7 +464,7 @@ def __init__(self, nc, decode_nc=True): def _decode_cf(self): # remove time before decoding and add again. time = self.get_time() - time_dims = self.nc["time_ir_wv"].dims + time_dims = self.nc[time.name].dims time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) self.nc = self.nc.drop_vars(time.name) From 5ec0cf9eaa357f34f4362c06e3a86de26f46e5e3 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Thu, 6 Jun 2024 10:31:01 +0000 Subject: [PATCH 042/340] adds support for filenames of MVIRI FCDR L1.5 release 2 --- satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml | 8 ++++++-- satpy/readers/mviri_l1b_fiduceo_nc.py | 9 +++++++-- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 5 ++++- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml index ec3c5cab77..da30cb2545 100644 --- a/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml +++ b/satpy/etc/readers/mviri_l1b_fiduceo_nc.yaml @@ -20,14 +20,18 @@ file_types: nc_easy: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriEasyFcdrFileHandler file_patterns: [ - 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc' + 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_EASY_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc + '{sensor}_FCDR-EASY_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' + # Example: MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc ] nc_full: file_reader: !!python/name:satpy.readers.mviri_l1b_fiduceo_nc.FiduceoMviriFullFcdrFileHandler file_patterns: [ - 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc' + 'FIDUCEO_FCDR_{level}_{sensor}_{platform}-{projection_longitude:f}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_FULL_{processor_version}_{format_version}.nc', # Example: FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc + '{sensor}_FCDR-FULL_{level}_{platform}-E{projection_longitude:s}_{start_time:%Y%m%d%H%M}_{end_time:%Y%m%d%H%M}_{release}.nc' + # Example: MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc ] datasets: diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index c170f87bd0..942d01fe71 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -464,7 +464,7 @@ def __init__(self, nc, decode_nc=True): def _decode_cf(self): # remove time before decoding and add again. time = self.get_time() - time_dims = self.nc["time_ir_wv"].dims + time_dims = self.nc[time.name].dims time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) self.nc = self.nc.drop_vars(time.name) @@ -591,7 +591,12 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 # Projection longitude is not provided in the file, read it from the # filename. - self.projection_longitude = float(filename_info["projection_longitude"]) + if "." in str(filename_info["projection_longitude"]): + self.projection_longitude = float(filename_info["projection_longitude"]) + else: + self.projection_longitude = ( + float(filename_info["projection_longitude"][:2] + "." + filename_info["projection_longitude"][2:]) + ) self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 2cc7fdce61..b07b79740c 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -560,12 +560,15 @@ def test_file_pattern(self, reader): "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_FULL_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-57.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", "FIDUCEO_FCDR_L15_MVIRI_MET7-00.0_201701201000_201701201030_EASY_v2.6_fv3.1.nc", + "MVIRI_FCDR-EASY_L15_MET7-E0000_200607060600_200607060630_0200.nc", + "MVIRI_FCDR-EASY_L15_MET7-E5700_200607060600_200607060630_0200.nc", + "MVIRI_FCDR-FULL_L15_MET7-E0000_200607060600_200607060630_0200.nc", "abcde", ] files = reader.select_files_from_pathnames(filenames) # only 3 out of 4 above should match - assert len(files) == 3 + assert len(files) == 6 class TestDatasetWrapper: From ed5213bd2e0b8e37a5e1f45b2b3d06d1948ff7f4 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 13 Jun 2024 14:05:34 +0000 Subject: [PATCH 043/340] Fix end_time computation, optimize SEVIRI imports and fix code style issues. --- satpy/readers/eum_l2_grib.py | 23 ++++++++++---------- satpy/tests/reader_tests/test_eum_l2_grib.py | 20 ++++++++--------- 2 files changed, 22 insertions(+), 21 deletions(-) diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index c3cc7e61c4..543aa71c30 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -22,6 +22,7 @@ https://navigator.eumetsat.int/ """ +import datetime as dt import logging import dask.array as da @@ -32,7 +33,9 @@ from satpy.readers.eum_base import get_service_mode from satpy.readers.fci_base import calculate_area_extent as fci_calculate_area_extent from satpy.readers.file_handlers import BaseFileHandler -from satpy.readers.seviri_base import PLATFORM_DICT, REPEAT_CYCLE_DURATION, REPEAT_CYCLE_DURATION_RSS +from satpy.readers.seviri_base import PLATFORM_DICT as SEVIRI_PLATFORM_DICT +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION as SEVIRI_REPEAT_CYCLE_DURATION +from satpy.readers.seviri_base import REPEAT_CYCLE_DURATION_RSS as SEVIRI_REPEAT_CYCLE_DURATION_RSS from satpy.readers.seviri_base import calculate_area_extent as seviri_calculate_area_extent from satpy.utils import get_legacy_chunk_size @@ -60,7 +63,7 @@ def __init__(self, filename, filename_info, filetype_info): if "seviri" in self.filetype_info["file_type"]: self.sensor = "seviri" - self.PLATFORM_NAME = PLATFORM_DICT[self.filename_info["spacecraft"]] + self.PLATFORM_NAME = SEVIRI_PLATFORM_DICT[self.filename_info["spacecraft"]] elif "fci" in self.filetype_info["file_type"]: self.sensor = "fci" self.PLATFORM_NAME = f"MTG-i{self.filename_info['spacecraft_id']}" @@ -74,8 +77,11 @@ def start_time(self): @property def end_time(self): """Return the sensing end time.""" - delta = REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else REPEAT_CYCLE_DURATION - return self.start_time + delta + if self.sensor == "seviri": + delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION + return self.start_time + dt.timedelta(minutes=delta) + elif self.sensor == "fci": + return self.filename_info["end_time"] def get_area_def(self, dataset_id): """Return the area definition for a dataset.""" @@ -282,13 +288,8 @@ def _get_attributes(self): "projection_longitude": self._ssp_lon } - attributes = { - "orbital_parameters": orbital_parameters, - "sensor": self.sensor - } - - - attributes["platform_name"] = self.PLATFORM_NAME + attributes = {"orbital_parameters": orbital_parameters, "sensor": self.sensor, + "platform_name": self.PLATFORM_NAME} return attributes diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index a7846be706..8745fc33d2 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -60,7 +60,7 @@ FAKE_GID = [0, 1, 2, 3, None] -class Test_EUML2GribFileHandler(unittest.TestCase): +class TestEUML2GribFileHandler(unittest.TestCase): """Test the EUML2GribFileHandler reader.""" @mock.patch("satpy.readers.eum_l2_grib.ec") @@ -72,7 +72,7 @@ def setUp(self, ec_): def common_checks(self, mock_file, dataset_id): """Commmon checks for fci and seviri data.""" - # Checks that the codes_grib_multi_support_on function has been called + # Checks that the codes_grib_multi_support_on function has been called self.ec_.codes_grib_multi_support_on.assert_called() # Restarts the id generator and clears the call history @@ -110,9 +110,9 @@ def common_checks(self, mock_file, dataset_id): @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(self, da_, xr_): """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.readers.eum_l2_grib import SEVIRI_REPEAT_CYCLE_DURATION, EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -126,7 +126,7 @@ def test_seviri_data_reading(self, da_, xr_): hour=19, minute=45, second=0) }, filetype_info={ - "file_type" : "seviri" + "file_type": "seviri" } ) @@ -135,7 +135,7 @@ def test_seviri_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) # Checks the basic data reading - assert REPEAT_CYCLE_DURATION == 15 + assert SEVIRI_REPEAT_CYCLE_DURATION == 15 # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -154,7 +154,7 @@ def test_seviri_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] @@ -208,7 +208,7 @@ def test_fci_data_reading(self, da_, xr_): """Test the reading of fci data from the product.""" from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size - CHUNK_SIZE = get_legacy_chunk_size() + chunk_size = get_legacy_chunk_size() with mock.patch("builtins.open", mock.mock_open()) as mock_file: with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): @@ -222,7 +222,7 @@ def test_fci_data_reading(self, da_, xr_): hour=19, minute=45, second=0) }, filetype_info={ - "file_type" : "fci" + "file_type": "fci" } ) @@ -247,7 +247,7 @@ def test_fci_data_reading(self, da_, xr_): # Checks that dask.array has been called with the correct arguments name, args, kwargs = da_.mock_calls[0] assert np.all(args[0] == np.ones((5568, 5568))) - assert args[1] == CHUNK_SIZE + assert args[1] == chunk_size # Checks that xarray.DataArray has been called with the correct arguments name, args, kwargs = xr_.mock_calls[0] From 164bcba5b8cb51081823e436a9861a53d6463259 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Thu, 13 Jun 2024 16:08:36 +0000 Subject: [PATCH 044/340] Add tests for end_time. --- satpy/tests/reader_tests/test_eum_l2_grib.py | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 8745fc33d2..593eb2f5af 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -110,7 +110,7 @@ def common_checks(self, mock_file, dataset_id): @mock.patch("satpy.readers.eum_l2_grib.da") def test_seviri_data_reading(self, da_, xr_): """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import SEVIRI_REPEAT_CYCLE_DURATION, EUML2GribFileHandler + from satpy.readers.eum_l2_grib import EUML2GribFileHandler from satpy.utils import get_legacy_chunk_size chunk_size = get_legacy_chunk_size() @@ -134,8 +134,9 @@ def test_seviri_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) - # Checks the basic data reading - assert SEVIRI_REPEAT_CYCLE_DURATION == 15 + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() @@ -219,7 +220,9 @@ def test_fci_data_reading(self, da_, xr_): filename_info={ "spacecraft_id": "1", "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) }, filetype_info={ "file_type": "fci" @@ -230,6 +233,10 @@ def test_fci_data_reading(self, da_, xr_): self.common_checks(mock_file, dataset_id) + # Check end_time + assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = self.reader._get_attributes() expected_attributes = { From 904270cb72045552fa877b7ac5f1b24379ecbc28 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 14 Jun 2024 14:05:01 +0200 Subject: [PATCH 045/340] fix: compat=override test --- satpy/tests/scene_tests/test_conversions.py | 46 +++++++++++++++++---- 1 file changed, 38 insertions(+), 8 deletions(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 4490903880..50ea1a46d8 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -17,7 +17,9 @@ """Unit tests for Scene conversion functionality.""" import datetime as dt +from datetime import datetime +import numpy as np import pytest import xarray as xr from dask import array as da @@ -47,14 +49,6 @@ def test_serialization_with_readers_and_data_arr(self): class TestSceneConversions: """Test Scene conversion to geoviews, xarray, etc.""" - def test_to_xarray_dataset_with_empty_scene(self): - """Test converting empty Scene to xarray dataset.""" - scn = Scene() - xrds = scn.to_xarray_dataset() - assert isinstance(xrds, xr.Dataset) - assert len(xrds.variables) == 0 - assert len(xrds.coords) == 0 - def test_geoviews_basic_with_area(self): """Test converting a Scene to geoviews with an AreaDefinition.""" from pyresample.geometry import AreaDefinition @@ -157,6 +151,42 @@ def single_area_scn(self): scn["var1"] = data_array return scn + def test_to_xarray_dataset_with_conflicting_variables(self): + """Test converting Scene with DataArrays with conflicting variables. + + E.g. "acq_time" in the seviri_l1b_nc reader + """ + from pyresample.geometry import AreaDefinition + area = AreaDefinition("test", "test", "test", + {"proj": "geos", "lon_0": -95.5, "h": 35786023.0}, + 2, 2, [-200, -200, 200, 200]) + scn = Scene() + + acq_time_1 = ("y", [np.datetime64("1958-01-02 00:00:01"), + np.datetime64("1958-01-02 00:00:02")]) + ds = xr.DataArray(da.zeros((2, 2), chunks=-1), dims=("y", "x"), + attrs={"start_time": datetime(2018, 1, 1), "area": area}) + ds["acq_time"] = acq_time_1 + + scn["ds1"] = ds + + acq_time_2 = ("y", [np.datetime64("1958-02-02 00:00:01"), + np.datetime64("1958-02-02 00:00:02")]) + ds2 = ds.copy() + ds2["acq_time"] = acq_time_2 + + scn["ds2"] = ds2 + + # drop case (compat="minimal") + xrds = scn.to_xarray_dataset() + assert isinstance(xrds, xr.Dataset) + assert "acq_time" not in xrds.coords + + xrds = scn.to_xarray_dataset(compat="override") + assert isinstance(xrds, xr.Dataset) + assert "acq_time" in xrds.coords + np.testing.assert_array_equal(xrds["acq_time"].values, acq_time_1[1]) + @pytest.fixture() def multi_area_scn(self): """Define Scene with multiple area.""" From aff7b96c114fe0d9e1283b03d7afa987cc3b08d5 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Fri, 14 Jun 2024 14:47:55 +0200 Subject: [PATCH 046/340] fix: testing of datasets euqal --- satpy/tests/scene_tests/test_conversions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 50ea1a46d8..f8d4c0a0cb 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -185,7 +185,7 @@ def test_to_xarray_dataset_with_conflicting_variables(self): xrds = scn.to_xarray_dataset(compat="override") assert isinstance(xrds, xr.Dataset) assert "acq_time" in xrds.coords - np.testing.assert_array_equal(xrds["acq_time"].values, acq_time_1[1]) + xr.testing.assert_equal(xrds["acq_time"], ds["acq_time"]) @pytest.fixture() def multi_area_scn(self): From c3efc55d6bbb2377d89cd81bc691e4f8910dfe11 Mon Sep 17 00:00:00 2001 From: David Navia Date: Mon, 24 Jun 2024 17:38:31 +0200 Subject: [PATCH 047/340] Add fci base test --- satpy/tests/reader_tests/test_fci_base.py | 46 +++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 satpy/tests/reader_tests/test_fci_base.py diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py new file mode 100644 index 0000000000..41ac956b67 --- /dev/null +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2017 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""FCI base reader tests package.""" + +# import datetime as dt +import unittest + +from satpy.readers.fci_base import calculate_area_extent +from satpy.tests.utils import make_dataid + + +class TestCalculateAreaExtent(unittest.TestCase): + """Test TestCalculateAreaExtent.""" + + def test_fun(self): + """Test function for TestCalculateAreaExtent.""" + dataset_id = make_dataid(name="dummmy", resolution=2000.) + + area_dict = { + "nlines": 5568, + "ncols": 5568, + "line_step": dataset_id["resolution"], + "column_step": dataset_id["resolution"], + } + + area_extent = calculate_area_extent(area_dict) + + expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) + + assert area_extent == expected From d8905baf48244b5ace4ac3a787f41bd6a9a831d7 Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 17 Jul 2024 12:26:21 +0200 Subject: [PATCH 048/340] refactor: remove Optional type string --- satpy/scene.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/scene.py b/satpy/scene.py index d2a8b0d002..03bf09ce56 100644 --- a/satpy/scene.py +++ b/satpy/scene.py @@ -1082,7 +1082,7 @@ def to_xarray_dataset(self, datasets=None, compat="minimal"): Parameters: datasets (list): List of products to include in the :class:`xarray.Dataset` - compat (Optional[str]): + compat (str): How to compare variables with the same name for conflicts. See :func:`xarray.merge` for possible options. Defaults to "minimal" which drops conflicting variables. From 3da840dc6e9a8366bd344dc9d97009d5d229435b Mon Sep 17 00:00:00 2001 From: BENR0 Date: Wed, 17 Jul 2024 13:07:03 +0200 Subject: [PATCH 049/340] fix: order of dataarrays --- satpy/tests/scene_tests/test_conversions.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index f8d4c0a0cb..2ee8fb8f73 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -185,7 +185,7 @@ def test_to_xarray_dataset_with_conflicting_variables(self): xrds = scn.to_xarray_dataset(compat="override") assert isinstance(xrds, xr.Dataset) assert "acq_time" in xrds.coords - xr.testing.assert_equal(xrds["acq_time"], ds["acq_time"]) + xr.testing.assert_equal(xrds["acq_time"], ds2["acq_time"]) @pytest.fixture() def multi_area_scn(self): From c924d9db7834e0bb3017faf11e31f0796ed8b0a3 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 5 Aug 2024 17:17:43 +0200 Subject: [PATCH 050/340] Include gradient/axisintercept for mode p NinJo wants that gradient=1 and axisintercept=0 for images of mode p. It shall be so. --- satpy/tests/writer_tests/test_ninjogeotiff.py | 8 ++++---- satpy/writers/ninjogeotiff.py | 6 ++++-- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index e05150a571..38f6a83ff3 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -577,8 +577,8 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): test_image_small_arctic_P, filename=fn, fill_value=255, - PhysicUnit="N/A", - PhysicValue="N/A", + PhysicUnit="satdata", + PhysicValue="satdata", SatelliteNameID=6400014, ChannelID=900015, DataType="PPRN", @@ -591,8 +591,8 @@ def test_write_and_read_file_P(test_image_small_arctic_P, tmp_path): tgs = src.tags() assert tgs["ninjo_FileName"] == fn assert tgs["ninjo_DataSource"] == "dowsing rod" - assert "ninjo_Gradient" not in tgs - assert "ninjo_AxisIntercept" not in tgs + assert tgs["ninjo_Gradient"] == "1.0" + assert tgs["ninjo_AxisIntercept"] == "0.0" def test_write_and_read_file_units( diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index 5f88cc52ed..cf7174c51c 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -74,11 +74,13 @@ NinJo has a functionality to read the corresponding quantity (example: brightness temperature or reflectance). To make this possible, the writer adds the tags ``Gradient`` and ``AxisIntercept``. Those tags are added if -and only if the image has mode ``L`` or ``LA`` and ``PhysicUnit`` is not set +and only if the image has mode ``L``, ``P``, or ``LA`` and ``PhysicUnit`` is not set to ``"N/A"``. In other words, to suppress those tags for images with mode ``L`` or ``LA`` (for example, for the composite ``vis_with_ir``, where the physical interpretation of individual pixels is lost), one should set ``PhysicUnit`` to ``"N/A"``, ``"n/a"``, ``"1"``, or ``""`` (empty string). +If the image has mode ``P``, ``Gradient`` is set to ``1.0`` and ``AxisIntercept`` +to ``0.0`` (as expected by NinJo). """ import copy @@ -236,7 +238,7 @@ def _fix_units(self, image, quantity, unit): def _check_include_scale_offset(self, image, unit): """Check if scale-offset tags should be included.""" - if image.mode.startswith("L") and unit.lower() not in ("n/a", "1", ""): + if image.mode[0] in "LP" and unit.lower() not in ("n/a", "1", ""): return True return False From 3181a80dd74f0f0dbd2801f690355b601be4c622 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 8 Aug 2024 16:14:29 +0200 Subject: [PATCH 051/340] Simulate palettized-p-mode in test In the ninjogeotiff writer, when testing p-mode images, add an enhancement history such as palettize would add, to get a more realistic test. --- satpy/tests/writer_tests/test_ninjogeotiff.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/satpy/tests/writer_tests/test_ninjogeotiff.py b/satpy/tests/writer_tests/test_ninjogeotiff.py index 38f6a83ff3..e937a682b5 100644 --- a/satpy/tests/writer_tests/test_ninjogeotiff.py +++ b/satpy/tests/writer_tests/test_ninjogeotiff.py @@ -20,6 +20,7 @@ import datetime import logging import os +from unittest.mock import Mock import dask.array as da import numpy as np @@ -247,6 +248,12 @@ def test_image_small_arctic_P(test_area_tiny_stereographic_wgs84): "start_time": datetime.datetime(2027, 8, 2, 8, 20), "area": test_area_tiny_stereographic_wgs84, "mode": "P"}) + # simulate an enhancement history such as palettize may add + arr.attrs["enhancement_history"] = [ + {"scale": np.float64(0.01), + "offset": np.float64(0.0), + "colormap": Mock()}] + return to_image(arr) From cb4f64639fd06b9507425dba8e8a9f8047fa9100 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 8 Aug 2024 16:31:40 +0200 Subject: [PATCH 052/340] Force scale/offse to 1/0 for ninjogeotiff mode p For ninjogeotiff mode p, force scale/offset to be 1 and 0, respectively. As a workaround for https://github.com/pytroll/satpy/issues/2300, do not check image.mode, but use image.data.attrs["mode"] instead. --- satpy/writers/ninjogeotiff.py | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index cf7174c51c..2aadb0a367 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -206,11 +206,18 @@ def save_image( # noqa: D417 overviews_minsize=overviews_minsize, overviews_resampling=overviews_resampling, tags={**(tags or {}), **ninjo_tags}, - scale_offset_tags=(self.scale_offset_tag_names - if self._check_include_scale_offset(image, PhysicUnit) - else None), + scale_offset_tags=self._get_scale_offset_tags(image, PhysicUnit), **gdal_opts) + def _get_scale_offset_tags(self, image, unit): + """Get scale offset tags (tuple or dict).""" + if self._check_include_scale_offset(image, unit): + # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 + if image.data.attrs["mode"][0] == "P": + return dict(zip(self.scale_offset_tag_names, (1, 0))) + return self.scale_offset_tag_names + return None # explicit is better than implicit + def _fix_units(self, image, quantity, unit): """Adapt units between °C and K. @@ -238,7 +245,7 @@ def _fix_units(self, image, quantity, unit): def _check_include_scale_offset(self, image, unit): """Check if scale-offset tags should be included.""" - if image.mode[0] in "LP" and unit.lower() not in ("n/a", "1", ""): + if image.data.attrs["mode"][0] in "LP" and unit.lower() not in ("n/a", "1", ""): return True return False @@ -380,16 +387,17 @@ def get_central_meridian(self): def get_color_depth(self): """Return the color depth.""" - if self.image.mode in ("L", "P"): + # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 + if self.image.data.attrs["mode"] in ("L", "P"): return 8 - if self.image.mode in ("LA", "PA"): + if self.image.data.attrs["mode"] in ("LA", "PA"): return 16 - if self.image.mode == "RGB": + if self.image.data.attrs["mode"] == "RGB": return 24 - if self.image.mode == "RGBA": + if self.image.data.attrs["mode"] == "RGBA": return 32 raise ValueError( - f"Unsupported image mode: {self.image.mode:s}") + f"Unsupported image mode: {self.image.data.attrs['mode']:s}") # Set unix epoch here explicitly, because datetime.timestamp() is # apparently not supported on Windows. From 2440dc0a9ca253aa7e3468ac22473b0c86449461 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 8 Aug 2024 18:01:23 +0200 Subject: [PATCH 053/340] Use data attribute fallback conditionally only The fallback/workaround to use image.data.attrs["mode"] instead of image.mode can and should only be used after to_image has been called. Before to_image has been called, the former is unavailable and the latter is reliable, I think. --- satpy/writers/ninjogeotiff.py | 19 +++++++++++-------- 1 file changed, 11 insertions(+), 8 deletions(-) diff --git a/satpy/writers/ninjogeotiff.py b/satpy/writers/ninjogeotiff.py index 2aadb0a367..1d5cfb69ac 100644 --- a/satpy/writers/ninjogeotiff.py +++ b/satpy/writers/ninjogeotiff.py @@ -213,7 +213,11 @@ def _get_scale_offset_tags(self, image, unit): """Get scale offset tags (tuple or dict).""" if self._check_include_scale_offset(image, unit): # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 - if image.data.attrs["mode"][0] == "P": + try: + mod = image.data.attrs["mode"] + except KeyError: + mod = image.mode + if mod == "P": return dict(zip(self.scale_offset_tag_names, (1, 0))) return self.scale_offset_tag_names return None # explicit is better than implicit @@ -245,7 +249,7 @@ def _fix_units(self, image, quantity, unit): def _check_include_scale_offset(self, image, unit): """Check if scale-offset tags should be included.""" - if image.data.attrs["mode"][0] in "LP" and unit.lower() not in ("n/a", "1", ""): + if image.mode[0] in "LP" and unit.lower() not in ("n/a", "1", ""): return True return False @@ -387,17 +391,16 @@ def get_central_meridian(self): def get_color_depth(self): """Return the color depth.""" - # image.mode cannot be trusted https://github.com/pytroll/satpy/issues/2300 - if self.image.data.attrs["mode"] in ("L", "P"): + if self.image.mode in ("L", "P"): return 8 - if self.image.data.attrs["mode"] in ("LA", "PA"): + if self.image.mode in ("LA", "PA"): return 16 - if self.image.data.attrs["mode"] == "RGB": + if self.image.mode == "RGB": return 24 - if self.image.data.attrs["mode"] == "RGBA": + if self.image.mode == "RGBA": return 32 raise ValueError( - f"Unsupported image mode: {self.image.data.attrs['mode']:s}") + f"Unsupported image mode: {self.image.mode:s}") # Set unix epoch here explicitly, because datetime.timestamp() is # apparently not supported on Windows. From c1cd334b345fdf1d5f0c123fcf215ded16f421ef Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 3 Sep 2024 07:46:09 +0000 Subject: [PATCH 054/340] sync/merge with fork diffs --- satpy/readers/mviri_l1b_fiduceo_nc.py | 20 +++++++++++-------- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 4 ++-- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 942d01fe71..1e096d6d67 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -452,24 +452,28 @@ def is_high_resol(resolution): class DatasetWrapper: """Helper class for accessing the dataset.""" - def __init__(self, nc, decode_nc=True): + def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc - if decode_nc is True: - self._decode_cf() - self._fix_duplicate_dimensions(self.nc) - self.nc = self._chunk(self.nc) + self._decode_cf() + self._fix_duplicate_dimensions(self.nc) + self.nc = self._chunk(self.nc) def _decode_cf(self): # remove time before decoding and add again. + time_dims, time = self._decode_time() + self.nc = self.nc.drop_vars(time.name) + self.nc = xr.decode_cf(self.nc) + self.nc[time.name] = (time_dims, time.values) + + def _decode_time(self): time = self.get_time() time_dims = self.nc[time.name].dims time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) - self.nc = self.nc.drop_vars(time.name) - self.nc = xr.decode_cf(self.nc) - self.nc[time.name] = (time_dims, time.values) + + return (time_dims, time) def _fix_duplicate_dimensions(self, nc): nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index b07b79740c..573cfa4e07 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -584,7 +584,7 @@ def test_fix_duplicate_dimensions(self): data_vars={"covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]])} ) - foo_ds = DatasetWrapper(foo, decode_nc=False) + foo_ds = DatasetWrapper(foo) foo_ds._fix_duplicate_dimensions(foo_ds.nc) foo_exp = xr.Dataset( @@ -628,7 +628,7 @@ def test_reassign_coords(self): "x": [.3, .4] } ) - ds = DatasetWrapper(nc, decode_nc=False) + ds = DatasetWrapper(nc) foo = ds["foo"] xr.testing.assert_equal(foo, foo_exp) From abf916ed312df5e149c356be1e68bd3c0a3d4441 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 3 Sep 2024 09:29:16 +0000 Subject: [PATCH 055/340] adapt changes after xarray release 2024.7.0: include chunks with opening dataset; also rename dimension "channel" --- satpy/readers/mviri_l1b_fiduceo_nc.py | 23 +++++++++++------------ 1 file changed, 11 insertions(+), 12 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 1e096d6d67..16e300729b 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -162,7 +162,9 @@ from satpy.readers._geos_area import get_area_definition, get_area_extent, sampling_to_lfac_cfac from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_chunk_size_limit +CHUNK_SIZE = get_chunk_size_limit() EQUATOR_RADIUS = 6378140.0 POLE_RADIUS = 6356755.0 ALTITUDE = 42164000.0 - EQUATOR_RADIUS @@ -405,6 +407,7 @@ def interp_acq_time(time2d, target_y): """ # Compute mean timestamp per scanline time = time2d.mean(dim="x") + # If required, repeat timestamps in y-direction to obtain higher # resolution y = time.coords["y"].values @@ -458,7 +461,7 @@ def __init__(self, nc): self._decode_cf() self._fix_duplicate_dimensions(self.nc) - self.nc = self._chunk(self.nc) + def _decode_cf(self): # remove time before decoding and add again. @@ -478,17 +481,9 @@ def _decode_time(self): def _fix_duplicate_dimensions(self, nc): nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") self.nc = nc.drop_dims("srf_size") - - def _chunk(self, nc): - - (chunk_size_y, chunk_size_x) = nc.variables["quality_pixel_bitmask"].encoding["chunksizes"] - chunks = { - "x": chunk_size_x, - "y": chunk_size_y, - "x_ir_wv": chunk_size_x, - "y_ir_wv": chunk_size_y - } - return nc.chunk(chunks) + nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") + nc.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") + self.nc = nc.drop_dims("channel") @property def attrs(self): @@ -586,6 +581,10 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self.mask_bad_quality = mask_bad_quality nc_raw = xr.open_dataset( filename, + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE}, decode_cf=False, decode_times=False, mask_and_scale=False, From 5822e5045612334f4cf49882ebead6218e66f9b0 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 3 Sep 2024 13:39:56 +0000 Subject: [PATCH 056/340] removed chunks part from test_fix_duplicate_dimensions; adapted tests; moved decoding into separate method of DatasetWrapper --- satpy/readers/mviri_l1b_fiduceo_nc.py | 10 +++++-- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 29 ++++++++++--------- 2 files changed, 23 insertions(+), 16 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 16e300729b..e989abe185 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -459,9 +459,6 @@ def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc - self._decode_cf() - self._fix_duplicate_dimensions(self.nc) - def _decode_cf(self): # remove time before decoding and add again. @@ -535,6 +532,11 @@ def _cleanup_attrs(self, ds): # satpy warnings. ds.attrs.pop("ancillary_variables", None) + def prepare_input(self): + """Decode data and rename duplicate dimensions.""" + self._decode_cf() + self._fix_duplicate_dimensions(self.nc) + def get_time(self): """Get time coordinate. @@ -591,6 +593,8 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 ) self.nc = DatasetWrapper(nc_raw) + # decode times and fix duplicate dimensions + self.nc.prepare_input() # Projection longitude is not provided in the file, read it from the # filename. diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 573cfa4e07..ae7dd1e3f5 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -312,6 +312,8 @@ def fixture_fake_dataset(): "sub_satellite_latitude_start": np.nan, "sub_satellite_latitude_end": 0.1, "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), cov), + "channel_correlation_matrix_independent": (("channel", "channel"), cov), + "channel_correlation_matrix_structured": (("channel", "channel"), cov) }, coords={ "y": [1, 2, 3, 4], @@ -319,8 +321,7 @@ def fixture_fake_dataset(): "y_ir_wv": [1, 2], "x_ir_wv": [1, 2], "y_tie": [1, 2], - "x_tie": [1, 2] - + "x_tie": [1, 2], }, attrs={"foo": "bar"} ) @@ -577,26 +578,28 @@ class TestDatasetWrapper: def test_fix_duplicate_dimensions(self): """Test the renaming of duplicate dimensions. - If duplicate dimensions are within the Dataset, opening the datasets with chunks throws an error. - Thus, the chunking needs to be done after opening the dataset and after the dimensions are renamed. + If duplicate dimensions are within the Dataset, opening the datasets with chunks throws a warning. + The dimensions need to be renamed. """ foo = xr.Dataset( - data_vars={"covariance_spectral_response_function_vis": - (("srf_size", "srf_size"), [[1, 2], [3, 4]])} + data_vars={ + "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]) + } ) foo_ds = DatasetWrapper(foo) foo_ds._fix_duplicate_dimensions(foo_ds.nc) foo_exp = xr.Dataset( - data_vars={"covariance_spectral_response_function_vis": - (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]])} + data_vars={ + "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]) + } ) - try: - foo_ds.nc.chunk("auto") - xr.testing.assert_allclose(foo_ds.nc, foo_exp) - except ValueError: - pytest.fail("Chunking failed.") + xr.testing.assert_allclose(foo_ds.nc, foo_exp) def test_reassign_coords(self): """Test reassigning of coordinates. From ee0b31fa48827923eecb5d6ec546c63081d999e2 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 3 Sep 2024 14:34:09 +0000 Subject: [PATCH 057/340] feature : Add the flash_age compositor into the /etc/composites/li.yaml files --- satpy/etc/composites/li.yaml | 31 ++++++++++++++++++++----------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/satpy/etc/composites/li.yaml b/satpy/etc/composites/li.yaml index 4d3cc88e95..19e879590c 100644 --- a/satpy/etc/composites/li.yaml +++ b/satpy/etc/composites/li.yaml @@ -10,69 +10,78 @@ composites: compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash prerequisites: - - flash_accumulation + - flash_accumulation acc_flash_alpha: description: Composite to colorise the AF product using the flash accumulation with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_alpha prerequisites: - - flash_accumulation + - flash_accumulation acc_flash_area: description: Composite to colorise the AFA product using the flash area compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - - accumulated_flash_area + - accumulated_flash_area acc_flash_area_alpha: description: Composite to colorise the AFA product using the flash area with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area_alpha prerequisites: - - accumulated_flash_area + - accumulated_flash_area acc_flash_radiance: description: Composite to colorise the AFR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - flash_radiance + - flash_radiance acc_flash_radiance_alpha: description: Composite to colorise the AFR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - flash_radiance + - flash_radiance flash_radiance: description: Composite to colorise the LFL product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - radiance + - radiance flash_radiance_alpha: description: Composite to colorise the LFL product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - radiance + - radiance group_radiance: description: Composite to colorise the LGR product using the flash radiance compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance prerequisites: - - radiance + - radiance group_radiance_alpha: description: Composite to colorise the LGR product using the flash radiance with transparency compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: lightning_radiance_alpha prerequisites: - - radiance + - radiance # DEPRECATED, USE acc_flash_area INSTEAD flash_area: compositor: !!python/name:satpy.composites.SingleBandCompositor standard_name: acc_flash_area prerequisites: - - accumulated_flash_area + - accumulated_flash_area + + flash_age: + description: Composite to colorise the LFL product using the flash time + compositor: !!python/name:satpy.composites.lightning.LightningTimeCompositor + standard_name: lightning_time + time_range: 60 # range for colormap in minutes + reference_time: end_time + prerequisites: + - flash_time From f41e06864bd6e5b4661b41a43a10804dffb6ae60 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Wed, 4 Sep 2024 08:45:03 +0000 Subject: [PATCH 058/340] moved code _get_projection_longitude() --- satpy/readers/mviri_l1b_fiduceo_nc.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index e989abe185..a313c92a6e 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -596,14 +596,8 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 # decode times and fix duplicate dimensions self.nc.prepare_input() - # Projection longitude is not provided in the file, read it from the - # filename. - if "." in str(filename_info["projection_longitude"]): - self.projection_longitude = float(filename_info["projection_longitude"]) - else: - self.projection_longitude = ( - float(filename_info["projection_longitude"][:2] + "." + filename_info["projection_longitude"][2:]) - ) + self.projection_longitude = self._get_projection_longitude(filename_info) + self.calib_coefs = self._get_calib_coefs() self._get_angles = functools.lru_cache(maxsize=8)( @@ -613,6 +607,13 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 self._get_acq_time_uncached ) + def _get_projection_longitude(self, filename_info): + """Read projection longitude from filename as it is not provided in the file.""" + if "." in str(filename_info["projection_longitude"]): + return float(filename_info["projection_longitude"]) + else: + return float(filename_info["projection_longitude"]) / 100 + def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" name = dataset_id["name"] From 2ffffc31d0ab0645ab4f5d40a4e7305f51d08126 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 4 Sep 2024 08:45:32 +0000 Subject: [PATCH 059/340] feature : Add the flash_age composite for li datas --- satpy/etc/enhancements/li.yaml | 94 +++++++++++++++++++++------------- 1 file changed, 59 insertions(+), 35 deletions(-) diff --git a/satpy/etc/enhancements/li.yaml b/satpy/etc/enhancements/li.yaml index 49009808eb..9aaa5c4a0b 100644 --- a/satpy/etc/enhancements/li.yaml +++ b/satpy/etc/enhancements/li.yaml @@ -1,60 +1,84 @@ enhancements: -# note that the colormap parameters are tuned for 5 minutes of files accumulation -# these are tentative recipes that will need to be further tuned as we gain experience with LI data + # note that the colormap parameters are tuned for 5 minutes of files accumulation + # these are tentative recipes that will need to be further tuned as we gain experience with LI data acc_flash: standard_name: acc_flash operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 5} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 5 } acc_flash_alpha: standard_name: acc_flash_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 5, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 5, + min_alpha: 120, + max_alpha: 180, + } acc_flash_area: standard_name: acc_flash_area operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 20} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 20 } acc_flash_area_alpha: standard_name: acc_flash_area_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 20, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 20, + min_alpha: 120, + max_alpha: 180, + } lightning_radiance: standard_name: lightning_radiance operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 1000} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 1000 } lightning_radiance_alpha: standard_name: lightning_radiance_alpha operations: - - name: colorize - method: !!python/name:satpy.enhancements.colorize - kwargs: - palettes: - - {colors: ylorrd, min_value: 0, max_value: 1000, - min_alpha: 120, max_alpha: 180} + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { + colors: ylorrd, + min_value: 0, + max_value: 1000, + min_alpha: 120, + max_alpha: 180, + } + + lightning_time: + standard_name: lightning_time + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - { colors: ylorrd, min_value: 0, max_value: 1 } From 1e7db424bbea71b865575c470637141251d4c337 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 4 Sep 2024 09:02:42 +0000 Subject: [PATCH 060/340] feature : Add the satpy/composites/lightning.py that enables flash_age feature --- satpy/composites/lightning.py | 99 +++++++++++++++++++++++++++++++++++ 1 file changed, 99 insertions(+) create mode 100644 satpy/composites/lightning.py diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py new file mode 100644 index 0000000000..d9c63bbdae --- /dev/null +++ b/satpy/composites/lightning.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Composite classes for the LI instrument.""" + +import logging + +import numpy as np +import xarray as xr + +from satpy.composites import CompositeBase + +LOG = logging.getLogger(__name__) + + +class LightningTimeCompositor(CompositeBase): + """Class used to create the flash_age compositor usefull for lighting event visualisation. + + The datas used are dates related to the lightning event that should be normalised between + 0 and 1. The value 1 corresponds to the latest lightning event and the value 0 corresponds + to the latest lightning event - time_range. The time_range is defined in the satpy/etc/composites/li.yaml + and is in minutes. + """ + def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): + """Initialisation of the class.""" + self.name = name + super().__init__(name, prerequisites, optional_prerequisites, **kwargs) + # Get the time_range which is in minute + self.time_range = self.attrs["time_range"] + self.standard_name = self.attrs["standard_name"] + + def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: + """Normalised the time in the range between [end_time,end_time - time_range]. + + The range of the normalised data is between 0 and 1 where 0 corresponds to the date end_time - time_range + and 1 to the end_time. Where end_times represent the latest lightning event and time_range is the range of + time you want to see the event.The dates that are earlier to end_time - time_range are removed. + + Args: + data (xr.DataArray): datas containing dates to be normalised + attrs (dict): Attributes suited to the flash_age composite + + Returns: + xr.DataArray: Normalised time + """ + # Compute the maximum time value + end_time = data.max().values + # Compute the minimum time value based on the time range + begin_time = end_time - np.timedelta64(self.time_range, "m") + # Drop values that are bellow begin_time + data = data.where(data > begin_time, drop=True) + # Normalize the time values + normalized_data = (data - begin_time) / (end_time - begin_time) + # Ensure the result is still an xarray.DataArray + return xr.DataArray(normalized_data, dims=data.dims, coords=data.coords,attrs=attrs) + + + @staticmethod + def _update_missing_metadata(existing_attrs, new_attrs): + for key, val in new_attrs.items(): + if key not in existing_attrs and val is not None: + existing_attrs[key] = val + + def _redefine_metadata(self,attrs:dict)->dict: + """Modify the standard_name and name metadatas. + + Args: + attrs (dict): data's attributes + + Returns: + dict: atualised attributes + """ + attrs["name"] = self.standard_name + attrs["standard_name"] =self.standard_name + # Attributes to describe the values range + return attrs + + + def __call__(self,projectables, nonprojectables=None, **attrs): + """Normalise the dates.""" + data = projectables[0] + new_attrs = data.attrs.copy() + self._update_missing_metadata(new_attrs, attrs) + new_attrs = self._redefine_metadata(new_attrs) + return self._normalize_time(data,new_attrs) From 6b079b1707b7ae242259f58bb6eacca14acf8208 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Wed, 4 Sep 2024 09:36:33 +0000 Subject: [PATCH 061/340] fix test_fix_duplicate_dimensions --- satpy/readers/mviri_l1b_fiduceo_nc.py | 52 +++++++++---------- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 13 +++-- 2 files changed, 34 insertions(+), 31 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index a313c92a6e..e0297f7ec3 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -459,28 +459,11 @@ def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc + # decode data + self._decode_cf() + # rename duplicate dimensions + self._fix_duplicate_dimensions(self.nc) - def _decode_cf(self): - # remove time before decoding and add again. - time_dims, time = self._decode_time() - self.nc = self.nc.drop_vars(time.name) - self.nc = xr.decode_cf(self.nc) - self.nc[time.name] = (time_dims, time.values) - - def _decode_time(self): - time = self.get_time() - time_dims = self.nc[time.name].dims - time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), - (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) - - return (time_dims, time) - - def _fix_duplicate_dimensions(self, nc): - nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") - self.nc = nc.drop_dims("srf_size") - nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") - nc.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") - self.nc = nc.drop_dims("channel") @property def attrs(self): @@ -532,10 +515,27 @@ def _cleanup_attrs(self, ds): # satpy warnings. ds.attrs.pop("ancillary_variables", None) - def prepare_input(self): - """Decode data and rename duplicate dimensions.""" - self._decode_cf() - self._fix_duplicate_dimensions(self.nc) + def _decode_cf(self): + # remove time before decoding and add again. + time_dims, time = self._decode_time() + self.nc = self.nc.drop_vars(time.name) + self.nc = xr.decode_cf(self.nc) + self.nc[time.name] = (time_dims, time.values) + + def _decode_time(self): + time = self.get_time() + time_dims = self.nc[time.name].dims + time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), + (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) + + return (time_dims, time) + + def _fix_duplicate_dimensions(self, nc): + nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") + self.nc = nc.drop_dims("srf_size") + nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") + nc.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") + self.nc = nc.drop_dims("channel") def get_time(self): """Get time coordinate. @@ -593,8 +593,6 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 ) self.nc = DatasetWrapper(nc_raw) - # decode times and fix duplicate dimensions - self.nc.prepare_input() self.projection_longitude = self._get_projection_longitude(filename_info) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index ae7dd1e3f5..9ced865eed 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -581,21 +581,26 @@ def test_fix_duplicate_dimensions(self): If duplicate dimensions are within the Dataset, opening the datasets with chunks throws a warning. The dimensions need to be renamed. """ + foo_time = 60*60 + foo_time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") + foo = xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), - "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), - "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]) + "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), + "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time, foo_time], [foo_time, foo_time]], + {"_FillValue": 4294967295, "add_offset": 0}) } ) foo_ds = DatasetWrapper(foo) - foo_ds._fix_duplicate_dimensions(foo_ds.nc) foo_exp = xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), - "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]) + "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time_exp, foo_time_exp], [foo_time_exp, foo_time_exp]]) } ) From efe81f2fe1956a1dca074ba7da9b24b695fea5db Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 4 Sep 2024 11:35:46 +0000 Subject: [PATCH 062/340] modify : use the end_time attributes instead of the maximum values --- satpy/composites/lightning.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index d9c63bbdae..b11a791abb 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -42,6 +42,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar # Get the time_range which is in minute self.time_range = self.attrs["time_range"] self.standard_name = self.attrs["standard_name"] + self.reference_time = self.attrs["reference_time"] def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: """Normalised the time in the range between [end_time,end_time - time_range]. @@ -58,7 +59,7 @@ def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: xr.DataArray: Normalised time """ # Compute the maximum time value - end_time = data.max().values + end_time = np.array(np.datetime64(data.attrs[self.reference_time])) # Compute the minimum time value based on the time range begin_time = end_time - np.timedelta64(self.time_range, "m") # Drop values that are bellow begin_time From d0f379b4fff28084f62aa51e40a056c5fca69d79 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 14:56:21 +0200 Subject: [PATCH 063/340] Remove rogue unit assignment in MSI SAFE reader --- satpy/readers/msi_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index b041436a74..8bab073c86 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -83,7 +83,6 @@ def get_dataset(self, key, info): if proj is None: return proj.attrs = info.copy() - proj.attrs["units"] = "%" proj.attrs["platform_name"] = self.platform_name return proj From 39ed676eb1310d8a04648ad66a4e0ac42965f5b4 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 15:04:10 +0200 Subject: [PATCH 064/340] Add check for S2/MSI processing level to catch method needed for radiance calculation. --- satpy/readers/msi_safe.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 8bab073c86..11a0312059 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -92,7 +92,9 @@ def _read_from_file(self, key): if key["calibration"] == "reflectance": return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": - return self._mda.calibrate_to_radiances(proj, self._channel) + # The calibration procedure differs for L1B and L1C/L2A data! + if self.process_level == "L1B": + return self._mda.calibrate_to_radiances(proj, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: @@ -218,7 +220,7 @@ def saturated(self): """Get the saturated value from the metadata.""" return self.special_values["SATURATED"] - def calibrate_to_radiances(self, data, band_name): + def calibrate_to_radiances_l1b(self, data, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" physical_gain = self.physical_gain(band_name) data = self._sanitize_data(data) From acf2088c50ef3cdd2f78cca1aa2e688aa5bb50c1 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 3 Sep 2024 17:19:41 +0200 Subject: [PATCH 065/340] Debugging MSI SAFE radiances --- satpy/readers/msi_safe.py | 46 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 45 insertions(+), 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 11a0312059..ea3f44cbf8 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -94,7 +94,14 @@ def _read_from_file(self, key): if key["calibration"] == "radiance": # The calibration procedure differs for L1B and L1C/L2A data! if self.process_level == "L1B": - return self._mda.calibrate_to_radiances(proj, self._channel) + # For L1B the radiances can be directly computed from the digital counts. + return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + else: + # For higher level data, radiances must be computed from the reflectance. + # sza = self._tile_mda.get_dataset() + tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) + return self._mda.calibrate_to_radiances(tmp_refl, self._channel) + if key["calibration"] == "counts": return self._mda._sanitize_data(proj) if key["calibration"] in ["aerosol_thickness", "water_vapor"]: @@ -203,6 +210,30 @@ def band_offsets(self): band_offsets = {} return band_offsets + def solar_irradiance(self, band_name): + """Get the solar irradiance for a given *band_name*.""" + band_index = self._band_index(band_name) + return self.solar_irradiances[band_index] + + @cached_property + def solar_irradiances(self): + """Get the TOA solar irradiance values from the metadata.""" + irrads = self.root.find(".//Solar_Irradiance_List") + if irrads is not None: + solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} + else: + solar_irrad = {} + return solar_irrad + + @cached_property + def sun_earth_dist(self): + """Get the sun-earth distance from the metadata.""" + sed = self.root.find(".//U") + if sed is not None: + return float(sed.text) + else: + return -1 + @cached_property def special_values(self): """Get the special values from the metadata.""" @@ -226,6 +257,19 @@ def calibrate_to_radiances_l1b(self, data, band_name): data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain + def calibrate_to_radiances(self, data, band_name): + """Calibrate *data* to radiance using the radiometric information for the metadata.""" + sed = self.sun_earth_dist + if sed < 0.5 or sed > 1.5: + raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") + solar_irrad_band = self.solar_irradiance(band_name) + + solar_zenith = 32.029 + + solar_zenith = np.deg2rad(solar_zenith) + + return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) + def physical_gain(self, band_name): """Get the physical gain for a given *band_name*.""" band_index = self._band_index(band_name) From 976b642710435750611a96e576899aeff097a3d9 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 5 Sep 2024 13:01:32 +0200 Subject: [PATCH 066/340] Bugfix for Sentinel-2 L1C radiance calculation, initial tests. --- satpy/readers/msi_safe.py | 33 ++++++++++++++++----- satpy/tests/reader_tests/test_msi_safe.py | 35 +++++++++++++---------- 2 files changed, 46 insertions(+), 22 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index ea3f44cbf8..33dca3a3f0 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -59,13 +59,15 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" - def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, mask_saturated=True): + def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, + mask_saturated=True, solar_ang_method="mean"): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] + self.solar_ang_method = solar_ang_method self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -98,9 +100,17 @@ def _read_from_file(self, key): return self._mda.calibrate_to_radiances_l1b(proj, self._channel) else: # For higher level data, radiances must be computed from the reflectance. - # sza = self._tile_mda.get_dataset() + # By default, we use the mean solar angles so that the user does not need to resample, + # but the user can also choose to use the solar angles from the tile metadata. + # This is on a coarse grid so for most bands must be resampled before use. + if self.solar_ang_method == "mean": + zen, azi = self._tile_mda.mean_sun_angles + else: + from satpy import DataQuery + dq = DataQuery(name="solar_zenith_angle") + zen = self._tile_mda.get_dataset(dq, {}) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) - return self._mda.calibrate_to_radiances(tmp_refl, self._channel) + return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) if key["calibration"] == "counts": return self._mda._sanitize_data(proj) @@ -257,15 +267,13 @@ def calibrate_to_radiances_l1b(self, data, band_name): data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / physical_gain - def calibrate_to_radiances(self, data, band_name): + def calibrate_to_radiances(self, data, solar_zenith, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" sed = self.sun_earth_dist if sed < 0.5 or sed > 1.5: raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") solar_irrad_band = self.solar_irradiance(band_name) - solar_zenith = 32.029 - solar_zenith = np.deg2rad(solar_zenith) return (data / 100.) * solar_irrad_band * np.cos(solar_zenith) / (np.pi * sed * sed) @@ -313,7 +321,18 @@ def get_area_def(self, dsid): cols, rows, area_extent) - return area + return (area) + + @cached_property + def mean_sun_angles(self): + """Get the mean sun angles from the metadata.""" + angs = self.root.find(".//Mean_Sun_Angle") + if angs is not None: + zen = float(angs.find("ZENITH_ANGLE").text) + azi = float(angs.find("AZIMUTH_ANGLE").text) + return zen, azi + else: + return -999, -999 @cached_property def projection(self): diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 84828f4ecf..d51c520865 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1442,11 +1442,12 @@ def jp2_builder(process_level, band_name, mask_saturated=True): process_level=process_level.replace("old", "")) xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), - filename_info, mock.MagicMock()) + filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh + def make_alt_dataid(**items): """Make a DataID with modified keys.""" from satpy.dataset.dataid import DataID, ModifierTuple, WavelengthRange @@ -1578,26 +1579,26 @@ def setup_method(self): [ ("L1C", True, "B01", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, np.inf]]], - [[[np.nan, -251.584265, -251.332429, -251.080593], - [-250.828757, 0., 16251.99095, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L1C", False, "B10", ([[[np.nan, -19.99, -19.98, -19.97], [-19.96, -10, 635.34, 635.35]]], - [[[np.nan, -35.465976, -35.448234, -35.430493], - [-35.412751, -17.741859, 1127.211275, 1127.229017]]], + [[[0.0, 1.09348075, 2.1869615, 3.28044225], + [4.373923, 1093.48075, 71660.1675, 71661.2609]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ("oldL1C", True, "B01", ([[[np.nan, 0.01, 0.02, 0.03], [0.04, 10, 655.34, np.inf]]], - [[[np.nan, 0.251836101, 0.503672202, 0.755508303], - [1.00734440, 251.836101, 16503.8271, np.inf]]], + [[[0.0, 5.60879825, 11.2175965, 16.8263948,], + [22.435193, 5608.79825, 367566.985, 367572.593]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, np.inf]]])), ("L2A", False, "B03", ([[[np.nan, -9.99, -9.98, -9.97], [-9.96, 0, 645.34, 645.35]]], - [[[np.nan, -238.571863, -238.333052, -238.094241], - [-237.855431, 0, 15411.407995, 15411.646806]]], + [[[0.0, 5.25188783, 10.5037757, 15.7556635,], + [21.0075513, 5251.88783, 344177.217, 344182.469]]], [[[np.nan, 1, 2, 3], [4, 1000, 65534, 65535]]])), ]) @@ -1606,10 +1607,11 @@ def test_xml_calibration(self, process_level, mask_saturated, band_name, expecte xml_fh = xml_builder(process_level, mask_saturated)[0] res1 = xml_fh.calibrate_to_reflectances(self.fake_data, band_name) - res2 = xml_fh.calibrate_to_radiances(self.fake_data, band_name) + res2 = xml_fh.calibrate_to_radiances(self.fake_data, 25.6, band_name) res3 = xml_fh._sanitize_data(self.fake_data) results = (res1, res2, res3) + np.testing.assert_allclose(results, expected) @pytest.mark.parametrize(("process_level", "mask_saturated", "band_name", "expected"), @@ -1655,11 +1657,14 @@ def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected + with mock.patch("satpy.readers.msi_safe.SAFEMSITileMDXML.mean_sun_angles", + new_callable=mock.PropertyMock) as mocker: + mocker.return_value = (25, 8) + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ From c1348f6bc527f5615a25c467e349004c572fcdb3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 15:42:49 +0200 Subject: [PATCH 067/340] Update S2/MSI reader to use only the gridded SZA for radiance calculation. --- satpy/readers/msi_safe.py | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 33dca3a3f0..a7c1828ba4 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -60,14 +60,13 @@ class SAFEMSIL1C(BaseFileHandler): """File handler for SAFE MSI files (jp2).""" def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, - mask_saturated=True, solar_ang_method="mean"): + mask_saturated=True): """Initialize the reader.""" super(SAFEMSIL1C, self).__init__(filename, filename_info, filetype_info) del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] - self.solar_ang_method = solar_ang_method self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -103,12 +102,8 @@ def _read_from_file(self, key): # By default, we use the mean solar angles so that the user does not need to resample, # but the user can also choose to use the solar angles from the tile metadata. # This is on a coarse grid so for most bands must be resampled before use. - if self.solar_ang_method == "mean": - zen, azi = self._tile_mda.mean_sun_angles - else: - from satpy import DataQuery - dq = DataQuery(name="solar_zenith_angle") - zen = self._tile_mda.get_dataset(dq, {}) + dq = dict(name="solar_zenith_angle", resolution=key["resolution"]) + zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) @@ -321,7 +316,7 @@ def get_area_def(self, dsid): cols, rows, area_extent) - return (area) + return area @cached_property def mean_sun_angles(self): From 4309738ae3e7b930c3375dab4f16760e00a6d269 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:36:35 +0200 Subject: [PATCH 068/340] Prepare S2/MSI reader for availability of L1B files, but raise error if user passes L1B data. --- satpy/readers/msi_safe.py | 31 +++++++++++-------------------- 1 file changed, 11 insertions(+), 20 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index a7c1828ba4..1599bcd8fe 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -67,6 +67,8 @@ def __init__(self, filename, filename_info, filetype_info, mda, tile_mda, del mask_saturated self._channel = filename_info["band_name"] self.process_level = filename_info["process_level"] + if self.process_level not in ["L1C", "L2A"]: + raise ValueError(f"Unsupported process level: {self.process_level}") self._tile_mda = tile_mda self._mda = mda self.platform_name = PLATFORMS[filename_info["fmission_id"]] @@ -94,10 +96,7 @@ def _read_from_file(self, key): return self._mda.calibrate_to_reflectances(proj, self._channel) if key["calibration"] == "radiance": # The calibration procedure differs for L1B and L1C/L2A data! - if self.process_level == "L1B": - # For L1B the radiances can be directly computed from the digital counts. - return self._mda.calibrate_to_radiances_l1b(proj, self._channel) - else: + if self.process_level in ["L1C", "L2A"]: # For higher level data, radiances must be computed from the reflectance. # By default, we use the mean solar angles so that the user does not need to resample, # but the user can also choose to use the solar angles from the tile metadata. @@ -106,6 +105,10 @@ def _read_from_file(self, key): zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) + #else: + # For L1B the radiances can be directly computed from the digital counts. + #return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + if key["calibration"] == "counts": return self._mda._sanitize_data(proj) @@ -162,7 +165,7 @@ class SAFEMSIMDXML(SAFEMSIXMLMetadata): def calibrate_to_reflectances(self, data, band_name): """Calibrate *data* using the radiometric information for the metadata.""" - quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level == "L1C" else \ + quantification = int(self.root.find(".//QUANTIFICATION_VALUE").text) if self.process_level[:2] == "L1" else \ int(self.root.find(".//BOA_QUANTIFICATION_VALUE").text) data = self._sanitize_data(data) return (data + self.band_offset(band_name)) / quantification * 100 @@ -170,7 +173,7 @@ def calibrate_to_reflectances(self, data, band_name): def calibrate_to_atmospheric(self, data, band_name): """Calibrate L2A AOT/WVP product.""" atmospheric_bands = ["AOT", "WVP"] - if self.process_level == "L1C": + if self.process_level == "L1C" or self.process_level == "L1B": return elif self.process_level == "L2A" and band_name not in atmospheric_bands: return @@ -207,7 +210,7 @@ def band_indices(self): @cached_property def band_offsets(self): """Get the band offsets from the metadata.""" - offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level == "L1C" else \ + offsets = self.root.find(".//Radiometric_Offset_List") if self.process_level[:2] == "L1" else \ self.root.find(".//BOA_ADD_OFFSET_VALUES_LIST") if offsets is not None: band_offsets = {int(off.attrib["band_id"]): float(off.text) for off in offsets} @@ -236,8 +239,7 @@ def sun_earth_dist(self): sed = self.root.find(".//U") if sed is not None: return float(sed.text) - else: - return -1 + return -1 @cached_property def special_values(self): @@ -318,17 +320,6 @@ def get_area_def(self, dsid): area_extent) return area - @cached_property - def mean_sun_angles(self): - """Get the mean sun angles from the metadata.""" - angs = self.root.find(".//Mean_Sun_Angle") - if angs is not None: - zen = float(angs.find("ZENITH_ANGLE").text) - azi = float(angs.find("AZIMUTH_ANGLE").text) - return zen, azi - else: - return -999, -999 - @cached_property def projection(self): """Get the geographic projection.""" From 485ef66f3691b53daee7ab091081399f14cd6569 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:38:13 +0200 Subject: [PATCH 069/340] Update S2/MSI tests for radiance calculations. --- satpy/tests/reader_tests/test_msi_safe.py | 49 ++++++++++++++--------- 1 file changed, 30 insertions(+), 19 deletions(-) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index d51c520865..4970767ea8 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1435,15 +1435,21 @@ def xml_builder(process_level, mask_saturated=True, band_name=None): return xml_fh, xml_tile_fh -def jp2_builder(process_level, band_name, mask_saturated=True): +def jp2_builder(process_level, band_name, mask_saturated=True, test_l1b=False): """Build fake SAFE jp2 image file.""" from satpy.readers.msi_safe import SAFEMSIL1C, SAFEMSITileMDXML filename_info = dict(observation_time=fname_dt, dtile_number=None, band_name=band_name, fmission_id="S2A", process_level=process_level.replace("old", "")) + if test_l1b: + filename_info["process_level"] = "L1B" + xml_fh = xml_builder(process_level, mask_saturated, band_name)[0] tile_xml_fh = mock.create_autospec(SAFEMSITileMDXML)(BytesIO(TILE_XMLS[PROCESS_LEVELS.index(process_level)]), filename_info, mock.MagicMock()) tile_xml_fh.start_time.return_value = tilemd_dt + tile_xml_fh.get_dataset.return_value = xr.DataArray([[22.5, 23.8], + [22.5, 24.8]], + dims=["x", "y"]) jp2_fh = SAFEMSIL1C("somefile", filename_info, mock.MagicMock(), xml_fh, tile_xml_fh) return jp2_fh @@ -1642,29 +1648,28 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - @pytest.mark.parametrize(("mask_saturated", "dataset_name", "calibration", "expected"), + @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ - (False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), - (True, "B02", "radiance", [[np.nan, -265.970568], [17181.325973, np.inf]]), - (True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), - (False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), - (True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), - (True, "SNOW", "water_vapor", None), + ("L2A", False, "B01", "reflectance", [[np.nan, -9.99], [645.34, 645.35]]), + ("L1C", True, "B02", "radiance", [[np.nan, -59.439197], [3877.121602, np.inf]]), + ("L2A", True, "B03", "counts", [[np.nan, 1], [65534, np.inf]]), + ("L2A", False, "AOT", "aerosol_thickness", [[np.nan, 0.001], [65.534, 65.535]]), + ("L2A", True, "WVP", "water_vapor", [[np.nan, 0.001], [65.534, np.inf]]), + ("L2A", True, "SNOW", "water_vapor", None), ]) - def test_calibration_and_masking(self, mask_saturated, dataset_name, calibration, expected): + def test_calibration_and_masking(self, process_level, mask_saturated, dataset_name, calibration, expected): """Test that saturated is masked with inf when requested and that calibration is performed.""" - jp2_fh = jp2_builder("L2A", dataset_name, mask_saturated) + jp2_fh = jp2_builder(process_level, dataset_name, mask_saturated) with mock.patch("xarray.open_dataset", return_value=self.fake_data): - with mock.patch("satpy.readers.msi_safe.SAFEMSITileMDXML.mean_sun_angles", - new_callable=mock.PropertyMock) as mocker: - mocker.return_value = (25, 8) - res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration), info=dict()) - if res is not None: - np.testing.assert_allclose(res, expected) - else: - assert res is expected + res = jp2_fh.get_dataset(make_alt_dataid(name=dataset_name, calibration=calibration, resolution="20"), + info=dict()) + if res is not None: + np.testing.assert_allclose(res, expected) + else: + assert res is expected @pytest.mark.parametrize(("process_level", "band_name", "dataset_name"), [ @@ -1682,7 +1687,13 @@ def test_filename_dsname_mismatch(self, process_level, band_name, dataset_name): assert res1 is None assert res2 is None - def test_start_time(self): + def test_start_end_time(self): """Test that the correct start time is returned.""" jp2_fh = jp2_builder("L1C", "B01") assert tilemd_dt == jp2_fh.start_time + assert tilemd_dt == jp2_fh.end_time + + def test_l1b_error(self): + """We can't process L1B data yet, so check an error is raised.""" + with pytest.raises(ValueError, match="Unsupported process level: L1B"): + jp2_builder("L1C", "B01", test_l1b=True) From 38a05d1d5d38ebec88d299d0145f73dacf006c17 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 6 Sep 2024 16:40:28 +0200 Subject: [PATCH 070/340] Add docs note to S2/MSI specifying that L1B data is not currently supported. --- satpy/readers/msi_safe.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 1599bcd8fe..3c75169744 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -15,7 +15,7 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . -"""SAFE MSI L1C reader. +"""SAFE MSI L1C/L2A reader. The MSI data has a special value for saturated pixels. By default, these pixels are set to np.inf, but for some applications it might be desirable @@ -32,6 +32,10 @@ https://sentinels.copernicus.eu/documents/247904/685211/S2-PDGS-TAS-DI-PSD-V14.9.pdf/3d3b6c9c-4334-dcc4-3aa7-f7c0deffbaf7?t=1643013091529 +NOTE: At present, L1B data is not supported. If the user needs radiance data instead of counts or reflectances, these +are retrieved by first calculating the reflectance and then working back to the radiance. L1B radiance data support +will be added once the data is published onto the Copernicus data ecosystem. + """ import logging From 793fab06aab6c22dc2bf9c8566b10ff4d26ddb5e Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 9 Sep 2024 08:14:11 +0200 Subject: [PATCH 071/340] Initial Landsat L1 commit --- satpy/etc/readers/oli_tirs_l1_tif.yaml | 305 +++++++++++++++++++++++++ satpy/readers/oli_tirs_l1_tif.py | 32 +++ 2 files changed, 337 insertions(+) create mode 100644 satpy/etc/readers/oli_tirs_l1_tif.yaml create mode 100644 satpy/readers/oli_tirs_l1_tif.py diff --git a/satpy/etc/readers/oli_tirs_l1_tif.yaml b/satpy/etc/readers/oli_tirs_l1_tif.yaml new file mode 100644 index 0000000000..6f54c2eda9 --- /dev/null +++ b/satpy/etc/readers/oli_tirs_l1_tif.yaml @@ -0,0 +1,305 @@ +reader: + name: oli_tirs_l1_tif + short_name: OLI/TIRS L1 GeoTIFF + long_name: Landsat-8/9 OLI/TIRS L1 data in GeoTIFF format. + description: GeoTIFF reader for Landsat-8/9 OLI/TIRS L1 data. + status: Beta + supports_fsspec: false + sensors: [oli, tirs] + default_channels: [] + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + +file_types: + b01_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] + requires: [l1_metadata] + b02_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] + requires: [l1_metadata] + b03_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] + requires: [l1_metadata] + b04_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] + requires: [l1_metadata] + b05_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] + requires: [l1_metadata] + b06_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] + requires: [l1_metadata] + b07_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] + requires: [l1_metadata] + b08_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] + requires: [l1_metadata] + b09_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] + requires: [l1_metadata] + b10_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] + requires: [l1_metadata] + b11_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] + requires: [l1_metadata] + sza_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SZA.TIF'] + requires: [l1_metadata] + saa_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SAA.TIF'] + requires: [l1_metadata] + vza_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VZA.TIF'] + requires: [l1_metadata] + vaa_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VAA.TIF'] + requires: [l1_metadata] + qa_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA.TIF'] + requires: [l1_metadata] + qa_radsat_granule: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA_RADSAT.TIF'] + requires: [l1_metadata] + l1_metadata: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_MD_Reader + file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] + +datasets: + # Channels on the OLI instrument + B01: + name: B01 + sensor: oli + wavelength: [0.433, 0.443, 0.453] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b01_granule + B02: + name: B02 + sensor: oli + wavelength: [0.450, 0.482, 0.515] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b02_granule + B03: + name: B03 + sensor: oli + wavelength: [0.525, 0.565, 0.600] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b03_granule + B04: + name: B04 + sensor: oli + wavelength: [0.630, 0.660, 0.680] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b04_granule + B05: + name: B05 + sensor: oli + wavelength: [0.845, 0.867, 0.885] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b05_granule + B06: + name: B06 + sensor: oli + wavelength: [1.560, 1.650, 1.660] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b06_granule + B07: + name: B07 + sensor: oli + wavelength: [2.100, 2.215, 2.300] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b07_granule + B08: + name: B08 + sensor: oli + wavelength: [0.500, 0.579, 0.680] + resolution: 15 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b03_granule + B09: + name: B09 + sensor: oli + wavelength: [1.360, 1.373, 1.390] + resolution: 30 + calibration: + reflectance: + standard_name: toa_bidirectional_reflectance + units: "%" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b09_granule + # Channels on the TIRS instrument + B10: + name: B10 + sensor: tirs + wavelength: [10.6, 10.888, 11.19] + resolution: 30 + calibration: + brightness_temperature: + standard_name: brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b10_granule + B11: + name: B11 + sensor: tirs + wavelength: [11.5, 11.981, 12.51] + resolution: 30 + calibration: + brightness_temperature: + standard_name: brightness_temperature + units: "K" + radiance: + standard_name: toa_outgoing_radiance_per_unit_wavelength + units: W m-2 um-1 sr-1 + counts: + standard_name: counts + units: "1" + file_type: b11_granule + + # QA Variables + qa: + name: qa + sensor: oli + resolution: 30 + file_type: qa_granule + qa_radsat: + name: qa_radsat + sensor: oli + resolution: 30 + file_type: qa_radsat_granule + + # Angles datasets + sza: + name: sza + sensor: oli + standard_name: solar_zenith_angle + resolution: 30 + units: "degrees" + file_type: sza_granule + saa: + name: saa + sensor: oli + standard_name: solar_azimuth_angle + resolution: 30 + units: "degrees" + file_type: saa_granule + vza: + name: vza + sensor: oli + standard_name: viewing_zenith_angle + resolution: 30 + units: "degrees" + file_type: vza_granule + vaa: + name: vaa + sensor: oli + standard_name: viewing_azimuth_angle + resolution: 30 + units: "degrees" + file_type: vaa_granule diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py new file mode 100644 index 0000000000..78db296eba --- /dev/null +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -0,0 +1,32 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Landsat OLI/TIRS Level 1 reader. + +Details of the data format can be found here: + https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/LSDS-1822_Landsat8-9-OLI-TIRS-C2-L1-DFCB-v6.pdf + https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product + +""" + +import logging + + +logger = logging.getLogger(__name__) + +PLATFORMS = {"08": "Landsat-8", + "09": "Landsat-9"} From 15a575f2b8441efd5fd9dd9e663217c0dc7311bd Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 13 Sep 2024 21:54:50 +0200 Subject: [PATCH 072/340] Updates to Landsat L1 reader. --- satpy/etc/readers/oli_tirs_l1_tif.yaml | 218 +++++++++++++---------- satpy/readers/oli_tirs_l1_tif.py | 237 +++++++++++++++++++++++++ 2 files changed, 361 insertions(+), 94 deletions(-) diff --git a/satpy/etc/readers/oli_tirs_l1_tif.yaml b/satpy/etc/readers/oli_tirs_l1_tif.yaml index 6f54c2eda9..e27345c1a7 100644 --- a/satpy/etc/readers/oli_tirs_l1_tif.yaml +++ b/satpy/etc/readers/oli_tirs_l1_tif.yaml @@ -10,82 +10,98 @@ reader: reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader file_types: - b01_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] + # Bands on the OLI subsystem + granule_b01: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] requires: [l1_metadata] - b02_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] + + granule_b02: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] requires: [l1_metadata] - b03_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] + + granule_b03: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] requires: [l1_metadata] - b04_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] + + granule_b04: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] requires: [l1_metadata] - b05_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] + + granule_b05: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] requires: [l1_metadata] - b06_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] + + granule_b06: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] requires: [l1_metadata] - b07_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] + + granule_b07: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] requires: [l1_metadata] - b08_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] + + granule_b08: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] requires: [l1_metadata] - b09_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] + + granule_b09: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] requires: [l1_metadata] - b10_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] + + # Bands on the TIRS subsystem + granule_b10: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] requires: [l1_metadata] - b11_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] + + granule_b11: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] requires: [l1_metadata] - sza_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SZA.TIF'] + + # Geometry datasets + granule_sza: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SZA.TIF'] requires: [l1_metadata] - saa_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SAA.TIF'] + granule_saa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_SAA.TIF'] requires: [l1_metadata] - vza_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VZA.TIF'] + granule_vza: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VZA.TIF'] requires: [l1_metadata] - vaa_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VAA.TIF'] + granule_vaa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_VAA.TIF'] requires: [l1_metadata] - qa_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA.TIF'] + + # QA Variables + granule_qa: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA.TIF'] requires: [l1_metadata] - qa_radsat_granule: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_CH_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA_RADSAT.TIF'] + granule_qa_radsat: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_QA_RADSAT.TIF'] requires: [l1_metadata] - l1_metadata: - file_reader: !!python/name:satpy.readers.oli_tirs_l1_tiff.OLITIRS_MD_Reader - file_patterns: ['{platform_type:1s}{data_type:{1s}{spacecraft_id:2s}_{process_level_correction:4s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] + + l1_metadata: + file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSMDReader + file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] datasets: - # Channels on the OLI instrument - B01: - name: B01 + b01: + name: b01 sensor: oli wavelength: [0.433, 0.443, 0.453] resolution: 30 @@ -99,9 +115,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b01_granule - B02: - name: B02 + file_type: granule_b01 + + b02: + name: b02 sensor: oli wavelength: [0.450, 0.482, 0.515] resolution: 30 @@ -115,9 +132,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b02_granule - B03: - name: B03 + file_type: granule_b02 + + b03: + name: b03 sensor: oli wavelength: [0.525, 0.565, 0.600] resolution: 30 @@ -131,9 +149,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b03_granule - B04: - name: B04 + file_type: granule_b03 + + b04: + name: b04 sensor: oli wavelength: [0.630, 0.660, 0.680] resolution: 30 @@ -147,9 +166,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b04_granule - B05: - name: B05 + file_type: granule_b04 + + b05: + name: b05 sensor: oli wavelength: [0.845, 0.867, 0.885] resolution: 30 @@ -163,9 +183,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b05_granule - B06: - name: B06 + file_type: granule_b05 + + b06: + name: b06 sensor: oli wavelength: [1.560, 1.650, 1.660] resolution: 30 @@ -179,9 +200,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b06_granule - B07: - name: B07 + file_type: granule_b06 + + b07: + name: b07 sensor: oli wavelength: [2.100, 2.215, 2.300] resolution: 30 @@ -195,9 +217,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b07_granule - B08: - name: B08 + file_type: granule_b07 + + b08: + name: b08 sensor: oli wavelength: [0.500, 0.579, 0.680] resolution: 15 @@ -211,9 +234,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b03_granule - B09: - name: B09 + file_type: granule_b08 + + b09: + name: b09 sensor: oli wavelength: [1.360, 1.373, 1.390] resolution: 30 @@ -227,10 +251,11 @@ datasets: counts: standard_name: counts units: "1" - file_type: b09_granule + file_type: granule_b09 + # Channels on the TIRS instrument - B10: - name: B10 + b10: + name: b10 sensor: tirs wavelength: [10.6, 10.888, 11.19] resolution: 30 @@ -244,9 +269,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: b10_granule - B11: - name: B11 + file_type: granule_b10 + + b11: + name: b11 sensor: tirs wavelength: [11.5, 11.981, 12.51] resolution: 30 @@ -260,19 +286,20 @@ datasets: counts: standard_name: counts units: "1" - file_type: b11_granule + file_type: granule_b11 # QA Variables qa: name: qa sensor: oli resolution: 30 - file_type: qa_granule + file_type: granule_qa + qa_radsat: name: qa_radsat sensor: oli resolution: 30 - file_type: qa_radsat_granule + file_type: granule_qa_radsat # Angles datasets sza: @@ -281,25 +308,28 @@ datasets: standard_name: solar_zenith_angle resolution: 30 units: "degrees" - file_type: sza_granule + file_type: granule_sza + saa: name: saa sensor: oli standard_name: solar_azimuth_angle resolution: 30 units: "degrees" - file_type: saa_granule + file_type: granule_saa + vza: name: vza sensor: oli standard_name: viewing_zenith_angle resolution: 30 units: "degrees" - file_type: vza_granule + file_type: granule_vza + vaa: name: vaa sensor: oli standard_name: viewing_azimuth_angle resolution: 30 units: "degrees" - file_type: vaa_granule + file_type: granule_vaa diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 78db296eba..f2c2caf7e5 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -24,9 +24,246 @@ """ import logging +from datetime import datetime +import defusedxml.ElementTree as ET +import numpy as np +import rasterio +import xarray as xr +from pyresample import utils + +from satpy.readers.file_handlers import BaseFileHandler +from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) +CHUNK_SIZE = get_legacy_chunk_size() PLATFORMS = {"08": "Landsat-8", "09": "Landsat-9"} + +OLI_BANDLIST = ["b01", "b02", "b03", "b04", "b05", "b06", "b07", "b08", "b09"] +TIRS_BANDLIST = ["b10", "b11"] +ANGLIST = ["sza", "saa", "vza", "vaa"] + +BANDLIST = OLI_BANDLIST + TIRS_BANDLIST + + +class OLITIRSCHReader(BaseFileHandler): + """File handler for Landsat L1 files (tif).""" + + @staticmethod + def get_btype(file_type): + """Return the band type from the file type.""" + pos = file_type.rfind("_") + if pos == -1: + raise ValueError(f"Invalid file type: {file_type}") + else: + return file_type[pos+1:] + + @property + def start_time(self): + """Return start time. + + This is actually the scene center time, as we don't have the start time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self._mda.center_time.hour, self._mda.center_time.minute, self._mda.center_time.second) + + @property + def end_time(self): + """Return end time. + + This is actually the scene center time, as we don't have the end time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self._mda.center_time.hour, self._mda.center_time.minute, self._mda.center_time.second) + + def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): + """Initialize the reader.""" + super(OLITIRSCHReader, self).__init__(filename, filename_info, filetype_info) + + # Check we have landsat data + if filename_info["platform_type"] != "L": + raise ValueError("This reader only supports Landsat data") + + # Get the channel name + self.channel = self.get_btype(filetype_info["file_type"]) + + # Data can be VIS, TIR or Combined. This flag denotes what the granule contains (O, T or C respectively). + self.chan_selector = filename_info["data_type"] + + self._obs_date = filename_info["observation_date"] + self._mda = mda + + # Retrieve some per-band useful metadata + self.bsat = self._mda.band_saturation + self.calinfo = self._mda.band_calibration + self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] + + def get_dataset(self, key, info): + """Load a dataset.""" + if self.channel != key["name"]: + return + + logger.debug("Reading %s.", key["name"]) + + dataset = rasterio.open(self.filename) + + # Create area definition + if hasattr(dataset, "crs") and dataset.crs is not None: + self.area = utils.get_area_def_from_raster(dataset) + + # Create area definition + if hasattr(dataset, "crs") and dataset.crs is not None: + self.area = utils.get_area_def_from_raster(dataset) + + data = xr.open_dataset(self.filename, engine="rasterio", + chunks={"band": 1, + "y": CHUNK_SIZE, + "x": CHUNK_SIZE}, + mask_and_scale=False)["band_data"].squeeze() + + # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan + data = xr.where(data == 0, np.float32(np.nan), data) + + attrs = data.attrs.copy() + + # Add useful metadata to the attributes. + attrs["perc_cloud_cover"] = self._mda.cloud_cover + + # Only OLI bands have a saturation flag + if key["name"] in OLI_BANDLIST: + attrs["saturated"] = self.bsat[key["name"]] + + # Rename to Satpy convention + data = data.rename({"band": "bands"}) + + data.attrs = attrs + + # Calibrate if we're using a band rather than a QA or geometry dataset + if key["name"] in BANDLIST: + data = self.calibrate(data, key["calibration"]) + if key["name"] in ANGLIST: + data = data * 0.01 + data.attrs["units"] = "degrees" + + return data + + def calibrate(self, data, calibration): + """Calibrate the data from counts into the desired units.""" + if calibration == "counts": + data.attrs["standard_name"] = "counts" + data.attrs["units"] = "1" + return data.astype(np.float32) + + if calibration in ["radiance", "brightness_temperature"]: + data.attrs["standard_name"] = "toa_outgoing_radiance_per_unit_wavelength" + data.attrs["units"] = "W m-2 um-1 sr-1" + data = data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] + if calibration == "radiance": + return data.astype(np.float32) + + if calibration == "reflectance": + if int(self.channel[1:]) < 10: + data.attrs["standard_name"] = "toa_bidirectional_reflectance" + data.attrs["units"] = "%" + data = data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] + return data.astype(np.float32) + raise ValueError(f"Reflectance not available for thermal bands: {self.channel}") + + if calibration == "brightness_temperature": + if self.channel[1:] in ["10", "11"]: + data.attrs["standard_name"] = "counts" + data.attrs["units"] = "K" + data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data) + 1)) + return data.astype(np.float32) + raise ValueError(f"Brightness temperature not available for visible bands: {self.channel}") + + return data.astype(np.float32) + + def get_area_def(self, dsid): + """Get area definition of the image.""" + if self.area is None: + raise NotImplementedError("No CRS information available from image") + return self.area + +class OLITIRSMDReader(BaseFileHandler): + """File handler for Landsat L1 files (tif).""" + def __init__(self, filename, filename_info, filetype_info): + """Init the reader.""" + super().__init__(filename, filename_info, filetype_info) + # Check we have landsat data + if filename_info["platform_type"] != "L": + raise ValueError("This reader only supports Landsat data") + + self._obs_date = filename_info["observation_date"] + self.root = ET.parse(self.filename) + self.process_level = filename_info["process_level_correction"] + self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] + import bottleneck # noqa + import geotiepoints # noqa + + + @property + def center_time(self): + """Return center time.""" + return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text, "%H:%M:%S.%f0Z") + + @property + def cloud_cover(self): + """Return estimated granule cloud cover percentage.""" + return float(self.root.find(".//IMAGE_ATTRIBUTES/CLOUD_COVER").text) + + def _get_satflag(self, band): + """Return saturation flag for a band.""" + flag = self.root.find(f".//IMAGE_ATTRIBUTES/SATURATION_BAND_{band}").text + if flag == "Y": + return True + return False + + @property + def band_saturation(self): + """Return per-band saturation flag.""" + bdict = {} + for i in range(1, 9): + bdict[f"b{i:02d}"] = self._get_satflag(i) + + return bdict + + def _get_band_radcal(self, band): + """Get the radiance scale and offset values.""" + rad_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_MULT_BAND_{band}").text) + rad_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/RADIANCE_ADD_BAND_{band}").text) + return rad_gain, rad_add + + def _get_band_viscal(self, band): + """Return visible channel calibration info.""" + rad_gain, rad_add = self._get_band_radcal(band) + ref_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_MULT_BAND_{band}").text) + ref_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_ADD_BAND_{band}").text) + return (rad_gain, rad_add, ref_gain, ref_add) + + def _get_band_tircal(self, band): + """Return thermal channel calibration info.""" + rad_gain, rad_add = self._get_band_radcal(band) + bt_k1 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K1_CONSTANT_BAND_{band}").text) + bt_k2 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K2_CONSTANT_BAND_{band}").text) + return (rad_gain, rad_add, bt_k1, bt_k2) + + @property + def band_calibration(self): + """Return per-band saturation flag.""" + bdict = {} + for i in range(1, 9): + bdict[f"b{i:02d}"] = self._get_band_viscal(i) + for i in range(10, 12): + bdict[f"b{i:02d}"] = self._get_band_tircal(i) + + + return bdict + + def earth_sun_distance(self): + """Return Earth-Sun distance.""" + return float(self.root.find(".//IMAGE_ATTRIBUTES/EARTH_SUN_DISTANCE").text) From 31aff4d56a4692cd0052e99885d126410314deaa Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Fri, 13 Sep 2024 22:22:37 +0200 Subject: [PATCH 073/340] Update time parser in Landsat L1 reader. --- satpy/readers/oli_tirs_l1_tif.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index f2c2caf7e5..8d49828bfc 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -209,7 +209,7 @@ def __init__(self, filename, filename_info, filetype_info): @property def center_time(self): """Return center time.""" - return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text, "%H:%M:%S.%f0Z") + return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], "%H:%M:%S.%f") @property def cloud_cover(self): From 411741c8da8d2f5a0585d5beeb15d12343355893 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 16 Sep 2024 11:06:35 +0200 Subject: [PATCH 074/340] Fix start and end times in Landsat L1 reader. --- satpy/readers/oli_tirs_l1_tif.py | 38 ++++++++++++++++++++------------ 1 file changed, 24 insertions(+), 14 deletions(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 8d49828bfc..6818cd2831 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -62,23 +62,13 @@ def get_btype(file_type): @property def start_time(self): - """Return start time. - - This is actually the scene center time, as we don't have the start time. - It is constructed from the observation date (from the filename) and the center time (from the metadata). - """ - return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, - self._mda.center_time.hour, self._mda.center_time.minute, self._mda.center_time.second) + """Return start time.""" + return self._mda.start_time @property def end_time(self): - """Return end time. - - This is actually the scene center time, as we don't have the end time. - It is constructed from the observation date (from the filename) and the center time (from the metadata). - """ - return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, - self._mda.center_time.hour, self._mda.center_time.minute, self._mda.center_time.second) + """Return end time.""" + return self._mda.end_time def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): """Initialize the reader.""" @@ -211,6 +201,26 @@ def center_time(self): """Return center time.""" return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], "%H:%M:%S.%f") + @property + def start_time(self): + """Return start time. + + This is actually the scene center time, as we don't have the start time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self.center_time.hour, self.center_time.minute, self.center_time.second) + + @property + def end_time(self): + """Return end time. + + This is actually the scene center time, as we don't have the end time. + It is constructed from the observation date (from the filename) and the center time (from the metadata). + """ + return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, + self.center_time.hour, self.center_time.minute, self.center_time.second) + @property def cloud_cover(self): """Return estimated granule cloud cover percentage.""" From f7e9853aef9d8cd18c17dacef05e13b8f1c18b95 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Mon, 16 Sep 2024 12:47:17 +0200 Subject: [PATCH 075/340] Fix reading Landsat L1 band 9 --- satpy/readers/oli_tirs_l1_tif.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 6818cd2831..3c22476869 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -237,7 +237,7 @@ def _get_satflag(self, band): def band_saturation(self): """Return per-band saturation flag.""" bdict = {} - for i in range(1, 9): + for i in range(1, 10): bdict[f"b{i:02d}"] = self._get_satflag(i) return bdict @@ -266,7 +266,7 @@ def _get_band_tircal(self, band): def band_calibration(self): """Return per-band saturation flag.""" bdict = {} - for i in range(1, 9): + for i in range(1, 10): bdict[f"b{i:02d}"] = self._get_band_viscal(i) for i in range(10, 12): bdict[f"b{i:02d}"] = self._get_band_tircal(i) From fd099cd06797e7a89218a4ee831dede3f3c23667 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Mon, 16 Sep 2024 13:53:02 +0000 Subject: [PATCH 076/340] - suppress warnings for renaming duplicate dimensions - include u_independent_toa_bidirectional_reflectance in tests - patch decode_cf/fix_duplicate_dimensions in test_reassign_coords() --- satpy/readers/mviri_l1b_fiduceo_nc.py | 2 ++ .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 15 +++++++++++---- 2 files changed, 13 insertions(+), 4 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index e0297f7ec3..b3e3ee275e 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -186,6 +186,8 @@ ] HIGH_RESOL = 2250 +warnings.filterwarnings("ignore", message="^.*We do not yet support duplicate dimension names, but " + "we do allow initial construction of the object.*$") class IRWVCalibrator: """Calibrate IR & WV channels.""" diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 9ced865eed..f5f8912ff8 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -127,6 +127,9 @@ }, attrs=attrs_exp ) + +u_struct_refl_exp = u_vis_refl_exp.copy() + acq_time_ir_wv_exp = [np.datetime64("NaT"), np.datetime64("1970-01-01 02:30").astype("datetime64[ns]")] wv_counts_exp = xr.DataArray( @@ -289,6 +292,7 @@ def fixture_fake_dataset(): "count_ir": (("y_ir_wv", "x_ir_wv"), count_ir), "toa_bidirectional_reflectance_vis": vis_refl_exp / 100, "u_independent_toa_bidirectional_reflectance": u_vis_refl_exp / 100, + "u_structured_toa_bidirectional_reflectance": u_vis_refl_exp / 100, "quality_pixel_bitmask": (("y", "x"), mask), "solar_zenith_angle": (("y_tie", "x_tie"), sza), "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), @@ -393,7 +397,8 @@ def test_init(self, file_handler): ("quality_pixel_bitmask", None, 2250, quality_pixel_bitmask_exp), ("solar_zenith_angle", None, 2250, sza_vis_exp), ("solar_zenith_angle", None, 4500, sza_ir_wv_exp), - ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp) + ("u_independent_toa_bidirectional_reflectance", None, 4500, u_vis_refl_exp), + ("u_structured_toa_bidirectional_reflectance", None, 4500, u_struct_refl_exp) ] ) def test_get_dataset(self, file_handler, name, calibration, resolution, @@ -636,9 +641,11 @@ def test_reassign_coords(self): "x": [.3, .4] } ) - ds = DatasetWrapper(nc) - foo = ds["foo"] - xr.testing.assert_equal(foo, foo_exp) + with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._fix_duplicate_dimensions"): + with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._decode_cf"): + ds = DatasetWrapper(nc) + foo = ds["foo"] + xr.testing.assert_equal(foo, foo_exp) class TestInterpolator: """Unit tests for Interpolator class.""" From 7c02d2d98618bbad33e9d8ffcd56dca6cd2274a8 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 17 Sep 2024 10:16:14 +0000 Subject: [PATCH 077/340] define fill_val in test --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index f5f8912ff8..b48e1f5046 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -45,6 +45,8 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request +fill_val = int("4294967295") + attrs_exp: dict = { "platform": "MET7", "raw_metadata": {"foo": "bar"}, @@ -281,8 +283,8 @@ def fixture_fake_dataset(): cov = da.from_array([[1, 2], [3, 4]]) time = np.arange(4) * 60 * 60 - time[0] = 4294967295 - time[1] = 4294967295 + time[0] = fill_val + time[1] = fill_val time = time.reshape(2, 2) ds = xr.Dataset( @@ -332,7 +334,7 @@ def fixture_fake_dataset(): ds["count_ir"].attrs["ancillary_variables"] = "a_ir b_ir" ds["count_wv"].attrs["ancillary_variables"] = "a_wv b_wv" ds["quality_pixel_bitmask"].encoding["chunksizes"] = (2, 2) - ds["time_ir_wv"].attrs["_FillValue"] = 4294967295 + ds["time_ir_wv"].attrs["_FillValue"] = fill_val ds["time_ir_wv"].attrs["add_offset"] = 0 return ds @@ -595,7 +597,7 @@ def test_fix_duplicate_dimensions(self): "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time, foo_time], [foo_time, foo_time]], - {"_FillValue": 4294967295, "add_offset": 0}) + {"_FillValue": fill_val, "add_offset": 0}) } ) foo_ds = DatasetWrapper(foo) From e8fa3ac7894f9153154700c890f1e0e24352012b Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 17 Sep 2024 17:02:11 +0200 Subject: [PATCH 078/340] Add error handling for incorrect data load. --- satpy/readers/oli_tirs_l1_tif.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 3c22476869..d143db1844 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -95,7 +95,7 @@ def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): def get_dataset(self, key, info): """Load a dataset.""" if self.channel != key["name"]: - return + raise ValueError(f"Requested channel {key['name']} does not match the reader channel {self.channel}") logger.debug("Reading %s.", key["name"]) From c62eee6478b6c44ab05f9f197ea3c5b13289bb16 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 17 Sep 2024 20:31:57 +0200 Subject: [PATCH 079/340] Replace landsat area def creation technique. --- satpy/readers/oli_tirs_l1_tif.py | 99 +++++++++++++++++++------------- 1 file changed, 60 insertions(+), 39 deletions(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index d143db1844..32f45a54e4 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -28,9 +28,7 @@ import defusedxml.ElementTree as ET import numpy as np -import rasterio import xarray as xr -from pyresample import utils from satpy.readers.file_handlers import BaseFileHandler from satpy.utils import get_legacy_chunk_size @@ -43,6 +41,7 @@ OLI_BANDLIST = ["b01", "b02", "b03", "b04", "b05", "b06", "b07", "b08", "b09"] TIRS_BANDLIST = ["b10", "b11"] +PAN_BANDLIST = ["b08"] ANGLIST = ["sza", "saa", "vza", "vaa"] BANDLIST = OLI_BANDLIST + TIRS_BANDLIST @@ -99,47 +98,37 @@ def get_dataset(self, key, info): logger.debug("Reading %s.", key["name"]) - dataset = rasterio.open(self.filename) + data = xr.open_dataset(self.filename, engine="rasterio", + chunks={"band": 1, + "y": CHUNK_SIZE, + "x": CHUNK_SIZE}, + mask_and_scale=False)["band_data"].squeeze() - # Create area definition - if hasattr(dataset, "crs") and dataset.crs is not None: - self.area = utils.get_area_def_from_raster(dataset) - - # Create area definition - if hasattr(dataset, "crs") and dataset.crs is not None: - self.area = utils.get_area_def_from_raster(dataset) - - data = xr.open_dataset(self.filename, engine="rasterio", - chunks={"band": 1, - "y": CHUNK_SIZE, - "x": CHUNK_SIZE}, - mask_and_scale=False)["band_data"].squeeze() + # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan + data = xr.where(data == 0, np.float32(np.nan), data) - # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan - data = xr.where(data == 0, np.float32(np.nan), data) + attrs = data.attrs.copy() - attrs = data.attrs.copy() + # Add useful metadata to the attributes. + attrs["perc_cloud_cover"] = self._mda.cloud_cover - # Add useful metadata to the attributes. - attrs["perc_cloud_cover"] = self._mda.cloud_cover + # Only OLI bands have a saturation flag + if key["name"] in OLI_BANDLIST: + attrs["saturated"] = self.bsat[key["name"]] - # Only OLI bands have a saturation flag - if key["name"] in OLI_BANDLIST: - attrs["saturated"] = self.bsat[key["name"]] + # Rename to Satpy convention + data = data.rename({"band": "bands"}) - # Rename to Satpy convention - data = data.rename({"band": "bands"}) + data.attrs = attrs - data.attrs = attrs + # Calibrate if we're using a band rather than a QA or geometry dataset + if key["name"] in BANDLIST: + data = self.calibrate(data, key["calibration"]) + if key["name"] in ANGLIST: + data = data * 0.01 + data.attrs["units"] = "degrees" - # Calibrate if we're using a band rather than a QA or geometry dataset - if key["name"] in BANDLIST: - data = self.calibrate(data, key["calibration"]) - if key["name"] in ANGLIST: - data = data * 0.01 - data.attrs["units"] = "degrees" - - return data + return data def calibrate(self, data, calibration): """Calibrate the data from counts into the desired units.""" @@ -174,10 +163,8 @@ def calibrate(self, data, calibration): return data.astype(np.float32) def get_area_def(self, dsid): - """Get area definition of the image.""" - if self.area is None: - raise NotImplementedError("No CRS information available from image") - return self.area + """Get area definition of the image from the metadata.""" + return self._mda.build_area_def(dsid["name"]) class OLITIRSMDReader(BaseFileHandler): """File handler for Landsat L1 files (tif).""" @@ -277,3 +264,37 @@ def band_calibration(self): def earth_sun_distance(self): """Return Earth-Sun distance.""" return float(self.root.find(".//IMAGE_ATTRIBUTES/EARTH_SUN_DISTANCE").text) + + def build_area_def(self, bname): + """Build area definition from metadata.""" + from pyresample.geometry import AreaDefinition + + # Here we assume that the thermal bands have the same resolution as the reflective bands, + # with only the panchromatic band (b08) having a different resolution. + if bname in PAN_BANDLIST: + pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_PANCHROMATIC").text) / 2. + x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_SAMPLES").text) + y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/PANCHROMATIC_LINES").text) + else: + pixoff = float(self.root.find(".//PROJECTION_ATTRIBUTES/GRID_CELL_SIZE_REFLECTIVE").text) / 2. + x_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_SAMPLES").text) + y_size = float(self.root.find(".//PROJECTION_ATTRIBUTES/REFLECTIVE_LINES").text) + + # Get remaining geoinfo from file + datum = self.root.find(".//PROJECTION_ATTRIBUTES/DATUM").text + utm_zone = int(self.root.find(".//PROJECTION_ATTRIBUTES/UTM_ZONE").text) + utm_str = f"{utm_zone}N" + + # We need to subtract / add half a pixel from the corner to get the correct extent (pixel centers) + ext_p1 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_X_PRODUCT").text) - pixoff + ext_p2 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_Y_PRODUCT").text) - pixoff + ext_p3 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_LR_PROJECTION_X_PRODUCT").text) + pixoff + ext_p4 = float(self.root.find(".//PROJECTION_ATTRIBUTES/CORNER_UL_PROJECTION_Y_PRODUCT").text) + pixoff + + # Create area definition + pcs_id = f"{datum} / UTM zone {utm_str}" + proj4_dict = {"proj": "utm", "zone": utm_zone, "datum": datum, "units": "m", "no_defs": None, "type": "crs"} + area_extent = (ext_p1, ext_p2, ext_p3, ext_p4) + + # Return the area extent + return AreaDefinition("geotiff_area", pcs_id, pcs_id, proj4_dict, x_size, y_size, area_extent) From b67dc8537378dbfedef0bb04921ce90641fc7642 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 18 Sep 2024 12:46:04 +0200 Subject: [PATCH 080/340] Updates to Landsat L1 reader. --- satpy/readers/oli_tirs_l1_tif.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 32f45a54e4..6ca017c688 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -96,6 +96,11 @@ def get_dataset(self, key, info): if self.channel != key["name"]: raise ValueError(f"Requested channel {key['name']} does not match the reader channel {self.channel}") + if key["name"] in OLI_BANDLIST and self.chan_selector not in ["O", "C"]: + raise ValueError(f"Requested channel {key['name']} is not available in this granule") + if key["name"] in TIRS_BANDLIST and self.chan_selector not in ["T", "C"]: + raise ValueError(f"Requested channel {key['name']} is not available in this granule") + logger.debug("Reading %s.", key["name"]) data = xr.open_dataset(self.filename, engine="rasterio", @@ -108,25 +113,26 @@ def get_dataset(self, key, info): data = xr.where(data == 0, np.float32(np.nan), data) attrs = data.attrs.copy() - # Add useful metadata to the attributes. attrs["perc_cloud_cover"] = self._mda.cloud_cover # Only OLI bands have a saturation flag if key["name"] in OLI_BANDLIST: + attrs["saturated"] = self.bsat[key["name"]] # Rename to Satpy convention data = data.rename({"band": "bands"}) - data.attrs = attrs + data.attrs.update(attrs) # Calibrate if we're using a band rather than a QA or geometry dataset if key["name"] in BANDLIST: data = self.calibrate(data, key["calibration"]) if key["name"] in ANGLIST: - data = data * 0.01 + data.data = data.data * 0.01 data.attrs["units"] = "degrees" + data.attrs["standard_name"] = "solar_zenith_angle" return data @@ -135,12 +141,12 @@ def calibrate(self, data, calibration): if calibration == "counts": data.attrs["standard_name"] = "counts" data.attrs["units"] = "1" - return data.astype(np.float32) + return data if calibration in ["radiance", "brightness_temperature"]: data.attrs["standard_name"] = "toa_outgoing_radiance_per_unit_wavelength" data.attrs["units"] = "W m-2 um-1 sr-1" - data = data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] + data.data = data.data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] if calibration == "radiance": return data.astype(np.float32) @@ -148,19 +154,15 @@ def calibrate(self, data, calibration): if int(self.channel[1:]) < 10: data.attrs["standard_name"] = "toa_bidirectional_reflectance" data.attrs["units"] = "%" - data = data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] + data.data = data.data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] return data.astype(np.float32) - raise ValueError(f"Reflectance not available for thermal bands: {self.channel}") if calibration == "brightness_temperature": if self.channel[1:] in ["10", "11"]: - data.attrs["standard_name"] = "counts" + data.attrs["standard_name"] = "toa_brightness_temperature" data.attrs["units"] = "K" - data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data) + 1)) + data.data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data.data) + 1)) return data.astype(np.float32) - raise ValueError(f"Brightness temperature not available for visible bands: {self.channel}") - - return data.astype(np.float32) def get_area_def(self, dsid): """Get area definition of the image from the metadata.""" @@ -258,7 +260,6 @@ def band_calibration(self): for i in range(10, 12): bdict[f"b{i:02d}"] = self._get_band_tircal(i) - return bdict def earth_sun_distance(self): From 0054c80e89edb275fedd388acbf63058702d10ed Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 18 Sep 2024 13:04:39 +0200 Subject: [PATCH 081/340] Add tests for Landsat L1 reader. --- .../reader_tests/test_oli_tirs_l1_tif.py | 576 ++++++++++++++++++ 1 file changed, 576 insertions(+) create mode 100644 satpy/tests/reader_tests/test_oli_tirs_l1_tif.py diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py new file mode 100644 index 0000000000..a2e1cd94d5 --- /dev/null +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -0,0 +1,576 @@ +#!/usr/bin/python +# Copyright (c) 2018 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Unittests for generic image reader.""" + +import os +import unittest + +import dask.array as da +import numpy as np +import pytest +import xarray as xr + +metadata_text = b""" + + + Image courtesy of the U.S. Geological Survey + https://doi.org/10.5066/P975CC9B + LC08_L1GT_026200_20240502_20240513_02_T2 + L1GT + 02 + T2 + GEOTIFF + LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt + LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + UINT16 + INT16 + INT16 + INT16 + INT16 + + + LANDSAT_8 + OLI_TIRS + 2 + 26 + 200 + NADIR + 26 + 200 + 2024-05-02 + 18:00:24.6148649Z + LGN + 0.85 + -1 + 9 + 9 + N + N + N + Y + N + N + N + N + N + -0.000 + -39.71362413 + -41.46228969 + 1.0079981 + UPPER + FINAL + ESTIMATED + + + UTM + WGS84 + WGS84 + 40 + 15.00 + 30.00 + 30.00 + 200 + 200 + 100 + 100 + 100 + 100 + NORTH_UP + 24.18941 + 58.17657 + 24.15493 + 60.44878 + 22.06522 + 58.15819 + 22.03410 + 60.39501 + 619500.000 + 2675700.000 + 850500.000 + 2675700.000 + 619500.000 + 2440500.000 + 850500.000 + 2440500.000 + + + Image courtesy of the U.S. Geological Survey + https://doi.org/10.5066/P975CC9B + 1885324_00001 + LC80262002024123LGN00 + LC08_L1GT_026200_20240502_20240513_02_T2 + L1GT + T2 + GEOTIFF + 2024-05-13T15:32:54Z + LPGS_16.4.0 + LC08_L1GT_026200_20240502_20240513_02_T2_B1.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B2.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B3.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B5.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B6.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B7.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B8.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B9.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B10.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_PIXEL.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_QA_RADSAT.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_ANG.txt + LC08_L1GT_026200_20240502_20240513_02_T2_VAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_VZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SAA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.txt + LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml + LC08CPF_20240429_20240630_02.03 + LO8BPF20240502162846_20240502181430.01 + LT8BPF20240502144307_20240510102926.01 + LC08RLUT_20150303_20431231_02_01.h5 + TIRS + GLS2000 + + + 748.04883 + -61.77412 + 766.01111 + -63.25745 + 705.87274 + -58.29120 + 595.23163 + -49.15442 + 364.25208 + -30.08006 + 90.58618 + -7.48064 + 30.53239 + -2.52137 + 673.63843 + -55.62928 + 142.35797 + -11.75597 + 22.00180 + 0.10033 + 22.00180 + 0.10033 + + + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + 1.210700 + -0.099980 + + + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + 65535 + 1 + + + 1.2357E-02 + 1.2654E-02 + 1.1661E-02 + 9.8329E-03 + 6.0172E-03 + 1.4964E-03 + 5.0438E-04 + 1.1128E-02 + 2.3517E-03 + 3.3420E-04 + 3.3420E-04 + -61.78647 + -63.27010 + -58.30286 + -49.16426 + -30.08607 + -7.48213 + -2.52188 + -55.64041 + -11.75832 + 0.10000 + 0.10000 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + 2.0000E-05 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + -0.100000 + + + 774.8853 + 1321.0789 + 480.8883 + 1201.1442 + + + UTM + WGS84 + WGS84 + 40 + 15.00 + 30.00 + 30.00 + NORTH_UP + CUBIC_CONVOLUTION + + +""" + +class TestOLITIRSL1(unittest.TestCase): + """Test generic image reader.""" + + def setUp(self): + """Create temporary images and metadata to test on.""" + import tempfile + from datetime import datetime + + from pyresample.geometry import AreaDefinition + + from satpy.scene import Scene + + self.date = datetime(2024, 5, 12) + + self.filename_info = dict(observation_date=datetime(2024, 5, 3), + platform_type="L", + process_level_correction="L1TP", + spacecraft_id="08", + data_type="C") + self.ftype_info = {"file_type": "granule_b04"} + + # Create area definition + pcs_id = "WGS 84 / UTM zone 40N" + proj4_dict = {"proj": "utm", "zone": 40, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs"} + self.x_size = 100 + self.y_size = 100 + area_extent = (619485., 2440485., 850515., 2675715.) + self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, + proj4_dict, self.x_size, self.y_size, + area_extent) + + # Create datasets for L, LA, RGB and RGBA mode images + self.test_data__1 = da.random.randint(12000, 16000, + size=(self.y_size, self.x_size), + chunks=(50, 50)).astype(np.uint16) + self.test_data__2 = da.random.randint(8000, 14000, + size=(self.y_size, self.x_size), + chunks=(50, 50)).astype(np.uint16) + self.test_data__3= da.random.randint(0, 10000, + size=(self.y_size, self.x_size), + chunks=(50, 50)).astype(np.uint16) + + ds_b4 = xr.DataArray(self.test_data__1, + dims=("y", "x"), + attrs={"name": "b04", + "start_time": self.date}) + + ds_b11 = xr.DataArray(self.test_data__2, + dims=("y", "x"), + attrs={"name": "b04", + "start_time": self.date}) + + ds_sza = xr.DataArray(self.test_data__3, + dims=("y", "x"), + attrs={"name": "sza", + "start_time": self.date}) + + # Temp dir for the saved images + self.base_dir = tempfile.mkdtemp() + + # Filenames to be used during testing + self.fnames = [f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF", + f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF", + f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml", + f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF"] + + self.bad_fname_plat = self.fnames[0].replace("LC08", "BC08") + self.bad_fname_plat2 = self.fnames[2].replace("LC08", "BC08") + + self.bad_fname_chan = self.fnames[0].replace("B4", "B5") + + # Put the datasets to Scene for easy saving + scn = Scene() + scn["b4"] = ds_b4 + scn["b4"].attrs["area"] = self.area_def + scn["b11"] = ds_b11 + scn["b11"].attrs["area"] = self.area_def + scn["sza"] = ds_sza + scn["sza"].attrs["area"] = self.area_def + + # Save the images. Two images in PNG and two in GeoTIFF + scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, + filename=os.path.join(self.base_dir, self.fnames[0])) + scn.save_dataset("b11", writer="geotiff", enhance=False, fill_value=0, + filename=os.path.join(self.base_dir, self.fnames[1])) + scn.save_dataset("sza", writer="geotiff", enhance=False, fill_value=0, + filename=os.path.join(self.base_dir, self.fnames[3])) + + scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, + filename=self.bad_fname_plat) + scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, + filename=self.bad_fname_chan) + + # Write the metadata to a file + with open(os.path.join(self.base_dir, self.fnames[2]), "wb") as f: + f.write(metadata_text) + with open(self.bad_fname_plat2, "wb") as f: + f.write(metadata_text) + + self.scn = scn + + def tearDown(self): + """Remove the temporary directory created for a test.""" + try: + import shutil + shutil.rmtree(self.base_dir, ignore_errors=True) + except OSError: + pass + + def test_basicload(self): + """Test loading a Landsat Scene.""" + from satpy import Scene + scn = Scene(reader="oli_tirs_l1_tif", filenames=[self.fnames[0], + self.fnames[1], + self.fnames[2]]) + scn.load(["b04", "b11"]) + + # Check dataset is loaded correctly + assert scn["b04"].shape == (100, 100) + assert scn["b04"].attrs["area"] == self.area_def + assert scn["b04"].attrs["saturated"] + assert scn["b11"].shape == (100, 100) + assert scn["b11"].attrs["area"] == self.area_def + with pytest.raises(KeyError, match="saturated"): + assert not scn["b11"].attrs["saturated"] + + def test_ch_startend(self): + """Test correct retrieval of start/end times.""" + from datetime import datetime + + from satpy import Scene + scn = Scene(reader="oli_tirs_l1_tif", filenames=[self.fnames[0], self.fnames[3], self.fnames[2]]) + bnds = scn.available_dataset_names() + assert bnds == ["b04", "sza"] + + scn.load(["b04"]) + assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24) + assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24) + + def test_loading(self): + """Test loading a Landsat Scene with good and bad channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + rdr = OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + + # Check case with good file data and load request + rdr.get_dataset({"name": "b04", "calibration": "counts"}, {}) + + # Check case with request to load channel not matching filename + with pytest.raises(ValueError, match="Requested channel b05 does not match the reader channel b04"): + rdr.get_dataset({"name": "b05", "calibration": "counts"}, {}) + + bad_finfo = self.filename_info.copy() + bad_finfo["data_type"] = "T" + + # Check loading invalid channel for data type + rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, self.ftype_info, good_mda) + with pytest.raises(ValueError, match= "Requested channel b04 is not available in this granule"): + rdr.get_dataset({"name": "b04", "calibration": "counts"}, {}) + + bad_finfo["data_type"] = "O" + ftype_b11 = self.ftype_info.copy() + ftype_b11["file_type"] = "granule_b11" + rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, ftype_b11, good_mda) + with pytest.raises(ValueError, match="Requested channel b11 is not available in this granule"): + rdr.get_dataset({"name": "b11", "calibration": "counts"}, {}) + + def test_badfiles(self): + """Test loading a Landsat Scene with bad data.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + bad_fname_info = self.filename_info.copy() + bad_fname_info["platform_type"] = "B" + + # Test that metadata reader initialises with correct filename + good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + + # Check metadata reader fails if platform type is wrong + with pytest.raises(ValueError, match="This reader only supports Landsat data"): + OLITIRSMDReader(self.fnames[2], bad_fname_info, {}) + + # Test that metadata reader initialises with correct filename + OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + + # Check metadata reader fails if platform type is wrong + with pytest.raises(ValueError, match="This reader only supports Landsat data"): + OLITIRSCHReader(self.fnames[0], bad_fname_info, self.ftype_info, good_mda) + bad_ftype_info = self.ftype_info.copy() + bad_ftype_info["file_type"] = "granule-b05" + with pytest.raises(ValueError, match="Invalid file type: granule-b05"): + OLITIRSCHReader(self.fnames[0], self.filename_info, bad_ftype_info, good_mda) + + def test_calibration_modes(self): + """Test calibration modes for the reader.""" + from satpy import Scene + + # Check counts calibration + scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn.load(["b04", "b11"], calibration="counts") + np.testing.assert_allclose(scn["b04"].values, self.test_data__1) + np.testing.assert_allclose(scn["b11"].values, self.test_data__2) + assert scn["b04"].attrs["units"] == "1" + assert scn["b11"].attrs["units"] == "1" + assert scn["b04"].attrs["standard_name"] == "counts" + assert scn["b11"].attrs["standard_name"] == "counts" + + # Check radiance calibration + exp_b04 = (self.test_data__1 * 0.0098329 - 49.16426).astype(np.float32) + exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000).astype(np.float32) + + scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn.load(["b04", "b11"], calibration="radiance") + assert scn["b04"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["b11"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["b04"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + assert scn["b11"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + np.testing.assert_allclose(scn["b04"].values, exp_b04, rtol=1e-4) + np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-4) + + # Check top level calibration + exp_b04 = (self.test_data__1 * 2e-05 - 0.1).astype(np.float32) + exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000) + exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) + scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn.load(["b04", "b11"]) + + assert scn["b04"].attrs["units"] == "%" + assert scn["b11"].attrs["units"] == "K" + assert scn["b04"].attrs["standard_name"] == "toa_bidirectional_reflectance" + assert scn["b11"].attrs["standard_name"] == "toa_brightness_temperature" + np.testing.assert_allclose(np.array(scn["b04"].values), np.array(exp_b04), rtol=1e-4) + np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-6) + + # Check angles are calculated correctly + scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn.load(["sza"]) + assert scn["sza"].attrs["units"] == "degrees" + assert scn["sza"].attrs["standard_name"] == "solar_zenith_angle" + np.testing.assert_allclose(scn["sza"].values * 100, np.array(self.test_data__3), atol=0.01, rtol=1e-3) + + def test_metadata(self): + """Check that metadata values loaded correctly.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader + mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + + cal_test_dict = {"b01": (0.012357, -61.78647, 2e-05, -0.1), + "b05": (0.0060172, -30.08607, 2e-05, -0.1), + "b10": (0.0003342, 0.1, 774.8853, 1321.0789)} + + assert mda.platform_name == "Landsat-8" + assert mda.earth_sun_distance() == 1.0079981 + assert mda.band_calibration["b01"] == cal_test_dict["b01"] + assert mda.band_calibration["b05"] == cal_test_dict["b05"] + assert mda.band_calibration["b10"] == cal_test_dict["b10"] + assert not mda.band_saturation["b01"] + assert mda.band_saturation["b04"] + assert not mda.band_saturation["b05"] + with pytest.raises(KeyError): + mda.band_saturation["b10"] + + def test_area_def(self): + """Check we can get the area defs properly.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader + mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + + standard_area = mda.build_area_def("b01") + pan_area = mda.build_area_def("b08") + + assert standard_area.area_extent == (619485.0, 2440485.0, 850515.0, 2675715.0) + assert pan_area.area_extent == (619492.5, 2440492.5, 850507.5, 2675707.5) From 44ce1d209e4c533cf466c811ae5511d97dae6dfb Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 18 Sep 2024 13:06:08 +0200 Subject: [PATCH 082/340] Add clarification note to Landsat reader. --- satpy/readers/oli_tirs_l1_tif.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 6ca017c688..209b103b0c 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -21,6 +21,9 @@ https://d9-wret.s3.us-west-2.amazonaws.com/assets/palladium/production/s3fs-public/atoms/files/LSDS-1822_Landsat8-9-OLI-TIRS-C2-L1-DFCB-v6.pdf https://www.usgs.gov/landsat-missions/using-usgs-landsat-level-1-data-product +NOTE: The scene geometry data (SZA, VZA, SAA, VAA) is retrieved from the L1 TIFF files, which are derived from Band 04. +The geometry differs between bands, so if you need precise geometry you should calculate this from the metadata instead. + """ import logging From 6b59245e1afb4e421b2b9c28e1eaf0d01997ba01 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 20 Sep 2024 15:10:50 +0000 Subject: [PATCH 083/340] test : Add the test related to the flash_age compositor --- satpy/composites/lightning.py | 3 +- .../tests/compositor_tests/test_lightning.py | 59 +++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 satpy/tests/compositor_tests/test_lightning.py diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index b11a791abb..af1a6ff02c 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -44,6 +44,7 @@ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwar self.standard_name = self.attrs["standard_name"] self.reference_time = self.attrs["reference_time"] + def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: """Normalised the time in the range between [end_time,end_time - time_range]. @@ -63,7 +64,7 @@ def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: # Compute the minimum time value based on the time range begin_time = end_time - np.timedelta64(self.time_range, "m") # Drop values that are bellow begin_time - data = data.where(data > begin_time, drop=True) + data = data.where(data >= begin_time, drop=True) # Normalize the time values normalized_data = (data - begin_time) / (end_time - begin_time) # Ensure the result is still an xarray.DataArray diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py new file mode 100644 index 0000000000..b686ff915e --- /dev/null +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -0,0 +1,59 @@ +"""Test the flash age compositor.""" +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (c) 2019 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + + +import datetime + +import numpy as np +import xarray as xr + +from satpy.composites.lightning import LightningTimeCompositor + + +def test_flash_age_compositor(): + """Test the flash_age compsitor by comparing two xarrays object.""" + comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], + standard_name="ligtning_time", + time_range=60, + reference_time="end_time") + attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", + "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} + flash_age_value = np.array(["2024-08-01T09:00:00", + "2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]") + #Coordinates data (assuming you have longitude and latitude arrays) + flash_age = xr.DataArray( + flash_age_value, + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = attrs_flash_age,name="flash_time") + res = comp([flash_age]) + expected_attrs = {"variable_name": "flash_time","name": "lightning_time", + "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc", + "standard_name": "ligtning_time" + } + expected_array = xr.DataArray( + np.array([0.0,0.5,1.0]), + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = expected_attrs,name="flash_time") + xr.testing.assert_equal(res,expected_array) From 0ebb1fe2eb6f032ae7272ed9835b626d0a91eda6 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 20 Sep 2024 15:18:56 +0000 Subject: [PATCH 084/340] feat : Handles case where xarray data is empty for flash_age composites. --- satpy/composites/lightning.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index af1a6ff02c..a7931027b9 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -18,6 +18,7 @@ """Composite classes for the LI instrument.""" import logging +import sys import numpy as np import xarray as xr @@ -65,6 +66,10 @@ def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: begin_time = end_time - np.timedelta64(self.time_range, "m") # Drop values that are bellow begin_time data = data.where(data >= begin_time, drop=True) + # exit if data is empty afer filtering + if data.size == 0 : + LOG.error(f"All the flash_age events happened before {begin_time}") + sys.exit(1) # Normalize the time values normalized_data = (data - begin_time) / (end_time - begin_time) # Ensure the result is still an xarray.DataArray From 27eac75d2676442e2c197a6585281fe895efd3e8 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 20 Sep 2024 15:29:51 +0000 Subject: [PATCH 085/340] typo : Correct operator multi lines error in test_lightning.py file --- satpy/tests/compositor_tests/test_lightning.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index b686ff915e..990d572fe7 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -45,7 +45,7 @@ def test_flash_age_compositor(): "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" },attrs = attrs_flash_age,name="flash_time") res = comp([flash_age]) - expected_attrs = {"variable_name": "flash_time","name": "lightning_time", + expected_attrs = {"variable_name": "flash_time","name": "lightning_time", "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc", "standard_name": "ligtning_time" From aae6b98e04a33a40f4d4ad4d243b6d89c2070ac9 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 20 Sep 2024 15:53:20 +0000 Subject: [PATCH 086/340] test : Add test related to empty array after being filtered --- .../tests/compositor_tests/test_lightning.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index 990d572fe7..25ba3f8c08 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -19,6 +19,8 @@ import datetime +import logging +from unittest import mock import numpy as np import xarray as xr @@ -57,3 +59,28 @@ def test_flash_age_compositor(): "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" },attrs = expected_attrs,name="flash_time") xr.testing.assert_equal(res,expected_array) + +def test_empty_array_error(caplog): + """Test when the filtered array is empty.""" + comp = LightningTimeCompositor("flash_age",prerequisites=["flash_time"], + standard_name="ligtning_time", + time_range=60, + reference_time="end_time") + attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", + "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} + flash_age_value = np.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") + flash_age = xr.DataArray( + flash_age_value, + dims=["y"], + coords={ + "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" + },attrs = attrs_flash_age,name="flash_time") + with mock.patch("sys.exit") as mock_exit: + # Capture logging output + with caplog.at_level(logging.ERROR): + _ = comp([flash_age]) + + mock_exit.assert_called_once_with(1) + + assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text From 589743172c540d233d97eece227e7f5cdeb86f0b Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 20 Sep 2024 16:23:02 +0000 Subject: [PATCH 087/340] test : Add test concerning the missing data --- .../tests/compositor_tests/test_lightning.py | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index 25ba3f8c08..5cd024401e 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -84,3 +84,31 @@ def test_empty_array_error(caplog): mock_exit.assert_called_once_with(1) assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text + +def test_update_missing_metadata(): + """Test the _update_missing_metadata method.""" + existing_attrs = { + "standard_name": "lightning_event_time", + "time_range": 30 + } + + # New metadata to be merged + new_attrs = { + "standard_name": None, # Should not overwrite since it's None + "reference_time": "2023-09-20T00:00:00Z", # Should be added + "units": "seconds" # Should be added + } + + # Expected result after merging + expected_attrs = { + "standard_name": "lightning_event_time", # Should remain the same + "time_range": 30, # Should remain the same + "reference_time": "2023-09-20T00:00:00Z", # Should be added + "units": "seconds" # Should be added + } + + # Call the static method + LightningTimeCompositor._update_missing_metadata(existing_attrs, new_attrs) + + # Assert the final state of existing_attrs is as expected + assert existing_attrs == expected_attrs From 3edf89391cfc6a29fed11baac9fe7c529b4cca8a Mon Sep 17 00:00:00 2001 From: clement laplace Date: Mon, 23 Sep 2024 08:04:48 +0000 Subject: [PATCH 088/340] typo : Erase useless comment --- satpy/tests/compositor_tests/test_lightning.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index 5cd024401e..4c1f8b9a8c 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -39,7 +39,6 @@ def test_flash_age_compositor(): "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} flash_age_value = np.array(["2024-08-01T09:00:00", "2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]") - #Coordinates data (assuming you have longitude and latitude arrays) flash_age = xr.DataArray( flash_age_value, dims=["y"], From e90d7d089813d213901f5e093bc934cd762cefeb Mon Sep 17 00:00:00 2001 From: bkremmli Date: Mon, 23 Sep 2024 13:40:17 +0000 Subject: [PATCH 089/340] ancillary changes like comments, etc. --- satpy/readers/mviri_l1b_fiduceo_nc.py | 12 +++++++----- .../tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 3 +-- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index b3e3ee275e..b11a7d07b1 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -461,9 +461,7 @@ def __init__(self, nc): """Wrap the given dataset.""" self.nc = nc - # decode data self._decode_cf() - # rename duplicate dimensions self._fix_duplicate_dimensions(self.nc) @@ -518,13 +516,15 @@ def _cleanup_attrs(self, ds): ds.attrs.pop("ancillary_variables", None) def _decode_cf(self): - # remove time before decoding and add again. + """Decode data.""" + # time decoding with decode_cf results in error - decode separately! time_dims, time = self._decode_time() self.nc = self.nc.drop_vars(time.name) self.nc = xr.decode_cf(self.nc) self.nc[time.name] = (time_dims, time.values) def _decode_time(self): + """Decode time using fill value and offset.""" time = self.get_time() time_dims = self.nc[time.name].dims time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), @@ -533,6 +533,7 @@ def _decode_time(self): return (time_dims, time) def _fix_duplicate_dimensions(self, nc): + """Rename dimensions as duplicate dimensions names are not supported by xarray.""" nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") self.nc = nc.drop_dims("srf_size") nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") @@ -589,6 +590,7 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 "y": CHUNK_SIZE, "x_ir_wv": CHUNK_SIZE, "y_ir_wv": CHUNK_SIZE}, + # see dataset wrapper for why decoding is disabled decode_cf=False, decode_times=False, mask_and_scale=False, @@ -611,8 +613,8 @@ def _get_projection_longitude(self, filename_info): """Read projection longitude from filename as it is not provided in the file.""" if "." in str(filename_info["projection_longitude"]): return float(filename_info["projection_longitude"]) - else: - return float(filename_info["projection_longitude"]) / 100 + + return float(filename_info["projection_longitude"]) / 100 def get_dataset(self, dataset_id, dataset_info): """Get the dataset.""" diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index b48e1f5046..172d34394f 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -45,7 +45,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = int("4294967295") +fill_val = 4294967295 attrs_exp: dict = { "platform": "MET7", @@ -575,7 +575,6 @@ def test_file_pattern(self, reader): ] files = reader.select_files_from_pathnames(filenames) - # only 3 out of 4 above should match assert len(files) == 6 From 51d8e77a3b80441faf82e2077a86334c6aa768cc Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 24 Sep 2024 06:45:44 +0000 Subject: [PATCH 090/340] change fill_val definition in test, changes for Code Scene status --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 47 ++++++++----------- 1 file changed, 19 insertions(+), 28 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 172d34394f..a3ef65ab07 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -28,7 +28,6 @@ import xarray as xr from pyproj import CRS from pyresample.geometry import AreaDefinition -from pytest_lazy_fixtures import lf as lazy_fixture from satpy.readers.mviri_l1b_fiduceo_nc import ( ALTITUDE, @@ -45,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = 4294967295 +fill_val = int("4294967295") attrs_exp: dict = { "platform": "MET7", @@ -257,6 +256,10 @@ height=2 ) +time_fake_dataset = np.arange(4) * 60 * 60 +time_fake_dataset[0] = fill_val +time_fake_dataset[1] = fill_val +time_fake_dataset = time_fake_dataset.reshape(2, 2) @pytest.fixture(name="fake_dataset") def fixture_fake_dataset(): @@ -266,26 +269,19 @@ def fixture_fake_dataset(): count_vis = da.linspace(0, 255, 16, dtype=np.uint8).reshape(4, 4) sza = da.from_array( np.array( - [[45, 90], - [0, 45]], + [[45, 90], [0, 45]], dtype=np.float32 ) ) mask = da.from_array( np.array( - [[0, 0, 0, 0], - [0, 0, 0, 0], - [0, 0, 1, 0], # 1 = "invalid" - [0, 0, 0, 0]], + [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 1, 0], [0, 0, 0, 0]], # 1 = "invalid" dtype=np.uint8 ) ) cov = da.from_array([[1, 2], [3, 4]]) - time = np.arange(4) * 60 * 60 - time[0] = fill_val - time[1] = fill_val - time = time.reshape(2, 2) + time = time_fake_dataset ds = xr.Dataset( data_vars={ @@ -664,10 +660,10 @@ def fixture_time_ir_wv(self): coords={"y": [1, 3, 5, 7]} ) - @pytest.fixture(name="acq_time_vis_exp") - def fixture_acq_time_vis_exp(self): + @pytest.fixture(name="acq_time_exp") + def fixture_acq_time_exp(self): """Returns acq_time_vis_exp.""" - return xr.DataArray( + vis = xr.DataArray( [ np.datetime64("1970-01-01 01:30"), np.datetime64("1970-01-01 01:30"), @@ -682,10 +678,7 @@ def fixture_acq_time_vis_exp(self): coords={"y": [1, 2, 3, 4, 5, 6, 7, 8]} ) - @pytest.fixture(name="acq_time_ir_exp") - def fixture_acq_time_ir_exp(self): - """Returns acq_time_ir_exp.""" - return xr.DataArray( + ir = xr.DataArray( [ np.datetime64("1970-01-01 01:30"), np.datetime64("1970-01-01 03:30"), @@ -696,14 +689,12 @@ def fixture_acq_time_ir_exp(self): coords={"y": [1, 3, 5, 7]} ) - @pytest.mark.parametrize( - "acq_time_exp", - [ - lazy_fixture("acq_time_ir_exp"), - lazy_fixture("acq_time_vis_exp") - ] - ) + return vis, ir + def test_interp_acq_time(self, time_ir_wv, acq_time_exp): """Tests time interpolation.""" - res = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp.coords["y"]) - xr.testing.assert_allclose(res, acq_time_exp) + res_vis = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[0].coords["y"]) + res_ir = Interpolator.interp_acq_time(time_ir_wv, target_y=acq_time_exp[1].coords["y"]) + + xr.testing.assert_allclose(res_vis, acq_time_exp[0]) + xr.testing.assert_allclose(res_ir, acq_time_exp[1]) From 89d9931fb51afa523f5aac4caec819f711db23ac Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 24 Sep 2024 08:04:29 +0000 Subject: [PATCH 091/340] change fill_val to int64 --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index a3ef65ab07..de6cec9913 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = int("4294967295") +fill_val = np.int64("4294967295") attrs_exp: dict = { "platform": "MET7", From 7d0ecefbf3b6981643a0f079af9f57e035aef6af Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 24 Sep 2024 08:15:55 +0000 Subject: [PATCH 092/340] include comments, create fixture_time_fake_dataset() --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 20 +++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index de6cec9913..fe03176713 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = np.int64("4294967295") +fill_val = np.int64("4294967295") # needs to be defined as int64 for windows attrs_exp: dict = { "platform": "MET7", @@ -256,13 +256,18 @@ height=2 ) -time_fake_dataset = np.arange(4) * 60 * 60 -time_fake_dataset[0] = fill_val -time_fake_dataset[1] = fill_val -time_fake_dataset = time_fake_dataset.reshape(2, 2) +@pytest.fixture(name="time_fake_dataset") +def fixture_time_fake_dataset(): + """Create time for fake dataset.""" + time = np.arange(4) * 60 * 60 + time[0] = fill_val + time[1] = fill_val + time = time.reshape(2, 2) + + return time @pytest.fixture(name="fake_dataset") -def fixture_fake_dataset(): +def fixture_fake_dataset(time_fake_dataset): """Create fake dataset.""" count_ir = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) count_wv = da.linspace(0, 255, 4, dtype=np.uint8).reshape(2, 2) @@ -281,7 +286,6 @@ def fixture_fake_dataset(): ) cov = da.from_array([[1, 2], [3, 4]]) - time = time_fake_dataset ds = xr.Dataset( data_vars={ @@ -293,7 +297,7 @@ def fixture_fake_dataset(): "u_structured_toa_bidirectional_reflectance": u_vis_refl_exp / 100, "quality_pixel_bitmask": (("y", "x"), mask), "solar_zenith_angle": (("y_tie", "x_tie"), sza), - "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time), + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), time_fake_dataset), "a_ir": -5.0, "b_ir": 1.0, "bt_a_ir": 10.0, From 11cf080f0dc4bcaf26ea397ca10bcac4a9e0c5ac Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 24 Sep 2024 09:57:04 +0000 Subject: [PATCH 093/340] adapt fill_val definition --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index fe03176713..443996bd67 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -20,6 +20,7 @@ from __future__ import annotations import os +import sys from unittest import mock import dask.array as da @@ -44,7 +45,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = np.int64("4294967295") # needs to be defined as int64 for windows +fill_val = min(sys.maxsize, 4294967295) # FillValue defined to be windows-compatible attrs_exp: dict = { "platform": "MET7", From 03ec1feacfcd1c631991ed741b05b2390b49f85d Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 24 Sep 2024 12:13:23 +0000 Subject: [PATCH 094/340] adapt fill_val definition --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 443996bd67..f18cde08c8 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -20,7 +20,6 @@ from __future__ import annotations import os -import sys from unittest import mock import dask.array as da @@ -45,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = min(sys.maxsize, 4294967295) # FillValue defined to be windows-compatible +fill_val = np.uint32(4294967295) # FillValue defined to be windows-compatible attrs_exp: dict = { "platform": "MET7", From b86f4b63926f9fbd166bf696c9c9f0aed4c762f2 Mon Sep 17 00:00:00 2001 From: verduijn Date: Wed, 25 Sep 2024 10:18:57 +0200 Subject: [PATCH 095/340] Fix wrong return type in 'check_satpy' comment --- satpy/utils.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/utils.py b/satpy/utils.py index 77645a476a..b7a5c28376 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -498,8 +498,8 @@ def check_satpy(readers=None, writers=None, extras=None): writers (list or None): Limit writers checked to those specified extras (list or None): Limit extras checked to those specified - Returns: bool - True if all specified features were successfully loaded. + Returns: + None """ from satpy.readers import configs_for_reader From bcae3f742eee4ea75b09aa90069a8cb0d6aad605 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Wed, 25 Sep 2024 13:58:40 +0000 Subject: [PATCH 096/340] lower fill_val for tests for windows compatibility --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index f18cde08c8..db3af4a0d6 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = np.uint32(4294967295) # FillValue defined to be windows-compatible +fill_val = np.uint32(429496729) # FillValue lower than in dataset to be windows-compatible attrs_exp: dict = { "platform": "MET7", From 7421103a7cc6fe6bc0fbf77115ba1a7d2ceaf8ea Mon Sep 17 00:00:00 2001 From: David Hoese Date: Wed, 25 Sep 2024 13:22:49 -0500 Subject: [PATCH 097/340] Remove deprecated usage of pyspectral's download_luts aerosol_type --- benchmarks/abi_l1b_benchmarks.py | 2 +- benchmarks/ahi_hsd_benchmarks.py | 2 +- benchmarks/seviri_hrit_benchmarks.py | 2 +- benchmarks/viirs_sdr_benchmarks.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/benchmarks/abi_l1b_benchmarks.py b/benchmarks/abi_l1b_benchmarks.py index 936e0dc514..574226a737 100644 --- a/benchmarks/abi_l1b_benchmarks.py +++ b/benchmarks/abi_l1b_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): if len(get_filenames(self.subdir)) != 16: raise RuntimeError("Existing data files do not match the expected number of files.") download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/ahi_hsd_benchmarks.py b/benchmarks/ahi_hsd_benchmarks.py index 361934168a..0a2dc65496 100644 --- a/benchmarks/ahi_hsd_benchmarks.py +++ b/benchmarks/ahi_hsd_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 4 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/seviri_hrit_benchmarks.py b/benchmarks/seviri_hrit_benchmarks.py index 9851dbdac9..0d14320b48 100644 --- a/benchmarks/seviri_hrit_benchmarks.py +++ b/benchmarks/seviri_hrit_benchmarks.py @@ -43,7 +43,7 @@ def setup_cache(self): except ImportError: assert len(get_filenames(self.subdir)) == 114 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self): """Set up the benchmarks.""" diff --git a/benchmarks/viirs_sdr_benchmarks.py b/benchmarks/viirs_sdr_benchmarks.py index 68db5c6682..a98e02fa97 100644 --- a/benchmarks/viirs_sdr_benchmarks.py +++ b/benchmarks/viirs_sdr_benchmarks.py @@ -42,7 +42,7 @@ def setup_cache(self): except ImportError: assert len(self.get_filenames()) == 6 * 3 # nosec download_rsr() - download_luts(aerosol_type="rayleigh_only") + download_luts(aerosol_types=["rayleigh_only"]) def setup(self, name): """Set up the benchmarks.""" From fd5749d8992e0f1c25f50436189f3128bec2f08a Mon Sep 17 00:00:00 2001 From: verduijn Date: Fri, 27 Sep 2024 21:07:46 +0200 Subject: [PATCH 098/340] Add show_versions to utils and use in check_satpy --- satpy/tests/test_utils.py | 18 +++++---- satpy/utils.py | 80 +++++++++++++++++++++++++++++---------- 2 files changed, 72 insertions(+), 26 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 61255c8006..455971babd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -278,13 +278,17 @@ def test_specific_check_satpy(self): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy with mock.patch("satpy.utils.print") as print_mock: - check_satpy(readers=["viirs_sdr"], extras=("cartopy", "__fake")) - checked_fake = False - for call in print_mock.mock_calls: - if len(call[1]) > 0 and "__fake" in call[1][0]: - assert "ok" not in call[1][1] - checked_fake = True - assert checked_fake, "Did not find __fake module mentioned in checks" + check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) + checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) + assert checked_fake, "Did not find __fake package mentioned in checks" + +class TestShowVersions: + """Test the 'show_versions' function.""" + + def test_basic_show_versions(self): + """Test 'check_satpy' basic functionality.""" + from satpy.utils import show_versions + show_versions() def test_debug_on(caplog): diff --git a/satpy/utils.py b/satpy/utils.py index b7a5c28376..64c3be54b8 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -20,9 +20,11 @@ import contextlib import datetime +import importlib.metadata import logging import os import pathlib +import platform import warnings from contextlib import contextmanager from copy import deepcopy @@ -476,27 +478,72 @@ def _check_yaml_configs(configs, key): pass return diagnostic +def _check_package_version(package_name: str) -> Optional[str]: + """Check the version of `package_name`. -def _check_import(module_names): - """Import the specified modules and provide status.""" - diagnostics = {} - for module_name in module_names: - try: - __import__(module_name) - res = "ok" - except ImportError as err: - res = str(err) - diagnostics[module_name] = res - return diagnostics + Args: + package_name (str): the distribution package name. + + Returns: + the version number if available else `None`. + """ + try: + return importlib.metadata.version(package_name) + except importlib.metadata.PackageNotFoundError: + return None +def show_versions(packages=None): + """Shows version for system, python and common packages (if installed). -def check_satpy(readers=None, writers=None, extras=None): + Args: + packages (list or None): Limit packages to those specified. + + Returns: + None. + + """ + packages = ( + ( + "cartopy", + "geoviews", + "numpy", + "dask", + "xarray", + "gdal", + "rasterio", + "pyproj", + "netcdf4", + "h5py", + "pyhdf", + "h5netcdf", + "fsspec", + ) + if packages is None + else packages + ) + + print("Versions") # noqa: T201 + print("======") # noqa: T201 + print(f"platform: {platform.platform()}") # noqa: T201 + print(f"python: {platform.python_version()}") # noqa: T201 + print() # noqa: T201 + + for package_name in sorted(packages): + package_version = _check_package_version(package_name) + print( # noqa: T201 + f"{package_name}: {package_version if package_version else 'not installed'}" + ) + + print() # noqa: T201 + + +def check_satpy(readers=None, writers=None, packages=None): """Check the satpy readers and writers for correct installation. Args: readers (list or None): Limit readers checked to those specified writers (list or None): Limit writers checked to those specified - extras (list or None): Limit extras checked to those specified + packages (list or None): Limit packages checked to those specified Returns: None @@ -517,12 +564,7 @@ def check_satpy(readers=None, writers=None, extras=None): print(writer + ": ", res) # noqa: T201 print() # noqa: T201 - print("Extras") # noqa: T201 - print("======") # noqa: T201 - module_names = extras if extras is not None else ("cartopy", "geoviews") - for module_name, res in sorted(_check_import(module_names).items()): - print(module_name + ": ", res) # noqa: T201 - print() # noqa: T201 + show_versions(packages=packages) def unify_chunks(*data_arrays: xr.DataArray) -> tuple[xr.DataArray, ...]: From d8bae6048c042cdf1563c896f20351b0a42c09e4 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:23:11 +0000 Subject: [PATCH 099/340] Improve SEVIRI metadata documentation --- satpy/readers/seviri_base.py | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index ace63e3f12..1a98dda098 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -153,9 +153,21 @@ scn['IR_108']['y'] = mi scn['IR_108'].sel(time=np.datetime64('2019-03-01T12:06:13.052000000')) -* Raw metadata from the file header can be included by setting the reader - argument ``include_raw_metadata=True`` (HRIT and Native format only). Note - that this comes with a performance penalty of up to 10% if raw metadata from +* HRIT and Native readers can add raw metadata from the file header, such + as calibration coefficients, to dataset attributes. Use the reader keyword + argument ``include_raw_metadata``. Here's an example for extracting + calibration coefficients from Native files. + + .. code-block:: python + + scene = satpy.Scene(filenames, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True}) + scene.load(["IR_108"]) + mda = scene["IR_108"].attrs["raw_metadata"] + coefs = mda["15_DATA_HEADER"]["RadiometricProcessing"]["Level15ImageCalibration"] + + Note that this comes with a performance penalty of up to 10% if raw metadata from multiple segments or scans need to be combined. By default, arrays with more than 100 elements are excluded to limit the performance penalty. This threshold can be adjusted using the ``mda_max_array_size`` reader keyword @@ -164,8 +176,8 @@ .. code-block:: python scene = satpy.Scene(filenames, - reader='seviri_l1b_hrit/native', - reader_kwargs={'include_raw_metadata': True, + reader='seviri_l1b_native', + reader_kwargs={'include_raw_metadata': True, 'mda_max_array_size': 1000}) References: From 3d15b3feb232b7317bf69ff20024f32814f6209d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:36:34 +0000 Subject: [PATCH 100/340] Fall back to conda --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index c9f5aa1f73..8bd9f63eaf 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,7 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - mamba-version: "*" + # mamba-version: "*" channels: conda-forge - name: Set cache environment variables From 6a4f2afab9d965c1bdc9825c83e1cd98e464748d Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Mon, 30 Sep 2024 14:46:00 +0000 Subject: [PATCH 101/340] Pin mamba version --- .github/workflows/ci.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 8bd9f63eaf..2cc2948a16 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,7 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - # mamba-version: "*" + mamba-version: "1.5.10" channels: conda-forge - name: Set cache environment variables From 0ad7c32a9a1597300f9deb2903e83a33fd2c2f5e Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 30 Sep 2024 20:35:30 -0500 Subject: [PATCH 102/340] Fix deprecated "compositor" usage in modifier definitions --- satpy/etc/composites/sgli.yaml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/composites/sgli.yaml b/satpy/etc/composites/sgli.yaml index 451c60d8e6..d5d46114a4 100644 --- a/satpy/etc/composites/sgli.yaml +++ b/satpy/etc/composites/sgli.yaml @@ -4,7 +4,7 @@ sensor_name: visir/sgli modifiers: rayleigh_corrected: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: @@ -17,7 +17,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_clean: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: @@ -30,7 +30,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_marine_tropical: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: marine_tropical_aerosol prerequisites: @@ -43,7 +43,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_desert: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: tropical aerosol_type: desert_aerosol prerequisites: @@ -56,7 +56,7 @@ modifiers: - solar_zenith_angle rayleigh_corrected_land: - compositor: !!python/name:satpy.modifiers.PSPRayleighReflectance + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: From 38c8e9be8835ddf3259ee388118b2d628202cb51 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 1 Oct 2024 08:52:01 +0000 Subject: [PATCH 103/340] add test for get_projection_longitude, test original fill_val --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index db3af4a0d6..db30414d71 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = np.uint32(429496729) # FillValue lower than in dataset to be windows-compatible +fill_val = np.uint32(4294967295) # FillValue lower than in dataset to be windows-compatible attrs_exp: dict = { "platform": "MET7", @@ -345,7 +345,7 @@ def fixture_fake_dataset(time_fake_dataset): params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) -def fixture_file_handler(fake_dataset, request): +def fixture_file_handler(fake_dataset, request, projection_longitude="57.0"): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True @@ -358,7 +358,7 @@ def fixture_file_handler(fake_dataset, request): filename="filename", filename_info={"platform": "MET7", "sensor": "MVIRI", - "projection_longitude": "57.0"}, + "projection_longitude": projection_longitude}, filetype_info={"foo": "bar"}, mask_bad_quality=mask_bad_quality ) @@ -379,7 +379,9 @@ def fixture_reader(): class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" - def test_init(self, file_handler): + + @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"]) + def test_init(self, file_handler, projection_longitude): """Test file handler initialization.""" assert file_handler.projection_longitude == 57.0 assert file_handler.mask_bad_quality is True From d88d3d3fed70c7c1c8fb03fc9b4d1ffe30fbc1e5 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Tue, 1 Oct 2024 09:45:46 +0000 Subject: [PATCH 104/340] Fix test miss --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index db30414d71..326a01963e 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -340,12 +340,18 @@ def fixture_fake_dataset(time_fake_dataset): return ds +@pytest.fixture(name="projection_longitude", params=["57.0"]) +def fixture_projection_longitude(request): + """Get projection longitude as string.""" + return request.param + + @pytest.fixture( name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) -def fixture_file_handler(fake_dataset, request, projection_longitude="57.0"): +def fixture_file_handler(fake_dataset, request, projection_longitude): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True @@ -380,7 +386,7 @@ class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" - @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"]) + @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"], indirect=True) def test_init(self, file_handler, projection_longitude): """Test file handler initialization.""" assert file_handler.projection_longitude == 57.0 From 484b723235e317322d550d4e97867ecd0430f2b4 Mon Sep 17 00:00:00 2001 From: bkremmli Date: Tue, 1 Oct 2024 09:49:29 +0000 Subject: [PATCH 105/340] revert to smaller fill_val --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 326a01963e..ffc3e980e2 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -44,7 +44,7 @@ # The following fixtures are not defined in this file, but are used and injected by Pytest: # - request -fill_val = np.uint32(4294967295) # FillValue lower than in dataset to be windows-compatible +fill_val = np.uint32(429496729) # FillValue lower than in dataset to be windows-compatible attrs_exp: dict = { "platform": "MET7", From 3aa390b0ce60605cfba8a3da4c123f410329434f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 1 Oct 2024 10:17:12 +0000 Subject: [PATCH 106/340] Bump pypa/gh-action-pypi-publish from 1.10.0 to 1.10.2 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.0 to 1.10.2. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.0...v1.10.2) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 130b3a43b8..12042f4f36 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.0 + uses: pypa/gh-action-pypi-publish@v1.10.2 with: user: __token__ password: ${{ secrets.pypi_password }} From dea6a67946f29a1622f535e42d6f4fb7d7fa0b94 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Tue, 1 Oct 2024 20:09:04 +0200 Subject: [PATCH 107/340] Fixed for latest real-data (not yet final format) Signed-off-by: Adam.Dybbroe --- pyproject.toml | 1 + satpy/readers/aws_l1b.py | 13 ++++++++++++- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 196ae6a462..679a01a625 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,6 +64,7 @@ hsaf_grib = ["pygrib"] remote_reading = ["fsspec"] insat_3d = ["xarray-datatree"] gms5-vissr_l1b = ["numba"] +aws_l1b = ["xarray-datatree"] # Writers: cf = ["h5netcdf >= 0.7.3"] awips_tiled = ["netCDF4 >= 1.1.8"] diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index b23dd4119b..cb421d4482 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023 Pytroll Developers +# Copyright (c) 2023, 2024 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -67,6 +67,16 @@ def platform_name(self): """Get the platform name.""" return self.filename_info["platform_name"] + @property + def orbit_start(self): + """Get the orbit number for the start of data.""" + return int(self["/attr/orbit_start"]) + + @property + def orbit_end(self): + """Get the orbit number for the end of data.""" + return int(self["/attr/orbit_end"]) + @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" @@ -111,6 +121,7 @@ def get_dataset(self, dataset_id, dataset_info): data_array.attrs["platform_name"] = self.platform_name data_array.attrs["sensor"] = self.sensor + data_array.attrs["orbit_number"] = self.orbit_start return data_array def _get_channel_data(self, dataset_id, dataset_info): From 70bbc862edf29d02ef4ec11248e7e15786c5af7d Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Tue, 1 Oct 2024 20:12:03 +0200 Subject: [PATCH 108/340] Changing the humidity-surface RGB to consider the lowest peaking absorption channel for the blue band rather than highest Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/atms.yaml | 2 +- satpy/etc/composites/aws.yaml | 18 +++++++++++++++++- satpy/etc/enhancements/aws.yaml | 28 ++++++++++++++++++++++++++++ satpy/etc/readers/atms_sdr_hdf5.yaml | 2 +- 4 files changed, 47 insertions(+), 3 deletions(-) diff --git a/satpy/etc/composites/atms.yaml b/satpy/etc/composites/atms.yaml index 27afd5d2b8..624f0bc93b 100644 --- a/satpy/etc/composites/atms.yaml +++ b/satpy/etc/composites/atms.yaml @@ -14,5 +14,5 @@ composites: prerequisites: - name: '16' - name: '17' - - name: '22' + - name: '18' standard_name: mw183_humidity_surface diff --git a/satpy/etc/composites/aws.yaml b/satpy/etc/composites/aws.yaml index 77d2014794..55af749d89 100644 --- a/satpy/etc/composites/aws.yaml +++ b/satpy/etc/composites/aws.yaml @@ -14,5 +14,21 @@ composites: prerequisites: - name: '9' - name: '10' - - name: '15' + - name: '12' standard_name: mw183_humidity_surface + + mw325_humidity_surface: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '9' + - name: '10' + - name: '19' + standard_name: mw325_humidity_surface + + mw325_humidity: + compositor: !!python/name:satpy.composites.RGBCompositor + prerequisites: + - name: '16' + - name: '18' + - name: '19' + standard_name: mw325_humidity diff --git a/satpy/etc/enhancements/aws.yaml b/satpy/etc/enhancements/aws.yaml index c997a11350..4d79b0ae11 100644 --- a/satpy/etc/enhancements/aws.yaml +++ b/satpy/etc/enhancements/aws.yaml @@ -27,3 +27,31 @@ enhancements: - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} + + mw325_humidity: + standard_name: mw325_humidity + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} + + mw325_humidity_surface: + standard_name: mw325_humidity_surface + operations: + - name: inverse + method: !!python/name:satpy.enhancements.invert + args: + - [true, true, true] + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: {stretch: linear} + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: {gamma: 1.2} diff --git a/satpy/etc/readers/atms_sdr_hdf5.yaml b/satpy/etc/readers/atms_sdr_hdf5.yaml index fa8e4105ce..6ad1a1e0a9 100644 --- a/satpy/etc/readers/atms_sdr_hdf5.yaml +++ b/satpy/etc/readers/atms_sdr_hdf5.yaml @@ -33,7 +33,7 @@ reader: file_types: atms_sdr_hdf5: file_reader: !!python/name:satpy.readers.atms_sdr_hdf5.ATMS_SDR_FileHandler - file_patterns: ['SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5', 'GATMO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] + file_patterns: ['SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5', 'GATMO_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5','GATMO-SATMS_{platform_shortname}_d{start_time:%Y%m%d_t%H%M%S%f}_e{end_time:%H%M%S%f}_b{orbit:5d}_c{creation_time:%Y%m%d%H%M%S%f}_{source}.h5'] # Example filenames # GATMO_j01_d20221220_t0910240_e0921356_b26361_c20221220100456680030_cspp_dev.h5 # SATMS_j01_d20221220_t0910240_e0921356_b26361_c20221220100456348770_cspp_dev.h5 From 9fb59bd5ce652238e3e4c0986ef5f7c651e292b1 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Wed, 2 Oct 2024 06:29:58 +0000 Subject: [PATCH 109/340] Use plain miniconda in CI --- .github/workflows/ci.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2cc2948a16..f74cc830ad 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -41,7 +41,6 @@ jobs: miniforge-version: latest python-version: ${{ matrix.python-version }} activate-environment: test-environment - mamba-version: "1.5.10" channels: conda-forge - name: Set cache environment variables From 78324b1f3e51ffcacde5b506182d108ce2b726ca Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 2 Oct 2024 10:25:51 +0200 Subject: [PATCH 110/340] Update MSI SAFE reader with more tests and some code streamlining for error messages. --- satpy/readers/msi_safe.py | 19 ++++----- satpy/tests/reader_tests/test_msi_safe.py | 49 ++++++++++++++++++++++- 2 files changed, 58 insertions(+), 10 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index 3c75169744..fa747e14d8 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -109,9 +109,9 @@ def _read_from_file(self, key): zen = self._tile_mda.get_dataset(dq, dict(xml_tag="Sun_Angles_Grid/Zenith")) tmp_refl = self._mda.calibrate_to_reflectances(proj, self._channel) return self._mda.calibrate_to_radiances(tmp_refl, zen, self._channel) - #else: + else: # For L1B the radiances can be directly computed from the digital counts. - #return self._mda.calibrate_to_radiances_l1b(proj, self._channel) + return self._mda.calibrate_to_radiances_l1b(proj, self._channel) if key["calibration"] == "counts": @@ -231,19 +231,22 @@ def solar_irradiance(self, band_name): def solar_irradiances(self): """Get the TOA solar irradiance values from the metadata.""" irrads = self.root.find(".//Solar_Irradiance_List") + if irrads is not None: solar_irrad = {int(irr.attrib["bandId"]): float(irr.text) for irr in irrads} - else: - solar_irrad = {} - return solar_irrad + if len(solar_irrad) > 0: + return solar_irrad + raise ValueError("No solar irradiance values were found in the metadata.") + + @cached_property def sun_earth_dist(self): """Get the sun-earth distance from the metadata.""" sed = self.root.find(".//U") - if sed is not None: + if sed.text is not None: return float(sed.text) - return -1 + raise ValueError("Sun-Earth distance in metadata is missing.") @cached_property def special_values(self): @@ -271,8 +274,6 @@ def calibrate_to_radiances_l1b(self, data, band_name): def calibrate_to_radiances(self, data, solar_zenith, band_name): """Calibrate *data* to radiance using the radiometric information for the metadata.""" sed = self.sun_earth_dist - if sed < 0.5 or sed > 1.5: - raise ValueError(f"Sun-Earth distance is incorrect in the metadata: {sed}") solar_irrad_band = self.solar_irradiance(band_name) solar_zenith = np.deg2rad(solar_zenith) diff --git a/satpy/tests/reader_tests/test_msi_safe.py b/satpy/tests/reader_tests/test_msi_safe.py index 4970767ea8..1f2e603ee2 100644 --- a/satpy/tests/reader_tests/test_msi_safe.py +++ b/satpy/tests/reader_tests/test_msi_safe.py @@ -1648,7 +1648,8 @@ class TestSAFEMSIL1C: def setup_method(self): """Set up the test.""" self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) - self.fake_data = xr.Dataset({"band_data": xr.DataArray([[[0, 1], [65534, 65535]]], dims=["band", "x", "y"])}) + self.fake_data_l1b = xr.Dataset({"band_data": xr.DataArray([[[1000, 1205.5], [3000.4, 2542.]]], + dims=["band", "x", "y"])}) @pytest.mark.parametrize(("process_level", "mask_saturated", "dataset_name", "calibration", "expected"), [ @@ -1697,3 +1698,49 @@ def test_l1b_error(self): """We can't process L1B data yet, so check an error is raised.""" with pytest.raises(ValueError, match="Unsupported process level: L1B"): jp2_builder("L1C", "B01", test_l1b=True) + + + @pytest.mark.parametrize(("st_str", "en_str", "err_str"), + [ + ("", + "", + "Sun-Earth distance in metadata is missing."), + ("", + "", + "No solar irradiance values were found in the metadata."), + ]) + def test_missing_esd(self, st_str, en_str, err_str): + """Test that missing Earth-Sun distance in the metadata is handled correctly.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + tmp_xml = str(mtd_l1c_xml) + p1 = tmp_xml.find(st_str) + p2 = tmp_xml.find(en_str) + tmp_xml = tmp_xml[:p1+len(st_str)] + tmp_xml[p2:] + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(tmp_xml), filename_info, mock.MagicMock()) + + if st_str == "": + with pytest.raises(ValueError, match=err_str): + xml_fh.sun_earth_dist + else: + with pytest.raises(ValueError, match=err_str): + xml_fh.solar_irradiances + + + def test_l1b_calib(self): + """Test that Level-1B calibration can be performed.""" + from satpy.readers.msi_safe import SAFEMSIMDXML + + filename_info = dict(observation_time=fname_dt, dtile_number=None, + band_name="B01", fmission_id="S2A", process_level="L1C") + + xml_fh = SAFEMSIMDXML(StringIO(mtd_l1c_xml), filename_info, mock.MagicMock()) + + res = xml_fh.calibrate_to_radiances_l1b(self.fake_data_l1b, "B01") + np.testing.assert_allclose(res.band_data.data.ravel(), + np.array((0.0, 51.752319, 503.77294, 388.33127)), + rtol=1e-4) From fb1521acb78abc0aa6ec3b6604f443052fde93b9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 2 Oct 2024 11:40:37 +0200 Subject: [PATCH 111/340] Update satpy/readers/msi_safe.py --- satpy/readers/msi_safe.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/satpy/readers/msi_safe.py b/satpy/readers/msi_safe.py index fa747e14d8..ec6a39f084 100644 --- a/satpy/readers/msi_safe.py +++ b/satpy/readers/msi_safe.py @@ -238,8 +238,6 @@ def solar_irradiances(self): return solar_irrad raise ValueError("No solar irradiance values were found in the metadata.") - - @cached_property def sun_earth_dist(self): """Get the sun-earth distance from the metadata.""" From 8cbe7b89e1c899cf7d82ca99083e35db48b47237 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 23:53:12 +0000 Subject: [PATCH 112/340] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.6.3 → v0.6.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.6.3...v0.6.9) - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) - [github.com/PyCQA/bandit: 1.7.9 → 1.7.10](https://github.com/PyCQA/bandit/compare/1.7.9...1.7.10) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a6c4b2b3d2..82163f8b60 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,18 +3,18 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.6.3' + rev: 'v0.6.9' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.9' # Update me! + rev: '1.7.10' # Update me! hooks: - id: bandit args: [--ini, .bandit] From 3a93ea97616846541d8211d35f7489157d08a52f Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 08:54:49 +0200 Subject: [PATCH 113/340] Apply code formatting suggestions from code review Co-authored-by: Panu Lahtinen --- satpy/tests/test_utils.py | 1 + satpy/utils.py | 1 + 2 files changed, 2 insertions(+) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 455971babd..74b5a3ecfd 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -282,6 +282,7 @@ def test_specific_check_satpy(self): checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) assert checked_fake, "Did not find __fake package mentioned in checks" + class TestShowVersions: """Test the 'show_versions' function.""" diff --git a/satpy/utils.py b/satpy/utils.py index 64c3be54b8..f4d456d4f6 100644 --- a/satpy/utils.py +++ b/satpy/utils.py @@ -492,6 +492,7 @@ def _check_package_version(package_name: str) -> Optional[str]: except importlib.metadata.PackageNotFoundError: return None + def show_versions(packages=None): """Shows version for system, python and common packages (if installed). From f53c6f191443a904922646176f1e4f159455712c Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 11:10:08 +0200 Subject: [PATCH 114/340] Add tests for `show_versions` with installed and missing packages --- satpy/tests/test_utils.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index 74b5a3ecfd..a10e285886 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -291,6 +291,37 @@ def test_basic_show_versions(self): from satpy.utils import show_versions show_versions() + def test_show_specific_version(self): + """Test 'show_version' works with installed package.""" + from satpy.utils import show_versions + with mock.patch("satpy.utils.print") as print_mock: + show_versions(packages=["pytest"]) + + # no regex or `.__version__` based checks to prevent edge case failures + pytest_mentioned = any( + "pytest:" in c[1][0] for c in print_mock.mock_calls if len(c[1]) + ) + pytest_installed = all( + "pytest: not installed" not in c[1][0] + for c in print_mock.mock_calls + if len(c[1]) + ) + check_pytest = pytest_mentioned and pytest_installed + assert check_pytest, "pytest with package version not in print output" + + def test_show_missing_specific_version(self): + """Test 'show_version' works with missing package.""" + from satpy.utils import show_versions + + with mock.patch("satpy.utils.print") as print_mock: + show_versions(packages=["__fake"]) + checked_fake = any( + "__fake: not installed" in c[1] + for c in print_mock.mock_calls + if len(c[1]) + ) + assert checked_fake, "Did not find '__fake: not installed' in print output" + def test_debug_on(caplog): """Test that debug_on is working as expected.""" From 692c9b1506bfc5d0e1f238067a12fbebf0d9d8c9 Mon Sep 17 00:00:00 2001 From: verduijn Date: Tue, 8 Oct 2024 15:49:36 +0200 Subject: [PATCH 115/340] Use capys fixture instead of patching print in tests --- satpy/tests/test_utils.py | 42 ++++++++++++--------------------------- 1 file changed, 13 insertions(+), 29 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index a10e285886..c52006f1be 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -291,36 +291,25 @@ def test_basic_show_versions(self): from satpy.utils import show_versions show_versions() - def test_show_specific_version(self): + def test_show_specific_version(self, capsys): """Test 'show_version' works with installed package.""" from satpy.utils import show_versions - with mock.patch("satpy.utils.print") as print_mock: - show_versions(packages=["pytest"]) + show_versions(packages=["pytest"]) + out, _ = capsys.readouterr() - # no regex or `.__version__` based checks to prevent edge case failures - pytest_mentioned = any( - "pytest:" in c[1][0] for c in print_mock.mock_calls if len(c[1]) - ) - pytest_installed = all( - "pytest: not installed" not in c[1][0] - for c in print_mock.mock_calls - if len(c[1]) - ) - check_pytest = pytest_mentioned and pytest_installed - assert check_pytest, "pytest with package version not in print output" + pytest_mentioned = "pytest:" in out + pytest_installed = "pytest: not installed" not in out + check_pytest = pytest_mentioned and pytest_installed + assert check_pytest, "pytest with package version not in print output" - def test_show_missing_specific_version(self): + def test_show_missing_specific_version(self, capsys): """Test 'show_version' works with missing package.""" from satpy.utils import show_versions + show_versions(packages=["__fake"]) + out, _ = capsys.readouterr() - with mock.patch("satpy.utils.print") as print_mock: - show_versions(packages=["__fake"]) - checked_fake = any( - "__fake: not installed" in c[1] - for c in print_mock.mock_calls - if len(c[1]) - ) - assert checked_fake, "Did not find '__fake: not installed' in print output" + check_fake = "__fake: not installed" in out + assert check_fake, "Did not find '__fake: not installed' in print output" def test_debug_on(caplog): @@ -330,12 +319,7 @@ def test_debug_on(caplog): def depwarn(): logger = logging.getLogger("satpy.silly") logger.debug("But now it's just got SILLY.") - warnings.warn( - "Stop that! It's SILLY.", - DeprecationWarning, - stacklevel=2 - ) - + warnings.warn("Stop that! It's SILLY.", DeprecationWarning, stacklevel=2) warnings.filterwarnings("ignore", category=DeprecationWarning) debug_on(False) filts_before = warnings.filters.copy() From f66621091d478d009368ef066066e4878873c5df Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 09:01:06 +0300 Subject: [PATCH 116/340] Refactor generic image reader tests --- .../tests/reader_tests/test_generic_image.py | 554 ++++++++++-------- 1 file changed, 295 insertions(+), 259 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 40d7611eb4..21e2b5b09e 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -16,273 +16,309 @@ # satpy. If not, see . """Unittests for generic image reader.""" -import os -import unittest +import datetime as dt import dask.array as da import numpy as np import pytest import xarray as xr +from pyresample.geometry import AreaDefinition +from rasterio.errors import NotGeoreferencedWarning +from satpy import Scene +from satpy.readers.generic_image import GenericImageFileHandler from satpy.tests.utils import RANDOM_GEN, make_dataid +DATA_DATE = dt.datetime(2018, 1, 1) -class TestGenericImage(unittest.TestCase): - """Test generic image reader.""" - - def setUp(self): - """Create temporary images to test on.""" - import datetime as dt - import tempfile - - from pyresample.geometry import AreaDefinition - - from satpy.scene import Scene - - self.date = dt.datetime(2018, 1, 1) - - # Create area definition - pcs_id = "ETRS89 / LAEA Europe" - proj4_dict = "EPSG:3035" - self.x_size = 100 - self.y_size = 100 - area_extent = (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222) - self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, - proj4_dict, self.x_size, self.y_size, - area_extent) - - # Create datasets for L, LA, RGB and RGBA mode images - r__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - g__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - b__ = da.random.randint(0, 256, size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint8) - a__ = 255 * np.ones((self.y_size, self.x_size), dtype=np.uint8) - a__[:10, :10] = 0 - a__ = da.from_array(a__, chunks=(50, 50)) - - r_nan__ = RANDOM_GEN.uniform(0., 1., size=(self.y_size, self.x_size)) - r_nan__[:10, :10] = np.nan - r_nan__ = da.from_array(r_nan__, chunks=(50, 50)) - - ds_l = xr.DataArray(da.stack([r__]), dims=("bands", "y", "x"), - attrs={"name": "test_l", - "start_time": self.date}) - ds_l["bands"] = ["L"] - ds_la = xr.DataArray(da.stack([r__, a__]), dims=("bands", "y", "x"), - attrs={"name": "test_la", - "start_time": self.date}) - ds_la["bands"] = ["L", "A"] - ds_rgb = xr.DataArray(da.stack([r__, g__, b__]), +X_SIZE = 100 +Y_SIZE = 100 +AREA_DEFINITION = AreaDefinition("geotiff_area", "ETRS89 / LAEA Europe", "ETRS89 / LAEA Europe", + "EPSG:3035", X_SIZE, Y_SIZE, + (2426378.0132, 1528101.2618, 6293974.6215, 5446513.5222)) + + +@pytest.fixture +def random_image_channel(): + """Create random data.""" + return da.random.randint(0, 256, size=(Y_SIZE, X_SIZE), chunks=(50, 50)).astype(np.uint8) + + +random_image_channel_l = random_image_channel +random_image_channel_r = random_image_channel +random_image_channel_g = random_image_channel +random_image_channel_b = random_image_channel + + +@pytest.fixture +def alpha_channel(): + """Create alpha channel with fully transparent and opaque areas.""" + a__ = 255 * np.ones((Y_SIZE, X_SIZE), dtype=np.uint8) + a__[:10, :10] = 0 + return da.from_array(a__, chunks=(50, 50)) + + +@pytest.fixture +def random_image_channel_with_nans(): + """Create random data and replace a portion of it with NaN values.""" + arr = RANDOM_GEN.uniform(0., 1., size=(Y_SIZE, X_SIZE)) + arr[:10, :10] = np.nan + return da.from_array(arr, chunks=(50, 50)) + + +@pytest.fixture +def test_image_l(tmp_path, random_image_channel_l): + """Create a test image with mode L.""" + dset = xr.DataArray(da.stack([random_image_channel_l]), dims=("bands", "y", "x"), + attrs={"name": "test_l", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_l_nan(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_nofillvalue.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def test_image_l_nan_fill_value(tmp_path, random_image_channel_with_nans): + """Create a test image with mode L where data has NaN values and fill value is set.""" + dset = xr.DataArray(da.stack([random_image_channel_with_nans]), dims=("bands", "y", "x"), + attrs={"name": "test_l_nan", "start_time": DATA_DATE}) + dset["bands"] = ["L"] + fname = tmp_path / "test_l_nan_fillvalue.tif" + _save_image(dset, fname, "geotiff", fill_value=0) + + return fname + + +@pytest.fixture +def test_image_la(tmp_path, random_image_channel_l, alpha_channel): + """Create a test image with mode LA.""" + dset = xr.DataArray(da.stack([random_image_channel_l, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_la", "start_time": DATA_DATE}) + dset["bands"] = ["L", "A"] + fname = tmp_path / "20180101_0000_test_la.png" + _save_image(dset, fname, "simple_image") + + return fname + + +@pytest.fixture +def test_image_rgb(tmp_path, random_image_channel_r, random_image_channel_g, random_image_channel_b): + """Create a test image with mode RGB.""" + dset = xr.DataArray(da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b]), dims=("bands", "y", "x"), attrs={"name": "test_rgb", - "start_time": self.date}) - ds_rgb["bands"] = ["R", "G", "B"] - ds_rgba = xr.DataArray(da.stack([r__, g__, b__, a__]), - dims=("bands", "y", "x"), - attrs={"name": "test_rgba", - "start_time": self.date}) - ds_rgba["bands"] = ["R", "G", "B", "A"] - - ds_l_nan = xr.DataArray(da.stack([r_nan__]), - dims=("bands", "y", "x"), - attrs={"name": "test_l_nan", - "start_time": self.date}) - ds_l_nan["bands"] = ["L"] - - # Temp dir for the saved images - self.base_dir = tempfile.mkdtemp() - - # Put the datasets to Scene for easy saving - scn = Scene() - scn["l"] = ds_l - scn["l"].attrs["area"] = self.area_def - scn["la"] = ds_la - scn["la"].attrs["area"] = self.area_def - scn["rgb"] = ds_rgb - scn["rgb"].attrs["area"] = self.area_def - scn["rgba"] = ds_rgba - scn["rgba"].attrs["area"] = self.area_def - scn["l_nan"] = ds_l_nan - scn["l_nan"].attrs["area"] = self.area_def - - # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset("l", os.path.join(self.base_dir, "test_l.png"), writer="simple_image") - scn.save_dataset("la", os.path.join(self.base_dir, "20180101_0000_test_la.png"), writer="simple_image") - scn.save_dataset("rgb", os.path.join(self.base_dir, "20180101_0000_test_rgb.tif"), writer="geotiff") - scn.save_dataset("rgba", os.path.join(self.base_dir, "test_rgba.tif"), writer="geotiff") - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_fillvalue.tif"), - writer="geotiff", fill_value=0) - scn.save_dataset("l_nan", os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif"), - writer="geotiff") - - self.scn = scn - - def tearDown(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def test_png_scene(self): - """Test reading PNG images via satpy.Scene().""" - from rasterio.errors import NotGeoreferencedWarning - - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert "area" not in scn["image"].attrs - - fname = os.path.join(self.base_dir, "20180101_0000_test_la.png") - with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - data = da.compute(scn["image"].data) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert "area" not in scn["image"].attrs - assert np.sum(np.isnan(data)) == 100 - - def test_geotiff_scene(self): - """Test reading TIFF images via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "20180101_0000_test_rgb.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time == self.date - assert scn.end_time == self.date - assert scn["image"].area == self.area_def - - fname = os.path.join(self.base_dir, "test_rgba.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (3, self.y_size, self.x_size) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None - assert scn["image"].area == self.area_def - - def test_geotiff_scene_nan(self): - """Test reading TIFF images originally containing NaN values via satpy.Scene().""" - from satpy import Scene - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - - fname = os.path.join(self.base_dir, "test_l_nan_nofillvalue.tif") - scn = Scene(reader="generic_image", filenames=[fname]) - scn.load(["image"]) - assert scn["image"].shape == (1, self.y_size, self.x_size) - assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) - - def test_GenericImageFileHandler(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image") - assert reader.file_content - assert reader.finfo["filename"] == fname - assert reader.finfo["start_time"] == self.date - assert reader.finfo["end_time"] == self.date - assert reader.area == self.area_def - assert reader.get_area_def(None) == self.area_def - assert reader.start_time == self.date - assert reader.end_time == self.date - - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert "spatial_ref" in dataset.coords - assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) - - def test_GenericImageFileHandler_masking_only_integer(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - class FakeGenericImageFileHandler(GenericImageFileHandler): - - def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): - """Get fake file content from 'get_test_content'.""" - super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) - self.file_content = file_content - self.dataset_name = None - self.file_content.update(kwargs) - - data = self.scn["rgba"] - - # do nothing if not integer - float_data = data / 255. - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) - assert reader.get_dataset(make_dataid(name="image"), {}) is float_data - - # masking if integer - data = data.astype(np.uint32) - assert data.bands.size == 4 - reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) - ret_data = reader.get_dataset(make_dataid(name="image"), {}) - assert ret_data.bands.size == 3 - - def test_GenericImageFileHandler_datasetid(self): - """Test direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_rgba.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - - def test_GenericImageFileHandler_nodata(self): - """Test nodata handling with direct use of the reader.""" - from satpy.readers.generic_image import GenericImageFileHandler - - fname = os.path.join(self.base_dir, "test_l_nan_fillvalue.tif") - fname_info = {"start_time": self.date} - ftype_info = {} - reader = GenericImageFileHandler(fname, fname_info, ftype_info) - - foo = make_dataid(name="image-custom") - assert reader.file_content - info = {"nodata_handling": "nan_mask"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) - assert np.isnan(dataset.attrs["_FillValue"]) - - info = {"nodata_handling": "fill_value"} - dataset = reader.get_dataset(foo, info) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 - - # default same as 'nodata_handling': 'fill_value' - dataset = reader.get_dataset(foo, {}) - assert isinstance(dataset, xr.DataArray) - assert np.sum(dataset.data[0][:10, :10].compute()) == 0 - assert dataset.attrs["_FillValue"] == 0 + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B"] + fname = tmp_path / "20180101_0000_test_rgb.tif" + _save_image(dset, fname, "geotiff") + + return fname + + +@pytest.fixture +def rgba_dset(random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel): + """Create an RGB dataset.""" + dset = xr.DataArray( + da.stack([random_image_channel_r, random_image_channel_g, random_image_channel_b, alpha_channel]), + dims=("bands", "y", "x"), + attrs={"name": "test_rgba", + "start_time": DATA_DATE}) + dset["bands"] = ["R", "G", "B", "A"] + return dset + + +@pytest.fixture +def test_image_rgba(tmp_path, rgba_dset): + """Create a test image with mode RGBA.""" + fname = tmp_path / "test_rgba.tif" + _save_image(rgba_dset, fname, "geotiff") + + return fname + + +def _save_image(dset, fname, writer, fill_value=None): + scn = Scene() + scn["data"] = dset + scn["data"].attrs["area"] = AREA_DEFINITION + scn.save_dataset("data", str(fname), writer=writer, fill_value=fill_value) + + +def test_png_scene_l_mode(test_image_l): + """Test reading a PNG image with L mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_l]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert "area" not in scn["image"].attrs + + +def test_png_scene_la_mode(test_image_la): + """Test reading a PNG image with LA mode via satpy.Scene().""" + with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): + scn = Scene(reader="generic_image", filenames=[test_image_la]) + scn.load(["image"]) + data = da.compute(scn["image"].data) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time == DATA_DATE + assert scn.end_time == DATA_DATE + assert "area" not in scn["image"].attrs + assert np.sum(np.isnan(data)) == 100 + + +def test_geotiff_scene_rgb(test_image_rgb): + """Test reading geotiff image in RGB mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgb]) + scn.load(["image"]) + assert scn["image"].shape == (3, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time == DATA_DATE + assert scn.end_time == DATA_DATE + assert scn["image"].area == AREA_DEFINITION + + +def test_geotiff_scene_rgba(test_image_rgba): + """Test reading geotiff image in RGBA mode via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_rgba]) + scn.load(["image"]) + assert scn["image"].shape == (3, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + assert scn.start_time is None + assert scn.end_time is None + assert scn["image"].area == AREA_DEFINITION + + +def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): + """Test reading geotiff image with fill value set via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 + + +def test_geotiff_scene_nan(test_image_l_nan): + """Test reading geotiff image with NaN values in it via satpy.Scene().""" + scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) + scn.load(["image"]) + assert scn["image"].shape == (1, Y_SIZE, X_SIZE) + assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) + + +def test_GenericImageFileHandler(test_image_rgba): + """Test direct use of the reader.""" + from satpy.readers.generic_image import GenericImageFileHandler + + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image") + assert reader.file_content + assert reader.finfo["filename"] == test_image_rgba + assert reader.finfo["start_time"] == DATA_DATE + assert reader.finfo["end_time"] == DATA_DATE + assert reader.area == AREA_DEFINITION + assert reader.get_area_def(None) == AREA_DEFINITION + assert reader.start_time == DATA_DATE + assert reader.end_time == DATA_DATE + + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert "spatial_ref" in dataset.coords + assert np.all(np.isnan(dataset.data[:, :10, :10].compute())) + + +class FakeGenericImageFileHandler(GenericImageFileHandler): + """Fake file handler.""" + + def __init__(self, filename, filename_info, filetype_info, file_content, **kwargs): + """Get fake file content from 'get_test_content'.""" + super(GenericImageFileHandler, self).__init__(filename, filename_info, filetype_info) + self.file_content = file_content + self.dataset_name = None + self.file_content.update(kwargs) + + +def test_GenericImageFileHandler_no_masking_for_float(rgba_dset): + """Test direct use of the reader for float_data.""" + # do nothing if not integer + float_data = rgba_dset / 255. + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": float_data}) + assert reader.get_dataset(make_dataid(name="image"), {}) is float_data + + +def test_GenericImageFileHandler_masking_for_integer(rgba_dset): + """Test direct use of the reader for float_data.""" + # masking if integer + data = rgba_dset.astype(np.uint32) + assert data.bands.size == 4 + reader = FakeGenericImageFileHandler("dummy", {}, {}, {"image": data}) + ret_data = reader.get_dataset(make_dataid(name="image"), {}) + assert ret_data.bands.size == 3 + + +def test_GenericImageFileHandler_datasetid(test_image_rgba): + """Test direct use of the reader.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + reader = GenericImageFileHandler(test_image_rgba, fname_info, ftype_info) + + data_id = make_dataid(name="image-custom") + assert reader.file_content + dataset = reader.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + + +@pytest.fixture +def reader_l_nan_fill_value(test_image_l_nan_fill_value): + """Create GenericImageFileHandler.""" + fname_info = {"start_time": DATA_DATE} + ftype_info = {} + return GenericImageFileHandler(test_image_l_nan_fill_value, fname_info, ftype_info) + + +def test_GenericImageFileHandler_nodata_nan_mask(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: nan_mask.""" + data_id = make_dataid(name="image-custom") + assert reader_l_nan_fill_value.file_content + info = {"nodata_handling": "nan_mask"} + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.all(np.isnan(dataset.data[0][:10, :10].compute())) + assert np.isnan(dataset.attrs["_FillValue"]) + + +def test_GenericImageFileHandler_nodata_fill_value(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with nodata handling: fill_value.""" + info = {"nodata_handling": "fill_value"} + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, info) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 + + +def test_GenericImageFileHandler_nodata_nan_mask_default(reader_l_nan_fill_value): + """Test nodata handling with direct use of the reader with default nodata handling.""" + data_id = make_dataid(name="image-custom") + dataset = reader_l_nan_fill_value.get_dataset(data_id, {}) + assert isinstance(dataset, xr.DataArray) + assert np.sum(dataset.data[0][:10, :10].compute()) == 0 + assert dataset.attrs["_FillValue"] == 0 From ac433d69d411c9030056d5bb3721458e7401ac4a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 11:05:27 +0300 Subject: [PATCH 117/340] Fix generic image reader to return float32 when float is needed --- satpy/readers/generic_image.py | 2 +- satpy/tests/reader_tests/test_generic_image.py | 6 ++++++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/satpy/readers/generic_image.py b/satpy/readers/generic_image.py index 5032b7bbb8..c0e334302f 100644 --- a/satpy/readers/generic_image.py +++ b/satpy/readers/generic_image.py @@ -147,7 +147,7 @@ def _mask_image_data(data, info): if not np.issubdtype(data.dtype, np.integer): raise ValueError("Only integer datatypes can be used as a mask.") mask = data.data[-1, :, :] == np.iinfo(data.dtype).min - data = data.astype(np.float64) + data = data.astype(np.float32) masked_data = da.stack([da.where(mask, np.nan, data.data[i, :, :]) for i in range(data.shape[0])]) data.data = masked_data diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 21e2b5b09e..1ab3e073ec 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -167,6 +167,7 @@ def test_png_scene_l_mode(test_image_l): assert scn.start_time is None assert scn.end_time is None assert "area" not in scn["image"].attrs + assert scn["image"].dtype == np.float32 def test_png_scene_la_mode(test_image_la): @@ -181,6 +182,7 @@ def test_png_scene_la_mode(test_image_la): assert scn.end_time == DATA_DATE assert "area" not in scn["image"].attrs assert np.sum(np.isnan(data)) == 100 + assert scn["image"].dtype == np.float32 def test_geotiff_scene_rgb(test_image_rgb): @@ -192,6 +194,7 @@ def test_geotiff_scene_rgb(test_image_rgb): assert scn.start_time == DATA_DATE assert scn.end_time == DATA_DATE assert scn["image"].area == AREA_DEFINITION + assert scn["image"].dtype == np.float32 def test_geotiff_scene_rgba(test_image_rgba): @@ -203,6 +206,7 @@ def test_geotiff_scene_rgba(test_image_rgba): assert scn.start_time is None assert scn.end_time is None assert scn["image"].area == AREA_DEFINITION + assert scn["image"].dtype == np.float32 def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): @@ -211,6 +215,7 @@ def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): scn.load(["image"]) assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 + assert scn["image"].dtype == np.uint8 def test_geotiff_scene_nan(test_image_l_nan): @@ -219,6 +224,7 @@ def test_geotiff_scene_nan(test_image_l_nan): scn.load(["image"]) assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) + assert scn["image"].dtype == np.float32 def test_GenericImageFileHandler(test_image_rgba): From d129942fc97918a79c746a235f9d374e4fe80aae Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 11:11:21 +0300 Subject: [PATCH 118/340] Fix add_bands() to not promote the data type when adding alpha band --- satpy/composites/__init__.py | 1 + satpy/tests/test_composites.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index 0ac99c98f7..b032f23a32 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -985,6 +985,7 @@ def add_bands(data, bands): alpha = new_data[0].copy() alpha.data = da.ones((data.sizes["y"], data.sizes["x"]), + dtype=new_data[0].dtype, chunks=new_data[0].chunks) # Rename band to indicate it's alpha alpha["bands"] = "A" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 2af010e9ac..1b60161a52 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -1302,7 +1302,7 @@ def test_add_bands_l_rgb(self): from satpy.composites import add_bands # L + RGB -> RGB - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1311,13 +1311,14 @@ def test_add_bands_l_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_l_rgba(self): """Test adding bands.""" from satpy.composites import add_bands # L + RGBA -> RGBA - data = xr.DataArray(da.ones((1, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((1, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L"]}, attrs={"mode": "L"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), coords={"bands": ["R", "G", "B", "A"]}) @@ -1326,13 +1327,14 @@ def test_add_bands_l_rgba(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_la_rgb(self): """Test adding bands.""" from satpy.composites import add_bands # LA + RGB -> RGBA - data = xr.DataArray(da.ones((2, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((2, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["L", "A"]}, attrs={"mode": "LA"}) new_bands = xr.DataArray(da.array(["R", "G", "B"]), dims=("bands"), coords={"bands": ["R", "G", "B"]}) @@ -1341,13 +1343,14 @@ def test_add_bands_la_rgb(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_rgb_rbga(self): """Test adding bands.""" from satpy.composites import add_bands # RGB + RGBA -> RGBA - data = xr.DataArray(da.ones((3, 3, 3)), dims=("bands", "y", "x"), + data = xr.DataArray(da.ones((3, 3, 3), dtype="float32"), dims=("bands", "y", "x"), coords={"bands": ["R", "G", "B"]}, attrs={"mode": "RGB"}) new_bands = xr.DataArray(da.array(["R", "G", "B", "A"]), dims=("bands"), @@ -1357,6 +1360,7 @@ def test_add_bands_rgb_rbga(self): assert res.attrs["mode"] == "".join(res_bands) np.testing.assert_array_equal(res.bands, res_bands) np.testing.assert_array_equal(res.coords["bands"], res_bands) + assert res.dtype == np.float32 def test_add_bands_p_l(self): """Test adding bands.""" From c6db95d92a61d8a2b2f09d378989cf626e21fc8e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 10 Oct 2024 15:27:45 +0300 Subject: [PATCH 119/340] Put common asserts to a helper function --- .../tests/reader_tests/test_generic_image.py | 45 ++++++++----------- 1 file changed, 19 insertions(+), 26 deletions(-) diff --git a/satpy/tests/reader_tests/test_generic_image.py b/satpy/tests/reader_tests/test_generic_image.py index 1ab3e073ec..0d5d647420 100644 --- a/satpy/tests/reader_tests/test_generic_image.py +++ b/satpy/tests/reader_tests/test_generic_image.py @@ -162,12 +162,20 @@ def test_png_scene_l_mode(test_image_l): with pytest.warns(NotGeoreferencedWarning, match=r"Dataset has no geotransform"): scn = Scene(reader="generic_image", filenames=[test_image_l]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None + _assert_image_common(scn, 1, None, None, np.float32) assert "area" not in scn["image"].attrs - assert scn["image"].dtype == np.float32 + + +def _assert_image_common(scn, channels, start_time, end_time, dtype): + assert scn["image"].shape == (channels, Y_SIZE, X_SIZE) + assert scn.sensor_names == {"images"} + try: + assert scn.start_time is start_time + assert scn.end_time is end_time + except AssertionError: + assert scn.start_time == start_time + assert scn.end_time == end_time + assert scn["image"].dtype == dtype def test_png_scene_la_mode(test_image_la): @@ -176,55 +184,40 @@ def test_png_scene_la_mode(test_image_la): scn = Scene(reader="generic_image", filenames=[test_image_la]) scn.load(["image"]) data = da.compute(scn["image"].data) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time == DATA_DATE - assert scn.end_time == DATA_DATE - assert "area" not in scn["image"].attrs assert np.sum(np.isnan(data)) == 100 - assert scn["image"].dtype == np.float32 + assert "area" not in scn["image"].attrs + _assert_image_common(scn, 1, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgb(test_image_rgb): """Test reading geotiff image in RGB mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgb]) scn.load(["image"]) - assert scn["image"].shape == (3, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time == DATA_DATE - assert scn.end_time == DATA_DATE assert scn["image"].area == AREA_DEFINITION - assert scn["image"].dtype == np.float32 + _assert_image_common(scn, 3, DATA_DATE, DATA_DATE, np.float32) def test_geotiff_scene_rgba(test_image_rgba): """Test reading geotiff image in RGBA mode via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_rgba]) scn.load(["image"]) - assert scn["image"].shape == (3, Y_SIZE, X_SIZE) - assert scn.sensor_names == {"images"} - assert scn.start_time is None - assert scn.end_time is None + _assert_image_common(scn, 3, None, None, np.float32) assert scn["image"].area == AREA_DEFINITION - assert scn["image"].dtype == np.float32 def test_geotiff_scene_nan_fill_value(test_image_l_nan_fill_value): """Test reading geotiff image with fill value set via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan_fill_value]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.sum(scn["image"].data[0][:10, :10].compute()) == 0 - assert scn["image"].dtype == np.uint8 - + _assert_image_common(scn, 1, None, None, np.uint8) def test_geotiff_scene_nan(test_image_l_nan): """Test reading geotiff image with NaN values in it via satpy.Scene().""" scn = Scene(reader="generic_image", filenames=[test_image_l_nan]) scn.load(["image"]) - assert scn["image"].shape == (1, Y_SIZE, X_SIZE) assert np.all(np.isnan(scn["image"].data[0][:10, :10].compute())) - assert scn["image"].dtype == np.float32 + _assert_image_common(scn, 1, None, None, np.float32) def test_GenericImageFileHandler(test_image_rgba): From a84d874f8093f91e65f0bdb859c66021a3cb7a49 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 10 Oct 2024 17:52:17 +0200 Subject: [PATCH 120/340] A first draft fix of the viewing geometry per feed horn Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws_l1b_nc.yaml | 147 ++++++++++++++++++++--- satpy/readers/aws_l1b.py | 22 +++- satpy/tests/reader_tests/test_aws_l1b.py | 53 ++++++-- 3 files changed, 192 insertions(+), 30 deletions(-) diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index d1d5ea8c5f..68e395d31d 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -363,46 +363,165 @@ datasets: # --- Navigation data --- - solar_azimuth: - name: solar_azimuth + solar_azimuth_horn1: + name: solar_azimuth_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle - horn: ["1", "2", "3", "4"] + horn: "1" + coordinates: + - longitude + - latitude + + solar_azimuth_horn2: + name: solar_azimuth_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "2" coordinates: - longitude - latitude - solar_zenith: - name: solar_zenith + solar_azimuth_horn3: + name: solar_azimuth_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_azimuth_horn4: + name: solar_azimuth_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + + solar_zenith_horn1: + name: solar_zenith_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle - horn: ["1", "2", "3", "4"] + horn: "1" coordinates: - longitude - latitude - satellite_azimuth: - name: satellite_azimuth + solar_zenith_horn2: + name: solar_zenith_horn2 file_type: aws_l1b_nc - file_key: data/navigation/aws_satellite_azimuth_angle - standard_name: satellite_azimuth_angle - horn: ["1", "2", "3", "4"] + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "2" coordinates: - longitude - latitude - satellite_zenith: - name: satellite_zenith + solar_zenith_horn3: + name: solar_zenith_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_zenith_horn4: + name: solar_zenith_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_zenith_horn1: + name: satellite_zenith_horn1 file_type: aws_l1b_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle - horn: ["1", "2", "3", "4"] + horn: "1" coordinates: - longitude - latitude + satellite_zenith_horn2: + name: satellite_zenith_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_zenith_horn3: + name: satellite_zenith_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_zenith_horn4: + name: satellite_zenith_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn1: + name: satellite_azimuth_horn1 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn2: + name: satellite_azimuth_horn2 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn3: + name: satellite_azimuth_horn3 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn4: + name: satellite_azimuth_horn4 + file_type: aws_l1b_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude file_types: aws_l1b_nc: diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index cb421d4482..ce07c209e1 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -101,9 +101,25 @@ def get_dataset(self, dataset_id, dataset_info): """Get the data.""" if dataset_id["name"] in AWS_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) - elif (dataset_id["name"] in ["longitude", "latitude", - "solar_azimuth", "solar_zenith", - "satellite_zenith", "satellite_azimuth"]): + elif dataset_id["name"] in ["satellite_zenith_horn1", + "satellite_zenith_horn2", + "satellite_zenith_horn3", + "satellite_zenith_horn4", + "solar_azimuth_horn1", + "solar_azimuth_horn2", + "solar_azimuth_horn3", + "solar_azimuth_horn4", + "solar_zenith_horn1", + "solar_zenith_horn2", + "solar_zenith_horn3", + "solar_zenith_horn4", + "satellite_azimuth_horn1", + "satellite_azimuth_horn2", + "satellite_azimuth_horn3", + "satellite_azimuth_horn4"]: + data_array = self._get_navigation_data(dataset_id, dataset_info) + + elif dataset_id["name"] in ["longitude", "latitude"]: data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index abbf517ab8..a9ed72a211 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -15,24 +15,27 @@ platform_name = "AWS1" file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa -fake_data_np = np.random.randint(0, 700000, size=19*5*5).reshape((19, 5, 5)) + +rng = np.random.default_rng() + +fake_data_np = rng.integers(0, 700000, size=19*5*5).reshape((19, 5, 5)) fake_data_np[0, 0, 0] = -2147483648 fake_data_np[0, 0, 1] = 700000 + 10 fake_data_np[0, 0, 2] = -10 fake_data = xr.DataArray(fake_data_np, dims=["n_channels", "n_fovs", "n_scans"]) -fake_lon_data = xr.DataArray(np.random.randint(0, 3599999, size=25 * 4).reshape((4, 5, 5)), +fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_lat_data = xr.DataArray(np.random.randint(-900000, 900000, size=25 * 4).reshape((4, 5, 5)), +fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sun_azi_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), +fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sun_zen_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), +fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sat_azi_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), +fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sat_zen_data = xr.DataArray(np.random.randint(0, 36000, size=25 * 4).reshape((4, 5, 5)), +fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), dims=["n_geo_groups", "n_fovs", "n_scans"]) @@ -56,6 +59,8 @@ def aws_file(tmp_path_factory): instrument = "AWS" ds.attrs["instrument"] = instrument + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 ds["data/calibration/aws_toa_brightness_temperature"] = fake_data ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 @@ -76,6 +81,7 @@ def aws_file(tmp_path_factory): ds["status/satellite/subsat_latitude_start"] = np.array(55.41) ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, processing_time=processing_time, platform_name=platform_name)) @@ -84,7 +90,7 @@ def aws_file(tmp_path_factory): return filename -@pytest.fixture() +@pytest.fixture def aws_handler(aws_file): """Create an aws filehandler.""" filename_info = parse(file_pattern, os.path.basename(aws_file)) @@ -133,12 +139,9 @@ def test_get_channel_data(aws_handler): @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), - ("solar_azimuth", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), - ("solar_zenith", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), - ("satellite_azimuth", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), - ("satellite_zenith", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) + ]) def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): - """Test retrieving the angles_data.""" + """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) @@ -155,3 +158,27 @@ def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): assert "n_geo_groups" not in res.coords if id_name == "longitude": assert res.max() <= 180 + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("solar_azimuth_horn1", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith_horn1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth_horn1", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith_horn1", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) +def test_get_viewing_geometry_data(aws_handler, id_name, file_key, fake_array): + """Test retrieving the angles_data.""" + Horn = Enum("Horn", ["1", "2", "3", "4"]) + did = dict(name=id_name, horn=Horn["1"]) + + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_handler.get_dataset(did, dataset_info) + + np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("x", "y") + assert "standard_name" in res.attrs + assert "n_geo_groups" not in res.coords + if id_name == "longitude": + assert res.max() <= 180 From a847be4a690cbb3a0c2cfb58dd0d8cefca0c7f05 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 11:07:38 +0200 Subject: [PATCH 121/340] Fix types to allow float32 computations for SAR-C --- satpy/readers/sar_c_safe.py | 47 ++++++++++----------- satpy/tests/reader_tests/test_sar_c_safe.py | 17 ++++++-- 2 files changed, 37 insertions(+), 27 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 986440759a..d5c98c1039 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -78,7 +78,7 @@ def _dictify(r): return int(r.text) except ValueError: try: - return float(r.text) + return np.float32(r.text) except ValueError: return r.text for x in r.findall("./*"): @@ -186,7 +186,7 @@ def get_dataset(self, key, info, chunks=None): def get_calibration_constant(self): """Load the calibration constant.""" - return float(self.root.find(".//absoluteCalibrationConstant").text) + return np.float32(self.root.find(".//absoluteCalibrationConstant").text) def _get_calibration_uncached(self, calibration, chunks=None): """Get the calibration array.""" @@ -341,7 +341,7 @@ def _get_array_pieces_for_current_line(self, current_line): current_blocks = self._find_blocks_covering_line(current_line) current_blocks.sort(key=(lambda x: x.coords["x"][0])) next_line = self._get_next_start_line(current_blocks, current_line) - current_y = np.arange(current_line, next_line) + current_y = np.arange(current_line, next_line, dtype=np.uint16) pieces = [arr.sel(y=current_y) for arr in current_blocks] return pieces @@ -389,7 +389,7 @@ def _get_padded_dask_pieces(self, pieces, chunks): @staticmethod def _fill_dask_pieces(dask_pieces, shape, chunks): if shape[1] > 0: - new_piece = da.full(shape, np.nan, chunks=chunks) + new_piece = da.full(shape, np.nan, chunks=chunks, dtype=np.float32) dask_pieces.append(new_piece) @@ -425,11 +425,10 @@ def expand(self, chunks): # corr = 1.5 data = self.lut * corr - x_coord = np.arange(self.first_pixel, self.last_pixel + 1) - y_coord = np.arange(self.first_line, self.last_line + 1) - - new_arr = (da.ones((len(y_coord), len(x_coord)), chunks=chunks) * - np.interp(y_coord, self.lines, data)[:, np.newaxis]) + x_coord = np.arange(self.first_pixel, self.last_pixel + 1, dtype=np.uint16) + y_coord = np.arange(self.first_line, self.last_line + 1, dtype=np.uint16) + new_arr = (da.ones((len(y_coord), len(x_coord)), dtype=np.float32, chunks=chunks) * + np.interp(y_coord, self.lines, data)[:, np.newaxis].astype(np.float32)) new_arr = xr.DataArray(new_arr, dims=["y", "x"], coords={"x": x_coord, @@ -438,29 +437,29 @@ def expand(self, chunks): @property def first_pixel(self): - return int(self.element.find("firstRangeSample").text) + return np.uint16(self.element.find("firstRangeSample").text) @property def last_pixel(self): - return int(self.element.find("lastRangeSample").text) + return np.uint16(self.element.find("lastRangeSample").text) @property def first_line(self): - return int(self.element.find("firstAzimuthLine").text) + return np.uint16(self.element.find("firstAzimuthLine").text) @property def last_line(self): - return int(self.element.find("lastAzimuthLine").text) + return np.uint16(self.element.find("lastAzimuthLine").text) @property def lines(self): lines = self.element.find("line").text.split() - return np.array(lines).astype(int) + return np.array(lines).astype(np.uint16) @property def lut(self): lut = self.element.find("noiseAzimuthLut").text.split() - return np.array(lut).astype(float) + return np.array(lut).astype(np.float32) class XMLArray: @@ -487,7 +486,7 @@ def _read_xml_array(self): new_x = elt.find("pixel").text.split() y += [int(elt.find("line").text)] * len(new_x) x += [int(val) for val in new_x] - data += [float(val) + data += [np.float32(val) for val in elt.find(self.element_tag).text.split()] return np.asarray(data), (x, y) @@ -519,17 +518,17 @@ def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE else: vchunks, hchunks = chunks, chunks - points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints), - np.asarray(xpoints))).T) + points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints, dtype=np.uint16), + np.asarray(xpoints, dtype=np.uint16))).T) interpolator = LinearNDInterpolator(points, values) - grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks), - da.arange(shape[0], chunks=vchunks)) + grid_x, grid_y = da.meshgrid(da.arange(shape[1], chunks=hchunks, dtype=np.uint16), + da.arange(shape[0], chunks=vchunks, dtype=np.uint16)) # workaround for non-thread-safe first call of the interpolator: interpolator((0, 0)) - res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator) + res = da.map_blocks(intp, grid_x, grid_y, interpolator=interpolator).astype(values.dtype) return DataArray(res, dims=("y", "x")) @@ -617,7 +616,7 @@ def _calibrate_and_denoise(self, data, key): def _get_digital_number(self, data): """Get the digital numbers (uncalibrated data).""" data = data.where(data > 0) - data = data.astype(np.float64) + data = data.astype(np.float32) dn = data * data return dn @@ -672,8 +671,8 @@ def get_gcps(self): for feature in gcps["features"]] gcp_array = np.array(gcp_list) - ypoints = np.unique(gcp_array[:, 0]) - xpoints = np.unique(gcp_array[:, 1]) + ypoints = np.unique(gcp_array[:, 0]).astype(np.uint16) + xpoints = np.unique(gcp_array[:, 1]).astype(np.uint16) gcp_lons = gcp_array[:, 2].reshape(ypoints.shape[0], xpoints.shape[0]) gcp_lats = gcp_array[:, 3].reshape(ypoints.shape[0], xpoints.shape[0]) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 9e24c00c4e..8411e23c70 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -289,16 +289,18 @@ def test_read_calibrated_natural(self, measurement_filehandler): calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="natural"), info=dict()) - expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]]) + expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) + assert xarr.dtype == np.float32 def test_read_calibrated_dB(self, measurement_filehandler): """Test the calibration routines.""" calibration = Calibration.sigma_nought xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="dB"), info=dict()) - expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) + expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected) + assert xarr.dtype == np.float32 def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" @@ -306,6 +308,7 @@ def test_read_lon_lats(self, measurement_filehandler): xarr = measurement_filehandler.get_dataset(query, info=dict()) expected = expected_longitudes np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) + assert xarr.dtype == np.float64 annotation_xml = b""" @@ -777,6 +780,7 @@ def test_get_noise_dataset(self, noise_filehandler): query = DataQuery(name="noise", polarization="vv") res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) + assert res.dtype == np.float32 def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" @@ -799,12 +803,14 @@ def test_dn_calibration_array(self, calibration_filehandler): expected_dn = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) + assert res.dtype == np.float32 def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" expected_beta = np.ones((10, 10)) * 1087 res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) + assert res.dtype == np.float32 def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" @@ -812,18 +818,20 @@ def test_sigma_calibration_array(self, calibration_filehandler): 1277.968, 1277.968, 1277.968, 1277.968]]) * np.ones((10, 1)) res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) - + assert res.dtype == np.float32 def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) + assert res.dtype == np.float32 def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" query = DataQuery(name="gamma", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) + assert res.dtype == np.float32 def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" @@ -837,6 +845,7 @@ def test_get_calibration_constant(self, calibration_filehandler): query = DataQuery(name="calibration_constant", polarization="vv") res = calibration_filehandler.get_dataset(query, {}) assert res == 1 + assert type(res) is np.float32 def test_incidence_angle(annotation_filehandler): @@ -844,6 +853,7 @@ def test_incidence_angle(annotation_filehandler): query = DataQuery(name="incidence_angle", polarization="vv") res = annotation_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) + assert res.dtype == np.float32 def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): @@ -863,6 +873,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db) + assert array.dtype == np.float32 def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): From 089dd3831fe5db90ac7884b059b0744d6f957524 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 11:12:28 +0200 Subject: [PATCH 122/340] Add bounding box --- satpy/readers/sar_c_safe.py | 10 +++++++++- satpy/tests/reader_tests/test_sar_c_safe.py | 3 ++- 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index d5c98c1039..bee4208d33 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -682,6 +682,13 @@ def get_gcps(self): return (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) + def get_bounding_box(self): + """Get the bounding box for the data coverage.""" + (xpoints, ypoints), (gcp_lons, gcp_lats, gcp_alts), (rio_gcps, crs) = self.get_gcps() + bblons = np.hstack((gcp_lons[0, :-1], gcp_lons[:-1, -1], gcp_lons[-1, :1:-1], gcp_lons[:1:-1, 0])) + bblats = np.hstack((gcp_lats[0, :-1], gcp_lats[:-1, -1], gcp_lats[-1, :1:-1], gcp_lats[:1:-1, 0])) + return bblons.tolist(), bblats.tolist() + @property def start_time(self): """Get the start time.""" @@ -729,7 +736,8 @@ def load(self, dataset_keys, **kwargs): gcps = get_gcps_from_array(val) from pyresample.future.geometry import SwathDefinition val.attrs["area"] = SwathDefinition(lonlats["longitude"], lonlats["latitude"], - attrs=dict(gcps=gcps)) + attrs=dict(gcps=gcps, + bounding_box=handler.get_bounding_box())) datasets[key] = val continue return datasets diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 8411e23c70..da198d4392 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -900,7 +900,7 @@ def test_filename_filtering_from_reader(measurement_file, calibration_file, nois pytest.fail(str(err)) -def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, annotation_file): +def test_swath_def_contains_gcps_and_bounding_box(measurement_file, calibration_file, noise_file, annotation_file): """Test reading using the reader defined in the config.""" with open(Path(PACKAGE_CONFIG_PATH) / "readers" / "sar-c_safe.yaml") as fd: config = yaml.load(fd, Loader=yaml.UnsafeLoader) @@ -915,3 +915,4 @@ def test_swath_def_contains_gcps(measurement_file, calibration_file, noise_file, dataset_dict = reader.load([query]) array = dataset_dict["measurement"] assert array.attrs["area"].attrs["gcps"] is not None + assert array.attrs["area"].attrs["bounding_box"] is not None From d2b8057ca245c0c53ba4c9e69aebdb3783f48753 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 11:35:46 +0200 Subject: [PATCH 123/340] Fix tolerance --- satpy/tests/reader_tests/test_sar_c_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index da198d4392..7a4b0ff79b 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -299,7 +299,7 @@ def test_read_calibrated_dB(self, measurement_filehandler): xarr = measurement_filehandler.get_dataset(DataQuery(name="measurement", polarization="vv", calibration=calibration, quantity="dB"), info=dict()) expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]], dtype=np.float32) - np.testing.assert_allclose(xarr.values[:2, :2], expected) + np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=1e-6) assert xarr.dtype == np.float32 def test_read_lon_lats(self, measurement_filehandler): From 4b51512dd089ae6ee74960609b2de5e9c66c2572 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 11:44:35 +0200 Subject: [PATCH 124/340] Fix tolerance --- satpy/tests/reader_tests/test_sar_c_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 7a4b0ff79b..499068dc3f 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -872,7 +872,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann array = dataset_dict["measurement"] np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) - np.testing.assert_allclose(array.values[:2, :2], expected_db) + np.testing.assert_allclose(array.values[:2, :2], expected_db, rtol=1e-6) assert array.dtype == np.float32 From 044a0ec9404b0783fc61445d7767cdbfa79cc88a Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:20:10 +0200 Subject: [PATCH 125/340] Import DataTree from xarray --- satpy/readers/insat3d_img_l1b_h5.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 41ddee5df6..dede8aefcd 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -7,11 +7,7 @@ import dask.array as da import numpy as np import xarray as xr - -from satpy.utils import import_error_helper - -with import_error_helper("xarray-datatree"): - from datatree import DataTree +from xarray.core.datatree import DataTree from satpy.readers.file_handlers import BaseFileHandler From 879893eb2cda176d48a1ec1f728a6f3690686348 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:44:06 +0200 Subject: [PATCH 126/340] Use spline interpolation for faster processing This requires https://github.com/pytroll/python-geotiepoints/pull/85 to be merged and released. --- satpy/readers/sar_c_safe.py | 13 ++- satpy/readers/sgli_l1b.py | 4 +- satpy/tests/reader_tests/test_sar_c_safe.py | 121 ++++---------------- 3 files changed, 33 insertions(+), 105 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 986440759a..0d42491cbd 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -51,7 +51,7 @@ import xarray as xr from dask import array as da from geotiepoints.geointerpolator import lonlat2xyz, xyz2lonlat -from geotiepoints.interpolator import MultipleGridInterpolator +from geotiepoints.interpolator import MultipleSplineInterpolator from xarray import DataArray from satpy.dataset.data_dict import DatasetDict @@ -636,10 +636,15 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - interpolator = MultipleGridInterpolator((ypoints, xpoints), x, y, z, gcp_alts) - hx, hy, hz, altitudes = interpolator.interpolate(fine_points, method="cubic", chunks=self.chunks) - longitudes, latitudes = xyz2lonlat(hx, hy, hz) + kx = 2 + ky = 2 + + interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=kx, ky=ky) + hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) + + + longitudes, latitudes = xyz2lonlat(hx, hy, hz) altitudes = xr.DataArray(altitudes, dims=["y", "x"]) longitudes = xr.DataArray(longitudes, dims=["y", "x"]) latitudes = xr.DataArray(latitudes, dims=["y", "x"]) diff --git a/satpy/readers/sgli_l1b.py b/satpy/readers/sgli_l1b.py index 079f93d2f3..f22f77b03a 100644 --- a/satpy/readers/sgli_l1b.py +++ b/satpy/readers/sgli_l1b.py @@ -175,7 +175,7 @@ def get_lon_lats(self, key): def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interval): """Interpolate spherical coordinates.""" - from geotiepoints.geointerpolator import GeoGridInterpolator + from geotiepoints.geointerpolator import GeoSplineInterpolator full_shape = (self.h5file["Image_data"].attrs["Number_of_lines"], self.h5file["Image_data"].attrs["Number_of_pixels"]) @@ -183,7 +183,7 @@ def interpolate_spherical(self, azimuthal_angle, polar_angle, resampling_interva tie_lines = np.arange(0, polar_angle.shape[0] * resampling_interval, resampling_interval) tie_cols = np.arange(0, polar_angle.shape[1] * resampling_interval, resampling_interval) - interpolator = GeoGridInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, method="slinear") + interpolator = GeoSplineInterpolator((tie_lines, tie_cols), azimuthal_angle, polar_angle, kx=2, ky=2) new_azi, new_pol = interpolator.interpolate_to_shape(full_shape, chunks="auto") return new_azi, new_pol diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 9e24c00c4e..26ff603b10 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -174,102 +174,26 @@ def measurement_filehandler(measurement_file, noise_filehandler, calibration_fil -expected_longitudes = np.array([[3.79492915e-16, 5.91666667e-01, 9.09722222e-01, - 1.00000000e+00, 9.08333333e-01, 6.80555556e-01, - 3.62500000e-01, 8.32667268e-17, -3.61111111e-01, - -6.75000000e-01, -8.95833333e-01, -9.77777778e-01, - -8.75000000e-01, -5.41666667e-01, 6.80555556e-02, - 1.00000000e+00], - [1.19166667e+00, 1.32437500e+00, 1.36941964e+00, - 1.34166667e+00, 1.25598214e+00, 1.12723214e+00, - 9.70282738e-01, 8.00000000e-01, 6.31250000e-01, - 4.78898810e-01, 3.57812500e-01, 2.82857143e-01, - 2.68898810e-01, 3.30803571e-01, 4.83437500e-01, - 7.41666667e-01], - [1.82638889e+00, 1.77596726e+00, 1.72667765e+00, - 1.67757937e+00, 1.62773172e+00, 1.57619402e+00, - 1.52202558e+00, 1.46428571e+00, 1.40203373e+00, - 1.33432894e+00, 1.26023065e+00, 1.17879819e+00, - 1.08909084e+00, 9.90167942e-01, 8.81088790e-01, - 7.60912698e-01], - [2.00000000e+00, 1.99166667e+00, 1.99305556e+00, - 2.00000000e+00, 2.00833333e+00, 2.01388889e+00, - 2.01250000e+00, 2.00000000e+00, 1.97222222e+00, - 1.92500000e+00, 1.85416667e+00, 1.75555556e+00, - 1.62500000e+00, 1.45833333e+00, 1.25138889e+00, - 1.00000000e+00], - [1.80833333e+00, 2.01669643e+00, 2.18011267e+00, - 2.30119048e+00, 2.38253827e+00, 2.42676446e+00, - 2.43647747e+00, 2.41428571e+00, 2.36279762e+00, - 2.28462160e+00, 2.18236607e+00, 2.05863946e+00, - 1.91605017e+00, 1.75720663e+00, 1.58471726e+00, - 1.40119048e+00], - [1.34722222e+00, 1.89627976e+00, 2.29940830e+00, - 2.57341270e+00, 2.73509779e+00, 2.80126842e+00, - 2.78872945e+00, 2.71428571e+00, 2.59474206e+00, - 2.44690334e+00, 2.28757440e+00, 2.13356009e+00, - 2.00166525e+00, 1.90869473e+00, 1.87145337e+00, - 1.90674603e+00], - [7.12500000e-01, 1.67563988e+00, 2.36250177e+00, - 2.80892857e+00, 3.05076318e+00, 3.12384850e+00, - 3.06402742e+00, 2.90714286e+00, 2.68903770e+00, - 2.44555485e+00, 2.21253720e+00, 2.02582766e+00, - 1.92126913e+00, 1.93470451e+00, 2.10197669e+00, - 2.45892857e+00], - [5.55111512e-16, 1.40000000e+00, 2.38095238e+00, - 3.00000000e+00, 3.31428571e+00, 3.38095238e+00, - 3.25714286e+00, 3.00000000e+00, 2.66666667e+00, - 2.31428571e+00, 2.00000000e+00, 1.78095238e+00, - 1.71428571e+00, 1.85714286e+00, 2.26666667e+00, - 3.00000000e+00], - [-6.94444444e-01, 1.11458333e+00, 2.36631944e+00, - 3.13888889e+00, 3.51041667e+00, 3.55902778e+00, - 3.36284722e+00, 3.00000000e+00, 2.54861111e+00, - 2.08680556e+00, 1.69270833e+00, 1.44444444e+00, - 1.42013889e+00, 1.69791667e+00, 2.35590278e+00, - 3.47222222e+00], - [-1.27500000e+00, 8.64613095e-01, 2.33016227e+00, - 3.21785714e+00, 3.62390731e+00, 3.64452239e+00, - 3.37591199e+00, 2.91428571e+00, 2.35585317e+00, - 1.79682398e+00, 1.33340774e+00, 1.06181406e+00, - 1.07825255e+00, 1.47893282e+00, 2.36006448e+00, - 3.81785714e+00], - [-1.64583333e+00, 6.95312500e-01, 2.28404018e+00, - 3.22916667e+00, 3.63950893e+00, 3.62388393e+00, - 3.29110863e+00, 2.75000000e+00, 2.10937500e+00, - 1.47805060e+00, 9.64843750e-01, 6.78571429e-01, - 7.28050595e-01, 1.22209821e+00, 2.26953125e+00, - 3.97916667e+00], - [-1.71111111e+00, 6.51904762e-01, 2.23951247e+00, - 3.16507937e+00, 3.54197279e+00, 3.48356009e+00, - 3.10320862e+00, 2.51428571e+00, 1.83015873e+00, - 1.16419501e+00, 6.29761905e-01, 3.40226757e-01, - 4.08956916e-01, 9.49319728e-01, 2.07468254e+00, - 3.89841270e+00], - [-1.37500000e+00, 7.79613095e-01, 2.20813846e+00, - 3.01785714e+00, 3.31605017e+00, 3.20999858e+00, - 2.80698342e+00, 2.21428571e+00, 1.53918651e+00, - 8.88966837e-01, 3.70907738e-01, 9.22902494e-02, - 1.60395408e-01, 6.82504252e-01, 1.76589782e+00, - 3.51785714e+00], - [-5.41666667e-01, 1.12366071e+00, 2.20147747e+00, - 2.77976190e+00, 2.94649235e+00, 2.78964711e+00, - 2.39720451e+00, 1.85714286e+00, 1.25744048e+00, - 6.86075680e-01, 2.31026786e-01, -1.97278912e-02, - 2.17899660e-02, 4.43558673e-01, 1.33355655e+00, - 2.77976190e+00], - [8.84722222e-01, 1.72927083e+00, 2.23108879e+00, - 2.44305556e+00, 2.41805060e+00, 2.20895337e+00, - 1.86864335e+00, 1.45000000e+00, 1.00590278e+00, - 5.89231151e-01, 2.52864583e-01, 4.96825397e-02, - 3.25644841e-02, 2.54389881e-01, 7.68038194e-01, - 1.62638889e+00], - [3.00000000e+00, 2.64166667e+00, 2.30853175e+00, - 2.00000000e+00, 1.71547619e+00, 1.45436508e+00, - 1.21607143e+00, 1.00000000e+00, 8.05555556e-01, - 6.32142857e-01, 4.79166667e-01, 3.46031746e-01, - 2.32142857e-01, 1.36904762e-01, 5.97222222e-02, - 0.00000000e+00]]) +expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, + 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], + [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , + 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], + [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, + 1.58494674, 1.52376394, 1.45880655, 1.39007883, 1.31758574], + [2., 1.99615628, 1.99615609, 2., 2.00768917, + 2.0192253 , 2.02115051, 2. , 1.95576762, 1.88845002], + [1.82332931, 2.02143515, 2.18032829, 2.30002491, 2.38053511, + 2.4218612 , 2.43113105, 2.41546985, 2.37487052, 2.3093278 ], + [1.22479001, 1.81701462, 2.26984318, 2.58335874, 2.75765719, + 2.79279164, 2.75366973, 2.70519769, 2.64737395, 2.58019762], + [0.51375081, 1.53781389, 2.3082042 , 2.82500549, 3.0885147 , + 3.09893859, 2.98922885, 2.89232293, 2.8082302 , 2.7369586 ], + [0., 1.33889733, 2.33891557, 3., 3.32266837, + 3.30731797, 3.1383157 , 3., 2.8923933 , 2.81551297], + [-0.31638932, 1.22031759, 2.36197571, 3.10836734, 3.46019271, + 3.41800603, 3.20098223, 3.02826595, 2.89989242, 2.81588745], + [-0.43541441, 1.18211505, 2.37738272, 3.1501186 , 3.50112948, + 3.43104055, 3.17724665, 2.97712796, 2.83072911, 2.73808164]]) class Calibration(Enum): @@ -304,8 +228,7 @@ def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) - expected = expected_longitudes - np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) + np.testing.assert_allclose(xarr.values, expected_longitudes) annotation_xml = b""" @@ -860,7 +783,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann query = DataID(reader._id_keys, **query.to_dict()) dataset_dict = reader.load([query]) array = dataset_dict["measurement"] - np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes[:10, :10], atol=1e-3) + np.testing.assert_allclose(array.attrs["area"].lons, expected_longitudes) expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db) From 98762e4375368757f2d4d496fe3038439d49e942 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 11 Oct 2024 12:47:29 +0200 Subject: [PATCH 127/340] Fix style --- satpy/tests/reader_tests/test_sar_c_safe.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 26ff603b10..3ec3aa9577 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -174,8 +174,8 @@ def measurement_filehandler(measurement_file, noise_filehandler, calibration_fil -expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, - 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], +expected_longitudes = np.array([[-0., 0.54230055, 0.87563228, 1., 0.91541479, + 0.62184442, 0.26733714, -0., -0.18015287, -0.27312165], [1.0883956 , 1.25662247, 1.34380634, 1.34995884, 1.2750712 , 1.11911385, 0.9390845 , 0.79202785, 0.67796547, 0.59691204], [1.75505196, 1.74123364, 1.71731849, 1.68330292, 1.63918145, From 7eb17ab75caf1914f117b09f893aecf500fc9360 Mon Sep 17 00:00:00 2001 From: verduijn Date: Sat, 12 Oct 2024 09:01:29 +0200 Subject: [PATCH 128/340] Replace patched `print` with capsys fixture --- satpy/tests/test_utils.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/satpy/tests/test_utils.py b/satpy/tests/test_utils.py index c52006f1be..ac3a2e921f 100644 --- a/satpy/tests/test_utils.py +++ b/satpy/tests/test_utils.py @@ -274,13 +274,15 @@ def test_basic_check_satpy(self): from satpy.utils import check_satpy check_satpy() - def test_specific_check_satpy(self): + def test_specific_check_satpy(self, capsys): """Test 'check_satpy' with specific features provided.""" from satpy.utils import check_satpy - with mock.patch("satpy.utils.print") as print_mock: - check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) - checked_fake = any("__fake: not installed" in c[1] for c in print_mock.mock_calls if len(c[1])) - assert checked_fake, "Did not find __fake package mentioned in checks" + check_satpy(readers=["viirs_sdr"], packages=("cartopy", "__fake")) + out, _ = capsys.readouterr() + checked_fake = "__fake: not installed" in out + checked_viirs_sdr = "Readers\n=======\nviirs_sdr" in out + assert checked_fake, "Did not find __fake package mentioned in checks" + assert checked_viirs_sdr, "Did not find viirs_sdr in readers mentioned in checks" class TestShowVersions: From 63de5e07965b7ae3d421722baa52e07d83b9cbad Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 14 Oct 2024 11:20:56 +0200 Subject: [PATCH 129/340] Run sar tests only with geotiepoints >= 1.7.5 --- satpy/tests/reader_tests/test_sar_c_safe.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 3ec3aa9577..f7191a951a 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -32,6 +32,7 @@ from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation rasterio = pytest.importorskip("rasterio") +geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" From ee7edefbe57be93c56db5ed1d32a4a9e170b8359 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 14 Oct 2024 11:45:30 +0200 Subject: [PATCH 130/340] Do importskip before importing satpy --- satpy/tests/reader_tests/test_sar_c_safe.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index f7191a951a..531305798b 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -26,13 +26,14 @@ import pytest import yaml -from satpy._config import PACKAGE_CONFIG_PATH -from satpy.dataset import DataQuery -from satpy.dataset.dataid import DataID -from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation +geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") + +from satpy._config import PACKAGE_CONFIG_PATH # noqa: E402 +from satpy.dataset import DataQuery # noqa: E402 +from satpy.dataset.dataid import DataID # noqa: E402 +from satpy.readers.sar_c_safe import Calibrator, Denoiser, SAFEXMLAnnotation # noqa: E402 rasterio = pytest.importorskip("rasterio") -geotiepoints = pytest.importorskip("geotiepoints", "1.7.5") dirname_suffix = "20190201T024655_20190201T024720_025730_02DC2A_AE07" From 54acda0d3879592e5c5e487a0a925bcc3c51b69c Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 14 Oct 2024 16:45:01 +0200 Subject: [PATCH 131/340] Adapt to use pytest instead of unittest --- satpy/tests/reader_tests/test_fci_base.py | 29 ++++++++++------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py index 41ac956b67..1534965402 100644 --- a/satpy/tests/reader_tests/test_fci_base.py +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -19,28 +19,25 @@ """FCI base reader tests package.""" # import datetime as dt -import unittest +import pytest from satpy.readers.fci_base import calculate_area_extent from satpy.tests.utils import make_dataid -class TestCalculateAreaExtent(unittest.TestCase): - """Test TestCalculateAreaExtent.""" +def test_calculate_area_extent(): + """Test function for calculate_area_extent.""" + dataset_id = make_dataid(name="dummy", resolution=2000.0) - def test_fun(self): - """Test function for TestCalculateAreaExtent.""" - dataset_id = make_dataid(name="dummmy", resolution=2000.) + area_dict = { + "nlines": 5568, + "ncols": 5568, + "line_step": dataset_id["resolution"], + "column_step": dataset_id["resolution"], + } - area_dict = { - "nlines": 5568, - "ncols": 5568, - "line_step": dataset_id["resolution"], - "column_step": dataset_id["resolution"], - } + area_extent = calculate_area_extent(area_dict) - area_extent = calculate_area_extent(area_dict) + expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) - expected = (-5568000.0, 5568000.0, 5568000.0, -5568000.0) - - assert area_extent == expected + assert area_extent == expected From b263479bd782060c4058e20734546543b8b287a9 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Mon, 14 Oct 2024 17:36:06 +0200 Subject: [PATCH 132/340] Update tests to use pytest instead of unittest --- satpy/tests/reader_tests/test_eum_l2_grib.py | 475 ++++++++++--------- 1 file changed, 239 insertions(+), 236 deletions(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 593eb2f5af..d3f5622bb7 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -20,7 +20,7 @@ import datetime import sys -import unittest +import pytest from unittest import mock import numpy as np @@ -60,240 +60,243 @@ FAKE_GID = [0, 1, 2, 3, None] -class TestEUML2GribFileHandler(unittest.TestCase): - """Test the EUML2GribFileHandler reader.""" - - @mock.patch("satpy.readers.eum_l2_grib.ec") - def setUp(self, ec_): - """Set up the test by creating a mocked eccodes library.""" - fake_gid_generator = (i for i in FAKE_GID) - ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_ = ec_ - - def common_checks(self, mock_file, dataset_id): - """Commmon checks for fci and seviri data.""" - # Checks that the codes_grib_multi_support_on function has been called - self.ec_.codes_grib_multi_support_on.assert_called() - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with a valid parameter_number - valid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 30}) - # Checks the correct file open call - mock_file.assert_called_with("test.grib", "rb") - # Checks that the dataset has been created as a DataArray object - assert valid_dataset._extract_mock_name() == "xr.DataArray()" - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - # Restarts the id generator and clears the call history - fake_gid_generator = (i for i in FAKE_GID) - self.ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) - self.ec_.codes_grib_new_from_file.reset_mock() - self.ec_.codes_release.reset_mock() - - # Checks the correct execution of the get_dataset function with an invalid parameter_number - invalid_dataset = self.reader.get_dataset(dataset_id, {"parameter_number": 50}) - # Checks that the function returns None - assert invalid_dataset is None - # Checks that codes_release has been called after each codes_grib_new_from_file call - # (except after the last one which has returned a None) - assert self.ec_.codes_grib_new_from_file.call_count == self.ec_.codes_release.call_count + 1 - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.eum_l2_grib.xr") - @mock.patch("satpy.readers.eum_l2_grib.da") - def test_seviri_data_reading(self, da_, xr_): - """Test the reading of data from the product.""" - from satpy.readers.eum_l2_grib import EUML2GribFileHandler - from satpy.utils import get_legacy_chunk_size - chunk_size = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): - self.ec_.codes_get_values.return_value = np.ones(1000*1200) - self.ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] - self.reader = EUML2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft": "MET11", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=45, second=0) - }, - filetype_info={ - "file_type": "seviri" - } - ) - - dataset_id = make_dataid(name="dummmy", resolution=3000) - - self.common_checks(mock_file, dataset_id) - - # Check end_time - assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 9.5 - }, - "sensor": "seviri", - "platform_name": "Meteosat-11" +@pytest.fixture +@mock.patch("satpy.readers.eum_l2_grib.ec") +def setup_reader(ec_): + """Set up the test by creating a mocked eccodes library.""" + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + return ec_ + + +def common_checks(ec_, reader, mock_file, dataset_id): + """Commmon checks for fci and seviri data.""" + # Checks that the codes_grib_multi_support_on function has been called + ec_.codes_grib_multi_support_on.assert_called() + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with a valid parameter_number + valid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 30}) + # Checks the correct file open call + mock_file.assert_called_with("test.grib", "rb") + # Checks that the dataset has been created as a DataArray object + assert valid_dataset._extract_mock_name() == "xr.DataArray()" + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + # Restarts the id generator and clears the call history + fake_gid_generator = (i for i in FAKE_GID) + ec_.codes_grib_new_from_file.side_effect = lambda fh: next(fake_gid_generator) + ec_.codes_grib_new_from_file.reset_mock() + ec_.codes_release.reset_mock() + + # Checks the correct execution of the get_dataset function with an invalid parameter_number + invalid_dataset = reader.get_dataset(dataset_id, {"parameter_number": 50}) + # Checks that the function returns None + assert invalid_dataset is None + # Checks that codes_release has been called after each codes_grib_new_from_file call + # (except after the last one which has returned a None) + assert ec_.codes_grib_new_from_file.call_count == ec_.codes_release.call_count + 1 + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_seviri_data_reading(da_, xr_, setup_reader): + """Test the reading of data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(1000 * 1200) + ec_.codes_get.side_effect = lambda gid, key: FAKE_SEVIRI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft": "MET11", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=45, second=0) + }, + filetype_info={ + "file_type": "seviri" } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((1200, 1000))) - assert args[1] == chunk_size - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6400000., - "b": 6300000., - "h": 32000000., - "ssp_lon": 9.5, - "nlines": 1000, - "ncols": 1200, - "a_name": "msg_seviri_rss_3km", - "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", - "p_id": "", + ) + + dataset_id = make_dataid(name="dummmy", resolution=3000) + + common_checks(ec_, reader, mock_file, dataset_id) + + # Check end_time + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 9.5 + }, + "sensor": "seviri", + "platform_name": "Meteosat-11" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((1200, 1000))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6400000., + "b": 6300000., + "h": 32000000., + "ssp_lon": 9.5, + "nlines": 1000, + "ncols": 1200, + "a_name": "msg_seviri_rss_3km", + "a_desc": "MSG SEVIRI Rapid Scanning Service area definition with 3 km resolution", + "p_id": "", + } + assert pdict == expected_pdict + expected_area_dict = { + "center_point": 500, + "north": 1200, + "east": 1, + "west": 1000, + "south": 1, + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", + mock.Mock(name="seviri_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=400.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, + "column_step": 400., "line_step": 400.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" + + +@pytest.mark.skipif(sys.platform.startswith("win"), reason="'eccodes' not supported on Windows") +@mock.patch("satpy.readers.eum_l2_grib.xr") +@mock.patch("satpy.readers.eum_l2_grib.da") +def test_fci_data_reading(da_, xr_, setup_reader): + """Test the reading of fci data from the product.""" + from satpy.readers.eum_l2_grib import EUML2GribFileHandler + from satpy.utils import get_legacy_chunk_size + ec_ = setup_reader + chunk_size = get_legacy_chunk_size() + + with mock.patch("builtins.open", mock.mock_open()) as mock_file: + with mock.patch("satpy.readers.eum_l2_grib.ec", ec_): + ec_.codes_get_values.return_value = np.ones(5568 * 5568) + ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] + reader = EUML2GribFileHandler( + filename="test.grib", + filename_info={ + "spacecraft_id": "1", + "start_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=40, second=0), + "end_time": datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + }, + filetype_info={ + "file_type": "fci" } - assert pdict == expected_pdict - expected_area_dict = { - "center_point": 500, - "north": 1200, - "east": 1, - "west": 1000, - "south": 1, - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.eum_l2_grib.seviri_calculate_area_extent", - mock.Mock(name="seviri_calculate_area_extent")) as cae: - with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=400.) - self.reader.get_area_def(dataset_id) - # Asserts that seviri_calculate_area_extent has been called with the correct arguments - expected_args = ({"center_point": 500, "east": 1, "west": 1000, "south": 1, "north": 1200, - "column_step": 400., "line_step": 400.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of seviri_calculate_area_extent - assert args[1]._extract_mock_name() == "seviri_calculate_area_extent()" - - @unittest.skipIf(sys.platform.startswith("win"), "'eccodes' not supported on Windows") - @mock.patch("satpy.readers.eum_l2_grib.xr") - @mock.patch("satpy.readers.eum_l2_grib.da") - def test_fci_data_reading(self, da_, xr_): - """Test the reading of fci data from the product.""" - from satpy.readers.eum_l2_grib import EUML2GribFileHandler - from satpy.utils import get_legacy_chunk_size - chunk_size = get_legacy_chunk_size() - - with mock.patch("builtins.open", mock.mock_open()) as mock_file: - with mock.patch("satpy.readers.eum_l2_grib.ec", self.ec_): - self.ec_.codes_get_values.return_value = np.ones(5568*5568) - self.ec_.codes_get.side_effect = lambda gid, key: FAKE_FCI_MESSAGE[key] - self.reader = EUML2GribFileHandler( - filename="test.grib", - filename_info={ - "spacecraft_id": "1", - "start_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=40, second=0), - "end_time": datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - }, - filetype_info={ - "file_type": "fci" - } - ) - - dataset_id = make_dataid(name="dummmy", resolution=2000) - - self.common_checks(mock_file, dataset_id) - - # Check end_time - assert self.reader.end_time == datetime.datetime(year=2020, month=10, day=20, - hour=19, minute=50, second=0) - - # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions - attributes = self.reader._get_attributes() - expected_attributes = { - "orbital_parameters": { - "projection_longitude": 0.0 - }, - "sensor": "fci", - "platform_name": "MTG-i1" - } - assert attributes == expected_attributes - - # Checks the reading of an array from the message - self.reader._get_xarray_from_msg(0) - - # Checks that dask.array has been called with the correct arguments - name, args, kwargs = da_.mock_calls[0] - assert np.all(args[0] == np.ones((5568, 5568))) - assert args[1] == chunk_size - - # Checks that xarray.DataArray has been called with the correct arguments - name, args, kwargs = xr_.mock_calls[0] - assert kwargs["dims"] == ("y", "x") - - # Checks the correct execution of the _get_proj_area function - pdict, area_dict = self.reader._get_proj_area(0) - - expected_pdict = { - "a": 6378140000.0, - "b": 6356755000.0, - "h": 35785830098.0, - "ssp_lon": 0.0, - "nlines": 5568, - "ncols": 5568, - "a_name": "msg_fci_fdss_2km", - "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", - "p_id": "" - } - assert pdict == expected_pdict - expected_area_dict = { - "nlines": 5568, - "ncols": 5568 - } - assert area_dict == expected_area_dict - - # Checks the correct execution of the get_area_def function - with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", - mock.Mock(name="fci_calculate_area_extent")) as cae: - with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: - dataset_id = make_dataid(name="dummmy", resolution=2000.) - self.reader.get_area_def(dataset_id) - # Asserts that seviri_calculate_area_extent has been called with the correct arguments - expected_args = ({"nlines": 5568, "ncols": 5568, - "column_step": 2000., "line_step": 2000.},) - name, args, kwargs = cae.mock_calls[0] - assert args == expected_args - # Asserts that get_area_definition has been called with the correct arguments - name, args, kwargs = gad.mock_calls[0] - assert args[0] == expected_pdict - # The second argument must be the return result of seviri_calculate_area_extent - assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" + ) + + dataset_id = make_dataid(name="dummmy", resolution=2000) + + common_checks(ec_, reader, mock_file, dataset_id) + + # Check end_time + assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, + hour=19, minute=50, second=0) + + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions + attributes = reader._get_attributes() + expected_attributes = { + "orbital_parameters": { + "projection_longitude": 0.0 + }, + "sensor": "fci", + "platform_name": "MTG-i1" + } + assert attributes == expected_attributes + + # Checks the reading of an array from the message + reader._get_xarray_from_msg(0) + + # Checks that dask.array has been called with the correct arguments + name, args, kwargs = da_.mock_calls[0] + assert np.all(args[0] == np.ones((5568, 5568))) + assert args[1] == chunk_size + + # Checks that xarray.DataArray has been called with the correct arguments + name, args, kwargs = xr_.mock_calls[0] + assert kwargs["dims"] == ("y", "x") + + # Checks the correct execution of the _get_proj_area function + pdict, area_dict = reader._get_proj_area(0) + + expected_pdict = { + "a": 6378140000.0, + "b": 6356755000.0, + "h": 35785830098.0, + "ssp_lon": 0.0, + "nlines": 5568, + "ncols": 5568, + "a_name": "msg_fci_fdss_2km", + "a_desc": "MSG FCI Full Disk Scanning Service area definition with 2 km resolution", + "p_id": "" + } + assert pdict == expected_pdict + expected_area_dict = { + "nlines": 5568, + "ncols": 5568 + } + assert area_dict == expected_area_dict + + # Checks the correct execution of the get_area_def function + with mock.patch("satpy.readers.eum_l2_grib.fci_calculate_area_extent", + mock.Mock(name="fci_calculate_area_extent")) as cae: + with mock.patch("satpy.readers.eum_l2_grib.get_area_definition", mock.Mock()) as gad: + dataset_id = make_dataid(name="dummmy", resolution=2000.) + reader.get_area_def(dataset_id) + # Asserts that seviri_calculate_area_extent has been called with the correct arguments + expected_args = ({"nlines": 5568, "ncols": 5568, + "column_step": 2000., "line_step": 2000.},) + name, args, kwargs = cae.mock_calls[0] + assert args == expected_args + # Asserts that get_area_definition has been called with the correct arguments + name, args, kwargs = gad.mock_calls[0] + assert args[0] == expected_pdict + # The second argument must be the return result of seviri_calculate_area_extent + assert args[1]._extract_mock_name() == "fci_calculate_area_extent()" From 2c6cc4d45a12c409324f526ab89a0b888e4bc6be Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Tue, 15 Oct 2024 14:03:17 -0500 Subject: [PATCH 133/340] Add aliases for platform names which may be in filename --- satpy/readers/viirs_edr.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 10cdbac043..025bf1ca14 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -197,9 +197,12 @@ def platform_name(self): platform_path = self.filename_info["platform_shortname"] platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", + "SNPP": "NOAA-20", "J01": "NOAA-20", + "N20": "NOAA-20", "JPSS-2": "NOAA-21", - "J02": "NOAA-21"} + "J02": "NOAA-21", + "N21": "NOAA-21"} return platform_dict[platform_path.upper()] def available_datasets(self, configured_datasets=None): From 12ebe3feab2d9f7b92bca8eabe059776291500cf Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 15 Oct 2024 22:02:13 +0200 Subject: [PATCH 134/340] Update satpy/readers/sar_c_safe.py --- satpy/readers/sar_c_safe.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index 0d42491cbd..b6d84e8fb7 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -637,10 +637,8 @@ def _get_lonlatalts_uncached(self): fine_points = [np.arange(size) for size in shape] x, y, z = lonlat2xyz(gcp_lons, gcp_lats) - kx = 2 - ky = 2 - interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=kx, ky=ky) + interpolator = MultipleSplineInterpolator((ypoints, xpoints), x, y, z, gcp_alts, kx=2, ky=2) hx, hy, hz, altitudes = interpolator.interpolate(fine_points, chunks=self.chunks) From 41bd4ee5c52c867fd7a21166468883c8c0aea885 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 15 Oct 2024 22:05:15 +0200 Subject: [PATCH 135/340] Update satpy/readers/sar_c_safe.py Co-authored-by: Panu Lahtinen --- satpy/readers/sar_c_safe.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index bee4208d33..0bcf9d4347 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -459,7 +459,7 @@ def lines(self): @property def lut(self): lut = self.element.find("noiseAzimuthLut").text.split() - return np.array(lut).astype(np.float32) + return np.array(lut, dtype=np.float32) class XMLArray: From eba7964e43678181f1b3bded706ae50f53775c49 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 07:25:43 +0000 Subject: [PATCH 136/340] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/reader_tests/test_eum_l2_grib.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index d3f5622bb7..50c6be5398 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -20,10 +20,10 @@ import datetime import sys -import pytest from unittest import mock import numpy as np +import pytest from satpy.tests.utils import make_dataid From ba307335d78aa050e19928c4082d92f06413fb47 Mon Sep 17 00:00:00 2001 From: Johan Strandgren <42137969+strandgren@users.noreply.github.com> Date: Wed, 16 Oct 2024 09:36:54 +0200 Subject: [PATCH 137/340] Remove unused imports --- satpy/tests/reader_tests/test_fci_base.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_base.py b/satpy/tests/reader_tests/test_fci_base.py index 1534965402..eda7eee8a1 100644 --- a/satpy/tests/reader_tests/test_fci_base.py +++ b/satpy/tests/reader_tests/test_fci_base.py @@ -18,9 +18,6 @@ """FCI base reader tests package.""" -# import datetime as dt -import pytest - from satpy.readers.fci_base import calculate_area_extent from satpy.tests.utils import make_dataid From 1f7b739dcac94bb2b36dd77644bcfd49344ecbf3 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 16 Oct 2024 10:20:32 +0200 Subject: [PATCH 138/340] Fix Landsat SZA tests. --- satpy/tests/reader_tests/test_oli_tirs_l1_tif.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py index a2e1cd94d5..fdb8407a6b 100644 --- a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -343,7 +343,7 @@ def setUp(self): self.test_data__2 = da.random.randint(8000, 14000, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint16) - self.test_data__3= da.random.randint(0, 10000, + self.test_data__3= da.random.randint(1, 10000, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint16) From aaff79e62adbd21b5714dfbdbf575cb8743b4119 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 11:33:58 +0300 Subject: [PATCH 139/340] Log an ERROR level message on YAMLErrors when reader loading fails --- satpy/readers/__init__.py | 9 ++++++++- satpy/tests/test_readers.py | 23 +++++++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 64055c7232..3a05bd0c16 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -567,10 +567,12 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader_instance = load_reader( reader_configs, **reader_kwargs[None if reader is None else reader[idx]]) - except (KeyError, IOError, yaml.YAMLError) as err: + except (KeyError, IOError) as err: LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) continue + except yaml.YAMLError as err: + _log_yaml_error(reader_configs, err) if not readers_files: # we weren't given any files for this reader @@ -590,6 +592,11 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): return reader_instances +def _log_yaml_error(reader_configs, err): + LOG.error("Problem with %s", str(reader_configs)) + LOG.error(str(err)) + + def _early_exit(filenames, reader): if not filenames and not reader: # used for an empty Scene diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index eb8983d3cf..a3aef9114e 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -258,6 +258,11 @@ class TestReaderLoader(unittest.TestCase): Assumes that the VIIRS SDR reader exists and works. """ + @pytest.fixture(autouse=True) + def inject_fixtures(self, caplog): # noqa: PT004 + """Inject caplog to the test class.""" + self._caplog = caplog + def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" from satpy.readers.viirs_sdr import VIIRSSDRFileHandler @@ -439,6 +444,24 @@ def test_almost_all_filtered(self): assert "abi_l1b" in readers assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 + @mock.patch("satpy.readers.load_reader") + def test_yaml_error_message(self, load_reader): + """Test that YAML errors are logged properly.""" + import logging + + import yaml + + from satpy.readers import load_readers + + filenames = ["AVHR_xxx_1B_M01_20241015100703Z_20241015114603Z_N_O_20241015105547Z.nat"] + error_message = "YAML test error message" + load_reader.side_effect = yaml.YAMLError(error_message) + + with self._caplog.at_level(logging.ERROR): + with pytest.raises(UnboundLocalError): + _ = load_readers(filenames=filenames, reader="avhrr_l1b_eps") + assert error_message in self._caplog.text + class TestFindFilesAndReaders: """Test the find_files_and_readers utility function.""" From ca987bc50a932b15192f483062f554562bf700bc Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 12:00:59 +0300 Subject: [PATCH 140/340] Refactor load_readers and update the test --- satpy/readers/__init__.py | 26 +++++++++++++++++--------- satpy/tests/test_readers.py | 2 +- 2 files changed, 18 insertions(+), 10 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 3a05bd0c16..ef0d8713c3 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -563,16 +563,9 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): else: readers_files = remaining_filenames - try: - reader_instance = load_reader( - reader_configs, - **reader_kwargs[None if reader is None else reader[idx]]) - except (KeyError, IOError) as err: - LOG.info("Cannot use %s", str(reader_configs)) - LOG.debug(str(err)) + reader_instance = _get_reader_instance(reader, reader_configs, idx, **reader_kwargs) + if reader_instance is None: continue - except yaml.YAMLError as err: - _log_yaml_error(reader_configs, err) if not readers_files: # we weren't given any files for this reader @@ -592,6 +585,21 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): return reader_instances +def _get_reader_instance(reader, reader_configs, idx, **reader_kwargs): + reader_instance = None + try: + reader_instance = load_reader( + reader_configs, + **reader_kwargs[None if reader is None else reader[idx]]) + except (KeyError, IOError) as err: + LOG.info("Cannot use %s", str(reader_configs)) + LOG.debug(str(err)) + except yaml.YAMLError as err: + _log_yaml_error(reader_configs, err) + + return reader_instance + + def _log_yaml_error(reader_configs, err): LOG.error("Problem with %s", str(reader_configs)) LOG.error(str(err)) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index a3aef9114e..bff268fdd2 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -458,7 +458,7 @@ def test_yaml_error_message(self, load_reader): load_reader.side_effect = yaml.YAMLError(error_message) with self._caplog.at_level(logging.ERROR): - with pytest.raises(UnboundLocalError): + with pytest.raises(match=ValueError): _ = load_readers(filenames=filenames, reader="avhrr_l1b_eps") assert error_message in self._caplog.text From dc3bc1ca376287c6bd9979248f8f217486457b8b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 13:08:42 +0300 Subject: [PATCH 141/340] Fix reader_kwargs handling --- satpy/readers/__init__.py | 7 +++++-- satpy/tests/test_readers.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index ef0d8713c3..eb2b73d681 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -557,13 +557,16 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader, filenames, remaining_filenames = _get_reader_and_filenames(reader, filenames) (reader_kwargs, reader_kwargs_without_filter) = _get_reader_kwargs(reader, reader_kwargs) + if reader_kwargs is None: + reader_kwargs = {} + for idx, reader_configs in enumerate(configs_for_reader(reader)): if isinstance(filenames, dict): readers_files = set(filenames[reader[idx]]) else: readers_files = remaining_filenames - reader_instance = _get_reader_instance(reader, reader_configs, idx, **reader_kwargs) + reader_instance = _get_reader_instance(reader, reader_configs, idx, reader_kwargs) if reader_instance is None: continue @@ -585,7 +588,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): return reader_instances -def _get_reader_instance(reader, reader_configs, idx, **reader_kwargs): +def _get_reader_instance(reader, reader_configs, idx, reader_kwargs): reader_instance = None try: reader_instance = load_reader( diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index bff268fdd2..343613cb0c 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -458,7 +458,7 @@ def test_yaml_error_message(self, load_reader): load_reader.side_effect = yaml.YAMLError(error_message) with self._caplog.at_level(logging.ERROR): - with pytest.raises(match=ValueError): + with pytest.raises(ValueError, match="No supported files found"): _ = load_readers(filenames=filenames, reader="avhrr_l1b_eps") assert error_message in self._caplog.text From 4745774dcdd1771201ea6b1fce32d6cc3af0644f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 13:40:32 +0300 Subject: [PATCH 142/340] Switch YAMLError to ConstructorError --- satpy/readers/__init__.py | 2 +- satpy/tests/test_readers.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index eb2b73d681..3d1cdf2d40 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -597,7 +597,7 @@ def _get_reader_instance(reader, reader_configs, idx, reader_kwargs): except (KeyError, IOError) as err: LOG.info("Cannot use %s", str(reader_configs)) LOG.debug(str(err)) - except yaml.YAMLError as err: + except yaml.constructor.ConstructorError as err: _log_yaml_error(reader_configs, err) return reader_instance diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index 343613cb0c..c01e201067 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -455,7 +455,7 @@ def test_yaml_error_message(self, load_reader): filenames = ["AVHR_xxx_1B_M01_20241015100703Z_20241015114603Z_N_O_20241015105547Z.nat"] error_message = "YAML test error message" - load_reader.side_effect = yaml.YAMLError(error_message) + load_reader.side_effect = yaml.constructor.ConstructorError(error_message) with self._caplog.at_level(logging.ERROR): with pytest.raises(ValueError, match="No supported files found"): From 8c9dd7e52922df89b0a242a903cb2a2ffc3550e8 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 14:03:36 +0300 Subject: [PATCH 143/340] Reduce cyclomatic complexity in load_readers --- satpy/readers/__init__.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/satpy/readers/__init__.py b/satpy/readers/__init__.py index 3d1cdf2d40..04f98e2182 100644 --- a/satpy/readers/__init__.py +++ b/satpy/readers/__init__.py @@ -561,18 +561,12 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): reader_kwargs = {} for idx, reader_configs in enumerate(configs_for_reader(reader)): - if isinstance(filenames, dict): - readers_files = set(filenames[reader[idx]]) - else: - readers_files = remaining_filenames - + readers_files = _get_readers_files(filenames, reader, idx, remaining_filenames) reader_instance = _get_reader_instance(reader, reader_configs, idx, reader_kwargs) - if reader_instance is None: + if reader_instance is None or not readers_files: + # Reader initiliasation failed or no files were given continue - if not readers_files: - # we weren't given any files for this reader - continue loadables = reader_instance.select_files_from_pathnames(readers_files) if loadables: reader_instance.create_storage_items( @@ -580,6 +574,7 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): fh_kwargs=reader_kwargs_without_filter[None if reader is None else reader[idx]]) reader_instances[reader_instance.name] = reader_instance remaining_filenames -= set(loadables) + if not remaining_filenames: break @@ -588,6 +583,12 @@ def load_readers(filenames=None, reader=None, reader_kwargs=None): return reader_instances +def _get_readers_files(filenames, reader, idx, remaining_filenames): + if isinstance(filenames, dict): + return set(filenames[reader[idx]]) + return remaining_filenames + + def _get_reader_instance(reader, reader_configs, idx, reader_kwargs): reader_instance = None try: From 3db654715280240f8fbc00690811e3350106bb7c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Wed, 16 Oct 2024 15:53:01 +0300 Subject: [PATCH 144/340] Write a reader config instead of mocking the parsing failure --- satpy/tests/test_readers.py | 28 ++++++++++++++++++---------- 1 file changed, 18 insertions(+), 10 deletions(-) diff --git a/satpy/tests/test_readers.py b/satpy/tests/test_readers.py index c01e201067..f11d181833 100644 --- a/satpy/tests/test_readers.py +++ b/satpy/tests/test_readers.py @@ -259,9 +259,10 @@ class TestReaderLoader(unittest.TestCase): """ @pytest.fixture(autouse=True) - def inject_fixtures(self, caplog): # noqa: PT004 + def inject_fixtures(self, caplog, tmp_path): # noqa: PT004 """Inject caplog to the test class.""" self._caplog = caplog + self._tmp_path = tmp_path def setUp(self): """Wrap HDF5 file handler with our own fake handler.""" @@ -444,22 +445,29 @@ def test_almost_all_filtered(self): assert "abi_l1b" in readers assert len(list(readers["abi_l1b"].available_dataset_ids)) == 0 - @mock.patch("satpy.readers.load_reader") - def test_yaml_error_message(self, load_reader): + def test_yaml_error_message(self): """Test that YAML errors are logged properly.""" import logging - import yaml - + import satpy from satpy.readers import load_readers - filenames = ["AVHR_xxx_1B_M01_20241015100703Z_20241015114603Z_N_O_20241015105547Z.nat"] - error_message = "YAML test error message" - load_reader.side_effect = yaml.constructor.ConstructorError(error_message) + reader_config = "reader:\n" + reader_config += " name: nonreader\n" + reader_config += " reader: !!python/name:notapackage.notareader.BadClass\n" + + os.mkdir(self._tmp_path / "readers") + reader_fname = self._tmp_path / "readers" / "nonreader.yaml" + with open(reader_fname, "w") as fid: + fid.write(reader_config) + + filenames = ["foo.bar"] + error_message = "No module named 'notapackage'" with self._caplog.at_level(logging.ERROR): - with pytest.raises(ValueError, match="No supported files found"): - _ = load_readers(filenames=filenames, reader="avhrr_l1b_eps") + with satpy.config.set({"config_path": [str(self._tmp_path)]}): + with pytest.raises(ValueError, match="No supported files found"): + _ = load_readers(filenames=filenames, reader="nonreader") assert error_message in self._caplog.text From 30ee8992cf62a27a084562265dbd20dee9b1f6e0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 18:00:04 +0200 Subject: [PATCH 145/340] Add no-op `image_ready` enhancement --- satpy/enhancements/__init__.py | 5 +++++ satpy/etc/enhancements/generic.yaml | 6 ++++++ 2 files changed, 11 insertions(+) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index a44ca590cf..95a147aafb 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -653,3 +653,8 @@ def _jma_true_color_reproduction(img_data, platform=None): output = da.dot(img_data.T, ccm.T) return output.T + + +def no_op(img): + """Do not do anything to the image.""" + return img.data diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 5d17154aab..7e23281531 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1285,3 +1285,9 @@ enhancements: imager_with_lightning: standard_name: imager_with_lightning operations: [] + + image_ready: + standard_name: image_ready + operations: + - name: no_op + method: !!python/name:satpy.enhancements.no_op From 4c0b1e71f16a78dd70494664bfd23e5eb35bd3e3 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 18:01:13 +0200 Subject: [PATCH 146/340] Fix style --- satpy/composites/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/composites/__init__.py b/satpy/composites/__init__.py index b032f23a32..d7518be91d 100644 --- a/satpy/composites/__init__.py +++ b/satpy/composites/__init__.py @@ -721,7 +721,7 @@ def __init__(self, name, lim_low=85., lim_high=88., day_night="day_night", inclu self.day_night = day_night self.include_alpha = include_alpha self._has_sza = False - super(DayNightCompositor, self).__init__(name, **kwargs) + super().__init__(name, **kwargs) def __call__( self, From 6ecb8476bb79616fefafa2ba4198abe76458ebc5 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 16 Oct 2024 20:22:23 +0200 Subject: [PATCH 147/340] Update Landsat reader to correctly return reflectance, and add composites (based on MSI). Also, split tests into manageable chunks. --- satpy/etc/composites/oli_tirs.yaml | 422 +++++++++++++++++++++++++ satpy/etc/readers/oli_tirs_l1_tif.yaml | 52 +-- satpy/readers/oli_tirs_l1_tif.py | 59 ++-- 3 files changed, 481 insertions(+), 52 deletions(-) create mode 100644 satpy/etc/composites/oli_tirs.yaml diff --git a/satpy/etc/composites/oli_tirs.yaml b/satpy/etc/composites/oli_tirs.yaml new file mode 100644 index 0000000000..7c74dc3f09 --- /dev/null +++ b/satpy/etc/composites/oli_tirs.yaml @@ -0,0 +1,422 @@ +sensor_name: visir/oli_tirs + +modifiers: + rayleigh_corrected: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rayleigh_only + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_antarctic: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: antarctic_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_average: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_average_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_clean_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_continental_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: continental_polluted_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_desert: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: desert_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_clean: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_clean_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_polluted: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_polluted_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_marine_tropical: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: marine_tropical_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_rural: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: rural_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + rayleigh_corrected_urban: + modifier: !!python/name:satpy.modifiers.PSPRayleighReflectance + atmosphere: us-standard + aerosol_type: urban_aerosol + prerequisites: + - name: 'b04' + modifiers: [sunz_corrected] + optional_prerequisites: + - name: satellite_azimuth_angle + - name: satellite_zenith_angle + - name: solar_azimuth_angle + - name: solar_zenith_angle + + +composites: + true_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: true_color + + true_color_antarctic: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] + standard_name: true_color + + true_color_continental_average: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] + standard_name: true_color + + true_color_continental_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] + standard_name: true_color + + true_color_continental_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] + standard_name: true_color + + true_color_desert: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] + standard_name: true_color + + true_color_marine_clean: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] + standard_name: true_color + + true_color_marine_polluted: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] + standard_name: true_color + + true_color_marine_tropical: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] + standard_name: true_color + + true_color_rural: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] + standard_name: true_color + + true_color_urban: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] + standard_name: true_color + + true_color_uncorr: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b02' + modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + true_color_raw: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b04' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'b03' + # modifiers: [effective_solar_pathlength_corrected] + - name: 'b02' + # modifiers: [effective_solar_pathlength_corrected] + standard_name: true_color + + natural_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + urban_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b07' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + false_color: + compositor: !!python/name:satpy.composites.GenericCompositor + prerequisites: + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: natural_color + + ndvi: + # Normalized Difference Vegetation Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndvi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b04' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + standard_name: ndvi_msi + + ndmi: + # Normalized Difference Moisture Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndmi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndmi_msi + + ndwi: + # Normalized Difference Water Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndwi/ + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b05' + modifiers: [effective_solar_pathlength_corrected] + standard_name: ndwi_msi + + ndsi: + # Normalized Difference Snow Index + # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ + compositor: !!python/name:satpy.composites.MaskingCompositor + prerequisites: + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.RatioCompositor + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + - compositor: !!python/name:satpy.composites.SumCompositor + prerequisites: + - name: 'b03' + modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] + - name: 'b06' + modifiers: [effective_solar_pathlength_corrected] + conditions: + - method: less_equal + value: 0.42 + transparency: 100 + - method: isnan + transparency: 100 + standard_name: ndsi_msi + + ndsi_with_true_color: + compositor: !!python/name:satpy.composites.BackgroundCompositor + prerequisites: + - name: ndsi + - name: true_color + standard_name: no_enhancement diff --git a/satpy/etc/readers/oli_tirs_l1_tif.yaml b/satpy/etc/readers/oli_tirs_l1_tif.yaml index e27345c1a7..54799ab624 100644 --- a/satpy/etc/readers/oli_tirs_l1_tif.yaml +++ b/satpy/etc/readers/oli_tirs_l1_tif.yaml @@ -5,7 +5,7 @@ reader: description: GeoTIFF reader for Landsat-8/9 OLI/TIRS L1 data. status: Beta supports_fsspec: false - sensors: [oli, tirs] + sensors: oli_tirs default_channels: [] reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader @@ -102,7 +102,7 @@ file_types: datasets: b01: name: b01 - sensor: oli + sensor: oli_tirs wavelength: [0.433, 0.443, 0.453] resolution: 30 calibration: @@ -119,7 +119,7 @@ datasets: b02: name: b02 - sensor: oli + sensor: oli_tirs wavelength: [0.450, 0.482, 0.515] resolution: 30 calibration: @@ -136,7 +136,7 @@ datasets: b03: name: b03 - sensor: oli + sensor: oli_tirs wavelength: [0.525, 0.565, 0.600] resolution: 30 calibration: @@ -153,7 +153,7 @@ datasets: b04: name: b04 - sensor: oli + sensor: oli_tirs wavelength: [0.630, 0.660, 0.680] resolution: 30 calibration: @@ -170,7 +170,7 @@ datasets: b05: name: b05 - sensor: oli + sensor: oli_tirs wavelength: [0.845, 0.867, 0.885] resolution: 30 calibration: @@ -187,7 +187,7 @@ datasets: b06: name: b06 - sensor: oli + sensor: oli_tirs wavelength: [1.560, 1.650, 1.660] resolution: 30 calibration: @@ -204,7 +204,7 @@ datasets: b07: name: b07 - sensor: oli + sensor: oli_tirs wavelength: [2.100, 2.215, 2.300] resolution: 30 calibration: @@ -221,7 +221,7 @@ datasets: b08: name: b08 - sensor: oli + sensor: oli_tirs wavelength: [0.500, 0.579, 0.680] resolution: 15 calibration: @@ -238,7 +238,7 @@ datasets: b09: name: b09 - sensor: oli + sensor: oli_tirs wavelength: [1.360, 1.373, 1.390] resolution: 30 calibration: @@ -256,7 +256,7 @@ datasets: # Channels on the TIRS instrument b10: name: b10 - sensor: tirs + sensor: oli_tirs wavelength: [10.6, 10.888, 11.19] resolution: 30 calibration: @@ -273,7 +273,7 @@ datasets: b11: name: b11 - sensor: tirs + sensor: oli_tirs wavelength: [11.5, 11.981, 12.51] resolution: 30 calibration: @@ -291,44 +291,44 @@ datasets: # QA Variables qa: name: qa - sensor: oli + sensor: oli_tirs resolution: 30 file_type: granule_qa qa_radsat: name: qa_radsat - sensor: oli + sensor: oli_tirs resolution: 30 file_type: granule_qa_radsat # Angles datasets - sza: - name: sza - sensor: oli + solar_zenith_angle: + name: solar_zenith_angle + sensor: oli_tirs standard_name: solar_zenith_angle resolution: 30 units: "degrees" file_type: granule_sza - saa: - name: saa - sensor: oli + solar_azimuth_angle: + name: solar_azimuth_angle + sensor: oli_tirs standard_name: solar_azimuth_angle resolution: 30 units: "degrees" file_type: granule_saa - vza: - name: vza - sensor: oli + satellite_zenith_angle: + name: satellite_zenith_angle + sensor: oli_tirs standard_name: viewing_zenith_angle resolution: 30 units: "degrees" file_type: granule_vza - vaa: - name: vaa - sensor: oli + satellite_azimuth_angle: + name: satellite_azimuth_angle + sensor: oli_tirs standard_name: viewing_azimuth_angle resolution: 30 units: "degrees" diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 209b103b0c..6be1b20a1c 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -27,7 +27,7 @@ """ import logging -from datetime import datetime +from datetime import datetime, timezone import defusedxml.ElementTree as ET import numpy as np @@ -45,7 +45,12 @@ OLI_BANDLIST = ["b01", "b02", "b03", "b04", "b05", "b06", "b07", "b08", "b09"] TIRS_BANDLIST = ["b10", "b11"] PAN_BANDLIST = ["b08"] -ANGLIST = ["sza", "saa", "vza", "vaa"] +ANGLIST = ["satellite_azimuth_angle", + "satellite_zenith_angle", + "solar_azimuth_angle", + "solar_zenith_angle"] + +ANGLIST_CHAN = ["sza", "saa", "vaa", "vza"] BANDLIST = OLI_BANDLIST + TIRS_BANDLIST @@ -74,7 +79,7 @@ def end_time(self): def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): """Initialize the reader.""" - super(OLITIRSCHReader, self).__init__(filename, filename_info, filetype_info) + super().__init__(filename, filename_info, filetype_info) # Check we have landsat data if filename_info["platform_type"] != "L": @@ -96,7 +101,7 @@ def __init__(self, filename, filename_info, filetype_info, mda, **kwargs): def get_dataset(self, key, info): """Load a dataset.""" - if self.channel != key["name"]: + if self.channel != key["name"] and self.channel not in ANGLIST_CHAN: raise ValueError(f"Requested channel {key['name']} does not match the reader channel {self.channel}") if key["name"] in OLI_BANDLIST and self.chan_selector not in ["O", "C"]: @@ -112,16 +117,22 @@ def get_dataset(self, key, info): "x": CHUNK_SIZE}, mask_and_scale=False)["band_data"].squeeze() + # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan - data = xr.where(data == 0, np.float32(np.nan), data) + data.data = xr.where(data.data == 0, np.float32(np.nan), data.data) attrs = data.attrs.copy() # Add useful metadata to the attributes. attrs["perc_cloud_cover"] = self._mda.cloud_cover + # Add platform / sensor attributes + attrs["platform_name"] = self.platform_name + attrs["sensor"] = "OLI_TIRS" + # Apply attrs from YAML + attrs["standard_name"] = info["standard_name"] + attrs["units"] = info["units"] # Only OLI bands have a saturation flag if key["name"] in OLI_BANDLIST: - attrs["saturated"] = self.bsat[key["name"]] # Rename to Satpy convention @@ -134,43 +145,37 @@ def get_dataset(self, key, info): data = self.calibrate(data, key["calibration"]) if key["name"] in ANGLIST: data.data = data.data * 0.01 - data.attrs["units"] = "degrees" - data.attrs["standard_name"] = "solar_zenith_angle" return data def calibrate(self, data, calibration): """Calibrate the data from counts into the desired units.""" if calibration == "counts": - data.attrs["standard_name"] = "counts" - data.attrs["units"] = "1" return data if calibration in ["radiance", "brightness_temperature"]: - data.attrs["standard_name"] = "toa_outgoing_radiance_per_unit_wavelength" - data.attrs["units"] = "W m-2 um-1 sr-1" data.data = data.data * self.calinfo[self.channel][0] + self.calinfo[self.channel][1] if calibration == "radiance": - return data.astype(np.float32) + data.data = data.data.astype(np.float32) + return data if calibration == "reflectance": if int(self.channel[1:]) < 10: - data.attrs["standard_name"] = "toa_bidirectional_reflectance" - data.attrs["units"] = "%" data.data = data.data * self.calinfo[self.channel][2] + self.calinfo[self.channel][3] - return data.astype(np.float32) + data.data = data.data.astype(np.float32) * 100 + return data if calibration == "brightness_temperature": if self.channel[1:] in ["10", "11"]: - data.attrs["standard_name"] = "toa_brightness_temperature" - data.attrs["units"] = "K" data.data = (self.calinfo[self.channel][3] / np.log((self.calinfo[self.channel][2] / data.data) + 1)) - return data.astype(np.float32) + data.data = data.data.astype(np.float32) + return data def get_area_def(self, dsid): """Get area definition of the image from the metadata.""" return self._mda.build_area_def(dsid["name"]) + class OLITIRSMDReader(BaseFileHandler): """File handler for Landsat L1 files (tif).""" def __init__(self, filename, filename_info, filetype_info): @@ -179,11 +184,10 @@ def __init__(self, filename, filename_info, filetype_info): # Check we have landsat data if filename_info["platform_type"] != "L": raise ValueError("This reader only supports Landsat data") - + self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] self._obs_date = filename_info["observation_date"] self.root = ET.parse(self.filename) self.process_level = filename_info["process_level_correction"] - self.platform_name = PLATFORMS[filename_info["spacecraft_id"]] import bottleneck # noqa import geotiepoints # noqa @@ -191,7 +195,8 @@ def __init__(self, filename, filename_info, filetype_info): @property def center_time(self): """Return center time.""" - return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], "%H:%M:%S.%f") + return datetime.strptime(self.root.find(".//IMAGE_ATTRIBUTES/SCENE_CENTER_TIME").text[:-2], + "%H:%M:%S.%f").replace(tzinfo=timezone.utc) @property def start_time(self): @@ -201,7 +206,8 @@ def start_time(self): It is constructed from the observation date (from the filename) and the center time (from the metadata). """ return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, - self.center_time.hour, self.center_time.minute, self.center_time.second) + self.center_time.hour, self.center_time.minute, self.center_time.second, + tzinfo=timezone.utc) @property def end_time(self): @@ -211,7 +217,8 @@ def end_time(self): It is constructed from the observation date (from the filename) and the center time (from the metadata). """ return datetime(self._obs_date.year, self._obs_date.month, self._obs_date.day, - self.center_time.hour, self.center_time.minute, self.center_time.second) + self.center_time.hour, self.center_time.minute, self.center_time.second, + tzinfo=timezone.utc) @property def cloud_cover(self): @@ -245,14 +252,14 @@ def _get_band_viscal(self, band): rad_gain, rad_add = self._get_band_radcal(band) ref_gain = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_MULT_BAND_{band}").text) ref_add = float(self.root.find(f".//LEVEL1_RADIOMETRIC_RESCALING/REFLECTANCE_ADD_BAND_{band}").text) - return (rad_gain, rad_add, ref_gain, ref_add) + return rad_gain, rad_add, ref_gain, ref_add def _get_band_tircal(self, band): """Return thermal channel calibration info.""" rad_gain, rad_add = self._get_band_radcal(band) bt_k1 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K1_CONSTANT_BAND_{band}").text) bt_k2 = float(self.root.find(f".//LEVEL1_THERMAL_CONSTANTS/K2_CONSTANT_BAND_{band}").text) - return (rad_gain, rad_add, bt_k1, bt_k2) + return rad_gain, rad_add, bt_k1, bt_k2 @property def band_calibration(self): From 9ad335871f0ceb2a0d239d5e771c38cecbe89079 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Wed, 16 Oct 2024 20:30:46 +0200 Subject: [PATCH 148/340] Split Landsat tests into smaller components. --- .../reader_tests/test_oli_tirs_l1_tif.py | 92 ++++++++++++------- 1 file changed, 58 insertions(+), 34 deletions(-) diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py index fdb8407a6b..04f97de974 100644 --- a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -17,7 +17,6 @@ """Unittests for generic image reader.""" import os -import unittest import dask.array as da import numpy as np @@ -305,19 +304,20 @@ """ -class TestOLITIRSL1(unittest.TestCase): + +class TestOLITIRSL1: """Test generic image reader.""" - def setUp(self): + def setup_method(self): """Create temporary images and metadata to test on.""" import tempfile - from datetime import datetime + from datetime import datetime, timezone from pyresample.geometry import AreaDefinition from satpy.scene import Scene - self.date = datetime(2024, 5, 12) + self.date = datetime(2024, 5, 12, tzinfo=timezone.utc) self.filename_info = dict(observation_date=datetime(2024, 5, 3), platform_type="L", @@ -343,9 +343,9 @@ def setUp(self): self.test_data__2 = da.random.randint(8000, 14000, size=(self.y_size, self.x_size), chunks=(50, 50)).astype(np.uint16) - self.test_data__3= da.random.randint(1, 10000, - size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint16) + self.test_data__3 = da.random.randint(1, 10000, + size=(self.y_size, self.x_size), + chunks=(50, 50)).astype(np.uint16) ds_b4 = xr.DataArray(self.test_data__1, dims=("y", "x"), @@ -406,7 +406,7 @@ def setUp(self): self.scn = scn - def tearDown(self): + def teardown_module(self): """Remove the temporary directory created for a test.""" try: import shutil @@ -433,44 +433,56 @@ def test_basicload(self): def test_ch_startend(self): """Test correct retrieval of start/end times.""" - from datetime import datetime + from datetime import datetime, timezone from satpy import Scene scn = Scene(reader="oli_tirs_l1_tif", filenames=[self.fnames[0], self.fnames[3], self.fnames[2]]) bnds = scn.available_dataset_names() - assert bnds == ["b04", "sza"] + assert bnds == ["b04", "solar_zenith_angle"] scn.load(["b04"]) - assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24) - assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24) + assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) + assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) - def test_loading(self): - """Test loading a Landsat Scene with good and bad channel requests.""" + def test_loading_gd(self): + """Test loading a Landsat Scene with good channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) rdr = OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) # Check case with good file data and load request - rdr.get_dataset({"name": "b04", "calibration": "counts"}, {}) + rdr.get_dataset({"name": "b04", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) + def test_loading_badfil(self): + """Test loading a Landsat Scene with bad channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + rdr = OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + + ftype = {"standard_name": "test_data", "units": "test_units"} # Check case with request to load channel not matching filename with pytest.raises(ValueError, match="Requested channel b05 does not match the reader channel b04"): - rdr.get_dataset({"name": "b05", "calibration": "counts"}, {}) + rdr.get_dataset({"name": "b05", "calibration": "counts"}, ftype) + def test_loading_badchan(self): + """Test loading a Landsat Scene with bad channel requests.""" + from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader + good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + ftype = {"standard_name": "test_data", "units": "test_units"} bad_finfo = self.filename_info.copy() bad_finfo["data_type"] = "T" # Check loading invalid channel for data type rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, self.ftype_info, good_mda) - with pytest.raises(ValueError, match= "Requested channel b04 is not available in this granule"): - rdr.get_dataset({"name": "b04", "calibration": "counts"}, {}) + with pytest.raises(ValueError, match="Requested channel b04 is not available in this granule"): + rdr.get_dataset({"name": "b04", "calibration": "counts"}, ftype) bad_finfo["data_type"] = "O" ftype_b11 = self.ftype_info.copy() ftype_b11["file_type"] = "granule_b11" rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, ftype_b11, good_mda) with pytest.raises(ValueError, match="Requested channel b11 is not available in this granule"): - rdr.get_dataset({"name": "b11", "calibration": "counts"}, {}) + rdr.get_dataset({"name": "b11", "calibration": "counts"}, ftype) def test_badfiles(self): """Test loading a Landsat Scene with bad data.""" @@ -478,14 +490,16 @@ def test_badfiles(self): bad_fname_info = self.filename_info.copy() bad_fname_info["platform_type"] = "B" + ftype = {"standard_name": "test_data", "units": "test_units"} + # Test that metadata reader initialises with correct filename - good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, ftype) # Check metadata reader fails if platform type is wrong with pytest.raises(ValueError, match="This reader only supports Landsat data"): - OLITIRSMDReader(self.fnames[2], bad_fname_info, {}) + OLITIRSMDReader(self.fnames[2], bad_fname_info, ftype) - # Test that metadata reader initialises with correct filename + # Test that metadata reader initialises with correct filename OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) # Check metadata reader fails if platform type is wrong @@ -496,11 +510,10 @@ def test_badfiles(self): with pytest.raises(ValueError, match="Invalid file type: granule-b05"): OLITIRSCHReader(self.fnames[0], self.filename_info, bad_ftype_info, good_mda) - def test_calibration_modes(self): - """Test calibration modes for the reader.""" + def test_calibration_counts(self): + """Test counts calibration mode for the reader.""" from satpy import Scene - # Check counts calibration scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) scn.load(["b04", "b11"], calibration="counts") np.testing.assert_allclose(scn["b04"].values, self.test_data__1) @@ -510,7 +523,9 @@ def test_calibration_modes(self): assert scn["b04"].attrs["standard_name"] == "counts" assert scn["b11"].attrs["standard_name"] == "counts" - # Check radiance calibration + def test_calibration_radiance(self): + """Test radiance calibration mode for the reader.""" + from satpy import Scene exp_b04 = (self.test_data__1 * 0.0098329 - 49.16426).astype(np.float32) exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000).astype(np.float32) @@ -523,8 +538,10 @@ def test_calibration_modes(self): np.testing.assert_allclose(scn["b04"].values, exp_b04, rtol=1e-4) np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-4) - # Check top level calibration - exp_b04 = (self.test_data__1 * 2e-05 - 0.1).astype(np.float32) + def test_calibration_highlevel(self): + """Test high level calibration modes for the reader.""" + from satpy import Scene + exp_b04 = (self.test_data__1 * 2e-05 - 0.1).astype(np.float32) * 100 exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000) exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) @@ -533,16 +550,23 @@ def test_calibration_modes(self): assert scn["b04"].attrs["units"] == "%" assert scn["b11"].attrs["units"] == "K" assert scn["b04"].attrs["standard_name"] == "toa_bidirectional_reflectance" - assert scn["b11"].attrs["standard_name"] == "toa_brightness_temperature" + assert scn["b11"].attrs["standard_name"] == "brightness_temperature" np.testing.assert_allclose(np.array(scn["b04"].values), np.array(exp_b04), rtol=1e-4) np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-6) + def test_angles(self): + """Test calibration modes for the reader.""" + from satpy import Scene + # Check angles are calculated correctly scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) - scn.load(["sza"]) - assert scn["sza"].attrs["units"] == "degrees" - assert scn["sza"].attrs["standard_name"] == "solar_zenith_angle" - np.testing.assert_allclose(scn["sza"].values * 100, np.array(self.test_data__3), atol=0.01, rtol=1e-3) + scn.load(["solar_zenith_angle"]) + assert scn["solar_zenith_angle"].attrs["units"] == "degrees" + assert scn["solar_zenith_angle"].attrs["standard_name"] == "solar_zenith_angle" + np.testing.assert_allclose(scn["solar_zenith_angle"].values * 100, + np.array(self.test_data__3), + atol=0.01, + rtol=1e-3) def test_metadata(self): """Check that metadata values loaded correctly.""" From ffaab45fcf504722ef374f1c1cc7e19f61ed40d9 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 23:16:21 +0200 Subject: [PATCH 149/340] Use fixtures in tests --- .../reader_tests/test_oli_tirs_l1_tif.py | 293 +++++++++--------- 1 file changed, 149 insertions(+), 144 deletions(-) diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py index 04f97de974..bfeea0e688 100644 --- a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -17,11 +17,15 @@ """Unittests for generic image reader.""" import os +from datetime import datetime, timezone import dask.array as da import numpy as np import pytest import xarray as xr +from pyresample.geometry import AreaDefinition + +from satpy import Scene metadata_text = b""" @@ -305,20 +309,113 @@ """ -class TestOLITIRSL1: - """Test generic image reader.""" +x_size = 100 +y_size = 100 +date = datetime(2024, 5, 12, tzinfo=timezone.utc) + + +@pytest.fixture(scope="session") +def l1_area(): + """Get the landsat 1 area def.""" + pcs_id = "WGS 84 / UTM zone 40N" + proj4_dict = {"proj": "utm", "zone": 40, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs"} + area_extent = (619485., 2440485., 850515., 2675715.) + return AreaDefinition("geotiff_area", pcs_id, pcs_id, + proj4_dict, x_size, y_size, + area_extent) + + +@pytest.fixture(scope="session") +def b4_data(): + """Get the data for the b4 channel.""" + return da.random.randint(12000, 16000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) + + +@pytest.fixture(scope="session") +def b11_data(): + """Get the data for the b11 channel.""" + return da.random.randint(8000, 14000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) + - def setup_method(self): - """Create temporary images and metadata to test on.""" - import tempfile - from datetime import datetime, timezone +@pytest.fixture(scope="session") +def sza_data(): + """Get the data for the sza.""" + return da.random.randint(1, 10000, + size=(y_size, x_size), + chunks=(50, 50)).astype(np.uint16) - from pyresample.geometry import AreaDefinition - from satpy.scene import Scene +def create_tif_file(data, name, area, filename): + """Create a tif file.""" + data_array = xr.DataArray(data, + dims=("y", "x"), + attrs={"name": name, + "start_time": date}) + scn = Scene() + scn["band_data"] = data_array + scn["band_data"].attrs["area"] = area + scn.save_dataset("band_data", writer="geotiff", enhance=False, fill_value=0, + filename=os.fspath(filename)) + + +@pytest.fixture(scope="session") +def l1_files_path(tmp_path_factory): + """Create the path for l1 files.""" + return tmp_path_factory.mktemp("l1_files") + + +@pytest.fixture(scope="session") +def b4_file(l1_files_path, b4_data, l1_area): + """Create the file for the b4 channel.""" + data = b4_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF" + name = "b04" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + +@pytest.fixture(scope="session") +def b11_file(l1_files_path, b11_data, l1_area): + """Create the file for the b11 channel.""" + data = b11_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF" + name = "b11" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + +@pytest.fixture(scope="session") +def sza_file(l1_files_path, sza_data, l1_area): + """Create the file for the sza.""" + data = sza_data + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF" + name = "sza" + create_tif_file(data, name, l1_area, filename) + return os.fspath(filename) + + +@pytest.fixture(scope="session") +def mda_file(l1_files_path): + """Create the metadata xml file.""" + filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml" + with open(filename, "wb") as f: + f.write(metadata_text) + return os.fspath(filename) + + +@pytest.fixture(scope="session") +def all_files(b4_file, b11_file, mda_file, sza_file): + """Return all the files.""" + return b4_file, b11_file, mda_file, sza_file - self.date = datetime(2024, 5, 12, tzinfo=timezone.utc) +class TestOLITIRSL1: + """Test generic image reader.""" + + def setup_method(self, tmp_path): + """Set up the filename and filetype info dicts..""" self.filename_info = dict(observation_date=datetime(2024, 5, 3), platform_type="L", process_level_correction="L1TP", @@ -326,117 +423,25 @@ def setup_method(self): data_type="C") self.ftype_info = {"file_type": "granule_b04"} - # Create area definition - pcs_id = "WGS 84 / UTM zone 40N" - proj4_dict = {"proj": "utm", "zone": 40, "datum": "WGS84", "units": "m", "no_defs": None, "type": "crs"} - self.x_size = 100 - self.y_size = 100 - area_extent = (619485., 2440485., 850515., 2675715.) - self.area_def = AreaDefinition("geotiff_area", pcs_id, pcs_id, - proj4_dict, self.x_size, self.y_size, - area_extent) - - # Create datasets for L, LA, RGB and RGBA mode images - self.test_data__1 = da.random.randint(12000, 16000, - size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint16) - self.test_data__2 = da.random.randint(8000, 14000, - size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint16) - self.test_data__3 = da.random.randint(1, 10000, - size=(self.y_size, self.x_size), - chunks=(50, 50)).astype(np.uint16) - - ds_b4 = xr.DataArray(self.test_data__1, - dims=("y", "x"), - attrs={"name": "b04", - "start_time": self.date}) - - ds_b11 = xr.DataArray(self.test_data__2, - dims=("y", "x"), - attrs={"name": "b04", - "start_time": self.date}) - - ds_sza = xr.DataArray(self.test_data__3, - dims=("y", "x"), - attrs={"name": "sza", - "start_time": self.date}) - - # Temp dir for the saved images - self.base_dir = tempfile.mkdtemp() - - # Filenames to be used during testing - self.fnames = [f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF", - f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF", - f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_MTL.xml", - f"{self.base_dir}/LC08_L1GT_026200_20240502_20240513_02_T2_SZA.TIF"] - - self.bad_fname_plat = self.fnames[0].replace("LC08", "BC08") - self.bad_fname_plat2 = self.fnames[2].replace("LC08", "BC08") - - self.bad_fname_chan = self.fnames[0].replace("B4", "B5") - - # Put the datasets to Scene for easy saving - scn = Scene() - scn["b4"] = ds_b4 - scn["b4"].attrs["area"] = self.area_def - scn["b11"] = ds_b11 - scn["b11"].attrs["area"] = self.area_def - scn["sza"] = ds_sza - scn["sza"].attrs["area"] = self.area_def - - # Save the images. Two images in PNG and two in GeoTIFF - scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, - filename=os.path.join(self.base_dir, self.fnames[0])) - scn.save_dataset("b11", writer="geotiff", enhance=False, fill_value=0, - filename=os.path.join(self.base_dir, self.fnames[1])) - scn.save_dataset("sza", writer="geotiff", enhance=False, fill_value=0, - filename=os.path.join(self.base_dir, self.fnames[3])) - - scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, - filename=self.bad_fname_plat) - scn.save_dataset("b4", writer="geotiff", enhance=False, fill_value=0, - filename=self.bad_fname_chan) - - # Write the metadata to a file - with open(os.path.join(self.base_dir, self.fnames[2]), "wb") as f: - f.write(metadata_text) - with open(self.bad_fname_plat2, "wb") as f: - f.write(metadata_text) - - self.scn = scn - - def teardown_module(self): - """Remove the temporary directory created for a test.""" - try: - import shutil - shutil.rmtree(self.base_dir, ignore_errors=True) - except OSError: - pass - - def test_basicload(self): + def test_basicload(self, l1_area, b4_file, b11_file, mda_file): """Test loading a Landsat Scene.""" - from satpy import Scene - scn = Scene(reader="oli_tirs_l1_tif", filenames=[self.fnames[0], - self.fnames[1], - self.fnames[2]]) + scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, + b11_file, + mda_file]) scn.load(["b04", "b11"]) # Check dataset is loaded correctly assert scn["b04"].shape == (100, 100) - assert scn["b04"].attrs["area"] == self.area_def + assert scn["b04"].attrs["area"] == l1_area assert scn["b04"].attrs["saturated"] assert scn["b11"].shape == (100, 100) - assert scn["b11"].attrs["area"] == self.area_def + assert scn["b11"].attrs["area"] == l1_area with pytest.raises(KeyError, match="saturated"): assert not scn["b11"].attrs["saturated"] - def test_ch_startend(self): + def test_ch_startend(self, b4_file, sza_file, mda_file): """Test correct retrieval of start/end times.""" - from datetime import datetime, timezone - - from satpy import Scene - scn = Scene(reader="oli_tirs_l1_tif", filenames=[self.fnames[0], self.fnames[3], self.fnames[2]]) + scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, sza_file, mda_file]) bnds = scn.available_dataset_names() assert bnds == ["b04", "solar_zenith_angle"] @@ -444,47 +449,47 @@ def test_ch_startend(self): assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) - def test_loading_gd(self): + def test_loading_gd(self, mda_file, b4_file): """Test loading a Landsat Scene with good channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader - good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) - rdr = OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) # Check case with good file data and load request rdr.get_dataset({"name": "b04", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) - def test_loading_badfil(self): + def test_loading_badfil(self, mda_file, b4_file): """Test loading a Landsat Scene with bad channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader - good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) - rdr = OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) + rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) ftype = {"standard_name": "test_data", "units": "test_units"} # Check case with request to load channel not matching filename with pytest.raises(ValueError, match="Requested channel b05 does not match the reader channel b04"): rdr.get_dataset({"name": "b05", "calibration": "counts"}, ftype) - def test_loading_badchan(self): + def test_loading_badchan(self, mda_file, b11_file): """Test loading a Landsat Scene with bad channel requests.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader - good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + good_mda = OLITIRSMDReader(mda_file, self.filename_info, {}) ftype = {"standard_name": "test_data", "units": "test_units"} bad_finfo = self.filename_info.copy() bad_finfo["data_type"] = "T" # Check loading invalid channel for data type - rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, self.ftype_info, good_mda) + rdr = OLITIRSCHReader(b11_file, bad_finfo, self.ftype_info, good_mda) with pytest.raises(ValueError, match="Requested channel b04 is not available in this granule"): rdr.get_dataset({"name": "b04", "calibration": "counts"}, ftype) bad_finfo["data_type"] = "O" ftype_b11 = self.ftype_info.copy() ftype_b11["file_type"] = "granule_b11" - rdr = OLITIRSCHReader(self.fnames[1], bad_finfo, ftype_b11, good_mda) + rdr = OLITIRSCHReader(b11_file, bad_finfo, ftype_b11, good_mda) with pytest.raises(ValueError, match="Requested channel b11 is not available in this granule"): rdr.get_dataset({"name": "b11", "calibration": "counts"}, ftype) - def test_badfiles(self): + def test_badfiles(self, mda_file, b4_file): """Test loading a Landsat Scene with bad data.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSCHReader, OLITIRSMDReader bad_fname_info = self.filename_info.copy() @@ -493,43 +498,43 @@ def test_badfiles(self): ftype = {"standard_name": "test_data", "units": "test_units"} # Test that metadata reader initialises with correct filename - good_mda = OLITIRSMDReader(self.fnames[2], self.filename_info, ftype) + good_mda = OLITIRSMDReader(mda_file, self.filename_info, ftype) # Check metadata reader fails if platform type is wrong with pytest.raises(ValueError, match="This reader only supports Landsat data"): - OLITIRSMDReader(self.fnames[2], bad_fname_info, ftype) + OLITIRSMDReader(mda_file, bad_fname_info, ftype) # Test that metadata reader initialises with correct filename - OLITIRSCHReader(self.fnames[0], self.filename_info, self.ftype_info, good_mda) + OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) # Check metadata reader fails if platform type is wrong with pytest.raises(ValueError, match="This reader only supports Landsat data"): - OLITIRSCHReader(self.fnames[0], bad_fname_info, self.ftype_info, good_mda) + OLITIRSCHReader(b4_file, bad_fname_info, self.ftype_info, good_mda) bad_ftype_info = self.ftype_info.copy() bad_ftype_info["file_type"] = "granule-b05" with pytest.raises(ValueError, match="Invalid file type: granule-b05"): - OLITIRSCHReader(self.fnames[0], self.filename_info, bad_ftype_info, good_mda) + OLITIRSCHReader(b4_file, self.filename_info, bad_ftype_info, good_mda) - def test_calibration_counts(self): + def test_calibration_counts(self, all_files, b4_data, b11_data): """Test counts calibration mode for the reader.""" from satpy import Scene - scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["b04", "b11"], calibration="counts") - np.testing.assert_allclose(scn["b04"].values, self.test_data__1) - np.testing.assert_allclose(scn["b11"].values, self.test_data__2) + np.testing.assert_allclose(scn["b04"].values, b4_data) + np.testing.assert_allclose(scn["b11"].values, b11_data) assert scn["b04"].attrs["units"] == "1" assert scn["b11"].attrs["units"] == "1" assert scn["b04"].attrs["standard_name"] == "counts" assert scn["b11"].attrs["standard_name"] == "counts" - def test_calibration_radiance(self): + def test_calibration_radiance(self, all_files, b4_data, b11_data): """Test radiance calibration mode for the reader.""" from satpy import Scene - exp_b04 = (self.test_data__1 * 0.0098329 - 49.16426).astype(np.float32) - exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000).astype(np.float32) + exp_b04 = (b4_data * 0.0098329 - 49.16426).astype(np.float32) + exp_b11 = (b11_data * 0.0003342 + 0.100000).astype(np.float32) - scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["b04", "b11"], calibration="radiance") assert scn["b04"].attrs["units"] == "W m-2 um-1 sr-1" assert scn["b11"].attrs["units"] == "W m-2 um-1 sr-1" @@ -538,13 +543,13 @@ def test_calibration_radiance(self): np.testing.assert_allclose(scn["b04"].values, exp_b04, rtol=1e-4) np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-4) - def test_calibration_highlevel(self): + def test_calibration_highlevel(self, all_files, b4_data, b11_data): """Test high level calibration modes for the reader.""" from satpy import Scene - exp_b04 = (self.test_data__1 * 2e-05 - 0.1).astype(np.float32) * 100 - exp_b11 = (self.test_data__2 * 0.0003342 + 0.100000) + exp_b04 = (b4_data * 2e-05 - 0.1).astype(np.float32) * 100 + exp_b11 = (b11_data * 0.0003342 + 0.100000) exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) - scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["b04", "b11"]) assert scn["b04"].attrs["units"] == "%" @@ -554,24 +559,24 @@ def test_calibration_highlevel(self): np.testing.assert_allclose(np.array(scn["b04"].values), np.array(exp_b04), rtol=1e-4) np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-6) - def test_angles(self): + def test_angles(self, all_files, sza_data): """Test calibration modes for the reader.""" from satpy import Scene # Check angles are calculated correctly - scn = Scene(reader="oli_tirs_l1_tif", filenames=self.fnames) + scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) scn.load(["solar_zenith_angle"]) assert scn["solar_zenith_angle"].attrs["units"] == "degrees" assert scn["solar_zenith_angle"].attrs["standard_name"] == "solar_zenith_angle" np.testing.assert_allclose(scn["solar_zenith_angle"].values * 100, - np.array(self.test_data__3), + np.array(sza_data), atol=0.01, rtol=1e-3) - def test_metadata(self): + def test_metadata(self, mda_file): """Check that metadata values loaded correctly.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader - mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + mda = OLITIRSMDReader(mda_file, self.filename_info, {}) cal_test_dict = {"b01": (0.012357, -61.78647, 2e-05, -0.1), "b05": (0.0060172, -30.08607, 2e-05, -0.1), @@ -588,10 +593,10 @@ def test_metadata(self): with pytest.raises(KeyError): mda.band_saturation["b10"] - def test_area_def(self): + def test_area_def(self, mda_file): """Check we can get the area defs properly.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader - mda = OLITIRSMDReader(self.fnames[2], self.filename_info, {}) + mda = OLITIRSMDReader(mda_file, self.filename_info, {}) standard_area = mda.build_area_def("b01") pan_area = mda.build_area_def("b08") From dce15c3dd45a7fe35d4dbd61d63db40f99c40fdd Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 16 Oct 2024 23:16:48 +0200 Subject: [PATCH 150/340] Use automatic chunking size --- satpy/readers/oli_tirs_l1_tif.py | 12 +++++------- 1 file changed, 5 insertions(+), 7 deletions(-) diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 6be1b20a1c..9ed382d084 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -34,10 +34,8 @@ import xarray as xr from satpy.readers.file_handlers import BaseFileHandler -from satpy.utils import get_legacy_chunk_size logger = logging.getLogger(__name__) -CHUNK_SIZE = get_legacy_chunk_size() PLATFORMS = {"08": "Landsat-8", "09": "Landsat-9"} @@ -111,11 +109,11 @@ def get_dataset(self, key, info): logger.debug("Reading %s.", key["name"]) - data = xr.open_dataset(self.filename, engine="rasterio", - chunks={"band": 1, - "y": CHUNK_SIZE, - "x": CHUNK_SIZE}, - mask_and_scale=False)["band_data"].squeeze() + data = xr.open_dataarray(self.filename, engine="rasterio", + chunks={"band": 1, + "y": "auto", + "x": "auto"}, + mask_and_scale=False).squeeze() # The fill value for Landsat is '0', for calibration simplicity convert it to np.nan From 3ab5c64ba18e504bd43bb3d184137971caa92c14 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 08:23:31 +0200 Subject: [PATCH 151/340] Fix warning --- satpy/tests/enhancement_tests/test_enhancements.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b30a073968..89ff21aafa 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -456,10 +456,10 @@ def test_cmap_list(self): """Test that colors can be a list/tuple.""" from satpy.enhancements import create_colormap colors = [ - [0, 0, 1], - [1, 0, 1], - [0, 1, 1], - [1, 1, 1], + [0., 0., 1.], + [1., 0., 1.], + [0., 1., 1.], + [1., 1., 1.], ] values = [2, 4, 6, 8] cmap = create_colormap({"colors": colors, "color_scale": 1}) From b408e65f14c58b4df296262180b319b09989d1b0 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 08:24:11 +0200 Subject: [PATCH 152/340] Test no-op --- satpy/tests/enhancement_tests/test_enhancements.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 89ff21aafa..b0f2b3d31b 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -711,3 +711,15 @@ def test_jma_true_color_reproduction(self): img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) + + +def test_no_op_enhancement(): + """Test the no-op enhancement.""" + from satpy.enhancements import no_op + + data = da.arange(-100, 1000, 110).reshape(2, 5) + rgb_data = np.stack([data, data, data]) + rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), + coords={"bands": ["R", "G", "B"]}, + attrs={"platform_name": "Himawari-8"}) + assert no_op(rgb) is rgb.data From eceedf94ea5a2282f690fd5e80e1147bf31eb78f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 09:05:50 +0200 Subject: [PATCH 153/340] Assert computed results are of right type --- satpy/tests/reader_tests/test_sar_c_safe.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index 499068dc3f..db7b09450c 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -292,6 +292,7 @@ def test_read_calibrated_natural(self, measurement_filehandler): expected = np.array([[np.nan, 0.02707529], [2.55858416, 3.27611055]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=2e-7) assert xarr.dtype == np.float32 + assert xarr.compute().dtype == np.float32 def test_read_calibrated_dB(self, measurement_filehandler): """Test the calibration routines.""" @@ -301,6 +302,7 @@ def test_read_calibrated_dB(self, measurement_filehandler): expected = np.array([[np.nan, -15.674268], [4.079997, 5.153585]], dtype=np.float32) np.testing.assert_allclose(xarr.values[:2, :2], expected, rtol=1e-6) assert xarr.dtype == np.float32 + assert xarr.compute().dtype == np.float32 def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" @@ -309,6 +311,7 @@ def test_read_lon_lats(self, measurement_filehandler): expected = expected_longitudes np.testing.assert_allclose(xarr.values, expected[:10, :10], atol=1e-3) assert xarr.dtype == np.float64 + assert xarr.compute().dtype == np.float64 annotation_xml = b""" @@ -781,6 +784,7 @@ def test_get_noise_dataset(self, noise_filehandler): res = noise_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_azimuth_noise * self.expected_range_noise) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_noise_dataset_has_right_chunk_size(self, noise_filehandler): """Test using get_dataset for the noise has right chunk size in result.""" @@ -804,6 +808,7 @@ def test_dn_calibration_array(self, calibration_filehandler): res = calibration_filehandler.get_calibration(Calibration.dn, chunks=5) np.testing.assert_allclose(res, expected_dn) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_beta_calibration_array(self, calibration_filehandler): """Test reading the beta calibration array.""" @@ -811,6 +816,7 @@ def test_beta_calibration_array(self, calibration_filehandler): res = calibration_filehandler.get_calibration(Calibration.beta_nought, chunks=5) np.testing.assert_allclose(res, expected_beta) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_sigma_calibration_array(self, calibration_filehandler): """Test reading the sigma calibration array.""" @@ -819,12 +825,14 @@ def test_sigma_calibration_array(self, calibration_filehandler): res = calibration_filehandler.get_calibration(Calibration.sigma_nought, chunks=5) np.testing.assert_allclose(res, expected_sigma) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_gamma_calibration_array(self, calibration_filehandler): """Test reading the gamma calibration array.""" res = calibration_filehandler.get_calibration(Calibration.gamma, chunks=5) np.testing.assert_allclose(res, self.expected_gamma) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_calibration_dataset(self, calibration_filehandler): """Test using get_dataset for the calibration.""" @@ -832,6 +840,7 @@ def test_get_calibration_dataset(self, calibration_filehandler): res = calibration_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, self.expected_gamma) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_get_calibration_dataset_has_right_chunk_size(self, calibration_filehandler): """Test using get_dataset for the calibration yields array with right chunksize.""" @@ -854,6 +863,7 @@ def test_incidence_angle(annotation_filehandler): res = annotation_filehandler.get_dataset(query, {}) np.testing.assert_allclose(res, 19.18318046) assert res.dtype == np.float32 + assert res.compute().dtype == np.float32 def test_reading_from_reader(measurement_file, calibration_file, noise_file, annotation_file): @@ -874,6 +884,7 @@ def test_reading_from_reader(measurement_file, calibration_file, noise_file, ann expected_db = np.array([[np.nan, -15.674268], [4.079997, 5.153585]]) np.testing.assert_allclose(array.values[:2, :2], expected_db, rtol=1e-6) assert array.dtype == np.float32 + assert array.compute().dtype == np.float32 def test_filename_filtering_from_reader(measurement_file, calibration_file, noise_file, annotation_file, tmp_path): From 5634073e94de0615f20f18e6e4f0aa5dc9d4f76b Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 09:25:06 +0200 Subject: [PATCH 154/340] Fix style --- satpy/tests/reader_tests/test_sar_c_safe.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_sar_c_safe.py b/satpy/tests/reader_tests/test_sar_c_safe.py index fafebba766..0d88190a5d 100644 --- a/satpy/tests/reader_tests/test_sar_c_safe.py +++ b/satpy/tests/reader_tests/test_sar_c_safe.py @@ -234,7 +234,6 @@ def test_read_lon_lats(self, measurement_filehandler): """Test reading lons and lats.""" query = DataQuery(name="longitude", polarization="vv") xarr = measurement_filehandler.get_dataset(query, info=dict()) - expected = expected_longitudes np.testing.assert_allclose(xarr.values, expected_longitudes) assert xarr.dtype == np.float64 assert xarr.compute().dtype == np.float64 From 4377ed3fb4d1618bbc4f19748dda2f397cafaaa2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 11:24:47 +0200 Subject: [PATCH 155/340] Drop python 3.9 --- .github/workflows/ci.yaml | 5 ++++- .github/workflows/deploy-sdist.yaml | 2 +- .pre-commit-config.yaml | 2 +- doc/rtd_environment.yml | 2 +- pyproject.toml | 2 +- 5 files changed, 8 insertions(+), 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f74cc830ad..73c2f1f3e9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -18,12 +18,15 @@ jobs: fail-fast: true matrix: os: ["windows-latest", "ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.11", "3.12"] + python-version: ["3.10", "3.11", "3.12"] experimental: [false] include: - python-version: "3.12" os: "ubuntu-latest" experimental: true + - python-version: "3.13" + os: "ubuntu-latest" + experimental: true env: PYTHON_VERSION: ${{ matrix.python-version }} diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 12042f4f36..3a8e43181a 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -19,7 +19,7 @@ jobs: shell: bash -l {0} run: | python -m pip install -q build - python -m build -s + python -m build - name: Publish package to PyPI if: github.event.action == 'published' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 82163f8b60..fc3d7e71e7 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,7 @@ repos: - types-setuptools - types-PyYAML - types-requests - args: ["--python-version", "3.9", "--ignore-missing-imports"] + args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.13.2 hooks: diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 5bc7dabe95..3b11a9a20b 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -2,7 +2,7 @@ name: readthedocs channels: - conda-forge dependencies: - - python=3.10 + - python=3.11 - pip - platformdirs - dask diff --git a/pyproject.toml b/pyproject.toml index 196ae6a462..65267c8548 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -24,7 +24,7 @@ dependencies = [ "zarr", ] readme = "README.rst" -requires-python = ">=3.9" +requires-python = ">=3.10" license = { text = "GPLv3" } classifiers = [ "Development Status :: 5 - Production/Stable", From 21d80ab76541039be59d925445dc27e5ea1a409f Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Thu, 17 Oct 2024 13:41:36 +0200 Subject: [PATCH 156/340] Update Landsat reader for compatibility with Pyspectral. --- satpy/etc/composites/oli_tirs.yaml | 152 +++++++++--------- satpy/etc/readers/oli_tirs_l1_tif.yaml | 88 +++++----- satpy/readers/oli_tirs_l1_tif.py | 12 +- .../reader_tests/test_oli_tirs_l1_tif.py | 106 ++++++------ 4 files changed, 179 insertions(+), 179 deletions(-) diff --git a/satpy/etc/composites/oli_tirs.yaml b/satpy/etc/composites/oli_tirs.yaml index 7c74dc3f09..7dd41db4af 100644 --- a/satpy/etc/composites/oli_tirs.yaml +++ b/satpy/etc/composites/oli_tirs.yaml @@ -6,7 +6,7 @@ modifiers: atmosphere: us-standard aerosol_type: rayleigh_only prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -19,7 +19,7 @@ modifiers: atmosphere: us-standard aerosol_type: antarctic_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -32,7 +32,7 @@ modifiers: atmosphere: us-standard aerosol_type: continental_average_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -45,7 +45,7 @@ modifiers: atmosphere: us-standard aerosol_type: continental_clean_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -58,7 +58,7 @@ modifiers: atmosphere: us-standard aerosol_type: continental_polluted_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -71,7 +71,7 @@ modifiers: atmosphere: us-standard aerosol_type: desert_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -84,7 +84,7 @@ modifiers: atmosphere: us-standard aerosol_type: marine_clean_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -97,7 +97,7 @@ modifiers: atmosphere: us-standard aerosol_type: marine_polluted_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -110,7 +110,7 @@ modifiers: atmosphere: us-standard aerosol_type: marine_tropical_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -123,7 +123,7 @@ modifiers: atmosphere: us-standard aerosol_type: rural_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -136,7 +136,7 @@ modifiers: atmosphere: us-standard aerosol_type: urban_aerosol prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [sunz_corrected] optional_prerequisites: - name: satellite_azimuth_angle @@ -149,176 +149,176 @@ composites: true_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: true_color true_color_antarctic: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_antarctic] standard_name: true_color true_color_continental_average: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_average] standard_name: true_color true_color_continental_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_clean] standard_name: true_color true_color_continental_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_continental_polluted] standard_name: true_color true_color_desert: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_desert] standard_name: true_color true_color_marine_clean: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_clean] standard_name: true_color true_color_marine_polluted: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_polluted] standard_name: true_color true_color_marine_tropical: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_marine_tropical] standard_name: true_color true_color_rural: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_rural] standard_name: true_color true_color_urban: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected_urban] standard_name: true_color true_color_uncorr: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected] - - name: 'b02' + - name: 'B2' modifiers: [effective_solar_pathlength_corrected] standard_name: true_color true_color_raw: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b04' + - name: 'B4' # modifiers: [effective_solar_pathlength_corrected] - - name: 'b03' + - name: 'B3' # modifiers: [effective_solar_pathlength_corrected] - - name: 'b02' + - name: 'B2' # modifiers: [effective_solar_pathlength_corrected] standard_name: true_color natural_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color urban_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b07' + - name: 'B7' modifiers: [effective_solar_pathlength_corrected] - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color false_color: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: natural_color @@ -331,15 +331,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b04' + - name: 'B4' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] standard_name: ndvi_msi @@ -352,15 +352,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] standard_name: ndmi_msi @@ -373,15 +373,15 @@ composites: prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b05' + - name: 'B5' modifiers: [effective_solar_pathlength_corrected] standard_name: ndwi_msi @@ -390,21 +390,21 @@ composites: # For more information please review https://custom-scripts.sentinel-hub.com/sentinel-2/ndsi/ compositor: !!python/name:satpy.composites.MaskingCompositor prerequisites: - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] - compositor: !!python/name:satpy.composites.SumCompositor prerequisites: - - name: 'b03' + - name: 'B3' modifiers: [effective_solar_pathlength_corrected, rayleigh_corrected] - - name: 'b06' + - name: 'B6' modifiers: [effective_solar_pathlength_corrected] conditions: - method: less_equal diff --git a/satpy/etc/readers/oli_tirs_l1_tif.yaml b/satpy/etc/readers/oli_tirs_l1_tif.yaml index 54799ab624..ea21d3ec91 100644 --- a/satpy/etc/readers/oli_tirs_l1_tif.yaml +++ b/satpy/etc/readers/oli_tirs_l1_tif.yaml @@ -11,58 +11,58 @@ reader: file_types: # Bands on the OLI subsystem - granule_b01: + granule_B1: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B1.TIF'] requires: [l1_metadata] - granule_b02: + granule_B2: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B2.TIF'] requires: [l1_metadata] - granule_b03: + granule_B3: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B3.TIF'] requires: [l1_metadata] - granule_b04: + granule_B4: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B4.TIF'] requires: [l1_metadata] - granule_b05: + granule_B5: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B5.TIF'] requires: [l1_metadata] - granule_b06: + granule_B6: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B6.TIF'] requires: [l1_metadata] - granule_b07: + granule_B7: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B7.TIF'] requires: [l1_metadata] - granule_b08: + granule_B8: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B8.TIF'] requires: [l1_metadata] - granule_b09: + granule_B9: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B9.TIF'] requires: [l1_metadata] # Bands on the TIRS subsystem - granule_b10: + granule_B10: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B10.TIF'] requires: [l1_metadata] - granule_b11: + granule_B11: file_reader: !!python/name:satpy.readers.oli_tirs_l1_tif.OLITIRSCHReader file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_B11.TIF'] requires: [l1_metadata] @@ -100,8 +100,8 @@ file_types: file_patterns: ['{platform_type:1s}{data_type:1s}{spacecraft_id:2s}_{process_level_correction:4s}_{tilepath:3s}{tilerow:3s}_{observation_date:%Y%m%d}_{processing_date:%Y%m%d}_{collection_id:2s}_{collection_category}_MTL.xml'] datasets: - b01: - name: b01 + B1: + name: B1 sensor: oli_tirs wavelength: [0.433, 0.443, 0.453] resolution: 30 @@ -115,10 +115,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b01 + file_type: granule_B1 - b02: - name: b02 + B2: + name: B2 sensor: oli_tirs wavelength: [0.450, 0.482, 0.515] resolution: 30 @@ -132,10 +132,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b02 + file_type: granule_B2 - b03: - name: b03 + B3: + name: B3 sensor: oli_tirs wavelength: [0.525, 0.565, 0.600] resolution: 30 @@ -149,10 +149,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b03 + file_type: granule_B3 - b04: - name: b04 + B4: + name: B4 sensor: oli_tirs wavelength: [0.630, 0.660, 0.680] resolution: 30 @@ -166,10 +166,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b04 + file_type: granule_B4 - b05: - name: b05 + B5: + name: B5 sensor: oli_tirs wavelength: [0.845, 0.867, 0.885] resolution: 30 @@ -183,10 +183,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b05 + file_type: granule_B5 - b06: - name: b06 + B6: + name: B6 sensor: oli_tirs wavelength: [1.560, 1.650, 1.660] resolution: 30 @@ -200,10 +200,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b06 + file_type: granule_B6 - b07: - name: b07 + B7: + name: B7 sensor: oli_tirs wavelength: [2.100, 2.215, 2.300] resolution: 30 @@ -217,10 +217,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b07 + file_type: granule_B7 - b08: - name: b08 + B8: + name: B8 sensor: oli_tirs wavelength: [0.500, 0.579, 0.680] resolution: 15 @@ -234,10 +234,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b08 + file_type: granule_B8 - b09: - name: b09 + B9: + name: B9 sensor: oli_tirs wavelength: [1.360, 1.373, 1.390] resolution: 30 @@ -251,11 +251,11 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b09 + file_type: granule_B9 # Channels on the TIRS instrument - b10: - name: b10 + B10: + name: B10 sensor: oli_tirs wavelength: [10.6, 10.888, 11.19] resolution: 30 @@ -269,10 +269,10 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b10 + file_type: granule_B10 - b11: - name: b11 + B11: + name: B11 sensor: oli_tirs wavelength: [11.5, 11.981, 12.51] resolution: 30 @@ -286,7 +286,7 @@ datasets: counts: standard_name: counts units: "1" - file_type: granule_b11 + file_type: granule_B11 # QA Variables qa: diff --git a/satpy/readers/oli_tirs_l1_tif.py b/satpy/readers/oli_tirs_l1_tif.py index 9ed382d084..08d2240e88 100644 --- a/satpy/readers/oli_tirs_l1_tif.py +++ b/satpy/readers/oli_tirs_l1_tif.py @@ -40,9 +40,9 @@ PLATFORMS = {"08": "Landsat-8", "09": "Landsat-9"} -OLI_BANDLIST = ["b01", "b02", "b03", "b04", "b05", "b06", "b07", "b08", "b09"] -TIRS_BANDLIST = ["b10", "b11"] -PAN_BANDLIST = ["b08"] +OLI_BANDLIST = ["B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9"] +TIRS_BANDLIST = ["B10", "B11"] +PAN_BANDLIST = ["B8"] ANGLIST = ["satellite_azimuth_angle", "satellite_zenith_angle", "solar_azimuth_angle", @@ -235,7 +235,7 @@ def band_saturation(self): """Return per-band saturation flag.""" bdict = {} for i in range(1, 10): - bdict[f"b{i:02d}"] = self._get_satflag(i) + bdict[f"B{i:01d}"] = self._get_satflag(i) return bdict @@ -264,9 +264,9 @@ def band_calibration(self): """Return per-band saturation flag.""" bdict = {} for i in range(1, 10): - bdict[f"b{i:02d}"] = self._get_band_viscal(i) + bdict[f"B{i:01d}"] = self._get_band_viscal(i) for i in range(10, 12): - bdict[f"b{i:02d}"] = self._get_band_tircal(i) + bdict[f"B{i:02d}"] = self._get_band_tircal(i) return bdict diff --git a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py index bfeea0e688..6790ed378f 100644 --- a/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py +++ b/satpy/tests/reader_tests/test_oli_tirs_l1_tif.py @@ -373,7 +373,7 @@ def b4_file(l1_files_path, b4_data, l1_area): """Create the file for the b4 channel.""" data = b4_data filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B4.TIF" - name = "b04" + name = "B4" create_tif_file(data, name, l1_area, filename) return os.fspath(filename) @@ -382,7 +382,7 @@ def b11_file(l1_files_path, b11_data, l1_area): """Create the file for the b11 channel.""" data = b11_data filename = l1_files_path / "LC08_L1GT_026200_20240502_20240513_02_T2_B11.TIF" - name = "b11" + name = "B11" create_tif_file(data, name, l1_area, filename) return os.fspath(filename) @@ -421,31 +421,31 @@ def setup_method(self, tmp_path): process_level_correction="L1TP", spacecraft_id="08", data_type="C") - self.ftype_info = {"file_type": "granule_b04"} + self.ftype_info = {"file_type": "granule_B4"} def test_basicload(self, l1_area, b4_file, b11_file, mda_file): """Test loading a Landsat Scene.""" scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, b11_file, mda_file]) - scn.load(["b04", "b11"]) + scn.load(["B4", "B11"]) # Check dataset is loaded correctly - assert scn["b04"].shape == (100, 100) - assert scn["b04"].attrs["area"] == l1_area - assert scn["b04"].attrs["saturated"] - assert scn["b11"].shape == (100, 100) - assert scn["b11"].attrs["area"] == l1_area + assert scn["B4"].shape == (100, 100) + assert scn["B4"].attrs["area"] == l1_area + assert scn["B4"].attrs["saturated"] + assert scn["B11"].shape == (100, 100) + assert scn["B11"].attrs["area"] == l1_area with pytest.raises(KeyError, match="saturated"): - assert not scn["b11"].attrs["saturated"] + assert not scn["B11"].attrs["saturated"] def test_ch_startend(self, b4_file, sza_file, mda_file): """Test correct retrieval of start/end times.""" scn = Scene(reader="oli_tirs_l1_tif", filenames=[b4_file, sza_file, mda_file]) bnds = scn.available_dataset_names() - assert bnds == ["b04", "solar_zenith_angle"] + assert bnds == ["B4", "solar_zenith_angle"] - scn.load(["b04"]) + scn.load(["B4"]) assert scn.start_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) assert scn.end_time == datetime(2024, 5, 2, 18, 0, 24, tzinfo=timezone.utc) @@ -456,7 +456,7 @@ def test_loading_gd(self, mda_file, b4_file): rdr = OLITIRSCHReader(b4_file, self.filename_info, self.ftype_info, good_mda) # Check case with good file data and load request - rdr.get_dataset({"name": "b04", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) + rdr.get_dataset({"name": "B4", "calibration": "counts"}, {"standard_name": "test_data", "units": "test_units"}) def test_loading_badfil(self, mda_file, b4_file): """Test loading a Landsat Scene with bad channel requests.""" @@ -466,8 +466,8 @@ def test_loading_badfil(self, mda_file, b4_file): ftype = {"standard_name": "test_data", "units": "test_units"} # Check case with request to load channel not matching filename - with pytest.raises(ValueError, match="Requested channel b05 does not match the reader channel b04"): - rdr.get_dataset({"name": "b05", "calibration": "counts"}, ftype) + with pytest.raises(ValueError, match="Requested channel B5 does not match the reader channel B4"): + rdr.get_dataset({"name": "B5", "calibration": "counts"}, ftype) def test_loading_badchan(self, mda_file, b11_file): """Test loading a Landsat Scene with bad channel requests.""" @@ -479,15 +479,15 @@ def test_loading_badchan(self, mda_file, b11_file): # Check loading invalid channel for data type rdr = OLITIRSCHReader(b11_file, bad_finfo, self.ftype_info, good_mda) - with pytest.raises(ValueError, match="Requested channel b04 is not available in this granule"): - rdr.get_dataset({"name": "b04", "calibration": "counts"}, ftype) + with pytest.raises(ValueError, match="Requested channel B4 is not available in this granule"): + rdr.get_dataset({"name": "B4", "calibration": "counts"}, ftype) bad_finfo["data_type"] = "O" ftype_b11 = self.ftype_info.copy() - ftype_b11["file_type"] = "granule_b11" + ftype_b11["file_type"] = "granule_B11" rdr = OLITIRSCHReader(b11_file, bad_finfo, ftype_b11, good_mda) - with pytest.raises(ValueError, match="Requested channel b11 is not available in this granule"): - rdr.get_dataset({"name": "b11", "calibration": "counts"}, ftype) + with pytest.raises(ValueError, match="Requested channel B11 is not available in this granule"): + rdr.get_dataset({"name": "B11", "calibration": "counts"}, ftype) def test_badfiles(self, mda_file, b4_file): """Test loading a Landsat Scene with bad data.""" @@ -520,13 +520,13 @@ def test_calibration_counts(self, all_files, b4_data, b11_data): from satpy import Scene scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) - scn.load(["b04", "b11"], calibration="counts") - np.testing.assert_allclose(scn["b04"].values, b4_data) - np.testing.assert_allclose(scn["b11"].values, b11_data) - assert scn["b04"].attrs["units"] == "1" - assert scn["b11"].attrs["units"] == "1" - assert scn["b04"].attrs["standard_name"] == "counts" - assert scn["b11"].attrs["standard_name"] == "counts" + scn.load(["B4", "B11"], calibration="counts") + np.testing.assert_allclose(scn["B4"].values, b4_data) + np.testing.assert_allclose(scn["B11"].values, b11_data) + assert scn["B4"].attrs["units"] == "1" + assert scn["B11"].attrs["units"] == "1" + assert scn["B4"].attrs["standard_name"] == "counts" + assert scn["B11"].attrs["standard_name"] == "counts" def test_calibration_radiance(self, all_files, b4_data, b11_data): """Test radiance calibration mode for the reader.""" @@ -535,13 +535,13 @@ def test_calibration_radiance(self, all_files, b4_data, b11_data): exp_b11 = (b11_data * 0.0003342 + 0.100000).astype(np.float32) scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) - scn.load(["b04", "b11"], calibration="radiance") - assert scn["b04"].attrs["units"] == "W m-2 um-1 sr-1" - assert scn["b11"].attrs["units"] == "W m-2 um-1 sr-1" - assert scn["b04"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" - assert scn["b11"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" - np.testing.assert_allclose(scn["b04"].values, exp_b04, rtol=1e-4) - np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-4) + scn.load(["B4", "B11"], calibration="radiance") + assert scn["B4"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["B11"].attrs["units"] == "W m-2 um-1 sr-1" + assert scn["B4"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + assert scn["B11"].attrs["standard_name"] == "toa_outgoing_radiance_per_unit_wavelength" + np.testing.assert_allclose(scn["B4"].values, exp_b04, rtol=1e-4) + np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-4) def test_calibration_highlevel(self, all_files, b4_data, b11_data): """Test high level calibration modes for the reader.""" @@ -550,14 +550,14 @@ def test_calibration_highlevel(self, all_files, b4_data, b11_data): exp_b11 = (b11_data * 0.0003342 + 0.100000) exp_b11 = (1201.1442 / np.log((480.8883 / exp_b11) + 1)).astype(np.float32) scn = Scene(reader="oli_tirs_l1_tif", filenames=all_files) - scn.load(["b04", "b11"]) + scn.load(["B4", "B11"]) - assert scn["b04"].attrs["units"] == "%" - assert scn["b11"].attrs["units"] == "K" - assert scn["b04"].attrs["standard_name"] == "toa_bidirectional_reflectance" - assert scn["b11"].attrs["standard_name"] == "brightness_temperature" - np.testing.assert_allclose(np.array(scn["b04"].values), np.array(exp_b04), rtol=1e-4) - np.testing.assert_allclose(scn["b11"].values, exp_b11, rtol=1e-6) + assert scn["B4"].attrs["units"] == "%" + assert scn["B11"].attrs["units"] == "K" + assert scn["B4"].attrs["standard_name"] == "toa_bidirectional_reflectance" + assert scn["B11"].attrs["standard_name"] == "brightness_temperature" + np.testing.assert_allclose(np.array(scn["B4"].values), np.array(exp_b04), rtol=1e-4) + np.testing.assert_allclose(scn["B11"].values, exp_b11, rtol=1e-6) def test_angles(self, all_files, sza_data): """Test calibration modes for the reader.""" @@ -578,28 +578,28 @@ def test_metadata(self, mda_file): from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader mda = OLITIRSMDReader(mda_file, self.filename_info, {}) - cal_test_dict = {"b01": (0.012357, -61.78647, 2e-05, -0.1), - "b05": (0.0060172, -30.08607, 2e-05, -0.1), - "b10": (0.0003342, 0.1, 774.8853, 1321.0789)} + cal_test_dict = {"B1": (0.012357, -61.78647, 2e-05, -0.1), + "B5": (0.0060172, -30.08607, 2e-05, -0.1), + "B10": (0.0003342, 0.1, 774.8853, 1321.0789)} assert mda.platform_name == "Landsat-8" assert mda.earth_sun_distance() == 1.0079981 - assert mda.band_calibration["b01"] == cal_test_dict["b01"] - assert mda.band_calibration["b05"] == cal_test_dict["b05"] - assert mda.band_calibration["b10"] == cal_test_dict["b10"] - assert not mda.band_saturation["b01"] - assert mda.band_saturation["b04"] - assert not mda.band_saturation["b05"] + assert mda.band_calibration["B1"] == cal_test_dict["B1"] + assert mda.band_calibration["B5"] == cal_test_dict["B5"] + assert mda.band_calibration["B10"] == cal_test_dict["B10"] + assert not mda.band_saturation["B1"] + assert mda.band_saturation["B4"] + assert not mda.band_saturation["B5"] with pytest.raises(KeyError): - mda.band_saturation["b10"] + mda.band_saturation["B10"] def test_area_def(self, mda_file): """Check we can get the area defs properly.""" from satpy.readers.oli_tirs_l1_tif import OLITIRSMDReader mda = OLITIRSMDReader(mda_file, self.filename_info, {}) - standard_area = mda.build_area_def("b01") - pan_area = mda.build_area_def("b08") + standard_area = mda.build_area_def("B1") + pan_area = mda.build_area_def("B8") assert standard_area.area_extent == (619485.0, 2440485.0, 850515.0, 2675715.0) assert pan_area.area_extent == (619492.5, 2440492.5, 850507.5, 2675707.5) From f9dccad47a39f9485873ece05ce03892ee8b234d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 14:06:48 +0200 Subject: [PATCH 157/340] Remove 3.13 experimental run --- .github/workflows/ci.yaml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 73c2f1f3e9..84622f221c 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -24,9 +24,6 @@ jobs: - python-version: "3.12" os: "ubuntu-latest" experimental: true - - python-version: "3.13" - os: "ubuntu-latest" - experimental: true env: PYTHON_VERSION: ${{ matrix.python-version }} From d62b5d2539a2789ff825b431af703cd931357d52 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 17 Oct 2024 14:26:00 +0200 Subject: [PATCH 158/340] Fix bogus no_op implementation --- doc/source/composites.rst | 5 +++-- satpy/enhancements/__init__.py | 5 ----- satpy/etc/enhancements/generic.yaml | 4 +--- satpy/tests/enhancement_tests/test_enhancements.py | 12 ------------ 4 files changed, 4 insertions(+), 22 deletions(-) diff --git a/doc/source/composites.rst b/doc/source/composites.rst index d0c494e414..dda071db24 100644 --- a/doc/source/composites.rst +++ b/doc/source/composites.rst @@ -579,8 +579,9 @@ the file) as:: default apply the :func:`~trollimage.xrimage.XRImage.stretch_linear` enhancement with cutoffs of 0.5% and 99.5%. If you want no enhancement at all (maybe you are enhancing a composite based on :class:`DayNightCompositor` where - the components have their own enhancements defined), you need to define - an enhancement that does nothing:: + the components have their own enhancements defined), you can use the `image_ready` standard name. + If this is not a suitable standard name, you can also define + an enhancement that does nothing: enhancements: day_x: diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index 95a147aafb..a44ca590cf 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -653,8 +653,3 @@ def _jma_true_color_reproduction(img_data, platform=None): output = da.dot(img_data.T, ccm.T) return output.T - - -def no_op(img): - """Do not do anything to the image.""" - return img.data diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 7e23281531..cdfb7851ad 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1288,6 +1288,4 @@ enhancements: image_ready: standard_name: image_ready - operations: - - name: no_op - method: !!python/name:satpy.enhancements.no_op + operations: [] diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index b0f2b3d31b..89ff21aafa 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -711,15 +711,3 @@ def test_jma_true_color_reproduction(self): img = XRImage(self.rgb) with pytest.raises(KeyError, match="No conversion matrix found for platform Fakesat"): jma_true_color_reproduction(img) - - -def test_no_op_enhancement(): - """Test the no-op enhancement.""" - from satpy.enhancements import no_op - - data = da.arange(-100, 1000, 110).reshape(2, 5) - rgb_data = np.stack([data, data, data]) - rgb = xr.DataArray(rgb_data, dims=("bands", "y", "x"), - coords={"bands": ["R", "G", "B"]}, - attrs={"platform_name": "Himawari-8"}) - assert no_op(rgb) is rgb.data From d6d4406dae1d9d85077ac2fa8788407f5fbdcb9e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 18 Oct 2024 13:42:03 +0200 Subject: [PATCH 159/340] Update changelog for v0.52.0 --- CHANGELOG.md | 61 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b421696af8..6d69b8db96 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,64 @@ +## Version 0.52.0 (2024/10/18) + +### Issues Closed + +* [Issue 2922](https://github.com/pytroll/satpy/issues/2922) - fci_l1c_nc reader ignoring MTG FDHSI segment 41 +* [Issue 2920](https://github.com/pytroll/satpy/issues/2920) - SEVIRI/FCI Water Vapour channel different normalization? +* [Issue 2917](https://github.com/pytroll/satpy/issues/2917) - Unpin mamba version in CI +* [Issue 2914](https://github.com/pytroll/satpy/issues/2914) - save.dataset - problem with MTG +* [Issue 2909](https://github.com/pytroll/satpy/issues/2909) - RuntimeError while compositing after resampling datasets +* [Issue 2907](https://github.com/pytroll/satpy/issues/2907) - The debug run reports an error, but there is no problem running after stopping at the breakpoint +* [Issue 2900](https://github.com/pytroll/satpy/issues/2900) - Eliminate dependency on external binaries of PublicDecompWT (xRITDecompress) by using pyPublicDecompWT +* [Issue 2897](https://github.com/pytroll/satpy/issues/2897) - generic_image reader returns data as float64 for PNG images +* [Issue 2887](https://github.com/pytroll/satpy/issues/2887) - "Don't know how to open the following files" ERROR in MTG-I1 LI data. +* [Issue 2884](https://github.com/pytroll/satpy/issues/2884) - MODIS and SEADAS test failures ([PR 2886](https://github.com/pytroll/satpy/pull/2886) by [@djhoese](https://github.com/djhoese)) +* [Issue 2869](https://github.com/pytroll/satpy/issues/2869) - ninjogeotiff writer should write gradient for P mode images ([PR 2870](https://github.com/pytroll/satpy/pull/2870) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2864](https://github.com/pytroll/satpy/issues/2864) - Documentation takes too long to build ([PR 2875](https://github.com/pytroll/satpy/pull/2875) by [@djhoese](https://github.com/djhoese)) +* [Issue 2839](https://github.com/pytroll/satpy/issues/2839) - Help about Netcdf Data +* [Issue 1974](https://github.com/pytroll/satpy/issues/1974) - debug_on() could write relevant versions +* [Issue 1266](https://github.com/pytroll/satpy/issues/1266) - Can pytroll process MetOp L0 Data? + +In this release 15 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2939](https://github.com/pytroll/satpy/pull/2939) - Fix bogus no_op implementation +* [PR 2938](https://github.com/pytroll/satpy/pull/2938) - Update Landsat reader for compatibility with Pyspectral. +* [PR 2926](https://github.com/pytroll/satpy/pull/2926) - Import DataTree from xarray +* [PR 2923](https://github.com/pytroll/satpy/pull/2923) - Fix data promotion in `generic_image` reader and `satpy.composites.add_bands` +* [PR 2916](https://github.com/pytroll/satpy/pull/2916) - Fix deprecated "compositor" usage in modifier definitions +* [PR 2910](https://github.com/pytroll/satpy/pull/2910) - Remove deprecated usage of pyspectral's download_luts aerosol_type +* [PR 2896](https://github.com/pytroll/satpy/pull/2896) - Bugfix for Sentinel-2 radiance calculation +* [PR 2886](https://github.com/pytroll/satpy/pull/2886) - Update pyhdf-based arrs to be manually tokenized ([2884](https://github.com/pytroll/satpy/issues/2884)) + +#### Features added + +* [PR 2936](https://github.com/pytroll/satpy/pull/2936) - Drop python 3.9 ([2741](https://github.com/pytroll/satpy/issues/2741)) +* [PR 2933](https://github.com/pytroll/satpy/pull/2933) - Add no-op image_ready enhancement +* [PR 2931](https://github.com/pytroll/satpy/pull/2931) - Enhance visibility of missing dependencies +* [PR 2929](https://github.com/pytroll/satpy/pull/2929) - Replace patched `print` with capsys fixture +* [PR 2927](https://github.com/pytroll/satpy/pull/2927) - Use spline interpolation for faster processing +* [PR 2925](https://github.com/pytroll/satpy/pull/2925) - Fix types to allow float32 computations for SAR-C +* [PR 2913](https://github.com/pytroll/satpy/pull/2913) - Update `check_satpy` to use new `show_version` to display package versions +* [PR 2905](https://github.com/pytroll/satpy/pull/2905) - Mcd12q1 draft +* [PR 2904](https://github.com/pytroll/satpy/pull/2904) - Add reader for Landsat L1 data +* [PR 2902](https://github.com/pytroll/satpy/pull/2902) - Add OCI L2 BGC reader +* [PR 2899](https://github.com/pytroll/satpy/pull/2899) - Switch from Mambaforge to Miniforge +* [PR 2893](https://github.com/pytroll/satpy/pull/2893) - Fix AAPP L1b reader not to up-cast data to float64 +* [PR 2870](https://github.com/pytroll/satpy/pull/2870) - Include gradient/axisintercept for mode p ([2869](https://github.com/pytroll/satpy/issues/2869)) +* [PR 2717](https://github.com/pytroll/satpy/pull/2717) - Add combined GRIB reader for both SEVIRI and FCI L2 products + +#### Documentation changes + +* [PR 2915](https://github.com/pytroll/satpy/pull/2915) - Improve SEVIRI metadata documentation +* [PR 2890](https://github.com/pytroll/satpy/pull/2890) - Fixing contributing.rst access on windows systems +* [PR 2875](https://github.com/pytroll/satpy/pull/2875) - Make documentation generation faster ([2864](https://github.com/pytroll/satpy/issues/2864), [2864](https://github.com/pytroll/satpy/issues/2864)) + +In this release 25 pull requests were closed. + + ## Version 0.51.0 (2024/08/15) ### Issues Closed From 61533c5359ebcbc6bae843e934cf75ab216d56c9 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 18 Oct 2024 15:41:08 +0000 Subject: [PATCH 160/340] Refactor MVIRI dataset access --- satpy/readers/mviri_l1b_fiduceo_nc.py | 215 ++++++++++-------- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 83 +++---- 2 files changed, 161 insertions(+), 137 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index b11a7d07b1..9961679728 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -454,113 +454,151 @@ def is_high_resol(resolution): return resolution == HIGH_RESOL -class DatasetWrapper: - """Helper class for accessing the dataset.""" - - def __init__(self, nc): - """Wrap the given dataset.""" - self.nc = nc - - self._decode_cf() - self._fix_duplicate_dimensions(self.nc) - - - @property - def attrs(self): - """Exposes dataset attributes.""" - return self.nc.attrs - - def __getitem__(self, item): - """Get a variable from the dataset.""" - ds = self.nc[item] - if self._should_dims_be_renamed(ds): - ds = self._rename_dims(ds) - elif self._coordinates_not_assigned(ds): - ds = self._reassign_coords(ds) +class DatasetPreprocessor: + """Helper class for preprocessing the dataset.""" + + def preprocess(self, ds): + """Preprocess the given dataset.""" + ds = self._rename_vars(ds) + ds = self._decode_cf(ds) + ds = self._fix_duplicate_dimensions(ds) + self._reassign_coords(ds) self._cleanup_attrs(ds) return ds - def _should_dims_be_renamed(self, ds): - """Determine whether dataset dimensions need to be renamed.""" - return "y_ir_wv" in ds.dims or "y_tie" in ds.dims - - def _rename_dims(self, ds): - """Rename dataset dimensions to match satpy's expectations.""" + def _rename_vars(self, ds): + """Rename variables to match satpy's expectations.""" new_names = { - "y_ir_wv": "y", - "x_ir_wv": "x", - "y_tie": "y", - "x_tie": "x" + "time_ir_wv": "time", + } + new_names_avail = { + old: new + for old, new in new_names.items() + if old in ds } - for old_name, new_name in new_names.items(): - if old_name in ds.dims: - ds = ds.rename({old_name: new_name}) + return ds.rename(new_names_avail) + + def _decode_cf(self, ds): + """Decode data according to CF conventions.""" + # CF decoding fails because time coordinate contains fill values. + # Decode time separately, then decode rest using decode_cf(). + time = self._decode_time(ds) + ds = ds.drop_vars(time.name) + ds = xr.decode_cf(ds) + ds[time.name] = (time.dims, time.values) return ds - def _coordinates_not_assigned(self, ds): - return "y" in ds.dims and "y" not in ds.coords + def _decode_time(self, ds): + """Decode time using fill value and offset. + + Replace fill values with NaT. + """ + time = ds["time"] + time_dec = (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]") + is_fill_value = time == time.attrs["_FillValue"] + return xr.where(is_fill_value, np.datetime64("NaT"), time_dec) + + def _fix_duplicate_dimensions(self, ds): + """Rename dimensions as duplicate dimensions names are not supported by xarray.""" + ds = ds.copy() + ds.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") + ds.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") + ds.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") + return ds.drop_dims(["channel", "srf_size"]) def _reassign_coords(self, ds): """Re-assign coordinates. For some reason xarray doesn't assign coordinates to all high - resolution data variables. + resolution data variables. In that case ds["varname"] doesn't + have coords, but they're still in ds.coords. """ - return ds.assign_coords({"y": self.nc.coords["y"], - "x": self.nc.coords["x"]}) + for var_name, data_array in ds.data_vars.items(): + if self._coordinates_not_assigned(data_array): + ds[var_name] = data_array.assign_coords( + { + "y": ds.coords["y"], + "x": ds.coords["x"] + } + ) + + def _coordinates_not_assigned(self, data_array): + return "y" in data_array.dims and "y" not in data_array.coords def _cleanup_attrs(self, ds): """Cleanup dataset attributes.""" # Remove ancillary_variables attribute to avoid downstream # satpy warnings. - ds.attrs.pop("ancillary_variables", None) - - def _decode_cf(self): - """Decode data.""" - # time decoding with decode_cf results in error - decode separately! - time_dims, time = self._decode_time() - self.nc = self.nc.drop_vars(time.name) - self.nc = xr.decode_cf(self.nc) - self.nc[time.name] = (time_dims, time.values) - - def _decode_time(self): - """Decode time using fill value and offset.""" - time = self.get_time() - time_dims = self.nc[time.name].dims - time = xr.where(time == time.attrs["_FillValue"], np.datetime64("NaT"), - (time + time.attrs["add_offset"]).astype("datetime64[s]").astype("datetime64[ns]")) - - return (time_dims, time) - - def _fix_duplicate_dimensions(self, nc): - """Rename dimensions as duplicate dimensions names are not supported by xarray.""" - nc.variables["covariance_spectral_response_function_vis"].dims = ("srf_size_1", "srf_size_2") - self.nc = nc.drop_dims("srf_size") - nc.variables["channel_correlation_matrix_independent"].dims = ("channel_1", "channel_2") - nc.variables["channel_correlation_matrix_structured"].dims = ("channel_1", "channel_2") - self.nc = nc.drop_dims("channel") + for data_array in ds.data_vars.values(): + data_array.attrs.pop("ancillary_variables", None) - def get_time(self): - """Get time coordinate. - Variable is sometimes named "time" and sometimes "time_ir_wv". - """ - try: - return self["time_ir_wv"] - except KeyError: - return self["time"] +class DatasetAccessor: + """Helper class for accessing the dataset.""" + + def __init__(self, ds): + """Wrap the given dataset.""" + self.ds = ds + + @property + def attrs(self): + """Exposes dataset attributes.""" + return self.ds.attrs + + def __getitem__(self, item): + """Get a variable from the dataset.""" + data_array = self.ds[item] + if self._should_dims_be_renamed(data_array): + return self._rename_dims(data_array) + return data_array + + def _should_dims_be_renamed(self, data_array): + """Determine whether dataset dimensions need to be renamed.""" + return "y_ir_wv" in data_array.dims or "y_tie" in data_array.dims + + def _rename_dims(self, data_array): + """Rename dataset dimensions to match satpy's expectations.""" + new_names = { + "y_ir_wv": "y", + "x_ir_wv": "x", + "y_tie": "y", + "x_tie": "x" + } + new_names_avail = { + old: new + for old, new in new_names.items() + if old in data_array.dims + } + return data_array.rename(new_names_avail) def get_xy_coords(self, resolution): """Get x and y coordinates for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords["x"], self.nc.coords["y"] - return self.nc.coords["x_ir_wv"], self.nc.coords["x_ir_wv"] + return self.ds.coords["x"], self.ds.coords["y"] + return self.ds.coords["x_ir_wv"], self.ds.coords["x_ir_wv"] def get_image_size(self, resolution): """Get image size for the given resolution.""" if is_high_resol(resolution): - return self.nc.coords["y"].size - return self.nc.coords["y_ir_wv"].size + return self.ds.coords["y"].size + return self.ds.coords["y_ir_wv"].size + + +def open_dataset(filename): + """Load dataset from the given file.""" + nc_raw = xr.open_dataset( + filename, + chunks={"x": CHUNK_SIZE, + "y": CHUNK_SIZE, + "x_ir_wv": CHUNK_SIZE, + "y_ir_wv": CHUNK_SIZE}, + # see dataset preprocessor for why decoding is disabled + decode_cf=False, + decode_times=False, + mask_and_scale=False, + ) + nc_preproc = DatasetPreprocessor().preprocess(nc_raw) + return DatasetAccessor(nc_preproc) class FiduceoMviriBase(BaseFileHandler): @@ -584,24 +622,9 @@ def __init__(self, filename, filename_info, filetype_info, # noqa: D417 super(FiduceoMviriBase, self).__init__( filename, filename_info, filetype_info) self.mask_bad_quality = mask_bad_quality - nc_raw = xr.open_dataset( - filename, - chunks={"x": CHUNK_SIZE, - "y": CHUNK_SIZE, - "x_ir_wv": CHUNK_SIZE, - "y_ir_wv": CHUNK_SIZE}, - # see dataset wrapper for why decoding is disabled - decode_cf=False, - decode_times=False, - mask_and_scale=False, - ) - - self.nc = DatasetWrapper(nc_raw) - + self.nc = open_dataset(filename) self.projection_longitude = self._get_projection_longitude(filename_info) - self.calib_coefs = self._get_calib_coefs() - self._get_angles = functools.lru_cache(maxsize=8)( self._get_angles_uncached ) @@ -745,7 +768,7 @@ def _get_acq_time_uncached(self, resolution): Note that the acquisition time does not increase monotonically with the scanline number due to the scan pattern and rectification. """ - time2d = self.nc.get_time() + time2d = self.nc["time"] _, target_y = self.nc.get_xy_coords(resolution) return Interpolator.interp_acq_time(time2d, target_y=target_y.values) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index ffc3e980e2..4c4714f2f4 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -33,7 +33,7 @@ ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, - DatasetWrapper, + DatasetPreprocessor, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, Interpolator, @@ -256,6 +256,7 @@ height=2 ) + @pytest.fixture(name="time_fake_dataset") def fixture_time_fake_dataset(): """Create time for fake dataset.""" @@ -266,6 +267,7 @@ def fixture_time_fake_dataset(): return time + @pytest.fixture(name="fake_dataset") def fixture_fake_dataset(time_fake_dataset): """Create fake dataset.""" @@ -385,7 +387,6 @@ def fixture_reader(): class TestFiduceoMviriFileHandlers: """Unit tests for FIDUCEO MVIRI file handlers.""" - @pytest.mark.parametrize("projection_longitude", ["57.0", "5700"], indirect=True) def test_init(self, file_handler, projection_longitude): """Test file handler initialization.""" @@ -435,11 +436,8 @@ def test_get_dataset(self, file_handler, name, calibration, resolution, def test_get_dataset_corrupt(self, file_handler): """Test getting datasets with known corruptions.""" - # Time may have different names and satellite position might be missing - file_handler.nc.nc = file_handler.nc.nc.rename( - {"time_ir_wv": "time"} - ) - file_handler.nc.nc = file_handler.nc.nc.drop_vars( + # Satellite position might be missing + file_handler.nc.ds = file_handler.nc.ds.drop_vars( ["sub_satellite_longitude_start"] ) @@ -564,7 +562,7 @@ def test_calib_exceptions(self, file_handler): @pytest.mark.file_handler_data(mask_bad_quality=False) def test_bad_quality_warning(self, file_handler): """Test warning about bad VIS quality.""" - file_handler.nc.nc["quality_pixel_bitmask"] = 2 + file_handler.nc.ds["quality_pixel_bitmask"] = 2 vis = make_dataid(name="VIS", resolution=2250, calibration="reflectance") with pytest.warns(UserWarning): @@ -586,8 +584,28 @@ def test_file_pattern(self, reader): assert len(files) == 6 -class TestDatasetWrapper: - """Unit tests for DatasetWrapper class.""" +class DatasetWithCorruptCoordinates: + """Replicate a dataset with corrupt coordinates.""" + + def __init__(self, ds): + """Initialize the dataset.""" + self.dims = ds.dims + self.coords = ds.coords + # Now ds["myvar"] doesn't have coords, but they're still in ds.coords + self.ds = ds.drop_vars(["y", "x"]) + self.data_vars = self.ds.data_vars + + def __getitem__(self, item): + """Get variable from the dataset.""" + return self.ds[item] + + def __setitem__(self, key, value): + """Set dataset variable.""" + self.ds[key] = value + + +class TestDatasetPreprocessor: + """Test dataset preprocessing.""" def test_fix_duplicate_dimensions(self): """Test the renaming of duplicate dimensions. @@ -595,30 +613,27 @@ def test_fix_duplicate_dimensions(self): If duplicate dimensions are within the Dataset, opening the datasets with chunks throws a warning. The dimensions need to be renamed. """ - foo_time = 60*60 - foo_time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") - - foo = xr.Dataset( + time = 60*60 + time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") + ds = xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), - "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time, foo_time], [foo_time, foo_time]], + "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[time, time], [time, time]], {"_FillValue": fill_val, "add_offset": 0}) } ) - foo_ds = DatasetWrapper(foo) - - foo_exp = xr.Dataset( + ds_preproc = DatasetPreprocessor().preprocess(ds) + ds_exp = xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), - "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[foo_time_exp, foo_time_exp], [foo_time_exp, foo_time_exp]]) + "time": (("y_ir_wv", "x_ir_wv"), [[time_exp, time_exp], [time_exp, time_exp]]) } ) - - xr.testing.assert_allclose(foo_ds.nc, foo_exp) + xr.testing.assert_allclose(ds_preproc, ds_exp) def test_reassign_coords(self): """Test reassigning of coordinates. @@ -629,32 +644,18 @@ def test_reassign_coords(self): impossible to create (neither dropping, resetting or deleting coordinates seems to work). Instead use mock as a workaround. """ - nc = mock.MagicMock( + myvar = xr.DataArray( + [[1, 2], [3, 4]], coords={ "y": [.1, .2], "x": [.3, .4] }, dims=("y", "x") ) - nc.__getitem__.return_value = xr.DataArray( - [[1, 2], - [3, 4]], - dims=("y", "x") - ) - foo_exp = xr.DataArray( - [[1, 2], - [3, 4]], - dims=("y", "x"), - coords={ - "y": [.1, .2], - "x": [.3, .4] - } - ) - with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._fix_duplicate_dimensions"): - with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.DatasetWrapper._decode_cf"): - ds = DatasetWrapper(nc) - foo = ds["foo"] - xr.testing.assert_equal(foo, foo_exp) + ds = DatasetWithCorruptCoordinates(xr.Dataset({"myvar": myvar})) + DatasetPreprocessor()._reassign_coords(ds) + xr.testing.assert_equal(ds["myvar"], myvar) + class TestInterpolator: """Unit tests for Interpolator class.""" From 14886640c58a742d23d9b0f4b1bf0db5f89b55f5 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 18 Oct 2024 16:14:46 +0000 Subject: [PATCH 161/340] Simplify test --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 83 +++++++------------ 1 file changed, 30 insertions(+), 53 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index 4c4714f2f4..a83c88252e 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -584,77 +584,54 @@ def test_file_pattern(self, reader): assert len(files) == 6 -class DatasetWithCorruptCoordinates: - """Replicate a dataset with corrupt coordinates.""" - - def __init__(self, ds): - """Initialize the dataset.""" - self.dims = ds.dims - self.coords = ds.coords - # Now ds["myvar"] doesn't have coords, but they're still in ds.coords - self.ds = ds.drop_vars(["y", "x"]) - self.data_vars = self.ds.data_vars - - def __getitem__(self, item): - """Get variable from the dataset.""" - return self.ds[item] - - def __setitem__(self, key, value): - """Set dataset variable.""" - self.ds[key] = value - - class TestDatasetPreprocessor: """Test dataset preprocessing.""" - def test_fix_duplicate_dimensions(self): - """Test the renaming of duplicate dimensions. + @pytest.fixture(name="dataset") + def fixture_dataset(self): + """Get dataset before preprocessing. - If duplicate dimensions are within the Dataset, opening the datasets with chunks throws a warning. - The dimensions need to be renamed. + - Encoded timestamps including fill values + - Duplicate dimension names + - x/y coordinates not assigned """ time = 60*60 - time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") - ds = xr.Dataset( + return xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), - "time_ir_wv": (("y_ir_wv", "x_ir_wv"), [[time, time], [time, time]], - {"_FillValue": fill_val, "add_offset": 0}) - } + "time": (("y", "x"), [[time, fill_val], [time, time]], + {"_FillValue": fill_val, "add_offset": 0}) + } ) - ds_preproc = DatasetPreprocessor().preprocess(ds) - ds_exp = xr.Dataset( + + @pytest.fixture(name="dataset_exp") + def fixture_dataset_exp(self): + """Get expected dataset after preprocessing. + + - Time should have been converted to datetime64 + - Duplicate dimensions should have been removed + - x/y coordinates should have been assigned + """ + time_exp = np.datetime64("1970-01-01 01:00").astype("datetime64[ns]") + return xr.Dataset( data_vars={ "covariance_spectral_response_function_vis": (("srf_size_1", "srf_size_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel_1", "channel_2"), [[1, 2], [3, 4]]), - "time": (("y_ir_wv", "x_ir_wv"), [[time_exp, time_exp], [time_exp, time_exp]]) + "time": (("y", "x"), [[time_exp, np.datetime64("NaT")], [time_exp, time_exp]]) + }, + coords={ + "y": [0, 1], + "x": [0, 1] } ) - xr.testing.assert_allclose(ds_preproc, ds_exp) - - def test_reassign_coords(self): - """Test reassigning of coordinates. - For some reason xarray does not always assign (y, x) coordinates to - the high resolution datasets, although they have dimensions (y, x) and - coordinates y and x exist. A dataset with these properties seems - impossible to create (neither dropping, resetting or deleting - coordinates seems to work). Instead use mock as a workaround. - """ - myvar = xr.DataArray( - [[1, 2], [3, 4]], - coords={ - "y": [.1, .2], - "x": [.3, .4] - }, - dims=("y", "x") - ) - ds = DatasetWithCorruptCoordinates(xr.Dataset({"myvar": myvar})) - DatasetPreprocessor()._reassign_coords(ds) - xr.testing.assert_equal(ds["myvar"], myvar) + def test_preprocess(self, dataset, dataset_exp): + """Test dataset preprocessing.""" + preprocessed = DatasetPreprocessor().preprocess(dataset) + xr.testing.assert_allclose(preprocessed, dataset_exp) class TestInterpolator: From 24583547194dd21ba2c11b1090fa4201a0cac5e2 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 18 Oct 2024 16:19:06 +0000 Subject: [PATCH 162/340] Improve test --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index a83c88252e..bacc5cfca1 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -601,8 +601,8 @@ def fixture_dataset(self): "covariance_spectral_response_function_vis": (("srf_size", "srf_size"), [[1, 2], [3, 4]]), "channel_correlation_matrix_independent": (("channel", "channel"), [[1, 2], [3, 4]]), "channel_correlation_matrix_structured": (("channel", "channel"), [[1, 2], [3, 4]]), - "time": (("y", "x"), [[time, fill_val], [time, time]], - {"_FillValue": fill_val, "add_offset": 0}) + "time_ir_wv": (("y", "x"), [[time, fill_val], [time, time]], + {"_FillValue": fill_val, "add_offset": 0}) } ) @@ -610,7 +610,8 @@ def fixture_dataset(self): def fixture_dataset_exp(self): """Get expected dataset after preprocessing. - - Time should have been converted to datetime64 + - Timestamps should have been converted to datetime64 + - Time dimension should have been renamed - Duplicate dimensions should have been removed - x/y coordinates should have been assigned """ From b4be219bc5c4d6312db98c44407825e177f23fcc Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 18 Oct 2024 16:28:50 +0000 Subject: [PATCH 163/340] Test with an actual file --- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 28 +++++++++++-------- 1 file changed, 17 insertions(+), 11 deletions(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index bacc5cfca1..a6eda3ea63 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -348,28 +348,34 @@ def fixture_projection_longitude(request): return request.param +@pytest.fixture(name="fake_file") +def fixture_fake_file(fake_dataset, tmp_path): + """Create test file.""" + filename = tmp_path / "test_mviri_fiduceo.nc" + fake_dataset.to_netcdf(filename) + return filename + + @pytest.fixture( name="file_handler", params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) -def fixture_file_handler(fake_dataset, request, projection_longitude): +def fixture_file_handler(fake_file, request, projection_longitude, tmp_path): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True if marker: mask_bad_quality = marker.kwargs["mask_bad_quality"] fh_class = request.param - with mock.patch("satpy.readers.mviri_l1b_fiduceo_nc.xr.open_dataset") as open_dataset: - open_dataset.return_value = fake_dataset - return fh_class( - filename="filename", - filename_info={"platform": "MET7", - "sensor": "MVIRI", - "projection_longitude": projection_longitude}, - filetype_info={"foo": "bar"}, - mask_bad_quality=mask_bad_quality - ) + return fh_class( + filename=fake_file, + filename_info={"platform": "MET7", + "sensor": "MVIRI", + "projection_longitude": projection_longitude}, + filetype_info={"foo": "bar"}, + mask_bad_quality=mask_bad_quality + ) @pytest.fixture(name="reader") From 49fd20afcfec3686d300388e39dcbfd784de7bb9 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 18 Oct 2024 16:30:16 +0000 Subject: [PATCH 164/340] Remove unused argument --- satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index a6eda3ea63..b5b5b5a593 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -361,7 +361,7 @@ def fixture_fake_file(fake_dataset, tmp_path): params=[FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler] ) -def fixture_file_handler(fake_file, request, projection_longitude, tmp_path): +def fixture_file_handler(fake_file, request, projection_longitude): """Create mocked file handler.""" marker = request.node.get_closest_marker("file_handler_data") mask_bad_quality = True From eaf9e3a64a70e0e2a29c6ba1e5ecf530f35fc589 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 21 Oct 2024 08:30:13 +0000 Subject: [PATCH 165/340] Fix seviri_l2_grib end_time property bug. --- satpy/readers/eum_l2_grib.py | 8 ++++++-- satpy/tests/reader_tests/test_eum_l2_grib.py | 10 +++++++--- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/satpy/readers/eum_l2_grib.py b/satpy/readers/eum_l2_grib.py index 543aa71c30..4ed5901a65 100644 --- a/satpy/readers/eum_l2_grib.py +++ b/satpy/readers/eum_l2_grib.py @@ -78,8 +78,12 @@ def start_time(self): def end_time(self): """Return the sensing end time.""" if self.sensor == "seviri": - delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION - return self.start_time + dt.timedelta(minutes=delta) + try: + delta = SEVIRI_REPEAT_CYCLE_DURATION_RSS if self._ssp_lon == 9.5 else SEVIRI_REPEAT_CYCLE_DURATION + return self.start_time + dt.timedelta(minutes=delta) + except AttributeError: + # If dataset and metadata (ssp_lon) have not yet been loaded, return None + return None elif self.sensor == "fci": return self.filename_info["end_time"] diff --git a/satpy/tests/reader_tests/test_eum_l2_grib.py b/satpy/tests/reader_tests/test_eum_l2_grib.py index 50c6be5398..4172cf0ea0 100644 --- a/satpy/tests/reader_tests/test_eum_l2_grib.py +++ b/satpy/tests/reader_tests/test_eum_l2_grib.py @@ -133,12 +133,16 @@ def test_seviri_data_reading(da_, xr_, setup_reader): dataset_id = make_dataid(name="dummmy", resolution=3000) + # Check that end_time is None for SEVIRI before the dataset has been loaded + assert reader.end_time is None + common_checks(ec_, reader, mock_file, dataset_id) - # Check end_time + # Check that end_time is now a valid datetime.datetime object after the dataset has been loaded assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, hour=19, minute=50, second=0) + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = reader._get_attributes() expected_attributes = { @@ -234,12 +238,12 @@ def test_fci_data_reading(da_, xr_, setup_reader): dataset_id = make_dataid(name="dummmy", resolution=2000) - common_checks(ec_, reader, mock_file, dataset_id) - # Check end_time assert reader.end_time == datetime.datetime(year=2020, month=10, day=20, hour=19, minute=50, second=0) + common_checks(ec_, reader, mock_file, dataset_id) + # Checks the correct execution of the _get_global_attributes and _get_metadata_from_msg functions attributes = reader._get_attributes() expected_attributes = { From b9a2f4d39dd395da4150e1d55c724e9734797c2b Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 21 Oct 2024 12:40:06 +0300 Subject: [PATCH 166/340] Check for _FillValue in enhanced images --- satpy/tests/enhancement_tests/test_enhancements.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 89ff21aafa..96176fda34 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -44,6 +44,12 @@ def run_and_check_enhancement(func, data, expected, **kwargs): old_keys = set(pre_attrs.keys()) # It is OK to have "enhancement_history" added new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} + # In case of palettes are used, _FillValue is added. + # Colorize doesn't add the fill value, so ignore that + if "palettes" in kwargs and func.__name__ != "colorize": + assert "_FillValue" in new_keys + # Remove it from further comparisons + new_keys = new_keys - {"_FillValue"} assert old_keys == new_keys res_data_arr = img.data From 821ca55622f173c72d74d04d716f69b2864bbb20 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 22 Oct 2024 14:00:15 +0200 Subject: [PATCH 167/340] Update MODIS L1b reader with additional geoinfo datasets --- satpy/etc/readers/modis_l1b.yaml | 28 ++++++++++++++++++++++++++++ satpy/readers/hdfeos_base.py | 4 ++++ 2 files changed, 32 insertions(+) diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index 17bdf134bf..b0f0a3f7f3 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -486,6 +486,34 @@ datasets: coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] + land_sea_mask: + name: land_sea_mask + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + height: + name: height + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + range: + name: range + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + + waterpresent: + name: waterpresent + sensor: modis + resolution: 1000 + coordinates: [longitude, latitude] + file_type: [hdf_eos_geo] + file_types: hdf_eos_data_250m: diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 0f563efe2c..e4721a89e9 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -333,6 +333,10 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): "satellite_zenith_angle": ("SensorZenith", "Sensor_Zenith"), "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), + "water_present": "WaterPresent", + "land_sea_mask": "Land/SeaMask", + "height": "Height", + "range": "Range", } def __init__(self, filename, filename_info, filetype_info, **kwargs): From 6814f7531746651537797f6c5dd9315c5d55e0fe Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 23 Oct 2024 12:46:22 +0200 Subject: [PATCH 168/340] Update changelog for v0.52.1 --- CHANGELOG.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d69b8db96..eefa202dff 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,21 @@ +## Version 0.52.1 (2024/10/23) + +### Issues Closed + +* [Issue 2942](https://github.com/pytroll/satpy/issues/2942) - 0.52.0 breaks `seviri_l2_grib`-reader with 'EUML2GribFileHandler' object has no attribute '_ssp_lon' ([PR 2943](https://github.com/pytroll/satpy/pull/2943) by [@strandgren](https://github.com/strandgren)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2944](https://github.com/pytroll/satpy/pull/2944) - Fix tests using `palettize` +* [PR 2943](https://github.com/pytroll/satpy/pull/2943) - Fix seviri_l2_grib end_time property bug. ([2942](https://github.com/pytroll/satpy/issues/2942)) + +In this release 2 pull requests were closed. + + ## Version 0.52.0 (2024/10/18) ### Issues Closed From 94611cc81f553f92b5182a0c3ab39220b34c59c8 Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Wed, 23 Oct 2024 11:07:11 -0500 Subject: [PATCH 169/340] Fix misreferenced suomi-npp --- satpy/readers/viirs_edr.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/viirs_edr.py b/satpy/readers/viirs_edr.py index 025bf1ca14..43f7d06032 100644 --- a/satpy/readers/viirs_edr.py +++ b/satpy/readers/viirs_edr.py @@ -197,7 +197,7 @@ def platform_name(self): platform_path = self.filename_info["platform_shortname"] platform_dict = {"NPP": "Suomi-NPP", "JPSS-1": "NOAA-20", - "SNPP": "NOAA-20", + "SNPP": "Suomi-NPP", "J01": "NOAA-20", "N20": "NOAA-20", "JPSS-2": "NOAA-21", From a4b8c8dfd4dd6111f886e7df4925eafdeb9cf00d Mon Sep 17 00:00:00 2001 From: Joleen Feltz Date: Wed, 23 Oct 2024 11:39:49 -0500 Subject: [PATCH 170/340] Test new platform alias --- satpy/tests/reader_tests/test_viirs_edr.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_viirs_edr.py b/satpy/tests/reader_tests/test_viirs_edr.py index 8e69df5313..447d43938d 100644 --- a/satpy/tests/reader_tests/test_viirs_edr.py +++ b/satpy/tests/reader_tests/test_viirs_edr.py @@ -468,7 +468,8 @@ def test_availability_veg_idx(self, data_file, exp_available): [ ("npp", "Suomi-NPP"), ("JPSS-1", "NOAA-20"), - ("J01", "NOAA-20") + ("J01", "NOAA-20"), + ("n21", "NOAA-21") ]) def test_get_platformname(self, surface_reflectance_file, filename_platform, exp_shortname): """Test finding start and end times of granules.""" From eb8e21c41ca378c07370392df462c07d84f2b5e5 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 24 Oct 2024 09:16:32 +0300 Subject: [PATCH 171/340] Fix dtype promotion in sunzen_reduction --- satpy/modifiers/angles.py | 3 +-- satpy/tests/test_modifiers.py | 20 +++++++++++++------- 2 files changed, 14 insertions(+), 9 deletions(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 5ea8530612..028d9357ab 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -572,7 +572,6 @@ def sunzen_reduction(data: da.Array, return da.map_blocks(_sunzen_reduction_ndarray, data, sunz, limit, max_sza, strength, meta=np.array((), dtype=data.dtype), chunks=data.chunks) - def _sunzen_reduction_ndarray(data: np.ndarray, sunz: np.ndarray, limit: float, @@ -584,7 +583,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza with np.errstate(invalid="ignore"): # we expect space pixels to be invalid - reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2) + reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2, dtype=data.dtype) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 7e28a7456b..279b73a28d 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -179,18 +179,24 @@ def setup_class(cls): cls.custom = SunZenithReducer(name="sza_reduction_test_custom", modifiers=tuple(), correction_limit=70, max_sza=95, strength=3.0) - def test_default_settings(self, sunz_ds1, sunz_sza): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_default_settings(self, sunz_ds1, sunz_sza, dtype): """Test default settings with sza data available.""" - res = self.default((sunz_ds1, sunz_sza), test_attr="test") + res = self.default((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]], dtype=dtype) + assert res.dtype == dtype np.testing.assert_allclose(res.values, - np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]]), - rtol=1e-5) + expected, + rtol=2e-5) - def test_custom_settings(self, sunz_ds1, sunz_sza): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_custom_settings(self, sunz_ds1, sunz_sza, dtype): """Test custom settings with sza data available.""" - res = self.custom((sunz_ds1, sunz_sza), test_attr="test") + res = self.custom((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]], dtype=dtype) + assert res.dtype == dtype np.testing.assert_allclose(res.values, - np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]]), + expected, rtol=1e-5) def test_invalid_max_sza(self, sunz_ds1, sunz_sza): From eca4e9c6cacdff7cf9e1b006b04539207b0466b7 Mon Sep 17 00:00:00 2001 From: Jason Date: Thu, 24 Oct 2024 11:02:15 +0200 Subject: [PATCH 172/340] Add altitude, landcover, and landseamask to mersi_ll_l1b reader --- satpy/etc/readers/mersi_ll_l1b.yaml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/satpy/etc/readers/mersi_ll_l1b.yaml b/satpy/etc/readers/mersi_ll_l1b.yaml index 47b6d432b0..c898ae11cf 100644 --- a/satpy/etc/readers/mersi_ll_l1b.yaml +++ b/satpy/etc/readers/mersi_ll_l1b.yaml @@ -272,3 +272,27 @@ datasets: coordinates: [longitude, latitude] file_type: mersi_ll_l1b_1000_geo file_key: Geolocation/MoonAzimuth + altitude: + name: altitude + units: degree + standard_name: altitude + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/Altitude + landcover: + name: landcover + units: degree + standard_name: landcover + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/LandCover + landseamask: + name: landseamask + units: degree + standard_name: landseamask + resolution: 1000 + coordinates: [longitude, latitude] + file_type: mersi_ll_l1b_1000_geo + file_key: Geolocation/LandSeaMask From abfa665dfb7be98d273006fff84875e0135bca34 Mon Sep 17 00:00:00 2001 From: Jason Date: Thu, 24 Oct 2024 11:18:43 +0200 Subject: [PATCH 173/340] Add authors --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 6716dcb889..dd744fd2e8 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -38,6 +38,7 @@ The following people have made contributions to this project: - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) +- [Chung-Hsiang Horng(chorng)](https://github.com/chorng) - [Lloyd Hughes (system123)](https://github.com/system123) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) From b01d68d33fda9593d7d39736d98056c6c7b352b3 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 24 Oct 2024 13:35:37 +0300 Subject: [PATCH 174/340] Add dtype checks to more tests --- .../enhancement_tests/test_enhancements.py | 7 ++-- satpy/tests/test_composites.py | 33 +++++++++++++------ 2 files changed, 27 insertions(+), 13 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 96176fda34..3d20677237 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -109,14 +109,15 @@ def _calc_func(data): exp_data = exp_data[np.newaxis, :, :] run_and_check_enhancement(_enh_func, in_data, exp_data) - def test_cira_stretch(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_cira_stretch(self, dtype): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], - [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) - run_and_check_enhancement(cira_stretch, self.ch1, expected) + [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]], dtype=dtype) + run_and_check_enhancement(cira_stretch, self.ch1.astype(dtype), expected) def test_reinhard(self): """Test the reinhard algorithm.""" diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 1b60161a52..50b9b88b74 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -250,27 +250,34 @@ def test_more_than_three_datasets(self): with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) - def test_self_sharpened_no_high_res(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_self_sharpened_no_high_res(self, dtype): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) with pytest.raises(ValueError, match="SelfSharpenedRGB requires at least one high resolution band, not 'None'"): comp((self.ds1, self.ds2, self.ds3)) - def test_basic_no_high_res(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_no_high_res(self, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color") - res = comp((self.ds1, self.ds2, self.ds3)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) assert res.shape == (3, 2, 2) + assert res.dtype == dtype - def test_basic_no_sharpen(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_no_sharpen(self, dtype): """Test that color None does no sharpening.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=None) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), + optional_datasets=(self.ds4.astype(dtype),)) assert res.shape == (3, 2, 2) + assert res.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("high_resolution_band", "neutral_resolution_band", "exp_r", "exp_g", "exp_b"), [ @@ -300,22 +307,26 @@ def test_basic_no_sharpen(self): np.array([[1.0, 1.0], [np.nan, 1.0]], dtype=np.float64)) ] ) - def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b): + def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, exp_r, exp_g, exp_b, dtype): """Test RatioSharpenedRGB by different groups of high_resolution_band and neutral_resolution_band.""" from satpy.composites import RatioSharpenedRGB comp = RatioSharpenedRGB(name="true_color", high_resolution_band=high_resolution_band, neutral_resolution_band=neutral_resolution_band) - res = comp((self.ds1, self.ds2, self.ds3), optional_datasets=(self.ds4,)) + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype)), + optional_datasets=(self.ds4.astype(dtype),)) assert "units" not in res.attrs assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) + assert res.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("exp_shape", "exp_r", "exp_g", "exp_b"), [ @@ -325,17 +336,19 @@ def test_ratio_sharpening(self, high_resolution_band, neutral_resolution_band, e np.array([[16 / 3, 16 / 3], [16 / 3, 0]], dtype=np.float64)) ] ) - def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b): + def test_self_sharpened_basic(self, exp_shape, exp_r, exp_g, exp_b, dtype): """Test that three datasets can be passed without optional high res.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color") - res = comp((self.ds1, self.ds2, self.ds3)) - data = res.values + res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) + assert res.dtype == dtype + data = res.values assert data.shape == exp_shape np.testing.assert_allclose(data[0], exp_r, rtol=1e-5) np.testing.assert_allclose(data[1], exp_g, rtol=1e-5) np.testing.assert_allclose(data[2], exp_b, rtol=1e-5) + assert data.dtype == dtype class TestDifferenceCompositor(unittest.TestCase): From 0e4a3f56772d46844a08e756bf099e87615eea5c Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 24 Oct 2024 13:36:44 +0300 Subject: [PATCH 175/340] Add dtype checks for modifiers --- satpy/tests/test_modifiers.py | 58 +++++++++++++++++++++++++---------- 1 file changed, 41 insertions(+), 17 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 7e28a7456b..fa360f2a8b 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -135,29 +135,46 @@ def test_basic_default_not_provided(self, sunz_ds1, as_32bit): assert res.dtype == res_np.dtype assert "y" not in res.coords assert "x" not in res.coords + if as_32bit: + assert res.dtype == np.float32 - def test_basic_lims_not_provided(self, sunz_ds1): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_basic_lims_not_provided(self, sunz_ds1, dtype): """Test custom limits when SZA isn't provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) - res = comp((sunz_ds1,), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) - + res = comp((sunz_ds1.astype(dtype),), test_attr="test") + expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected, rtol=1e-5) + assert res.dtype == dtype + assert values.dtype == dtype + + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) - def test_basic_default_provided(self, data_arr, sunz_sza): + def test_basic_default_provided(self, data_arr, sunz_sza, dtype): """Test default limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple()) - res = comp((data_arr, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[22.401667, 22.31777], [22.437503, 22.353533]])) - + res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[22.401667, 22.31777], [22.437503, 22.353533]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected) + assert res.dtype == dtype + assert values.dtype == dtype + + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("data_arr", [lazy_fixture("sunz_ds1"), lazy_fixture("sunz_ds1_stacked")]) - def test_basic_lims_provided(self, data_arr, sunz_sza): + def test_basic_lims_provided(self, data_arr, sunz_sza, dtype): """Test custom limits when SZA is provided.""" from satpy.modifiers.geometry import SunZenithCorrector comp = SunZenithCorrector(name="sza_test", modifiers=tuple(), correction_limit=90) - res = comp((data_arr, sunz_sza), test_attr="test") - np.testing.assert_allclose(res.values, np.array([[66.853262, 68.168939], [66.30742, 67.601493]])) + res = comp((data_arr.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") + expected = np.array([[66.853262, 68.168939], [66.30742, 67.601493]], dtype=dtype) + values = res.values + np.testing.assert_allclose(values, expected, rtol=1e-5) + assert res.dtype == dtype + assert values.dtype == dtype def test_imcompatible_areas(self, sunz_ds2, sunz_sza): """Test sunz correction on incompatible areas.""" @@ -502,6 +519,7 @@ def _create_test_data(self, name, wavelength, resolution): }) return input_band, red_band, angle1, angle1, angle1, angle1 + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( ("name", "wavelength", "resolution", "aerosol_type", "reduce_lim_low", "reduce_lim_high", "reduce_strength", "exp_mean", "exp_unique"), @@ -521,7 +539,7 @@ def _create_test_data(self, name, wavelength, resolution): ] ) def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, reduce_lim_low, reduce_lim_high, - reduce_strength, exp_mean, exp_unique): + reduce_strength, exp_mean, exp_unique, dtype): """Test PSPRayleighReflectance with fake data.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance ray_cor = PSPRayleighReflectance(name=name, atmosphere="us-standard", aerosol_types=aerosol_type, @@ -535,42 +553,48 @@ def test_rayleigh_corrector(self, name, wavelength, resolution, aerosol_type, re assert ray_cor.attrs["reduce_strength"] == reduce_strength input_band, red_band, *_ = self._create_test_data(name, wavelength, resolution) - res = ray_cor([input_band, red_band]) + res = ray_cor([input_band.astype(dtype), red_band.astype(dtype)]) assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(np.nanmean(data), exp_mean, rtol=1e-5) assert data.shape == (3, 5) np.testing.assert_allclose(unique, exp_unique, rtol=1e-5) + assert data.dtype == dtype + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize("as_optionals", [False, True]) - def test_rayleigh_with_angles(self, as_optionals): + def test_rayleigh_with_angles(self, as_optionals, dtype): """Test PSPRayleighReflectance with angles provided.""" from satpy.modifiers.atmosphere import PSPRayleighReflectance aerosol_type = "rayleigh_only" ray_cor = PSPRayleighReflectance(name="B01", atmosphere="us-standard", aerosol_types=aerosol_type) - prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals) + prereqs, opt_prereqs = self._get_angles_prereqs_and_opts(as_optionals, dtype) with mock.patch("satpy.modifiers.atmosphere.get_angles") as get_angles: res = ray_cor(prereqs, opt_prereqs) get_angles.assert_not_called() assert isinstance(res, xr.DataArray) assert isinstance(res.data, da.Array) + assert res.dtype == dtype data = res.values unique = np.unique(data[~np.isnan(data)]) np.testing.assert_allclose(unique, np.array([-75.0, -37.71298492, 31.14350754]), rtol=1e-5) assert data.shape == (3, 5) + assert data.dtype == dtype - def _get_angles_prereqs_and_opts(self, as_optionals): + def _get_angles_prereqs_and_opts(self, as_optionals, dtype): wavelength = (0.45, 0.47, 0.49) resolution = 1000 input_band, red_band, *angles = self._create_test_data("B01", wavelength, resolution) - prereqs = [input_band, red_band] + prereqs = [input_band.astype(dtype), red_band.astype(dtype)] opt_prereqs = [] + angles = [a.astype(dtype) for a in angles] if as_optionals: opt_prereqs = angles else: From 965c1b448666bfd4d931f1505c79b185e2e2b6b9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 24 Oct 2024 13:57:41 +0300 Subject: [PATCH 176/340] Compute Rayleigh correction with original data precision --- satpy/modifiers/atmosphere.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index 1c6225f42a..a190510a83 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -77,7 +77,9 @@ def __call__(self, projectables, optional_datasets=None, **info): projectables = projectables + (optional_datasets or []) if len(projectables) != 6: vis, red = self.match_data_arrays(projectables) - sata, satz, suna, sunz = get_angles(vis) + # Adjust the angle data precision to match the data + # This does not affect the accuracy visibly + sata, satz, suna, sunz = [d.astype(vis.dtype) for d in get_angles(vis)] else: vis, red, sata, satz, suna, sunz = self.match_data_arrays(projectables) # First make sure the two azimuth angles are in the range 0-360: @@ -116,14 +118,14 @@ def __call__(self, projectables, optional_datasets=None, **info): refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs["wavelength"][1], red.data) - if reduce_strength > 0: if reduce_lim_low > reduce_lim_high: reduce_lim_low = reduce_lim_high refl_cor_band = corrector.reduce_rayleigh_highzenith(sunz, refl_cor_band, reduce_lim_low, reduce_lim_high, reduce_strength) - proj = vis - refl_cor_band + # Need to convert again to data precision, Rayleigh calculations always promote datatype to float64 + proj = vis - refl_cor_band.astype(vis.dtype) proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) return proj From 81ccc61ea27bdbcf01e010cbcd6b6269ce73892d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 11:03:55 +0300 Subject: [PATCH 177/340] Fix cira stretch upcasting the data --- satpy/enhancements/__init__.py | 5 +++-- satpy/tests/enhancement_tests/test_enhancements.py | 12 ++++++++---- 2 files changed, 11 insertions(+), 6 deletions(-) diff --git a/satpy/enhancements/__init__.py b/satpy/enhancements/__init__.py index a44ca590cf..86efe1ffba 100644 --- a/satpy/enhancements/__init__.py +++ b/satpy/enhancements/__init__.py @@ -219,11 +219,12 @@ def cira_stretch(img, **kwargs): @exclude_alpha def _cira_stretch(band_data): - log_root = np.log10(0.0223) + dtype = band_data.dtype + log_root = np.log10(0.0223, dtype=dtype) denom = (1.0 - log_root) * 0.75 band_data *= 0.01 band_data = band_data.clip(np.finfo(float).eps) - band_data = np.log10(band_data) + band_data = np.log10(band_data, dtype=dtype) band_data -= log_root band_data /= denom return band_data diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index 96176fda34..fd471ae792 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -32,7 +32,7 @@ # - tmp_path -def run_and_check_enhancement(func, data, expected, **kwargs): +def run_and_check_enhancement(func, data, expected, match_dtype=False, **kwargs): """Perform basic checks that apply to multiple tests.""" from trollimage.xrimage import XRImage @@ -58,6 +58,9 @@ def run_and_check_enhancement(func, data, expected, **kwargs): res_data = res_data_arr.data.compute() # mimics what xrimage geotiff writing does assert not isinstance(res_data, da.Array) np.testing.assert_allclose(res_data, expected, atol=1.e-6, rtol=0) + if match_dtype: + assert res_data_arr.dtype == data.dtype + assert res_data.dtype == data.dtype def identical_decorator(func): @@ -109,14 +112,15 @@ def _calc_func(data): exp_data = exp_data[np.newaxis, :, :] run_and_check_enhancement(_enh_func, in_data, exp_data) - def test_cira_stretch(self): + @pytest.mark.parametrize("dtype", [np.float32, np.float64]) + def test_cira_stretch(self, dtype): """Test applying the cira_stretch.""" from satpy.enhancements import cira_stretch expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], - [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]]) - run_and_check_enhancement(cira_stretch, self.ch1, expected) + [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]], dtype=dtype) + run_and_check_enhancement(cira_stretch, self.ch1.astype(dtype), expected, match_dtype=True) def test_reinhard(self): """Test the reinhard algorithm.""" From 516992329e6c880c8bfb226f41d5d53c6e5476d4 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 12:45:13 +0300 Subject: [PATCH 178/340] Refactor run_and_check_enhancement --- .../enhancement_tests/test_enhancements.py | 44 ++++++++++++++----- 1 file changed, 33 insertions(+), 11 deletions(-) diff --git a/satpy/tests/enhancement_tests/test_enhancements.py b/satpy/tests/enhancement_tests/test_enhancements.py index fd471ae792..6b797f0015 100644 --- a/satpy/tests/enhancement_tests/test_enhancements.py +++ b/satpy/tests/enhancement_tests/test_enhancements.py @@ -32,35 +32,57 @@ # - tmp_path -def run_and_check_enhancement(func, data, expected, match_dtype=False, **kwargs): +def run_and_check_enhancement(func, data, expected, **kwargs): """Perform basic checks that apply to multiple tests.""" + pre_attrs = data.attrs + img = _get_enhanced_image(func, data, **kwargs) + + _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) + _assert_image_data(img, expected) + + +def _get_enhanced_image(func, data, **kwargs): from trollimage.xrimage import XRImage - pre_attrs = data.attrs img = XRImage(data) func(img, **kwargs) + return img + + +def _assert_image(img, pre_attrs, func_name, has_palette): + assert isinstance(img.data, xr.DataArray) assert isinstance(img.data.data, da.Array) + old_keys = set(pre_attrs.keys()) # It is OK to have "enhancement_history" added new_keys = set(img.data.attrs.keys()) - {"enhancement_history"} # In case of palettes are used, _FillValue is added. # Colorize doesn't add the fill value, so ignore that - if "palettes" in kwargs and func.__name__ != "colorize": + if has_palette and func_name != "colorize": assert "_FillValue" in new_keys # Remove it from further comparisons new_keys = new_keys - {"_FillValue"} assert old_keys == new_keys - res_data_arr = img.data - assert isinstance(res_data_arr, xr.DataArray) - assert isinstance(res_data_arr.data, da.Array) - res_data = res_data_arr.data.compute() # mimics what xrimage geotiff writing does + +def _assert_image_data(img, expected, dtype=None): + # Compute the data to mimic what xrimage geotiff writing does + res_data = img.data.data.compute() assert not isinstance(res_data, da.Array) np.testing.assert_allclose(res_data, expected, atol=1.e-6, rtol=0) - if match_dtype: - assert res_data_arr.dtype == data.dtype - assert res_data.dtype == data.dtype + if dtype: + assert img.data.dtype == dtype + assert res_data.dtype == dtype + + +def run_and_check_enhancement_with_dtype(func, data, expected, **kwargs): + """Perform basic checks that apply to multiple tests.""" + pre_attrs = data.attrs + img = _get_enhanced_image(func, data, **kwargs) + + _assert_image(img, pre_attrs, func.__name__, "palettes" in kwargs) + _assert_image_data(img, expected, dtype=data.dtype) def identical_decorator(func): @@ -120,7 +142,7 @@ def test_cira_stretch(self, dtype): expected = np.array([[ [np.nan, -7.04045974, -7.04045974, 0.79630132, 0.95947296], [1.05181359, 1.11651012, 1.16635571, 1.20691137, 1.24110186]]], dtype=dtype) - run_and_check_enhancement(cira_stretch, self.ch1.astype(dtype), expected, match_dtype=True) + run_and_check_enhancement_with_dtype(cira_stretch, self.ch1.astype(dtype), expected) def test_reinhard(self): """Test the reinhard algorithm.""" From 392fb773a617e40fc2324290516f80ec0db0da0f Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 13:03:30 +0300 Subject: [PATCH 179/340] Fix clipping of Rayleigh strength reduction --- satpy/modifiers/atmosphere.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index a190510a83..8d869b2d53 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -99,7 +99,7 @@ def __call__(self, projectables, optional_datasets=None, **info): aerosol_type = self.attrs.get("aerosol_type", "marine_clean_aerosol") reduce_lim_low = abs(self.attrs.get("reduce_lim_low", 70)) reduce_lim_high = abs(self.attrs.get("reduce_lim_high", 105)) - reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1) + reduce_strength = np.clip(self.attrs.get("reduce_strength", 0), 0, 1).astype(vis.dtype) logger.info("Removing Rayleigh scattering with atmosphere '%s' and " "aerosol type '%s' for '%s'", @@ -118,6 +118,7 @@ def __call__(self, projectables, optional_datasets=None, **info): refl_cor_band = corrector.get_reflectance(sunz, satz, ssadiff, vis.attrs["wavelength"][1], red.data) + if reduce_strength > 0: if reduce_lim_low > reduce_lim_high: reduce_lim_low = reduce_lim_high @@ -125,7 +126,7 @@ def __call__(self, projectables, optional_datasets=None, **info): reduce_lim_low, reduce_lim_high, reduce_strength) # Need to convert again to data precision, Rayleigh calculations always promote datatype to float64 - proj = vis - refl_cor_band.astype(vis.dtype) + proj = vis - refl_cor_band proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) return proj From 118fc93b407ceb4a8688057d1c43233cddfcebf6 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 13:14:15 +0300 Subject: [PATCH 180/340] Remove obsolete comment --- satpy/modifiers/atmosphere.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/modifiers/atmosphere.py b/satpy/modifiers/atmosphere.py index 8d869b2d53..c7144c27ca 100644 --- a/satpy/modifiers/atmosphere.py +++ b/satpy/modifiers/atmosphere.py @@ -125,7 +125,6 @@ def __call__(self, projectables, optional_datasets=None, **info): refl_cor_band = corrector.reduce_rayleigh_highzenith(sunz, refl_cor_band, reduce_lim_low, reduce_lim_high, reduce_strength) - # Need to convert again to data precision, Rayleigh calculations always promote datatype to float64 proj = vis - refl_cor_band proj.attrs = vis.attrs self.apply_modifier_info(vis, proj) From de80552b37282dcc5cd5be7fe410d777a5ad341d Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 14:50:10 +0300 Subject: [PATCH 181/340] Test also computed dtypes --- satpy/tests/test_composites.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 50b9b88b74..91c383d1b3 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -266,6 +266,7 @@ def test_basic_no_high_res(self, dtype): res = comp((self.ds1.astype(dtype), self.ds2.astype(dtype), self.ds3.astype(dtype))) assert res.shape == (3, 2, 2) assert res.dtype == dtype + assert res.values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) def test_basic_no_sharpen(self, dtype): @@ -276,6 +277,7 @@ def test_basic_no_sharpen(self, dtype): optional_datasets=(self.ds4.astype(dtype),)) assert res.shape == (3, 2, 2) assert res.dtype == dtype + assert res.values.dtype == dtype @pytest.mark.parametrize("dtype", [np.float32, np.float64]) @pytest.mark.parametrize( From 3f1076ae94af61c375be206d260b3b96d86520b9 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Fri, 25 Oct 2024 14:50:50 +0300 Subject: [PATCH 182/340] Remove unnecessary dtype parametrization --- satpy/tests/test_composites.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/tests/test_composites.py b/satpy/tests/test_composites.py index 91c383d1b3..eb3f90b715 100644 --- a/satpy/tests/test_composites.py +++ b/satpy/tests/test_composites.py @@ -250,8 +250,7 @@ def test_more_than_three_datasets(self): with pytest.raises(ValueError, match="Expected 3 datasets, got 4"): comp((self.ds1, self.ds2, self.ds3, self.ds1), optional_datasets=(self.ds4_big,)) - @pytest.mark.parametrize("dtype", [np.float32, np.float64]) - def test_self_sharpened_no_high_res(self, dtype): + def test_self_sharpened_no_high_res(self): """Test for exception when no high_res band is specified.""" from satpy.composites import SelfSharpenedRGB comp = SelfSharpenedRGB(name="true_color", high_resolution_band=None) From d6b8d011e2afb78001be5e2280c980a01262034b Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 25 Oct 2024 15:40:17 +0200 Subject: [PATCH 183/340] Bugfix the VIIRS lowres version of the day-microphysics. It should use only M-bands! Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/viirs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index bebf6c5833..aa182604af 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -405,7 +405,7 @@ composites: - name: M07 modifiers: [sunz_corrected] - name: M12 - modifiers: [nir_reflectance] + modifiers: [nir_reflectance_lowres] - M15 standard_name: day_microphysics From b470a8164524dc035afc5b2d92d791a339838fa1 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Wed, 30 Oct 2024 09:34:35 +0100 Subject: [PATCH 184/340] =?UTF-8?q?Remove=20co2=5Fcorrected=20modifier=20f?= =?UTF-8?q?or=20METimage=203.74=20=C2=B5m?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Remove the co2_corrected modifier for the METimage 3.74 µm channel. The CO2 correction was developed specifically for the SEVIRI 3.9 µm channel and should not be applied to channel where the CO₂ impact is different. --- satpy/etc/composites/vii.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/etc/composites/vii.yaml b/satpy/etc/composites/vii.yaml index e5c4f0786b..7828050170 100644 --- a/satpy/etc/composites/vii.yaml +++ b/satpy/etc/composites/vii.yaml @@ -87,7 +87,6 @@ composites: - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: - name: 'vii_3740' - modifiers: [ co2_corrected ] - name: 'vii_10690' - compositor: !!python/name:satpy.composites.DifferenceCompositor prerequisites: From 04c82eaa012562bfad170045ac925b47651c8a90 Mon Sep 17 00:00:00 2001 From: Xuanhan Lai Date: Thu, 31 Oct 2024 11:25:17 +0800 Subject: [PATCH 185/340] Update mersi3_l1b.yaml Fixed bugs related to incorrect "calibration_index" values for each visible band (from band 5 to band 19) with 1 km spatial resolution. Xuanhan Lai, 2024-10-31 --- satpy/etc/readers/mersi3_l1b.yaml | 30 +++++++++++++++--------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/satpy/etc/readers/mersi3_l1b.yaml b/satpy/etc/readers/mersi3_l1b.yaml index d7078786db..4cc35efa40 100644 --- a/satpy/etc/readers/mersi3_l1b.yaml +++ b/satpy/etc/readers/mersi3_l1b.yaml @@ -164,7 +164,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -184,7 +184,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -204,7 +204,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -224,7 +224,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -244,7 +244,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -264,7 +264,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -284,7 +284,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -304,7 +304,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -324,7 +324,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -344,7 +344,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -364,7 +364,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -384,7 +384,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -404,7 +404,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -424,7 +424,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -444,7 +444,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: From b09b7f84dbfe19fa4d6426093936a7b148282ff7 Mon Sep 17 00:00:00 2001 From: Xuanhan Lai Date: Thu, 31 Oct 2024 11:25:48 +0800 Subject: [PATCH 186/340] Update mersi2_l1b.yaml Fixed bugs related to incorrect "calibration_index" values for each visible band (from band 5 to band 19) with 1 km spatial resolution. Xuanhan Lai, 2024-10-31 --- satpy/etc/readers/mersi2_l1b.yaml | 50 +++++++++++-------------------- 1 file changed, 17 insertions(+), 33 deletions(-) diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index 3e0ecb390c..cc28d61595 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -176,7 +176,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 0 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 0 + calibration_index: 4 coordinates: [longitude, latitude] calibration: reflectance: @@ -196,7 +196,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 1 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 1 + calibration_index: 5 coordinates: [longitude, latitude] calibration: reflectance: @@ -216,7 +216,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 2 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 2 + calibration_index: 6 coordinates: [longitude, latitude] calibration: reflectance: @@ -236,7 +236,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 3 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 3 + calibration_index: 7 coordinates: [longitude, latitude] calibration: reflectance: @@ -256,7 +256,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 4 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 4 + calibration_index: 8 coordinates: [longitude, latitude] calibration: reflectance: @@ -276,7 +276,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 5 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 5 + calibration_index: 9 coordinates: [longitude, latitude] calibration: reflectance: @@ -296,7 +296,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 6 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 6 + calibration_index: 10 coordinates: [longitude, latitude] calibration: reflectance: @@ -316,7 +316,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 7 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 7 + calibration_index: 11 coordinates: [longitude, latitude] calibration: reflectance: @@ -336,7 +336,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 8 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 8 + calibration_index: 12 coordinates: [longitude, latitude] calibration: reflectance: @@ -356,7 +356,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 9 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 9 + calibration_index: 13 coordinates: [longitude, latitude] calibration: reflectance: @@ -376,7 +376,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 10 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 10 + calibration_index: 14 coordinates: [longitude, latitude] calibration: reflectance: @@ -396,7 +396,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 11 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 11 + calibration_index: 15 coordinates: [longitude, latitude] calibration: reflectance: @@ -416,7 +416,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 12 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 12 + calibration_index: 16 coordinates: [longitude, latitude] calibration: reflectance: @@ -436,7 +436,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 13 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 13 + calibration_index: 17 coordinates: [longitude, latitude] calibration: reflectance: @@ -456,7 +456,7 @@ datasets: file_key: Data/EV_1KM_RefSB band_index: 14 calibration_key: Calibration/VIS_Cal_Coeff - calibration_index: 14 + calibration_index: 18 coordinates: [longitude, latitude] calibration: reflectance: @@ -468,6 +468,8 @@ datasets: counts: units: "1" standard_name: counts + + # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] @@ -482,9 +484,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -502,9 +501,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -522,9 +518,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -542,9 +535,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -568,9 +558,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -594,9 +581,6 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature - radiance: - units: 'mW/ (m2 cm-1 sr)' - standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts From db67d2e04566bb85a3428064f5bf715598b62b85 Mon Sep 17 00:00:00 2001 From: Xuanhan Lai Date: Thu, 31 Oct 2024 11:33:20 +0800 Subject: [PATCH 187/340] Update AUTHORS.md --- AUTHORS.md | 1 + 1 file changed, 1 insertion(+) diff --git a/AUTHORS.md b/AUTHORS.md index 6716dcb889..5159980166 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -106,3 +106,4 @@ The following people have made contributions to this project: - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Antonio Valentino](https://github.com/avalentino) - [Clément (ludwigvonkoopa)](https://github.com/ludwigVonKoopa) +- [Xuanhan Lai (sgxl)](https://github.com/sgxl) From 4ca81ca8a6a951d5a0a782bea71c739aef1bb8ef Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Thu, 31 Oct 2024 10:39:18 +0000 Subject: [PATCH 188/340] Fix CF writer crashing with netcdf devel --- satpy/tests/writer_tests/test_cf.py | 2 +- satpy/writers/cf_writer.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index 1a72894108..eba2cd537f 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -542,7 +542,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): import netCDF4 with monkeypatch.context() as m: m.setattr(netCDF4, "__version__", "1.6.0") - m.setattr(netCDF4, "__netcdf4libversion__", "4.9.0") + m.setattr(netCDF4, "__netcdf4libversion__", "4.9.0-development") m.setattr(xr, "__version__", "2022.12.0") with warnings.catch_warnings(): scene.save_datasets(filename=filename, writer="cf") diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index fdcdbd4e8c..25a60ee28f 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -390,8 +390,11 @@ def _backend_versions_match(): def _get_backend_versions(): import netCDF4 + + # Make libnetcdf development version compatible with PEP440 + libnetcdf_version = netCDF4.__netcdf4libversion__.replace("development", "dev") return { "netCDF4": Version(netCDF4.__version__), - "libnetcdf": Version(netCDF4.__netcdf4libversion__), + "libnetcdf": Version(libnetcdf_version), "xarray": Version(xr.__version__) } From 927769f013faf7a806cd8f5ed22dc289e6a39158 Mon Sep 17 00:00:00 2001 From: Xuanhan Lai Date: Fri, 1 Nov 2024 09:00:32 +0800 Subject: [PATCH 189/340] Update mersi2_l1b.yaml Fix bugs related to the incorrect removal of radiance calculations. --- satpy/etc/readers/mersi2_l1b.yaml | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/satpy/etc/readers/mersi2_l1b.yaml b/satpy/etc/readers/mersi2_l1b.yaml index cc28d61595..352a3a4c83 100644 --- a/satpy/etc/readers/mersi2_l1b.yaml +++ b/satpy/etc/readers/mersi2_l1b.yaml @@ -468,8 +468,6 @@ datasets: counts: units: "1" standard_name: counts - - # Not sure how to get radiance for BT channels '20': name: '20' wavelength: [3.710, 3.800, 3.890] @@ -484,6 +482,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -501,6 +502,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -518,6 +522,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -535,6 +542,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -558,6 +568,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts @@ -581,6 +594,9 @@ datasets: brightness_temperature: units: "K" standard_name: toa_brightness_temperature + radiance: + units: 'mW/ (m2 cm-1 sr)' + standard_name: toa_outgoing_radiance_per_unit_wavelength counts: units: "1" standard_name: counts From 8df630d3ca871afa0687dc3231fd22315d4a555a Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 1 Nov 2024 07:54:26 +0000 Subject: [PATCH 190/340] Make libnetcdf version parsing more robust --- satpy/tests/writer_tests/test_cf.py | 15 +++++++++++---- satpy/writers/cf_writer.py | 25 ++++++++++++++++++++----- 2 files changed, 31 insertions(+), 9 deletions(-) diff --git a/satpy/tests/writer_tests/test_cf.py b/satpy/tests/writer_tests/test_cf.py index eba2cd537f..ba305fbe58 100644 --- a/satpy/tests/writer_tests/test_cf.py +++ b/satpy/tests/writer_tests/test_cf.py @@ -528,12 +528,19 @@ def _assert_encoding_as_expected(self, filename, expected): assert f["test-array"].dtype == expected["dtype"] assert f["test-array"].encoding["complevel"] == expected["complevel"] - def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch): + @pytest.mark.parametrize( + "versions", + [ + {"netCDF4": "1.5.0", "libnetcdf": "4.9.1-development"}, + {"netCDF4": "1.6.0", "libnetcdf": "invalid-version"} + ] + ) + def test_warning_if_backends_dont_match(self, scene, filename, monkeypatch, versions): """Test warning if backends don't match.""" import netCDF4 with monkeypatch.context() as m: - m.setattr(netCDF4, "__version__", "1.5.0") - m.setattr(netCDF4, "__netcdf4libversion__", "4.9.1") + m.setattr(netCDF4, "__version__", versions["netCDF4"]) + m.setattr(netCDF4, "__netcdf4libversion__", versions["libnetcdf"]) with pytest.warns(UserWarning, match=r"Backend version mismatch"): scene.save_datasets(filename=filename, writer="cf") @@ -542,7 +549,7 @@ def test_no_warning_if_backends_match(self, scene, filename, monkeypatch): import netCDF4 with monkeypatch.context() as m: m.setattr(netCDF4, "__version__", "1.6.0") - m.setattr(netCDF4, "__netcdf4libversion__", "4.9.0-development") + m.setattr(netCDF4, "__netcdf4libversion__", "4.9.0") m.setattr(xr, "__version__", "2022.12.0") with warnings.catch_warnings(): scene.save_datasets(filename=filename, writer="cf") diff --git a/satpy/writers/cf_writer.py b/satpy/writers/cf_writer.py index 25a60ee28f..c0b7fd827b 100644 --- a/satpy/writers/cf_writer.py +++ b/satpy/writers/cf_writer.py @@ -158,7 +158,7 @@ import numpy as np import xarray as xr -from packaging.version import Version +from packaging.version import InvalidVersion, Version from satpy.cf.coords import EPOCH # noqa: F401 (for backward compatibility) from satpy.writers import Writer @@ -390,11 +390,26 @@ def _backend_versions_match(): def _get_backend_versions(): import netCDF4 - - # Make libnetcdf development version compatible with PEP440 - libnetcdf_version = netCDF4.__netcdf4libversion__.replace("development", "dev") + libnetcdf_version = _parse_libnetcdf_version( + netCDF4.__netcdf4libversion__ + ) return { "netCDF4": Version(netCDF4.__version__), - "libnetcdf": Version(libnetcdf_version), + "libnetcdf": libnetcdf_version, "xarray": Version(xr.__version__) } + + +def _parse_libnetcdf_version(version_str): + # Make libnetcdf development version compatible with PEP440 + version_str = version_str.replace("development", "dev") + try: + return Version(version_str) + except InvalidVersion: + warnings.warn( + f"Unable to parse netcdf-c version {version_str}, " + f"using 0.0.0 as fallback", + UserWarning, + stacklevel=3 + ) + return Version("0.0.0") From b861932a6a33909bdf3955a3117b8dc7d3363a04 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 10:03:02 +0000 Subject: [PATCH 191/340] Bump pypa/gh-action-pypi-publish from 1.10.2 to 1.11.0 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.10.2 to 1.11.0. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.10.2...v1.11.0) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 3a8e43181a..479e7f8281 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.10.2 + uses: pypa/gh-action-pypi-publish@v1.11.0 with: user: __token__ password: ${{ secrets.pypi_password }} From f36a096ce254b21a07d62fa5492a8178a255a5de Mon Sep 17 00:00:00 2001 From: Mario Hros <966992+k3a@users.noreply.github.com> Date: Tue, 15 Oct 2024 16:41:04 +0200 Subject: [PATCH 192/340] Fix data type when getting a line offset for a segmented hrit_jma --- AUTHORS.md | 1 + satpy/readers/hrit_jma.py | 4 ++-- satpy/tests/reader_tests/test_ahi_hrit.py | 12 ++++++------ 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/AUTHORS.md b/AUTHORS.md index 6716dcb889..27ab02fdc1 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -38,6 +38,7 @@ The following people have made contributions to this project: - [Gerrit Holl (gerritholl)](https://github.com/gerritholl) - Deutscher Wetterdienst - [David Hoese (djhoese)](https://github.com/djhoese) - [Marc Honnorat (honnorat)](https://github.com/honnorat) +- [Mario Hros (k3a)](https://github.com/k3a) - [Lloyd Hughes (system123)](https://github.com/system123) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) - [Mikhail Itkin (mitkin)](https://github.com/mitkin) diff --git a/satpy/readers/hrit_jma.py b/satpy/readers/hrit_jma.py index ac83776a6a..0266ce945f 100644 --- a/satpy/readers/hrit_jma.py +++ b/satpy/readers/hrit_jma.py @@ -345,8 +345,8 @@ def _get_line_offset(self): if self.is_segmented: # loff in the file specifies the offset of the full disk image # centre (1375/2750 for VIS/IR) - segment_number = self.mda["segment_sequence_number"] - 1 - loff -= (self.mda["total_no_image_segm"] - segment_number - 1) * nlines + segment_number = int(self.mda["segment_sequence_number"]) - 1 + loff -= (int(self.mda["total_no_image_segm"]) - segment_number - 1) * nlines elif self.area_id in (NORTH_HEMIS, SOUTH_HEMIS): # loff in the file specifies the start line of the half disk image # in the full disk image diff --git a/satpy/tests/reader_tests/test_ahi_hrit.py b/satpy/tests/reader_tests/test_ahi_hrit.py index 24e01e6802..ea4afab7f1 100644 --- a/satpy/tests/reader_tests/test_ahi_hrit.py +++ b/satpy/tests/reader_tests/test_ahi_hrit.py @@ -75,8 +75,8 @@ def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, proj_h8 = b"GEOS(140.70) " proj_mtsat2 = b"GEOS(145.00) " proj_name = proj_h8 if platform == "Himawari-8" else proj_mtsat2 - return {"image_segm_seq_no": segno, - "total_no_image_segm": numseg, + return {"image_segm_seq_no": np.uint8(segno), + "total_no_image_segm": np.uint8(numseg), "projection_name": proj_name, "projection_parameters": { "a": 6378169.00, @@ -85,10 +85,10 @@ def _get_mda(self, loff=5500.0, coff=5500.0, nlines=11000, ncols=11000, }, "cfac": 10233128, "lfac": 10233128, - "coff": coff, - "loff": loff, - "number_of_columns": ncols, - "number_of_lines": nlines, + "coff": np.int32(coff), + "loff": np.int32(loff), + "number_of_columns": np.uint16(ncols), + "number_of_lines": np.uint16(nlines), "image_data_function": idf, "image_observation_time": self._get_acq_time(nlines)} From 20d79de6464ff866ae4f9b2001bf8928b906b183 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Sat, 2 Nov 2024 09:56:59 +0200 Subject: [PATCH 193/340] Test dtype also for computed data --- satpy/tests/test_modifiers.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/satpy/tests/test_modifiers.py b/satpy/tests/test_modifiers.py index 8bdc65f54d..a4aca52e64 100644 --- a/satpy/tests/test_modifiers.py +++ b/satpy/tests/test_modifiers.py @@ -202,7 +202,9 @@ def test_default_settings(self, sunz_ds1, sunz_sza, dtype): res = self.default((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[0.02916261, 0.02839063], [0.02949383, 0.02871911]], dtype=dtype) assert res.dtype == dtype - np.testing.assert_allclose(res.values, + values = res.values + assert values.dtype == dtype + np.testing.assert_allclose(values, expected, rtol=2e-5) @@ -212,7 +214,9 @@ def test_custom_settings(self, sunz_ds1, sunz_sza, dtype): res = self.custom((sunz_ds1.astype(dtype), sunz_sza.astype(dtype)), test_attr="test") expected = np.array([[0.01041319, 0.01030033], [0.01046164, 0.01034834]], dtype=dtype) assert res.dtype == dtype - np.testing.assert_allclose(res.values, + values = res.values + assert values.dtype == dtype + np.testing.assert_allclose(values, expected, rtol=1e-5) From cc787ecd1535d6dfcc35dbbc5891a0aecea10718 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 4 Nov 2024 10:35:51 -0600 Subject: [PATCH 194/340] Fix MODIS readers chunking compatibility with newer dask --- satpy/readers/hdfeos_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 0f563efe2c..21d8507a08 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -238,7 +238,7 @@ def _chunks_for_variable(self, hdf_dataset): return normalize_low_res_chunks( (1,) * num_nonyx_dims + ("auto", -1), var_shape, - (1,) * num_nonyx_dims + (scan_length_250m, -1), + (1,) * num_nonyx_dims + (scan_length_250m, var_shape[-1]), (1,) * num_nonyx_dims + (res_multiplier, res_multiplier), np.float32, ) From 619304061e195d74ba28c7b498603f19c0fe38c2 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 4 Nov 2024 19:12:28 +0200 Subject: [PATCH 195/340] Use log-magic to simplify code --- satpy/modifiers/angles.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/modifiers/angles.py b/satpy/modifiers/angles.py index 028d9357ab..679af8b3b9 100644 --- a/satpy/modifiers/angles.py +++ b/satpy/modifiers/angles.py @@ -583,7 +583,7 @@ def _sunzen_reduction_ndarray(data: np.ndarray, # invert the reduction factor such that minimum reduction is done at `limit` and gradually increases towards max_sza with np.errstate(invalid="ignore"): # we expect space pixels to be invalid - reduction_factor = 1. - np.log(reduction_factor + 1) / np.log(2, dtype=data.dtype) + reduction_factor = 1. - np.log2(reduction_factor + 1) # apply non-linearity to the reduction factor for a non-linear reduction of the signal. This can be used for a # slower or faster transision to higher/lower fractions at the ndvi extremes. If strength equals 1.0, this From 28a793c02eb83649ce2e2ea544dc5c34d41c3f0a Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 4 Nov 2024 11:26:10 -0600 Subject: [PATCH 196/340] Remove conda defaults channel in CI --- .github/workflows/ci.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 84622f221c..f50be0e9b8 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -42,6 +42,8 @@ jobs: python-version: ${{ matrix.python-version }} activate-environment: test-environment channels: conda-forge + conda-remove-defaults: true + channel-priority: strict - name: Set cache environment variables shell: bash -l {0} From 1cc1b873f155a892392bab562fad2bf532956bdb Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 22:40:41 +0000 Subject: [PATCH 197/340] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.6.9 → v0.7.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.6.9...v0.7.2) - [github.com/pre-commit/mirrors-mypy: v1.11.2 → v1.13.0](https://github.com/pre-commit/mirrors-mypy/compare/v1.11.2...v1.13.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fc3d7e71e7..fe36218d40 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.6.9' + rev: 'v0.7.2' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.11.2' # Use the sha / tag you want to point at + rev: 'v1.13.0' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From a0a7f53ccb3441c51bcdcc59c5cf2131c8c4e671 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Tue, 5 Nov 2024 09:22:16 +0100 Subject: [PATCH 198/340] Remove unneeded call to private scipy function --- satpy/readers/sar_c_safe.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/satpy/readers/sar_c_safe.py b/satpy/readers/sar_c_safe.py index e8676c9b56..437d25929a 100644 --- a/satpy/readers/sar_c_safe.py +++ b/satpy/readers/sar_c_safe.py @@ -511,15 +511,14 @@ def intp(grid_x, grid_y, interpolator): def interpolate_xarray_linear(xpoints, ypoints, values, shape, chunks=CHUNK_SIZE): """Interpolate linearly, generating a dask array.""" - from scipy.interpolate.interpnd import LinearNDInterpolator, _ndim_coords_from_arrays + from scipy.interpolate.interpnd import LinearNDInterpolator if isinstance(chunks, (list, tuple)): vchunks, hchunks = chunks else: vchunks, hchunks = chunks, chunks - - points = _ndim_coords_from_arrays(np.vstack((np.asarray(ypoints, dtype=np.uint16), - np.asarray(xpoints, dtype=np.uint16))).T) + points = np.vstack((np.asarray(ypoints, dtype=np.uint16), + np.asarray(xpoints, dtype=np.uint16))).T interpolator = LinearNDInterpolator(points, values) From 1e278836e5ab2740f9c293a5c6b9f57ae7072922 Mon Sep 17 00:00:00 2001 From: Simon Proud Date: Tue, 5 Nov 2024 10:59:47 +0100 Subject: [PATCH 199/340] Update MODIS additional L1 datasets to fix tests. --- satpy/etc/readers/modis_l1b.yaml | 4 ++-- satpy/readers/hdfeos_base.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/etc/readers/modis_l1b.yaml b/satpy/etc/readers/modis_l1b.yaml index b0f0a3f7f3..94e8d3fb20 100644 --- a/satpy/etc/readers/modis_l1b.yaml +++ b/satpy/etc/readers/modis_l1b.yaml @@ -486,8 +486,8 @@ datasets: coordinates: [longitude, latitude] file_type: [hdf_eos_geo, hdf_eos_data_1000m] - land_sea_mask: - name: land_sea_mask + landsea_mask: + name: landsea_mask sensor: modis resolution: 1000 coordinates: [longitude, latitude] diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index 3bb7559ec9..db8d8db188 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -334,7 +334,7 @@ class HDFEOSGeoReader(HDFEOSBaseFileReader): "solar_azimuth_angle": ("SolarAzimuth", "SolarAzimuth"), "solar_zenith_angle": ("SolarZenith", "Solar_Zenith"), "water_present": "WaterPresent", - "land_sea_mask": "Land/SeaMask", + "landsea_mask": "Land/SeaMask", "height": "Height", "range": "Range", } From b7f07387db2d89b188c861007b874b32ba2b0649 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 6 Nov 2024 18:40:35 +0100 Subject: [PATCH 200/340] Fix geos proj parameters for insat 3d satellites --- satpy/readers/insat3d_img_l1b_h5.py | 5 ++++- satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py | 2 ++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index dede8aefcd..0b469863fc 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -179,7 +179,10 @@ def get_area_def(self, ds_id): #fov = self.datatree.attrs["Field_of_View(degrees)"] fov = 18 cfac = 2 ** 16 / (fov / cols) - lfac = 2 ** 16 / (fov / lines) + + # From reverse engineering metadata from a netcdf file, we discovered + # the lfac is actually the same as cfac, ie dependend on cols, not lines! + lfac = 2 ** 16 / (fov / cols) h = self.datatree.attrs["Observed_Altitude(km)"] * 1000 # WGS 84 diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index 7378078c2a..eb7c17ba9d 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -280,6 +280,8 @@ def test_filehandler_returns_area(insat_filehandler): area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] + np.testing.assert_allclose(area_def.area_extent, [-5618068.510660236, -5640108.009097205, + 5622075.692194229, 5644115.1906312]) def test_filehandler_has_start_and_end_time(insat_filehandler): From 6643a884703b2aae7ab715ec8c84b16e1d19491e Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 6 Nov 2024 18:47:55 +0100 Subject: [PATCH 201/340] Fix typo --- satpy/readers/insat3d_img_l1b_h5.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index 0b469863fc..aa54827e79 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -181,7 +181,7 @@ def get_area_def(self, ds_id): cfac = 2 ** 16 / (fov / cols) # From reverse engineering metadata from a netcdf file, we discovered - # the lfac is actually the same as cfac, ie dependend on cols, not lines! + # the lfac is actually the same as cfac, ie dependent on cols, not lines! lfac = 2 ** 16 / (fov / cols) h = self.datatree.attrs["Observed_Altitude(km)"] * 1000 From 7fdf859085cfc1da232a19e049234eedef42ce5c Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 7 Nov 2024 08:12:47 +0100 Subject: [PATCH 202/340] Remove attempt to Rayleigh correct the I02 band which is beyond 0.8 micron Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/viirs.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/viirs.yaml b/satpy/etc/composites/viirs.yaml index aa182604af..f2baec6f13 100644 --- a/satpy/etc/composites/viirs.yaml +++ b/satpy/etc/composites/viirs.yaml @@ -232,7 +232,7 @@ composites: modifiers: [sunz_corrected, rayleigh_corrected] optional_prerequisites: - name: I02 - modifiers: [sunz_corrected_iband, rayleigh_corrected_iband] + modifiers: [sunz_corrected_iband] standard_name: false_color high_resolution_band: green From aca867c2584d1450dd8ff72addf85ab15ef4acd4 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 7 Nov 2024 08:32:16 +0100 Subject: [PATCH 203/340] Fix offset --- satpy/readers/insat3d_img_l1b_h5.py | 4 ++-- satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/readers/insat3d_img_l1b_h5.py b/satpy/readers/insat3d_img_l1b_h5.py index aa54827e79..29f211f08f 100644 --- a/satpy/readers/insat3d_img_l1b_h5.py +++ b/satpy/readers/insat3d_img_l1b_h5.py @@ -194,8 +194,8 @@ def get_area_def(self, ds_id): pdict = { "cfac": cfac, "lfac": lfac, - "coff": cols / 2, - "loff": lines / 2, + "coff": cols // 2 + 1, + "loff": lines // 2, "ncols": cols, "nlines": lines, "scandir": "N2S", diff --git a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py index eb7c17ba9d..3b100415a4 100644 --- a/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py +++ b/satpy/tests/reader_tests/test_insat3d_img_l1b_h5.py @@ -280,8 +280,8 @@ def test_filehandler_returns_area(insat_filehandler): area_def = fh.get_area_def(ds_id) _ = area_def.get_lonlats(chunks=1000) assert subsatellite_longitude == area_def.crs.to_cf()["longitude_of_projection_origin"] - np.testing.assert_allclose(area_def.area_extent, [-5618068.510660236, -5640108.009097205, - 5622075.692194229, 5644115.1906312]) + np.testing.assert_allclose(area_def.area_extent, [-5620072.101427, -5640108.009097, + 5620072.101427, 5644115.190631]) def test_filehandler_has_start_and_end_time(insat_filehandler): From 9d6a879fd4987f286634c8425d8f57f08db2fb6c Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Thu, 7 Nov 2024 09:51:58 +0100 Subject: [PATCH 204/340] Pin flexparser before it breaks pint See https://github.com/hgrecco/pint/issues/1969 --- continuous_integration/environment.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index ec5668c8e4..2698b32510 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -58,6 +58,7 @@ dependencies: - ephem - bokeh - pytest-xdist + - flexparser<0.4 # breaks latest pint 0.24.3. see https://github.com/hgrecco/pint/issues/1969 - pip: - pytest-lazy-fixtures - trollsift From 5763dfe78f2d65b041ebbd2eb6be71c2871c4162 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 7 Nov 2024 14:01:52 +0100 Subject: [PATCH 205/340] Bugfix to the appliance of the rayleigh correction - no correction should be applied above 0.8 micron Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/abi.yaml | 4 ++-- satpy/etc/composites/ahi.yaml | 34 +++------------------------------- satpy/etc/composites/ami.yaml | 6 +++--- 3 files changed, 8 insertions(+), 36 deletions(-) diff --git a/satpy/etc/composites/abi.yaml b/satpy/etc/composites/abi.yaml index 4700aa470b..d4aa8c0185 100644 --- a/satpy/etc/composites/abi.yaml +++ b/satpy/etc/composites/abi.yaml @@ -36,7 +36,7 @@ composites: - name: C02 modifiers: [sunz_corrected, rayleigh_corrected_crefl] - name: C03 - modifiers: [sunz_corrected, rayleigh_corrected_crefl] + modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance green_raw: @@ -710,7 +710,7 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: C03 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected] - name: C02 modifiers: [sunz_corrected, rayleigh_corrected] - name: green diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 5d633056be..916e4134d3 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -214,11 +214,11 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - wavelength: 1.63 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - wavelength: 0.85 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - wavelength: 0.635 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color @@ -303,34 +303,6 @@ composites: - name: B01 standard_name: true_color_reproduction_color_stretch -# true_color_reducedsize_land: -# compositor: !!python/name:satpy.composites.GenericCompositor -# prerequisites: -# - wavelength: 0.65 -# modifiers: [reducer4, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# - wavelength: 0.51 -# modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# - wavelength: 0.46 -# modifiers: [reducer2, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_land] -# standard_name: true_color -# -# true_color_reducedsize_marine_tropical: -# compositor: !!python/name:satpy.composites.GenericCompositor -# prerequisites: -# - wavelength: 0.65 -# modifiers: [reducer4, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# - wavelength: 0.51 -# modifiers: [reducer2, vegetation_corrected_reduced, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# - wavelength: 0.46 -# modifiers: [reducer2, effective_solar_pathlength_corrected, -# rayleigh_corrected_reducedsize_marine_tropical] -# standard_name: true_color - day_microphysics_eum: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index b0d943c5ca..8d4ef2b92d 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -137,11 +137,11 @@ composites: compositor: !!python/name:satpy.composites.SelfSharpenedRGB prerequisites: - name: NR016 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - name: VI008 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected] - name: VI006 - modifiers: [sunz_corrected] #, rayleigh_corrected] + modifiers: [sunz_corrected, rayleigh_corrected] high_resolution_band: blue standard_name: natural_color From dfd0a8940506e04a9f4c6c6f5bf55fb54ed4d04d Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Nov 2024 09:29:33 +0100 Subject: [PATCH 206/340] Remove flexparser pinning A new version of pint has been released to address the previous issue --- continuous_integration/environment.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 2698b32510..36802a29cc 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -57,8 +57,7 @@ dependencies: - pint-xarray - ephem - bokeh - - pytest-xdist - - flexparser<0.4 # breaks latest pint 0.24.3. see https://github.com/hgrecco/pint/issues/1969 + - pytest-xdist # breaks latest pint 0.24.3. see https://github.com/hgrecco/pint/issues/1969 - pip: - pytest-lazy-fixtures - trollsift From 269e5d25e05ed0697350c4d176007a6c7d46fffb Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Nov 2024 10:19:42 +0100 Subject: [PATCH 207/340] Remove spurious comment --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index 36802a29cc..ec5668c8e4 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -57,7 +57,7 @@ dependencies: - pint-xarray - ephem - bokeh - - pytest-xdist # breaks latest pint 0.24.3. see https://github.com/hgrecco/pint/issues/1969 + - pytest-xdist - pip: - pytest-lazy-fixtures - trollsift From fb8650b6c5f9dd579764d6c0ca7d1c5c7d5e1011 Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Fri, 8 Nov 2024 15:14:10 +0100 Subject: [PATCH 208/340] Update satpy/etc/composites/ahi.yaml --- satpy/etc/composites/ahi.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/ahi.yaml b/satpy/etc/composites/ahi.yaml index 916e4134d3..179f36c97c 100644 --- a/satpy/etc/composites/ahi.yaml +++ b/satpy/etc/composites/ahi.yaml @@ -218,7 +218,7 @@ composites: - wavelength: 0.85 modifiers: [sunz_corrected] - wavelength: 0.635 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color From 4a8d8ff03fa50fea615885bb4af23c3e1bbcb26d Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Fri, 8 Nov 2024 15:15:16 +0100 Subject: [PATCH 209/340] Update satpy/etc/composites/ami.yaml --- satpy/etc/composites/ami.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/ami.yaml b/satpy/etc/composites/ami.yaml index 8d4ef2b92d..a58463a93a 100644 --- a/satpy/etc/composites/ami.yaml +++ b/satpy/etc/composites/ami.yaml @@ -141,7 +141,7 @@ composites: - name: VI008 modifiers: [sunz_corrected] - name: VI006 - modifiers: [sunz_corrected, rayleigh_corrected] + modifiers: [sunz_corrected] high_resolution_band: blue standard_name: natural_color From bd29bb18a0ecf28dc49c7851057b8a67bccae03f Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Fri, 8 Nov 2024 16:40:56 +0100 Subject: [PATCH 210/340] Update changelog for v0.53.0 --- CHANGELOG.md | 35 +++++++++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index eefa202dff..acc4a07d30 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,38 @@ +## Version 0.53.0 (2024/11/08) + +### Issues Closed + +* [Issue 2960](https://github.com/pytroll/satpy/issues/2960) - netcdf4 version causes error ([PR 2961](https://github.com/pytroll/satpy/pull/2961) by [@sfinkens](https://github.com/sfinkens)) +* [Issue 2952](https://github.com/pytroll/satpy/issues/2952) - Altitude, LandCover, and LandSeaMask are missing in the `mersi_ll_l1b` reader for FY3E L1B ([PR 2953](https://github.com/pytroll/satpy/pull/2953) by [@chorng](https://github.com/chorng)) +* [Issue 2948](https://github.com/pytroll/satpy/issues/2948) - "Missing" platform abbreviation causes unexpected error when loading data array in Scene ([PR 2949](https://github.com/pytroll/satpy/pull/2949) by [@joleenf](https://github.com/joleenf)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 2971](https://github.com/pytroll/satpy/pull/2971) - Pin flexparser before it breaks pint +* [PR 2970](https://github.com/pytroll/satpy/pull/2970) - Remove rayleigh correction on VIIRS false_color for I02 band +* [PR 2968](https://github.com/pytroll/satpy/pull/2968) - Remove unneeded call to private scipy function in SAR reader +* [PR 2965](https://github.com/pytroll/satpy/pull/2965) - Fix MODIS readers chunking compatibility with newer dask +* [PR 2961](https://github.com/pytroll/satpy/pull/2961) - Fix CF writer crashing with netcdf development version ([2960](https://github.com/pytroll/satpy/issues/2960)) +* [PR 2957](https://github.com/pytroll/satpy/pull/2957) - Bugfix the VIIRS lowres version of the day-microphysics. +* [PR 2956](https://github.com/pytroll/satpy/pull/2956) - Fix cira stretch upcasting the data +* [PR 2954](https://github.com/pytroll/satpy/pull/2954) - Fix Rayleigh correction to use the same datatype as the input data +* [PR 2950](https://github.com/pytroll/satpy/pull/2950) - Fix dtype promotion in `SunZenithReduction` +* [PR 2949](https://github.com/pytroll/satpy/pull/2949) - Add more platforms to VIIRS EDR reader ([2948](https://github.com/pytroll/satpy/issues/2948)) +* [PR 2930](https://github.com/pytroll/satpy/pull/2930) - Fix data type when getting a line offset for a segmented hrit_jma + +#### Features added + +* [PR 2973](https://github.com/pytroll/satpy/pull/2973) - Remove flexparser pinning +* [PR 2953](https://github.com/pytroll/satpy/pull/2953) - Add altitude, landcover, and landseamask to mersi_ll_l1b reader ([2952](https://github.com/pytroll/satpy/issues/2952)) +* [PR 2946](https://github.com/pytroll/satpy/pull/2946) - Update MODIS L1b reader with additional geoinfo datasets + +In this release 14 pull requests were closed. + + ## Version 0.52.1 (2024/10/23) ### Issues Closed From baf5005582cb36a483bb17ac095afc53a5424c40 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Nov 2024 15:47:13 +0200 Subject: [PATCH 211/340] Make slope, intercept and cal coeffs float32 --- satpy/tests/reader_tests/test_mersi_l1b.py | 69 +++++++++++++--------- 1 file changed, 40 insertions(+), 29 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index e9b8c45ae6..6ab26a25ab 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -31,23 +31,27 @@ def _get_calibration(num_scans, ftype): calibration = { f"Calibration/{ftype}_Cal_Coeff": xr.DataArray( - da.ones((19, 3), chunks=1024), - attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + da.ones((19, 3), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 19, dtype=np.float32), + "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands", "_coeffs")), "Calibration/Solar_Irradiance": xr.DataArray( - da.ones((19, ), chunks=1024), - attrs={"Slope": np.array([1.] * 19), "Intercept": np.array([0.] * 19)}, + da.ones((19, ), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 19, dtype=np.float32), + "Intercept": np.array([0.] * 19, dtype=np.float32)}, dims=("_bands")), "Calibration/Solar_Irradiance_LL": xr.DataArray( - da.ones((1, ), chunks=1024), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.])}, + da.ones((1, ), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32)}, dims=("_bands")), "Calibration/IR_Cal_Coeff": xr.DataArray( - da.ones((6, 4, num_scans), chunks=1024), - attrs={"Slope": np.array([1.] * 6), "Intercept": np.array([0.] * 6)}, + da.ones((6, 4, num_scans), chunks=1024, dtype=np.float32), + attrs={"Slope": np.array([1.] * 6, dtype=np.float32), + "Intercept": np.array([0.] * 6, dtype=np.float32)}, dims=("_bands", "_coeffs", "_scans")), } return calibration @@ -62,7 +66,7 @@ def _get_250m_data(num_scans, rows_per_scan, num_cols, filetype_info): def_attrs = {fill_value_name: 65535, "valid_range": [0, 4095], - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1) + "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32) } nounits_attrs = {**def_attrs, **{"units": "NO"}} radunits_attrs = {**def_attrs, **{"units": "mW/ (m2 cm-1 sr)"}} @@ -116,7 +120,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): da.ones((5, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 5), "Intercept": np.array([0.] * 5), + "Slope": np.array([1.] * 5, dtype=np.float32), "Intercept": np.array([0.] * 5, dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], @@ -128,7 +132,7 @@ def _get_500m_data(num_scans, rows_per_scan, num_cols): da.ones((3, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), attrs={ - "Slope": np.array([1.] * 3), "Intercept": np.array([0.] * 3), + "Slope": np.array([1.] * 3, dtype=np.float32), "Intercept": np.array([0.] * 3, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], @@ -150,7 +154,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): data = {"Data/EV_1KM_LL": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32), "FillValue": 65535, "units": "NO", "valid_range": [0, 4095], @@ -158,7 +163,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_rows", "_cols")), f"{key_prefix}EV_1KM_RefSB": xr.DataArray(da.ones((15, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 15), "Intercept": np.array([0.] * 15), + attrs={"Slope": np.array([1.] * 15, dtype=np.float32), + "Intercept": np.array([0.] * 15, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], @@ -166,7 +172,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ref_bands", "_rows", "_cols")), "Data/EV_1KM_Emissive": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + attrs={"Slope": np.array([1.] * 4, dtype=np.float32), + "Intercept": np.array([0.] * 4, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 25000], @@ -174,7 +181,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ir_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_RefSB": xr.DataArray(da.ones((4, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 4), "Intercept": np.array([0.] * 4), + attrs={"Slope": np.array([1.] * 4, dtype=np.float32), + "Intercept": np.array([0.] * 4, dtype=np.float32), fill_value_name: 65535, "units": "NO", "valid_range": [0, 4095], @@ -182,14 +190,16 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ref250_bands", "_rows", "_cols")), f"{key_prefix}EV_250_Aggr.1KM_Emissive": xr.DataArray(da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.]), "Intercept": np.array([0.]), + attrs={"Slope": np.array([1.], dtype=np.float32), + "Intercept": np.array([0.], dtype=np.float32), fill_value_name: 65535, "units": radunits, "valid_range": [0, 4095], "long_name": b"250m Emissive Bands Earth View Science Data Aggregated to 1 km"}, dims=("_rows", "_cols")) if is_mersi1 else xr.DataArray(da.ones((2, num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.uint16), - attrs={"Slope": np.array([1.] * 2), "Intercept": np.array([0.] * 2), + attrs={"Slope": np.array([1.] * 2, dtype=np.float32), + "Intercept": np.array([0.] * 2, dtype=np.float32), "FillValue": 65535, "units": "mW/ (m2 cm-1 sr)", "valid_range": [0, 4095], @@ -199,7 +209,8 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([.01] * 1, dtype=np.float32), + "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, @@ -212,7 +223,7 @@ def _get_250m_ll_data(num_scans, rows_per_scan, num_cols): # Set some default attributes def_attrs = {"FillValue": 65535, "valid_range": [0, 4095], - "Slope": np.array([1.]), "Intercept": np.array([0.]), + "Slope": np.array([1.], dtype=np.float32), "Intercept": np.array([0.], dtype=np.float32), "long_name": b"250m Earth View Science Data", "units": "mW/ (m2 cm-1 sr)", } @@ -237,7 +248,7 @@ def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [-90, 90], }, @@ -246,7 +257,7 @@ def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - "Slope": np.array([1.] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [-180, 180], }, @@ -255,7 +266,7 @@ def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): xr.DataArray( da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), attrs={ - "Slope": np.array([.01] * 1), "Intercept": np.array([0.] * 1), + "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", "valid_range": [0, 28000], }, @@ -288,13 +299,13 @@ def get_test_content(self, filename, filename_info, filetype_info): "/attr/Observing Ending Time": "18:38:36.728", } fy3a_attrs = { - "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + "/attr/VIR_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3b_attrs = { - "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19), + "/attr/VIS_Cal_Coeff": np.array([0.0, 1.0, 0.0] * 19, dtype=np.float32), } fy3d_attrs = { - "/attr/Solar_Irradiance": np.array([1.0] * 19), + "/attr/Solar_Irradiance": np.array([1.0] * 19, dtype=np.float32), } global_attrs, ftype = self._set_sensor_attrs(global_attrs) @@ -384,11 +395,11 @@ def _add_tbb_coefficients(self, global_attrs): return if "_1000" in self.filetype_info["file_type"]: - global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6) - global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([1.0] * 6, dtype=np.float32) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) else: - global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6) - global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6) + global_attrs["/attr/TBB_Trans_Coefficient_A"] = np.array([0.0] * 6, dtype=np.float32) + global_attrs["/attr/TBB_Trans_Coefficient_B"] = np.array([0.0] * 6, dtype=np.float32) @property def _num_cols_for_file_type(self): From d809f1edfd34ef458dbfea25bb59eff674c42b7e Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Nov 2024 15:47:55 +0200 Subject: [PATCH 212/340] Test the data dtype --- satpy/tests/reader_tests/test_mersi_l1b.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 6ab26a25ab..6f287de368 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -523,6 +523,9 @@ def test_all_resolutions(self): _test_multi_resolutions(available_datasets, self.ir_250_bands, resolution, ir_num_results) res = reader.load(self.bands_1000 + self.bands_250) + for i in res: + assert res[i].dtype == np.float32 + if resolution != "250": assert len(res) == len(self.bands_1000 + self.bands_250) else: From cbc872eac0f0c6474c46953612c6478156e92582 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Nov 2024 15:48:34 +0200 Subject: [PATCH 213/340] Fix data masking to preserve dtype --- satpy/readers/mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mersi_l1b.py b/satpy/readers/mersi_l1b.py index 5b1a960031..a6db73bee5 100644 --- a/satpy/readers/mersi_l1b.py +++ b/satpy/readers/mersi_l1b.py @@ -182,7 +182,7 @@ def _mask_data(self, data, dataset_id, attrs): attrs["_FillValue"] = fill_value new_fill = data.dtype.type(fill_value) else: - new_fill = np.nan + new_fill = np.float32(np.nan) try: # Due to a bug in the valid_range upper limit in the 10.8(24) and 12.0(25) # in the HDF data, this is hardcoded here. From 85f85af171be16fda505ab87350ef5e62847d32a Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Mon, 11 Nov 2024 18:40:52 +0200 Subject: [PATCH 214/340] Test also computed values --- satpy/tests/reader_tests/test_mersi_l1b.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index 6f287de368..c5e540bc41 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -525,6 +525,7 @@ def test_all_resolutions(self): res = reader.load(self.bands_1000 + self.bands_250) for i in res: assert res[i].dtype == np.float32 + assert res[i].values.dtype == np.float32 if resolution != "250": assert len(res) == len(self.bands_1000 + self.bands_250) From 3bb6db5894e74e7a00af4ffe7f99b9243590c025 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Nov 2024 09:24:19 +0200 Subject: [PATCH 215/340] Fix angle dtypes in tests --- satpy/tests/reader_tests/test_mersi_l1b.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index c5e540bc41..e5aaa6a571 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -246,25 +246,25 @@ def _get_geo_data(num_scans, rows_per_scan, num_cols, prefix): geo = { prefix + "Longitude": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ - "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), + "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-90, 90], }, dims=("_rows", "_cols")), prefix + "Latitude": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.float64), attrs={ - "Slope": np.array([1.] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), + "Slope": np.array([1.] * 1, dtype=np.float64), "Intercept": np.array([0.] * 1, dtype=np.float64), "units": "degree", "valid_range": [-180, 180], }, dims=("_rows", "_cols")), prefix + "SensorZenith": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), "units": "degree", From fd2cec62d51fcbd2c0297073ba36cb37b33e7a02 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Tue, 12 Nov 2024 09:28:23 +0200 Subject: [PATCH 216/340] Fix 1 km SensorZenith dtype in tests --- satpy/tests/reader_tests/test_mersi_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_mersi_l1b.py b/satpy/tests/reader_tests/test_mersi_l1b.py index e5aaa6a571..3bdcfdb66f 100644 --- a/satpy/tests/reader_tests/test_mersi_l1b.py +++ b/satpy/tests/reader_tests/test_mersi_l1b.py @@ -207,7 +207,7 @@ def _get_1km_data(num_scans, rows_per_scan, num_cols, filetype_info): dims=("_ir250_bands", "_rows", "_cols")), f"{key_prefix}SensorZenith": xr.DataArray( - da.ones((num_scans * rows_per_scan, num_cols), chunks=1024), + da.ones((num_scans * rows_per_scan, num_cols), chunks=1024, dtype=np.int16), attrs={ "Slope": np.array([.01] * 1, dtype=np.float32), "Intercept": np.array([0.] * 1, dtype=np.float32), From 1fe920ee7fad8d6d8efca1cfd99043030528f6db Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 19 Nov 2024 07:54:04 +0000 Subject: [PATCH 217/340] feat: Daskify the non accumumulated product for the li_l2_nc reader --- satpy/readers/li_l2_nc.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 587039fa46..8e9a9ec2b9 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -111,6 +111,8 @@ def get_dataset(self, dataset_id, ds_info=None): var_with_swath_coord = self.is_var_with_swath_coord(dataset_id) if var_with_swath_coord and self.with_area_def: data_array = self.get_array_on_fci_grid(data_array) + else : + data_array.data = da.from_array(data_array.data) return data_array def get_area_def(self, dsid): @@ -161,6 +163,7 @@ def get_array_on_fci_grid(self, data_array: xr.DataArray): data_2d = da.where(data_2d > 0, data_2d, np.nan) xarr = xr.DataArray(da.asarray(data_2d, CHUNK_SIZE), dims=("y", "x")) + xarr.attrs = attrs return xarr From b465b0a1c8f86e5d52798bc1487c25a12a9fd249 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Tue, 19 Nov 2024 10:46:52 +0100 Subject: [PATCH 218/340] fix: test --- satpy/tests/scene_tests/test_conversions.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/scene_tests/test_conversions.py b/satpy/tests/scene_tests/test_conversions.py index 2e7d1010fa..96a19b7ed7 100644 --- a/satpy/tests/scene_tests/test_conversions.py +++ b/satpy/tests/scene_tests/test_conversions.py @@ -189,10 +189,11 @@ def test_to_xarray_dataset_with_conflicting_variables(self): assert isinstance(xrds, xr.Dataset) assert "acq_time" not in xrds.coords - xrds = scn.to_xarray_dataset(compat="override") + # override: pick variable from first dataset + xrds = scn.to_xarray_dataset(datasets=["ds1", "ds2"], compat="override") assert isinstance(xrds, xr.Dataset) assert "acq_time" in xrds.coords - xr.testing.assert_equal(xrds["acq_time"], ds2["acq_time"]) + xr.testing.assert_equal(xrds["acq_time"], ds["acq_time"]) @pytest.fixture def multi_area_scn(self): From b777de5af66cfaa3b8099f91559aea1265c3a1a6 Mon Sep 17 00:00:00 2001 From: Pouria Khalaj Date: Tue, 19 Nov 2024 22:59:34 +0100 Subject: [PATCH 219/340] Fix bug #2981 --- satpy/readers/seviri_base.py | 2 +- satpy/tests/reader_tests/test_seviri_l1b_native.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/seviri_base.py b/satpy/readers/seviri_base.py index aa224b0d6a..c9ad563899 100644 --- a/satpy/readers/seviri_base.py +++ b/satpy/readers/seviri_base.py @@ -700,7 +700,7 @@ def calibrate(self, data, calibration): "brightness_temperature"]: gain, offset = self.get_gain_offset() res = self._algo.convert_to_radiance( - data.astype(np.float32), gain, offset + data.astype(np.float32), np.float32(gain), np.float32(offset) ) else: raise ValueError( diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index e7a5d0f5f3..c5818bfa6a 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1314,6 +1314,7 @@ def test_read_physical_seviri_nat_file(full_path): with warnings.catch_warnings(): warnings.filterwarnings("ignore", category=UserWarning) scene.load(["VIS006"]) + assert scene["VIS006"].dtype == np.float32 assert scene["VIS006"].shape == (3712, 3712) assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) From 6d8382923dfc0ebe9c53c10a530996830e38b2c5 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 20 Nov 2024 00:50:16 +0000 Subject: [PATCH 220/340] fix: Correct the issue related to the test_li_l2_nc --- satpy/readers/li_base_nc.py | 1 - satpy/readers/li_l2_nc.py | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/readers/li_base_nc.py b/satpy/readers/li_base_nc.py index cefbcc7e55..a65129c904 100644 --- a/satpy/readers/li_base_nc.py +++ b/satpy/readers/li_base_nc.py @@ -742,7 +742,6 @@ def get_dataset(self, dataset_id, ds_info=None): # Retrieve default infos if missing: if ds_info is None: ds_info = self.get_dataset_infos(dataset_id["name"]) - # check for potential error: if ds_info is None: raise KeyError(f"No dataset registered for {dataset_id}") diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index 8e9a9ec2b9..c0faf0150b 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -112,7 +112,9 @@ def get_dataset(self, dataset_id, ds_info=None): if var_with_swath_coord and self.with_area_def: data_array = self.get_array_on_fci_grid(data_array) else : - data_array.data = da.from_array(data_array.data) + if data_array is not None: + if not isinstance(data_array.data,da.Array): + data_array.data = da.from_array(data_array.data) return data_array def get_area_def(self, dsid): From da848757a4f0bb786d53d67867aeeb374547b278 Mon Sep 17 00:00:00 2001 From: pkhalaj <11797985+pkhalaj@users.noreply.github.com> Date: Wed, 20 Nov 2024 09:15:48 +0100 Subject: [PATCH 221/340] Check that the computed values have the correct `dtype` . Co-authored-by: Panu Lahtinen --- satpy/tests/reader_tests/test_seviri_l1b_native.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_seviri_l1b_native.py b/satpy/tests/reader_tests/test_seviri_l1b_native.py index c5818bfa6a..5c6a86596e 100644 --- a/satpy/tests/reader_tests/test_seviri_l1b_native.py +++ b/satpy/tests/reader_tests/test_seviri_l1b_native.py @@ -1315,6 +1315,7 @@ def test_read_physical_seviri_nat_file(full_path): warnings.filterwarnings("ignore", category=UserWarning) scene.load(["VIS006"]) assert scene["VIS006"].dtype == np.float32 + assert scene["VIS006"].values.dtype == np.float32 assert scene["VIS006"].shape == (3712, 3712) assert isinstance(scene["VIS006"], xr.core.dataarray.DataArray) From 9871d1a4a83d5b9c4a9baaebfb5f2e59a2ef36b3 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 20 Nov 2024 11:17:50 +0000 Subject: [PATCH 222/340] test : Verify that all the dataset encapsulate a dask array --- satpy/tests/reader_tests/test_li_l2_nc.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/reader_tests/test_li_l2_nc.py b/satpy/tests/reader_tests/test_li_l2_nc.py index 36cc930683..13981a37f4 100644 --- a/satpy/tests/reader_tests/test_li_l2_nc.py +++ b/satpy/tests/reader_tests/test_li_l2_nc.py @@ -19,6 +19,7 @@ import os from unittest import mock +import dask.array as da import numpy as np import pytest import xarray as xr @@ -128,6 +129,7 @@ def _test_dataset_variable(self, var_params, sname=""): res = self.get_variable_dataset(dataset_info, dname, handler) assert res.shape == shape assert res.dims[0] == "y" + assert isinstance(res.data,da.Array) # Should retrieve content with fullname key: full_name = self.create_fullname_key(desc, var_path, dname, sname=sname) # Note: 'content' is not recognized as a valid member of the class below From fa56be53f8f78660b76f95bde7d382cb6552edd9 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 20 Nov 2024 13:55:43 +0000 Subject: [PATCH 223/340] typo : put space into the line 116 of readers/li_l2_nc.py --- satpy/readers/li_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/li_l2_nc.py b/satpy/readers/li_l2_nc.py index c0faf0150b..e69b6ffe5b 100644 --- a/satpy/readers/li_l2_nc.py +++ b/satpy/readers/li_l2_nc.py @@ -113,7 +113,7 @@ def get_dataset(self, dataset_id, ds_info=None): data_array = self.get_array_on_fci_grid(data_array) else : if data_array is not None: - if not isinstance(data_array.data,da.Array): + if not isinstance(data_array.data, da.Array): data_array.data = da.from_array(data_array.data) return data_array From d475533960e9c2425546ea72e0787880d3987589 Mon Sep 17 00:00:00 2001 From: Yufei Zhu Date: Wed, 20 Nov 2024 21:21:30 +0000 Subject: [PATCH 224/340] add the new name of GOES-R ABI L2 Cloud Particle Size product into abi_l2_nc.yaml --- satpy/etc/readers/abi_l2_nc.yaml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 3c3a94cc40..13447231a8 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -211,6 +211,10 @@ datasets: name: PSDN file_type: abi_l2_cpsn file_key: PSD + cloud_particle_size_new: + name: CPS + file_type: abi_l2_cps + file_key: CPS # --- Cloud Top Pressure --- cloud_top_pressure: From 075eebea1416eba7d852c9724c865e9d585a9a9f Mon Sep 17 00:00:00 2001 From: poeppelmann Date: Mon, 5 Aug 2024 12:41:24 +0200 Subject: [PATCH 225/340] add behave tests --- satpy/tests/behave/create_reference.py | 28 +++++ satpy/tests/behave/download_data.py | 106 ++++++++++++++++++ .../behave/features/image_comparison.feature | 11 ++ .../behave/features/steps/image_comparison.py | 97 ++++++++++++++++ satpy/tests/behave/modify_image.py | 27 +++++ satpy/tests/behave/test_install.py | 89 +++++++++++++++ satpy/tests/behave/test_results.txt | 8 ++ 7 files changed, 366 insertions(+) create mode 100644 satpy/tests/behave/create_reference.py create mode 100644 satpy/tests/behave/download_data.py create mode 100644 satpy/tests/behave/features/image_comparison.feature create mode 100644 satpy/tests/behave/features/steps/image_comparison.py create mode 100644 satpy/tests/behave/modify_image.py create mode 100644 satpy/tests/behave/test_install.py create mode 100644 satpy/tests/behave/test_results.txt diff --git a/satpy/tests/behave/create_reference.py b/satpy/tests/behave/create_reference.py new file mode 100644 index 0000000000..897f542a90 --- /dev/null +++ b/satpy/tests/behave/create_reference.py @@ -0,0 +1,28 @@ +from dask.diagnostics import ProgressBar +from satpy import Scene +from glob import glob +import os +import warnings +import dask + +os.environ['OMP_NUM_THREADS'] = os.environ['MKL_NUM_THREADS'] = '2' +os.environ['PYTROLL_CHUNK_SIZE'] = '1024' +warnings.simplefilter('ignore') +dask.config.set(scheduler='threads', num_workers=4) + +# Get the list of satellite files to open +satellite = "GOES16" +filenames = glob(f'./satellite_data/{satellite}/*.nc') + +scn = Scene(reader='abi_l1b', filenames=filenames) + +# what composites Satpy knows how to make and that it has the inputs for? +print(scn.available_composite_names()) + +composite = 'airmass' +scn.load([composite]) +with ProgressBar(): + scn.save_datasets(writer='simple_image', filename=f'./features/data/reference/reference_image_{satellite}_{composite}.png') + + + diff --git a/satpy/tests/behave/download_data.py b/satpy/tests/behave/download_data.py new file mode 100644 index 0000000000..f3c8c794cc --- /dev/null +++ b/satpy/tests/behave/download_data.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +"""Download test data and ancillary data for running this tutorial.""" + +import os +import math +import requests +from zipfile import ZipFile +from tqdm import tqdm + +TUTORIAL_ROOT = os.path.dirname(os.path.abspath(__file__)) + + +def download_pyspectral_luts(): + print("Downloading lookup tables used by pyspectral...") + from pyspectral.utils import download_luts, download_rsr + download_luts() + download_rsr() + return True + + +def _download_data_zip(url, output_filename): + if os.path.isfile(output_filename): + print("Data zip file already exists, won't re-download: {}".format(output_filename)) + return True + + print("Downloading {}".format(url)) + r = requests.get(url, stream=True) + + # Total size in bytes. + total_size = int(r.headers.get('content-length', 0)) + block_size = 1024 + wrote = 0 + with open(output_filename, 'wb') as f: + for data in tqdm(r.iter_content(block_size), total=math.ceil(total_size//block_size), unit='KB', unit_scale=True): + wrote += len(data) + f.write(data) + if total_size != 0 and wrote != total_size: + print("ERROR: something went wrong downloading {}".format(url)) + return False + return True + + +def _unzip(filename, output_dir): + print("Extracting {}".format(filename)) + try: + with ZipFile(filename, 'r') as zip_obj: + zip_obj.extractall(output_dir) + except (IOError, OSError): + print("FAIL: Could not extract {}".format(filename)) + return False + return True + + +def _download_and_unzip(url, output_dir): + filename = os.path.basename(url) + if _download_data_zip(url, filename): + return _unzip(filename, output_dir) + return False + + +def download_test_data(): + cwd = os.getcwd() + os.chdir(TUTORIAL_ROOT) + + ret = _download_and_unzip( + 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_abi_l1b_conus.zip', + os.path.join('data', 'abi_l1b') + ) + ret &= _download_and_unzip( + 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_abi_l1b_meso.zip', + os.path.join('data', 'abi_l1b') + ) + ret &= _download_and_unzip( + 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_viirs_sdr.zip', + os.path.join('data', 'viirs_sdr') + ) + os.chdir(cwd) + return ret + + +def main(): + import argparse + parser = argparse.ArgumentParser(description="Download data necessary for the Satpy tutorial") + parser.add_argument('--luts-only', action='store_true', + help="Only download LUTs for pyspectral operation") + parser.add_argument('--data-only', action='store_true', + help="Only download test data") + args = parser.parse_args() + + ret = True + if not args.data_only: + ret &= download_pyspectral_luts() + if not args.luts_only: + ret &= download_test_data() + + if ret: + print("Downloaded `.zip` files can now be deleted.") + print("SUCCESS") + else: + print("FAIL") + return int(not ret) + + +if __name__ == "__main__": + import sys + sys.exit(main()) \ No newline at end of file diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature new file mode 100644 index 0000000000..a2392bd27a --- /dev/null +++ b/satpy/tests/behave/features/image_comparison.feature @@ -0,0 +1,11 @@ +Feature: Image Comparison + + Scenario Outline: Compare generated image with reference image + Given I have a reference image file from + When I generate a new image file from + Then the generated image should be the same as the reference image + + Examples: + |satellite |composite | + |GOES17 |airmass | + |GOES16 |airmass | diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py new file mode 100644 index 0000000000..6552abb609 --- /dev/null +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -0,0 +1,97 @@ +import os +import warnings +from glob import glob +from PIL import Image +import cv2 +import dask +import numpy as np +from behave import given, when, then +from satpy import Scene +from datetime import datetime + +# Define a before_all hook to create the timestamp and test results directory +def before_all(context): + context.timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + context.test_results_dir = f"../test_results/image_comparison/{context.timestamp}" + os.makedirs(os.path.join(context.test_results_dir, 'generated'), exist_ok=True) + os.makedirs(os.path.join(context.test_results_dir, 'difference'), exist_ok=True) + + # Write the timestamp to test_results.txt + results_file = os.path.join(context.test_results_dir, 'test_results.txt') + with open(results_file, 'a') as f: + f.write(f"Test executed at {context.timestamp}.\n\n") + +# Register the before_all hook +def setup_hooks(): + from behave import use_fixture + from behave.runner import Context + + use_fixture(before_all, Context) + +setup_hooks() +@given('I have a {composite} reference image file from {satellite}') +def step_given_reference_image(context, composite, satellite): + reference_image = f"reference_image_{satellite}_{composite}.png" + context.reference_image = cv2.imread(f"./features/data/reference/{reference_image}") + context.reference_different_image = cv2.imread(f"./features/data/reference_different/{reference_image}") + context.satellite = satellite + context.composite = composite + + +@when('I generate a new {composite} image file from {satellite}') +def step_when_generate_image(context, composite, satellite): + os.environ['OMP_NUM_THREADS'] = os.environ['MKL_NUM_THREADS'] = '2' + os.environ['PYTROLL_CHUNK_SIZE'] = '1024' + warnings.simplefilter('ignore') + dask.config.set(scheduler='threads', num_workers=4) + + # Get the list of satellite files to open + filenames = glob(f'./features/data/satellite_data/{satellite}/*.nc') + + scn = Scene(reader='abi_l1b', filenames=filenames) + + scn.load([composite]) + + # Save the generated image in the generated folder + generated_image_path = os.path.join(context.test_results_dir, 'generated', + f'generated_{context.satellite}_{context.composite}.png') + scn.save_datasets(writer='simple_image', filename=generated_image_path) + + # Save the generated image in the context + context.generated_image = cv2.imread(generated_image_path) + + +@then('the generated image should be the same as the reference image') +def step_then_compare_images(context): + threshold = 2000 + # Load the images + imageA = cv2.cvtColor(context.reference_different_image, cv2.COLOR_BGR2GRAY) + imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) + # Ensure both images have the same dimensions + if imageA.shape != imageB.shape: + raise ValueError("Both images must have the same dimensions") + array1 = np.array(imageA) + array2 = np.array(imageB) + # Perform pixel-wise comparison + result_matrix = (array1 != array2).astype(np.uint8) * 255 + + # Save the resulting numpy array as an image in the difference folder + diff_image_path = os.path.join(context.test_results_dir, 'difference', + f'diff_{context.satellite}_{context.composite}.png') + cv2.imwrite(diff_image_path, result_matrix) + + # Count non-zero pixels in the result matrix + non_zero_count = np.count_nonzero(result_matrix) + + # Write the results to a file in the test results directory + results_file = os.path.join(context.test_results_dir, 'test_results.txt') + with open(results_file, 'a') as f: + f.write(f"Test for {context.satellite} - {context.composite}\n") + f.write(f"Non-zero pixel differences: {non_zero_count}\n") + if non_zero_count < threshold: + f.write(f"Result: Passed - {non_zero_count} pixel differences.\n\n") + else: + f.write(f"Result: Failed - {non_zero_count} pixel differences exceed the threshold of {threshold}.\n\n") + + # Assert that the number of differences is below the threshold + assert non_zero_count < threshold, f"Images are not similar enough. {non_zero_count} pixel differences exceed the threshold of {threshold}." diff --git a/satpy/tests/behave/modify_image.py b/satpy/tests/behave/modify_image.py new file mode 100644 index 0000000000..ca4a890dd3 --- /dev/null +++ b/satpy/tests/behave/modify_image.py @@ -0,0 +1,27 @@ +from PIL import Image, ImageDraw, ImageFont + + +def add_text_to_image(input_path, output_path, text, position=(800, 2200), font_size=700, font_color=(255, 255, 255)): + # Open the image + image = Image.open(input_path) + + # Create a drawing object + draw = ImageDraw.Draw(image) + + # Load a font + font = ImageFont.load_default() + + # Specify font size and color + font = ImageFont.truetype("arial.ttf", font_size) + draw.text(position, text, font=font, fill=font_color) + + # Save the modified image + image.save(output_path) + + +# Example usage +input_image_path = 'C:/Users/sennlaub/IdeaProjects/DWD_Pytroll/img/reference.png' +output_image_path = 'C:/Users/sennlaub/IdeaProjects/DWD_Pytroll/img/reference_different.png' +text_to_add = 'Hello, World!' + +add_text_to_image(input_image_path, output_image_path, text_to_add) diff --git a/satpy/tests/behave/test_install.py b/satpy/tests/behave/test_install.py new file mode 100644 index 0000000000..ec8d5edad7 --- /dev/null +++ b/satpy/tests/behave/test_install.py @@ -0,0 +1,89 @@ +#!/usr/bin/env python +"""Test that the installation steps for this tutorial were successful. + +1. Check that Satpy features are available and all dependencies are importable. +2. Check that data has been downloaded. + +""" + +import io +import os +from contextlib import redirect_stdout + +try: + from satpy.utils import check_satpy +except ImportError: + print("FAIL: Satpy is not importable") + raise + + +TUTORIAL_ROOT = os.path.dirname(os.path.abspath(__file__)) + + +def check_satpy_features(): + print("Checking Satpy features...\n") + readers = ['abi_l1b', 'viirs_sdr'] + writers = ['cf', 'geotiff', 'simple_image'] + extras = ['cartopy', 'geoviews'] + out = io.StringIO() + with redirect_stdout(out): + check_satpy(readers=readers, writers=writers, extras=extras) + out_str = out.getvalue() + print(out_str) + + for feature in readers + writers + extras: + if feature + ": ok" not in out_str: + print("FAIL: Missing or corrupt Satpy dependency (see above for details).") + return False + return True + + +def check_data_download(): + print("Checking data directories...\n") + + # base_dirs + abi_dir = os.path.join(TUTORIAL_ROOT, 'data', 'abi_l1b') + viirs_dir = os.path.join(TUTORIAL_ROOT, 'data', 'viirs_sdr') + + # data case dirs + conus_dir = os.path.join(abi_dir, '20180511_texas_fire_abi_l1b_conus') + meso_dir = os.path.join(abi_dir, '20180511_texas_fire_abi_l1b_meso') + viirs_dir = os.path.join(viirs_dir, '20180511_texas_fire_viirs_sdr') + if not os.path.isdir(conus_dir): + print("FAIL: Missing ABI L1B CONUS data: {}".format(conus_dir)) + return False + if not os.path.isdir(meso_dir): + print("FAIL: Missing ABI L1B Mesoscale data: {}".format(meso_dir)) + return False + if not os.path.isdir(viirs_dir): + print("FAIL: Missing VIIRS SDR data: {}".format(viirs_dir)) + return False + + # number of files + if len(os.listdir(conus_dir)) != 16: + print("FAIL: Expected 16 files in {}".format(conus_dir)) + return False + if len(os.listdir(meso_dir)) != 1440: + print("FAIL: Expected 1440 files in {}".format(meso_dir)) + return False + if len(os.listdir(viirs_dir)) != 21: + print("FAIL: Expected 21 files in {}".format(viirs_dir)) + return False + + return True + + +def main(): + ret = True + ret &= check_satpy_features() + ret &= check_data_download() + if ret: + print("SUCCESS") + else: + print("FAIL") + return ret + + +if __name__ == "__main__": + import sys + sys.exit(main()) \ No newline at end of file diff --git a/satpy/tests/behave/test_results.txt b/satpy/tests/behave/test_results.txt new file mode 100644 index 0000000000..0a1a1eac78 --- /dev/null +++ b/satpy/tests/behave/test_results.txt @@ -0,0 +1,8 @@ +Test for GOES17 - airmass +Non-zero pixel differences: 6607692 +Result: Failed - 6607692 pixel differences exceed the threshold of 100. + +Test for GOES16 - airmass +Non-zero pixel differences: 1590 +Result: Failed - 1590 pixel differences exceed the threshold of 100. + From 95a702015f58d1d59d92c956700bec10cabec468 Mon Sep 17 00:00:00 2001 From: poeppelmann Date: Mon, 5 Aug 2024 14:16:25 +0200 Subject: [PATCH 226/340] change path --- satpy/tests/behave/features/steps/image_comparison.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 6552abb609..90b959c770 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -12,7 +12,7 @@ # Define a before_all hook to create the timestamp and test results directory def before_all(context): context.timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") - context.test_results_dir = f"../test_results/image_comparison/{context.timestamp}" + context.test_results_dir = f"/home/ubuntu/pytroll-image-comparison-tests/test_results/image_comparison/{context.timestamp}" os.makedirs(os.path.join(context.test_results_dir, 'generated'), exist_ok=True) os.makedirs(os.path.join(context.test_results_dir, 'difference'), exist_ok=True) From 3eae895d3ece583eef6444f6812cfb209f6561b6 Mon Sep 17 00:00:00 2001 From: poeppelmann Date: Thu, 8 Aug 2024 11:38:31 +0200 Subject: [PATCH 227/340] change satellite_data path to mounted ext_data --- satpy/tests/behave/features/steps/image_comparison.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 90b959c770..017dba90cb 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -9,6 +9,8 @@ from satpy import Scene from datetime import datetime +ext_data_path = "/app/ext_data" + # Define a before_all hook to create the timestamp and test results directory def before_all(context): context.timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") @@ -46,7 +48,7 @@ def step_when_generate_image(context, composite, satellite): dask.config.set(scheduler='threads', num_workers=4) # Get the list of satellite files to open - filenames = glob(f'./features/data/satellite_data/{satellite}/*.nc') + filenames = glob(f'{ext_data_path}/satellite_data/{satellite}/*.nc') scn = Scene(reader='abi_l1b', filenames=filenames) From e38fb35945aea01c61ca66c613c1c6ed19e95dbe Mon Sep 17 00:00:00 2001 From: poeppelmann Date: Thu, 8 Aug 2024 13:58:31 +0200 Subject: [PATCH 228/340] change results path to mounted ext_data --- satpy/tests/behave/features/steps/image_comparison.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 017dba90cb..6a052397a0 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -14,7 +14,7 @@ # Define a before_all hook to create the timestamp and test results directory def before_all(context): context.timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") - context.test_results_dir = f"/home/ubuntu/pytroll-image-comparison-tests/test_results/image_comparison/{context.timestamp}" + context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" os.makedirs(os.path.join(context.test_results_dir, 'generated'), exist_ok=True) os.makedirs(os.path.join(context.test_results_dir, 'difference'), exist_ok=True) From a9d09e73c1a6b1097f1e51aded9e81e4995586df Mon Sep 17 00:00:00 2001 From: poeppelmann Date: Fri, 9 Aug 2024 11:22:35 +0200 Subject: [PATCH 229/340] timezone correction --- satpy/tests/behave/features/steps/image_comparison.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 6a052397a0..64ba3929eb 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -8,12 +8,14 @@ from behave import given, when, then from satpy import Scene from datetime import datetime +import pytz ext_data_path = "/app/ext_data" # Define a before_all hook to create the timestamp and test results directory def before_all(context): - context.timestamp = datetime.now().strftime("%Y-%m-%d-%H-%M-%S") + berlin_time = datetime.now(pytz.timezone('Europe/Berlin')) + context.timestamp = berlin_time.strftime("%Y-%m-%d-%H-%M-%S") context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" os.makedirs(os.path.join(context.test_results_dir, 'generated'), exist_ok=True) os.makedirs(os.path.join(context.test_results_dir, 'difference'), exist_ok=True) From 3ee8cc6c6d2c5947f52787263ae9818ec721243b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Katrin=20P=C3=B6ppelmann?= Date: Fri, 27 Sep 2024 10:26:15 +0000 Subject: [PATCH 230/340] changes to behave tests, add ash composite test --- satpy/tests/behave/create_reference.py | 8 +- satpy/tests/behave/download_data.py | 106 ------------------ .../behave/features/image_comparison.feature | 2 + .../behave/features/steps/image_comparison.py | 7 +- satpy/tests/behave/modify_image.py | 18 ++- satpy/tests/behave/test_install.py | 89 --------------- satpy/tests/behave/test_results.txt | 8 -- 7 files changed, 23 insertions(+), 215 deletions(-) mode change 100644 => 100755 satpy/tests/behave/create_reference.py delete mode 100644 satpy/tests/behave/download_data.py mode change 100644 => 100755 satpy/tests/behave/features/image_comparison.feature mode change 100644 => 100755 satpy/tests/behave/features/steps/image_comparison.py mode change 100644 => 100755 satpy/tests/behave/modify_image.py delete mode 100644 satpy/tests/behave/test_install.py delete mode 100644 satpy/tests/behave/test_results.txt diff --git a/satpy/tests/behave/create_reference.py b/satpy/tests/behave/create_reference.py old mode 100644 new mode 100755 index 897f542a90..de2929ee13 --- a/satpy/tests/behave/create_reference.py +++ b/satpy/tests/behave/create_reference.py @@ -5,21 +5,23 @@ import warnings import dask +ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" + os.environ['OMP_NUM_THREADS'] = os.environ['MKL_NUM_THREADS'] = '2' os.environ['PYTROLL_CHUNK_SIZE'] = '1024' warnings.simplefilter('ignore') dask.config.set(scheduler='threads', num_workers=4) # Get the list of satellite files to open -satellite = "GOES16" -filenames = glob(f'./satellite_data/{satellite}/*.nc') +satellite = "GOES17" +filenames = glob(f'{ext_data_path}/satellite_data/{satellite}/*.nc') scn = Scene(reader='abi_l1b', filenames=filenames) # what composites Satpy knows how to make and that it has the inputs for? print(scn.available_composite_names()) -composite = 'airmass' +composite = 'ash' scn.load([composite]) with ProgressBar(): scn.save_datasets(writer='simple_image', filename=f'./features/data/reference/reference_image_{satellite}_{composite}.png') diff --git a/satpy/tests/behave/download_data.py b/satpy/tests/behave/download_data.py deleted file mode 100644 index f3c8c794cc..0000000000 --- a/satpy/tests/behave/download_data.py +++ /dev/null @@ -1,106 +0,0 @@ -#!/usr/bin/env python -"""Download test data and ancillary data for running this tutorial.""" - -import os -import math -import requests -from zipfile import ZipFile -from tqdm import tqdm - -TUTORIAL_ROOT = os.path.dirname(os.path.abspath(__file__)) - - -def download_pyspectral_luts(): - print("Downloading lookup tables used by pyspectral...") - from pyspectral.utils import download_luts, download_rsr - download_luts() - download_rsr() - return True - - -def _download_data_zip(url, output_filename): - if os.path.isfile(output_filename): - print("Data zip file already exists, won't re-download: {}".format(output_filename)) - return True - - print("Downloading {}".format(url)) - r = requests.get(url, stream=True) - - # Total size in bytes. - total_size = int(r.headers.get('content-length', 0)) - block_size = 1024 - wrote = 0 - with open(output_filename, 'wb') as f: - for data in tqdm(r.iter_content(block_size), total=math.ceil(total_size//block_size), unit='KB', unit_scale=True): - wrote += len(data) - f.write(data) - if total_size != 0 and wrote != total_size: - print("ERROR: something went wrong downloading {}".format(url)) - return False - return True - - -def _unzip(filename, output_dir): - print("Extracting {}".format(filename)) - try: - with ZipFile(filename, 'r') as zip_obj: - zip_obj.extractall(output_dir) - except (IOError, OSError): - print("FAIL: Could not extract {}".format(filename)) - return False - return True - - -def _download_and_unzip(url, output_dir): - filename = os.path.basename(url) - if _download_data_zip(url, filename): - return _unzip(filename, output_dir) - return False - - -def download_test_data(): - cwd = os.getcwd() - os.chdir(TUTORIAL_ROOT) - - ret = _download_and_unzip( - 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_abi_l1b_conus.zip', - os.path.join('data', 'abi_l1b') - ) - ret &= _download_and_unzip( - 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_abi_l1b_meso.zip', - os.path.join('data', 'abi_l1b') - ) - ret &= _download_and_unzip( - 'https://bin.ssec.wisc.edu/pub/davidh/20180511_texas_fire_viirs_sdr.zip', - os.path.join('data', 'viirs_sdr') - ) - os.chdir(cwd) - return ret - - -def main(): - import argparse - parser = argparse.ArgumentParser(description="Download data necessary for the Satpy tutorial") - parser.add_argument('--luts-only', action='store_true', - help="Only download LUTs for pyspectral operation") - parser.add_argument('--data-only', action='store_true', - help="Only download test data") - args = parser.parse_args() - - ret = True - if not args.data_only: - ret &= download_pyspectral_luts() - if not args.luts_only: - ret &= download_test_data() - - if ret: - print("Downloaded `.zip` files can now be deleted.") - print("SUCCESS") - else: - print("FAIL") - return int(not ret) - - -if __name__ == "__main__": - import sys - sys.exit(main()) \ No newline at end of file diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature old mode 100644 new mode 100755 index a2392bd27a..23bd550e6d --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -9,3 +9,5 @@ Feature: Image Comparison |satellite |composite | |GOES17 |airmass | |GOES16 |airmass | + |GOES16 |ash | + |GOES17 |ash | \ No newline at end of file diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py old mode 100644 new mode 100755 index 64ba3929eb..400ddeacc2 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -10,7 +10,9 @@ from datetime import datetime import pytz -ext_data_path = "/app/ext_data" +#ext_data_path = "/app/ext_data" +ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" +threshold = 2000 # Define a before_all hook to create the timestamp and test results directory def before_all(context): @@ -67,9 +69,8 @@ def step_when_generate_image(context, composite, satellite): @then('the generated image should be the same as the reference image') def step_then_compare_images(context): - threshold = 2000 # Load the images - imageA = cv2.cvtColor(context.reference_different_image, cv2.COLOR_BGR2GRAY) + imageA = cv2.cvtColor(context.reference_different_image, cv2.COLOR_BGR2GRAY) # reference_different_image for testing only imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) # Ensure both images have the same dimensions if imageA.shape != imageB.shape: diff --git a/satpy/tests/behave/modify_image.py b/satpy/tests/behave/modify_image.py old mode 100644 new mode 100755 index ca4a890dd3..a4252db928 --- a/satpy/tests/behave/modify_image.py +++ b/satpy/tests/behave/modify_image.py @@ -1,5 +1,5 @@ from PIL import Image, ImageDraw, ImageFont - +import os def add_text_to_image(input_path, output_path, text, position=(800, 2200), font_size=700, font_color=(255, 255, 255)): # Open the image @@ -9,10 +9,13 @@ def add_text_to_image(input_path, output_path, text, position=(800, 2200), font_ draw = ImageDraw.Draw(image) # Load a font - font = ImageFont.load_default() + font_path = "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf" + if os.path.exists(font_path): + font = ImageFont.truetype(font_path, font_size) + else: + print("DejaVuSans not found, using default font (fixed size)") + font = ImageFont.load_default() - # Specify font size and color - font = ImageFont.truetype("arial.ttf", font_size) draw.text(position, text, font=font, fill=font_color) # Save the modified image @@ -20,8 +23,11 @@ def add_text_to_image(input_path, output_path, text, position=(800, 2200), font_ # Example usage -input_image_path = 'C:/Users/sennlaub/IdeaProjects/DWD_Pytroll/img/reference.png' -output_image_path = 'C:/Users/sennlaub/IdeaProjects/DWD_Pytroll/img/reference_different.png' +satellite = "GOES16" +composite = "ash" +reference_image = f"reference_image_{satellite}_{composite}.png" +input_image_path = f"./features/data/reference/{reference_image}" +output_image_path = f"./features/data/reference_different/{reference_image}" text_to_add = 'Hello, World!' add_text_to_image(input_image_path, output_image_path, text_to_add) diff --git a/satpy/tests/behave/test_install.py b/satpy/tests/behave/test_install.py deleted file mode 100644 index ec8d5edad7..0000000000 --- a/satpy/tests/behave/test_install.py +++ /dev/null @@ -1,89 +0,0 @@ -#!/usr/bin/env python -"""Test that the installation steps for this tutorial were successful. - -1. Check that Satpy features are available and all dependencies are importable. -2. Check that data has been downloaded. - -""" - -import io -import os -from contextlib import redirect_stdout - -try: - from satpy.utils import check_satpy -except ImportError: - print("FAIL: Satpy is not importable") - raise - - -TUTORIAL_ROOT = os.path.dirname(os.path.abspath(__file__)) - - -def check_satpy_features(): - print("Checking Satpy features...\n") - readers = ['abi_l1b', 'viirs_sdr'] - writers = ['cf', 'geotiff', 'simple_image'] - extras = ['cartopy', 'geoviews'] - out = io.StringIO() - with redirect_stdout(out): - check_satpy(readers=readers, writers=writers, extras=extras) - out_str = out.getvalue() - print(out_str) - - for feature in readers + writers + extras: - if feature + ": ok" not in out_str: - print("FAIL: Missing or corrupt Satpy dependency (see above for details).") - return False - return True - - -def check_data_download(): - print("Checking data directories...\n") - - # base_dirs - abi_dir = os.path.join(TUTORIAL_ROOT, 'data', 'abi_l1b') - viirs_dir = os.path.join(TUTORIAL_ROOT, 'data', 'viirs_sdr') - - # data case dirs - conus_dir = os.path.join(abi_dir, '20180511_texas_fire_abi_l1b_conus') - meso_dir = os.path.join(abi_dir, '20180511_texas_fire_abi_l1b_meso') - viirs_dir = os.path.join(viirs_dir, '20180511_texas_fire_viirs_sdr') - if not os.path.isdir(conus_dir): - print("FAIL: Missing ABI L1B CONUS data: {}".format(conus_dir)) - return False - if not os.path.isdir(meso_dir): - print("FAIL: Missing ABI L1B Mesoscale data: {}".format(meso_dir)) - return False - if not os.path.isdir(viirs_dir): - print("FAIL: Missing VIIRS SDR data: {}".format(viirs_dir)) - return False - - # number of files - if len(os.listdir(conus_dir)) != 16: - print("FAIL: Expected 16 files in {}".format(conus_dir)) - return False - if len(os.listdir(meso_dir)) != 1440: - print("FAIL: Expected 1440 files in {}".format(meso_dir)) - return False - if len(os.listdir(viirs_dir)) != 21: - print("FAIL: Expected 21 files in {}".format(viirs_dir)) - return False - - return True - - -def main(): - ret = True - ret &= check_satpy_features() - ret &= check_data_download() - if ret: - print("SUCCESS") - else: - print("FAIL") - return ret - - -if __name__ == "__main__": - import sys - sys.exit(main()) \ No newline at end of file diff --git a/satpy/tests/behave/test_results.txt b/satpy/tests/behave/test_results.txt deleted file mode 100644 index 0a1a1eac78..0000000000 --- a/satpy/tests/behave/test_results.txt +++ /dev/null @@ -1,8 +0,0 @@ -Test for GOES17 - airmass -Non-zero pixel differences: 6607692 -Result: Failed - 6607692 pixel differences exceed the threshold of 100. - -Test for GOES16 - airmass -Non-zero pixel differences: 1590 -Result: Failed - 1590 pixel differences exceed the threshold of 100. - From 05cee828169290c5167a8060d5649cc17464f36f Mon Sep 17 00:00:00 2001 From: katrin Date: Fri, 27 Sep 2024 12:36:27 +0200 Subject: [PATCH 231/340] edit path --- satpy/tests/behave/features/steps/image_comparison.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 400ddeacc2..04dcedc736 100755 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -10,8 +10,8 @@ from datetime import datetime import pytz -#ext_data_path = "/app/ext_data" -ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" +ext_data_path = "/app/ext_data" +#ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" threshold = 2000 # Define a before_all hook to create the timestamp and test results directory @@ -70,7 +70,7 @@ def step_when_generate_image(context, composite, satellite): @then('the generated image should be the same as the reference image') def step_then_compare_images(context): # Load the images - imageA = cv2.cvtColor(context.reference_different_image, cv2.COLOR_BGR2GRAY) # reference_different_image for testing only + imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) # reference_different_image for testing only imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) # Ensure both images have the same dimensions if imageA.shape != imageB.shape: From c3e70190dbe20d2e20260e2dd4e6bce0c8cee777 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 21 Nov 2024 16:43:00 +0100 Subject: [PATCH 232/340] Move and adapt reference creation script Move the reference imagery creation script to the utils/ directory. Delete the not-needed image modification script. Simplify and generalise the reference imagery creation script a bit. --- satpy/tests/behave/create_reference.py | 30 --------------- satpy/tests/behave/modify_image.py | 33 ----------------- utils/create_reference.py | 51 ++++++++++++++++++++++++++ 3 files changed, 51 insertions(+), 63 deletions(-) delete mode 100755 satpy/tests/behave/create_reference.py delete mode 100755 satpy/tests/behave/modify_image.py create mode 100644 utils/create_reference.py diff --git a/satpy/tests/behave/create_reference.py b/satpy/tests/behave/create_reference.py deleted file mode 100755 index de2929ee13..0000000000 --- a/satpy/tests/behave/create_reference.py +++ /dev/null @@ -1,30 +0,0 @@ -from dask.diagnostics import ProgressBar -from satpy import Scene -from glob import glob -import os -import warnings -import dask - -ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" - -os.environ['OMP_NUM_THREADS'] = os.environ['MKL_NUM_THREADS'] = '2' -os.environ['PYTROLL_CHUNK_SIZE'] = '1024' -warnings.simplefilter('ignore') -dask.config.set(scheduler='threads', num_workers=4) - -# Get the list of satellite files to open -satellite = "GOES17" -filenames = glob(f'{ext_data_path}/satellite_data/{satellite}/*.nc') - -scn = Scene(reader='abi_l1b', filenames=filenames) - -# what composites Satpy knows how to make and that it has the inputs for? -print(scn.available_composite_names()) - -composite = 'ash' -scn.load([composite]) -with ProgressBar(): - scn.save_datasets(writer='simple_image', filename=f'./features/data/reference/reference_image_{satellite}_{composite}.png') - - - diff --git a/satpy/tests/behave/modify_image.py b/satpy/tests/behave/modify_image.py deleted file mode 100755 index a4252db928..0000000000 --- a/satpy/tests/behave/modify_image.py +++ /dev/null @@ -1,33 +0,0 @@ -from PIL import Image, ImageDraw, ImageFont -import os - -def add_text_to_image(input_path, output_path, text, position=(800, 2200), font_size=700, font_color=(255, 255, 255)): - # Open the image - image = Image.open(input_path) - - # Create a drawing object - draw = ImageDraw.Draw(image) - - # Load a font - font_path = "/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf" - if os.path.exists(font_path): - font = ImageFont.truetype(font_path, font_size) - else: - print("DejaVuSans not found, using default font (fixed size)") - font = ImageFont.load_default() - - draw.text(position, text, font=font, fill=font_color) - - # Save the modified image - image.save(output_path) - - -# Example usage -satellite = "GOES16" -composite = "ash" -reference_image = f"reference_image_{satellite}_{composite}.png" -input_image_path = f"./features/data/reference/{reference_image}" -output_image_path = f"./features/data/reference_different/{reference_image}" -text_to_add = 'Hello, World!' - -add_text_to_image(input_image_path, output_image_path, text_to_add) diff --git a/utils/create_reference.py b/utils/create_reference.py new file mode 100644 index 0000000000..8604cb45c0 --- /dev/null +++ b/utils/create_reference.py @@ -0,0 +1,51 @@ +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . + +"""Script to create image testing references. + +Script to create reference images for the automated image testing system. + +create_reference.py + +The input data directory must follow the data structure from the +image-comparison-tests repository with satellite_data/. + +This script is a work in progress and expected to change significantly. +It is absolutely not intended for any operational production of satellite +imagery. +""" + +import sys +from glob import glob + +from dask.diagnostics import ProgressBar + +from satpy import Scene + +ext_data_path = sys.argv[1] +outdir = sys.argv[2] +satellite = sys.argv[3] + +filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") + +scn = Scene(reader="abi_l1b", filenames=filenames) + +composites = ["ash", "airmass"] +scn.load(composites) +ls = scn.resample(resampler="native") +with ProgressBar(): + ls.save_datasets(writer="simple_image", filename=outdir + + "/satpy-reference-image-{platform_name}-{sensor}-{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png") From fcbe1f3b5bea9e62882ba8173d4a37f11f49adff Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 22 Nov 2024 11:54:33 +0100 Subject: [PATCH 233/340] Read reference images externally Read reference images externally, like the input data. --- .../behave/features/steps/image_comparison.py | 82 ++++++++++++------- 1 file changed, 53 insertions(+), 29 deletions(-) mode change 100755 => 100644 satpy/tests/behave/features/steps/image_comparison.py diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py old mode 100755 new mode 100644 index 04dcedc736..f06398a07d --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -1,74 +1,96 @@ +# Copyright (c) 2024 Satpy developers +# +# This file is part of satpy. +# +# satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# satpy. If not, see . +"""Image comparison tests.""" + import os import warnings +from datetime import datetime from glob import glob -from PIL import Image + import cv2 import dask import numpy as np -from behave import given, when, then -from satpy import Scene -from datetime import datetime import pytz +from behave import given, then, when + +from satpy import Scene ext_data_path = "/app/ext_data" #ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" threshold = 2000 -# Define a before_all hook to create the timestamp and test results directory def before_all(context): - berlin_time = datetime.now(pytz.timezone('Europe/Berlin')) + """Define a before_all hook to create the timestamp and test results directory.""" + berlin_time = datetime.now(pytz.timezone("Europe/Berlin")) context.timestamp = berlin_time.strftime("%Y-%m-%d-%H-%M-%S") context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" - os.makedirs(os.path.join(context.test_results_dir, 'generated'), exist_ok=True) - os.makedirs(os.path.join(context.test_results_dir, 'difference'), exist_ok=True) + os.makedirs(os.path.join(context.test_results_dir, "generated"), exist_ok=True) + os.makedirs(os.path.join(context.test_results_dir, "difference"), exist_ok=True) # Write the timestamp to test_results.txt - results_file = os.path.join(context.test_results_dir, 'test_results.txt') - with open(results_file, 'a') as f: + results_file = os.path.join(context.test_results_dir, "test_results.txt") + with open(results_file, "a") as f: f.write(f"Test executed at {context.timestamp}.\n\n") -# Register the before_all hook def setup_hooks(): + """Register the before_all hook.""" from behave import use_fixture from behave.runner import Context use_fixture(before_all, Context) setup_hooks() -@given('I have a {composite} reference image file from {satellite}') +@given("I have a {composite} reference image file from {satellite}") def step_given_reference_image(context, composite, satellite): + """Prepare a reference image.""" reference_image = f"reference_image_{satellite}_{composite}.png" - context.reference_image = cv2.imread(f"./features/data/reference/{reference_image}") + #context.reference_image = cv2.imread(f"./features/data/reference/{reference_image}") + context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") context.reference_different_image = cv2.imread(f"./features/data/reference_different/{reference_image}") context.satellite = satellite context.composite = composite -@when('I generate a new {composite} image file from {satellite}') +@when("I generate a new {composite} image file from {satellite}") def step_when_generate_image(context, composite, satellite): - os.environ['OMP_NUM_THREADS'] = os.environ['MKL_NUM_THREADS'] = '2' - os.environ['PYTROLL_CHUNK_SIZE'] = '1024' - warnings.simplefilter('ignore') - dask.config.set(scheduler='threads', num_workers=4) + """Generate test images.""" + os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" + os.environ["PYTROLL_CHUNK_SIZE"] = "1024" + warnings.simplefilter("ignore") + dask.config.set(scheduler="threads", num_workers=4) # Get the list of satellite files to open - filenames = glob(f'{ext_data_path}/satellite_data/{satellite}/*.nc') + filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") - scn = Scene(reader='abi_l1b', filenames=filenames) + scn = Scene(reader="abi_l1b", filenames=filenames) scn.load([composite]) # Save the generated image in the generated folder - generated_image_path = os.path.join(context.test_results_dir, 'generated', - f'generated_{context.satellite}_{context.composite}.png') - scn.save_datasets(writer='simple_image', filename=generated_image_path) + generated_image_path = os.path.join(context.test_results_dir, "generated", + f"generated_{context.satellite}_{context.composite}.png") + scn.save_datasets(writer="simple_image", filename=generated_image_path) # Save the generated image in the context context.generated_image = cv2.imread(generated_image_path) -@then('the generated image should be the same as the reference image') +@then("the generated image should be the same as the reference image") def step_then_compare_images(context): + """Compare test image to reference image.""" # Load the images imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) # reference_different_image for testing only imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) @@ -81,16 +103,16 @@ def step_then_compare_images(context): result_matrix = (array1 != array2).astype(np.uint8) * 255 # Save the resulting numpy array as an image in the difference folder - diff_image_path = os.path.join(context.test_results_dir, 'difference', - f'diff_{context.satellite}_{context.composite}.png') + diff_image_path = os.path.join(context.test_results_dir, "difference", + f"diff_{context.satellite}_{context.composite}.png") cv2.imwrite(diff_image_path, result_matrix) # Count non-zero pixels in the result matrix non_zero_count = np.count_nonzero(result_matrix) # Write the results to a file in the test results directory - results_file = os.path.join(context.test_results_dir, 'test_results.txt') - with open(results_file, 'a') as f: + results_file = os.path.join(context.test_results_dir, "test_results.txt") + with open(results_file, "a") as f: f.write(f"Test for {context.satellite} - {context.composite}\n") f.write(f"Non-zero pixel differences: {non_zero_count}\n") if non_zero_count < threshold: @@ -99,4 +121,6 @@ def step_then_compare_images(context): f.write(f"Result: Failed - {non_zero_count} pixel differences exceed the threshold of {threshold}.\n\n") # Assert that the number of differences is below the threshold - assert non_zero_count < threshold, f"Images are not similar enough. {non_zero_count} pixel differences exceed the threshold of {threshold}." + assert non_zero_count < threshold, (f"Images are not similar enough. " + f"{non_zero_count} pixel differences exceed the threshold of " + f"{threshold}.") From a89b0f9b7a5739281d262a307a40018bdfbd14bd Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 22 Nov 2024 14:57:58 +0100 Subject: [PATCH 234/340] Add pytz to mypy stubs --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe36218d40..a0eb6d01c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,6 +27,7 @@ repos: - types-setuptools - types-PyYAML - types-requests + - types-pytz args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.13.2 From 22c5c55a71580e94201402ebcd484511e0d70566 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 22 Nov 2024 15:13:33 +0100 Subject: [PATCH 235/340] remove dangling commented line --- satpy/tests/behave/features/steps/image_comparison.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index f06398a07d..170f7af83e 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -29,7 +29,6 @@ from satpy import Scene ext_data_path = "/app/ext_data" -#ext_data_path = "/home/bildabgleich/pytroll-image-comparison-tests/data" threshold = 2000 def before_all(context): From b8f9b2eb8ea730d36a7d5ff04506b983b3cfb375 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 22 Nov 2024 15:48:26 +0100 Subject: [PATCH 236/340] Use UTC times, not Berlin times --- .pre-commit-config.yaml | 1 - satpy/tests/behave/features/steps/image_comparison.py | 5 ++--- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a0eb6d01c3..fe36218d40 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -27,7 +27,6 @@ repos: - types-setuptools - types-PyYAML - types-requests - - types-pytz args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort rev: 5.13.2 diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 170f7af83e..af8b93b7c9 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -23,7 +23,6 @@ import cv2 import dask import numpy as np -import pytz from behave import given, then, when from satpy import Scene @@ -33,8 +32,8 @@ def before_all(context): """Define a before_all hook to create the timestamp and test results directory.""" - berlin_time = datetime.now(pytz.timezone("Europe/Berlin")) - context.timestamp = berlin_time.strftime("%Y-%m-%d-%H-%M-%S") + tm = datetime.now() + context.timestamp = tm.strftime("%Y-%m-%d-%H-%M-%S") context.test_results_dir = f"{ext_data_path}/test_results/image_comparison/{context.timestamp}" os.makedirs(os.path.join(context.test_results_dir, "generated"), exist_ok=True) os.makedirs(os.path.join(context.test_results_dir, "difference"), exist_ok=True) From c7bbfe5228ed322e49424d8fde24cb3b10cc2be9 Mon Sep 17 00:00:00 2001 From: Stephan Finkensieper Date: Fri, 22 Nov 2024 15:12:23 +0000 Subject: [PATCH 237/340] Add preprocessing method --- satpy/readers/mviri_l1b_fiduceo_nc.py | 20 ++++++++++++++++--- .../reader_tests/test_mviri_l1b_fiduceo_nc.py | 4 ++-- 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/satpy/readers/mviri_l1b_fiduceo_nc.py b/satpy/readers/mviri_l1b_fiduceo_nc.py index 9961679728..56f15ca9d8 100644 --- a/satpy/readers/mviri_l1b_fiduceo_nc.py +++ b/satpy/readers/mviri_l1b_fiduceo_nc.py @@ -454,7 +454,17 @@ def is_high_resol(resolution): return resolution == HIGH_RESOL -class DatasetPreprocessor: +def preprocess_dataset(ds): + """Preprocess the given dataset. + + Performs steps that can be done once, such as decoding + according to CF conventions. + """ + preproc = _DatasetPreprocessor() + return preproc.preprocess(ds) + + +class _DatasetPreprocessor: """Helper class for preprocessing the dataset.""" def preprocess(self, ds): @@ -534,7 +544,11 @@ def _cleanup_attrs(self, ds): class DatasetAccessor: - """Helper class for accessing the dataset.""" + """Helper class for accessing the dataset. + + Performs steps that need to be done each time a variable + is accessed, such as renaming "y_*" coordinates to "y". + """ def __init__(self, ds): """Wrap the given dataset.""" @@ -597,7 +611,7 @@ def open_dataset(filename): decode_times=False, mask_and_scale=False, ) - nc_preproc = DatasetPreprocessor().preprocess(nc_raw) + nc_preproc = preprocess_dataset(nc_raw) return DatasetAccessor(nc_preproc) diff --git a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py index b5b5b5a593..7e925d2b2c 100644 --- a/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py +++ b/satpy/tests/reader_tests/test_mviri_l1b_fiduceo_nc.py @@ -33,10 +33,10 @@ ALTITUDE, EQUATOR_RADIUS, POLE_RADIUS, - DatasetPreprocessor, FiduceoMviriEasyFcdrFileHandler, FiduceoMviriFullFcdrFileHandler, Interpolator, + preprocess_dataset, ) from satpy.tests.utils import make_dataid @@ -637,7 +637,7 @@ def fixture_dataset_exp(self): def test_preprocess(self, dataset, dataset_exp): """Test dataset preprocessing.""" - preprocessed = DatasetPreprocessor().preprocess(dataset) + preprocessed = preprocess_dataset(dataset) xr.testing.assert_allclose(preprocessed, dataset_exp) From 16391494403c944b2731572cb92679486c0d9be4 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 22 Nov 2024 16:32:36 +0100 Subject: [PATCH 238/340] remove reference difference image --- satpy/tests/behave/features/steps/image_comparison.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index af8b93b7c9..17a17cdeeb 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -55,9 +55,7 @@ def setup_hooks(): def step_given_reference_image(context, composite, satellite): """Prepare a reference image.""" reference_image = f"reference_image_{satellite}_{composite}.png" - #context.reference_image = cv2.imread(f"./features/data/reference/{reference_image}") context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") - context.reference_different_image = cv2.imread(f"./features/data/reference_different/{reference_image}") context.satellite = satellite context.composite = composite @@ -90,7 +88,7 @@ def step_when_generate_image(context, composite, satellite): def step_then_compare_images(context): """Compare test image to reference image.""" # Load the images - imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) # reference_different_image for testing only + imageA = cv2.cvtColor(context.reference_image, cv2.COLOR_BGR2GRAY) imageB = cv2.cvtColor(context.generated_image, cv2.COLOR_BGR2GRAY) # Ensure both images have the same dimensions if imageA.shape != imageB.shape: From 80c93cbdb918039e90e731e06195261acf854860 Mon Sep 17 00:00:00 2001 From: Yufei Zhu Date: Fri, 22 Nov 2024 19:35:59 +0000 Subject: [PATCH 239/340] add change effective time in config file. --- satpy/etc/readers/abi_l2_nc.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/etc/readers/abi_l2_nc.yaml b/satpy/etc/readers/abi_l2_nc.yaml index 13447231a8..8410d4cff8 100644 --- a/satpy/etc/readers/abi_l2_nc.yaml +++ b/satpy/etc/readers/abi_l2_nc.yaml @@ -211,6 +211,8 @@ datasets: name: PSDN file_type: abi_l2_cpsn file_key: PSD + + # new variable name since 18:51UTC December 04, 2023. cloud_particle_size_new: name: CPS file_type: abi_l2_cps From 06cf2e47fe0259e54222b3d0e0d9c4544b44eaf0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 10:18:01 +0000 Subject: [PATCH 240/340] Bump codecov/codecov-action from 4 to 5 Bumps [codecov/codecov-action](https://github.com/codecov/codecov-action) from 4 to 5. - [Release notes](https://github.com/codecov/codecov-action/releases) - [Changelog](https://github.com/codecov/codecov-action/blob/main/CHANGELOG.md) - [Commits](https://github.com/codecov/codecov-action/compare/v4...v5) --- updated-dependencies: - dependency-name: codecov/codecov-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] --- .github/workflows/ci.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index f50be0e9b8..e18d23d63e 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -115,7 +115,7 @@ jobs: pytest -n auto --cov=satpy satpy/tests --cov-report=xml --cov-report= - name: Upload unittest coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: flags: unittests file: ./coverage.xml @@ -136,7 +136,7 @@ jobs: coverage xml - name: Upload behaviour test coverage to Codecov - uses: codecov/codecov-action@v4 + uses: codecov/codecov-action@v5 with: flags: behaviourtests file: ./coverage.xml From c803137b6ed0807cb76485bf9ababed685d903c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 1 Dec 2024 10:18:04 +0000 Subject: [PATCH 241/340] Bump pypa/gh-action-pypi-publish from 1.11.0 to 1.12.2 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.11.0 to 1.12.2. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.11.0...v1.12.2) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index 479e7f8281..f97f137b80 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.11.0 + uses: pypa/gh-action-pypi-publish@v1.12.2 with: user: __token__ password: ${{ secrets.pypi_password }} From f9079351e42c8ae109db0f057d326bbaa0ecb7f2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 2 Dec 2024 22:50:09 +0000 Subject: [PATCH 242/340] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.2 → v0.8.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.2...v0.8.1) - [github.com/PyCQA/bandit: 1.7.10 → 1.8.0](https://github.com/PyCQA/bandit/compare/1.7.10...1.8.0) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe36218d40..ff8c9568c9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.7.2' + rev: 'v0.8.1' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,7 +14,7 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.7.10' # Update me! + rev: '1.8.0' # Update me! hooks: - id: bandit args: [--ini, .bandit] From 9087361a4807ca832707e39c97251d5db1995caf Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Wed, 4 Dec 2024 16:38:06 +0100 Subject: [PATCH 243/340] fix double labels in doc --- doc/source/conf.py | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/source/conf.py b/doc/source/conf.py index a580f0310f..4c8405d19f 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -92,6 +92,7 @@ def __getattr__(cls, name): # Autosectionlabel # Make sure target is unique autosectionlabel_prefix_document = True +autosectionlabel_maxdepth = 3 # API docs apidoc_module_dir = "../../satpy" From 0589f61f200bdaea2801789ceb51e000b522cced Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Wed, 4 Dec 2024 16:46:40 +0100 Subject: [PATCH 244/340] Add t865 dataset to olci l2 list --- satpy/etc/readers/olci_l2.yaml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 110bb11a2e..a3e82d3b5c 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -46,6 +46,11 @@ file_types: file_patterns: - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc' + esa_l2_w_aer: + file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 + file_patterns: + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' + - '{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}______{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc' esa_l2_gifapar: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: @@ -426,6 +431,16 @@ datasets: file_type: esa_l2_wqsf nc_key: WQSF + t865: + name: w_aer + sensor: olci + resolution: 300 + standard_name: aerosol_optical_thickness + units: "lg(re g.m-3)" + coordinates: [longitude, latitude] + file_type: esa_l2_w_aer + nc_key: T865 + iwv: name: iwv sensor: olci From b2426afc2d991aef01b956994491fc7ac7355ab4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Thu, 5 Dec 2024 11:28:28 +0100 Subject: [PATCH 245/340] change platform name to be conform to wmo space oscar --- satpy/readers/hdfeos_base.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/hdfeos_base.py b/satpy/readers/hdfeos_base.py index db8d8db188..e1a76bec6b 100644 --- a/satpy/readers/hdfeos_base.py +++ b/satpy/readers/hdfeos_base.py @@ -301,7 +301,7 @@ def _get_good_data_mask(self, data_arr, is_category=False): def _add_satpy_metadata(self, data_id: DataID, data_arr: xr.DataArray): """Add metadata that is specific to Satpy.""" new_attrs = { - "platform_name": "EOS-" + self.metadata_platform_name, + "platform_name": self.metadata_platform_name, "sensor": "modis", } From c475513a2b9f613be96d0c0926777f84f9148c2a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Thu, 5 Dec 2024 13:34:46 +0100 Subject: [PATCH 246/340] remove EOS --- satpy/tests/reader_tests/modis_tests/test_modis_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py index 47f5f92c8e..94a21c2ef8 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l1b.py @@ -47,7 +47,7 @@ def _check_shared_metadata(data_arr): assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" + assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l1b" From bcced60a3aa0b84d9fd23466f9fb1ecadda00324 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Benjamin=20R=C3=B6sner?= Date: Thu, 5 Dec 2024 13:40:24 +0100 Subject: [PATCH 247/340] remove EOS --- satpy/tests/reader_tests/modis_tests/test_modis_l2.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py index 8ca515de4e..d827e18215 100644 --- a/satpy/tests/reader_tests/modis_tests/test_modis_l2.py +++ b/satpy/tests/reader_tests/modis_tests/test_modis_l2.py @@ -44,7 +44,7 @@ def _check_shared_metadata(data_arr, expect_area=False): assert data_arr.attrs["sensor"] == "modis" - assert data_arr.attrs["platform_name"] == "EOS-Terra" + assert data_arr.attrs["platform_name"] == "Terra" assert "rows_per_scan" in data_arr.attrs assert isinstance(data_arr.attrs["rows_per_scan"], int) assert data_arr.attrs["reader"] == "modis_l2" From f92ceaa704d783fe83ebf2ff614a2416064d7ce7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 5 Dec 2024 12:49:47 +0000 Subject: [PATCH 248/340] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- satpy/tests/behave/features/image_comparison.feature | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index 23bd550e6d..3352db70b4 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -10,4 +10,4 @@ Feature: Image Comparison |GOES17 |airmass | |GOES16 |airmass | |GOES16 |ash | - |GOES17 |ash | \ No newline at end of file + |GOES17 |ash | From c6af00dc3bd864bffbd6784a8d04f85251ee2eb6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 6 Dec 2024 10:49:18 -0600 Subject: [PATCH 249/340] Remove version limit on pytest in CI This was added due to pytest-lazy-fixture but the replacement package pytest-lazy-fixtures fixes compatibility. --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index ec5668c8e4..d1bb979025 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -43,7 +43,7 @@ dependencies: - python-eccodes # 2.19.1 seems to cause library linking issues - eccodes>=2.20 - - pytest<8.0.0 + - pytest - pytest-cov - fsspec - botocore>=1.33 From 1880cf445e16952b0cb6fa967be1a8d4124ae905 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Fri, 6 Dec 2024 10:54:20 -0600 Subject: [PATCH 250/340] Fix parallax test compatibility with pytest 8 --- satpy/tests/modifier_tests/test_parallax.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/satpy/tests/modifier_tests/test_parallax.py b/satpy/tests/modifier_tests/test_parallax.py index 276ff0ebd4..63ddbd8caf 100644 --- a/satpy/tests/modifier_tests/test_parallax.py +++ b/satpy/tests/modifier_tests/test_parallax.py @@ -18,6 +18,7 @@ import math import os import unittest.mock +import warnings import dask.array as da import dask.config @@ -368,13 +369,13 @@ def test_correct_area_clearsky_different_resolutions(self, res1, res2): resolution=res2, area_extent=[-1, -1, 1, 1]) - with pytest.warns(None) as record: + with warnings.catch_warnings(): + warnings.simplefilter("error") sc = make_fake_scene( {"CTH_clear": np.full(area1.shape, np.nan)}, daskify=False, area=area1, common_attrs=_get_attrs(0, 0, 35_000)) - assert len(record) == 0 corrector = ParallaxCorrection(area2) new_area = corrector(sc["CTH_clear"]) From 24c72423cffd032ac5b6ee1d4b6b63bb001dc81d Mon Sep 17 00:00:00 2001 From: Antonio Valentino Date: Sat, 7 Dec 2024 16:03:04 +0100 Subject: [PATCH 251/340] Drop dependency on xarray-datatree DataTree is now part of the xarray since v2024.10.0. An explicit dependency on the externap package xarray-datatree is no lionger needed. --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 65267c8548..9ed0eda02d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -62,7 +62,7 @@ seviri_l2_bufr = ["eccodes"] seviri_l2_grib = ["eccodes"] hsaf_grib = ["pygrib"] remote_reading = ["fsspec"] -insat_3d = ["xarray-datatree"] +insat_3d = ["xarray>=2024.10.0"] gms5-vissr_l1b = ["numba"] # Writers: cf = ["h5netcdf >= 0.7.3"] @@ -87,7 +87,7 @@ satpos_from_tle = ["skyfield", "astropy"] tests = ["behave", "h5py", "netCDF4", "pyhdf", "imageio", "rasterio", "geoviews", "trollimage", "fsspec", "bottleneck", "rioxarray", "pytest", "pytest-lazy-fixtures", "defusedxml", - "s3fs", "eccodes", "h5netcdf", "xarray-datatree", + "s3fs", "eccodes", "h5netcdf", "xarray>=2024.10.0", "skyfield", "ephem", "pint-xarray", "astropy", "dask-image", "python-geotiepoints", "numba"] dev = ["satpy[doc,tests]"] From 5984c29df8bbe8851c20c343fcda646357c3a5d6 Mon Sep 17 00:00:00 2001 From: Antonio Valentino Date: Sat, 7 Dec 2024 16:07:55 +0100 Subject: [PATCH 252/340] Remove duplicate entry in AUTHORS.md --- AUTHORS.md | 1 - 1 file changed, 1 deletion(-) diff --git a/AUTHORS.md b/AUTHORS.md index 903df59c76..4e3821c54e 100644 --- a/AUTHORS.md +++ b/AUTHORS.md @@ -106,6 +106,5 @@ The following people have made contributions to this project: - [Clement Laplace (ClementLaplace)](https://github.com/ClementLaplace) - [Will Sharpe (wjsharpe)](https://github.com/wjsharpe) - [Sara Hörnquist (shornqui)](https://github.com/shornqui) -- [Antonio Valentino](https://github.com/avalentino) - [Clément (ludwigvonkoopa)](https://github.com/ludwigVonKoopa) - [Xuanhan Lai (sgxl)](https://github.com/sgxl) From 576e94180b584a33706bc84c9f633016bdcb4130 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 9 Dec 2024 16:28:17 +0100 Subject: [PATCH 253/340] Started behave test for FCI cloudtop --- .../behave/features/image_comparison.feature | 15 ++++++++------- .../behave/features/steps/image_comparison.py | 15 ++++++++------- 2 files changed, 16 insertions(+), 14 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index 3352db70b4..03064fe824 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -1,13 +1,14 @@ Feature: Image Comparison Scenario Outline: Compare generated image with reference image - Given I have a reference image file from - When I generate a new image file from + Given I have a reference image file from resampled to + When I generate a new image file from with for Then the generated image should be the same as the reference image Examples: - |satellite |composite | - |GOES17 |airmass | - |GOES16 |airmass | - |GOES16 |ash | - |GOES17 |ash | + |satellite |composite | reader | area + |GOES17 |airmass | abi_l1b | null + |GOES16 |airmass | abi_l1b | null + |GOES16 |ash | abi_l1b | null + |GOES17 |ash | abi_l1b | null + |METEOSAT12 | cloudtop | fci_l1b_nc | north_atlantic diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 17a17cdeeb..6c658ceaac 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -51,17 +51,18 @@ def setup_hooks(): use_fixture(before_all, Context) setup_hooks() -@given("I have a {composite} reference image file from {satellite}") -def step_given_reference_image(context, composite, satellite): +@given("I have a {composite} reference image file from {satellite} resampled to ") +def step_given_reference_image(context, composite, satellite, area): """Prepare a reference image.""" - reference_image = f"reference_image_{satellite}_{composite}.png" + reference_image = f"reference_image_{satellite}_{composite}_{area}.png" context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") context.satellite = satellite context.composite = composite + context.area = area -@when("I generate a new {composite} image file from {satellite}") -def step_when_generate_image(context, composite, satellite): +@when("I generate a new {composite} image file from {satellite} with {reader} for {area}") +def step_when_generate_image(context, composite, satellite, reader, area): """Generate test images.""" os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" os.environ["PYTROLL_CHUNK_SIZE"] = "1024" @@ -71,13 +72,13 @@ def step_when_generate_image(context, composite, satellite): # Get the list of satellite files to open filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") - scn = Scene(reader="abi_l1b", filenames=filenames) + scn = Scene(reader=reader, filenames=filenames) scn.load([composite]) # Save the generated image in the generated folder generated_image_path = os.path.join(context.test_results_dir, "generated", - f"generated_{context.satellite}_{context.composite}.png") + f"generated_{context.satellite}_{context.composite}_{context.area}.png") scn.save_datasets(writer="simple_image", filename=generated_image_path) # Save the generated image in the context From d4a1e20adbde3d2701bb77ddff2de4ef0aaeee6a Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 9 Dec 2024 18:57:10 +0100 Subject: [PATCH 254/340] Continue development of FCI imagery tests --- .../behave/features/steps/image_comparison.py | 18 ++++- utils/create_reference.py | 74 +++++++++++++++---- 2 files changed, 76 insertions(+), 16 deletions(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 6c658ceaac..3891e8f5dc 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -24,12 +24,21 @@ import dask import numpy as np from behave import given, then, when +from pyresample.area_config import create_area_def from satpy import Scene ext_data_path = "/app/ext_data" threshold = 2000 +# test areas used only for testing +test_areas = { + "north_atlantic": create_area_def( + "ofz", 4087, description="oceanographer fracture zone", + area_extent=[-4230000, 4675000, -3562000, 5232000], + resolution=750) + } + def before_all(context): """Define a before_all hook to create the timestamp and test results directory.""" tm = datetime.now() @@ -51,7 +60,7 @@ def setup_hooks(): use_fixture(before_all, Context) setup_hooks() -@given("I have a {composite} reference image file from {satellite} resampled to ") +@given("I have a {composite} reference image file from {satellite} resampled to {area}") def step_given_reference_image(context, composite, satellite, area): """Prepare a reference image.""" reference_image = f"reference_image_{satellite}_{composite}_{area}.png" @@ -76,10 +85,15 @@ def step_when_generate_image(context, composite, satellite, reader, area): scn.load([composite]) + if area == "null": + ls = scn + else: + ls = scn.resample(test_areas.get(area, area)) + # Save the generated image in the generated folder generated_image_path = os.path.join(context.test_results_dir, "generated", f"generated_{context.satellite}_{context.composite}_{context.area}.png") - scn.save_datasets(writer="simple_image", filename=generated_image_path) + ls.save_datasets(writer="simple_image", filename=generated_image_path) # Save the generated image in the context context.generated_image = cv2.imread(generated_image_path) diff --git a/utils/create_reference.py b/utils/create_reference.py index 8604cb45c0..489a37cddf 100644 --- a/utils/create_reference.py +++ b/utils/create_reference.py @@ -28,24 +28,70 @@ imagery. """ -import sys +import argparse +import pathlib from glob import glob -from dask.diagnostics import ProgressBar - from satpy import Scene +from satpy.tests.behave.features.steps.image_comparison import test_areas + + +def generate_images(reader, filenames, area, composites, outdir): + """Generate reference images for testing purposes.""" + from dask.diagnostics import ProgressBar + scn = Scene(reader="abi_l1b", filenames=filenames) + + composites = ["ash", "airmass"] + scn.load(composites) + if area is None: + ls = scn + elif area == "native": + ls = scn.resample(resampler="native") + else: + ls = scn.resample(test_areas.get(area, area)) + + with ProgressBar(): + ls.save_datasets(writer="simple_image", filename=outdir + + "/satpy-reference-image-{platform_name}-{sensor}-{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png") + +def get_parser(): + """Return argument parser.""" + parser = argparse.ArgumentParser(description=__doc__) + + parser.add_argument( + "satellite", action="store", type=str, + help="Satellite name.") + + parser.add_argument( + "reader", action="store", type=str, + help="Reader name.") + + parser.add_argument( + "area", action="store", type=str, + help="Area name, 'null' (no resampling) or 'native' (native resampling)") + + parser.add_argument( + "basedir", action="store", type=pathlib.Path, + help="Root directory where reference input data are contained.") + + parser.add_argument( + "outdir", action="store", type=pathlib.Path, + help="Directory where to write resulting images.") -ext_data_path = sys.argv[1] -outdir = sys.argv[2] -satellite = sys.argv[3] + return parser -filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") +def main(): + """Main function.""" + parsed = get_parser().parse_args() + ext_data_path = parsed.basedir + reader = parsed.reader + area = parsed.area + outdir = parsed.outdir + satellite = parsed.satellite -scn = Scene(reader="abi_l1b", filenames=filenames) + filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*") + generate_images(reader, filenames, None if area.lower() == "null" else + area, ["airmass", "ash"], outdir) -composites = ["ash", "airmass"] -scn.load(composites) -ls = scn.resample(resampler="native") -with ProgressBar(): - ls.save_datasets(writer="simple_image", filename=outdir + - "/satpy-reference-image-{platform_name}-{sensor}-{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png") +if __name__ == "__main__": + main() From 1f562ae413bbf0871d98be6680be7fe28870f4db Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Tue, 10 Dec 2024 10:01:08 +0100 Subject: [PATCH 255/340] Change reference data and area for ABI test --- satpy/tests/behave/features/image_comparison.feature | 2 +- satpy/tests/behave/features/steps/image_comparison.py | 11 +---------- utils/create_reference.py | 3 +-- 3 files changed, 3 insertions(+), 13 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index 03064fe824..5bb6c9c2c9 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -11,4 +11,4 @@ Feature: Image Comparison |GOES16 |airmass | abi_l1b | null |GOES16 |ash | abi_l1b | null |GOES17 |ash | abi_l1b | null - |METEOSAT12 | cloudtop | fci_l1b_nc | north_atlantic + |METEOSAT12 | cloudtop | fci_l1b_nc | sve diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 3891e8f5dc..74f0af3229 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -24,21 +24,12 @@ import dask import numpy as np from behave import given, then, when -from pyresample.area_config import create_area_def from satpy import Scene ext_data_path = "/app/ext_data" threshold = 2000 -# test areas used only for testing -test_areas = { - "north_atlantic": create_area_def( - "ofz", 4087, description="oceanographer fracture zone", - area_extent=[-4230000, 4675000, -3562000, 5232000], - resolution=750) - } - def before_all(context): """Define a before_all hook to create the timestamp and test results directory.""" tm = datetime.now() @@ -88,7 +79,7 @@ def step_when_generate_image(context, composite, satellite, reader, area): if area == "null": ls = scn else: - ls = scn.resample(test_areas.get(area, area)) + ls = scn.resample(area) # Save the generated image in the generated folder generated_image_path = os.path.join(context.test_results_dir, "generated", diff --git a/utils/create_reference.py b/utils/create_reference.py index 489a37cddf..ca0d0a180d 100644 --- a/utils/create_reference.py +++ b/utils/create_reference.py @@ -33,7 +33,6 @@ from glob import glob from satpy import Scene -from satpy.tests.behave.features.steps.image_comparison import test_areas def generate_images(reader, filenames, area, composites, outdir): @@ -48,7 +47,7 @@ def generate_images(reader, filenames, area, composites, outdir): elif area == "native": ls = scn.resample(resampler="native") else: - ls = scn.resample(test_areas.get(area, area)) + ls = scn.resample(area) with ProgressBar(): ls.save_datasets(writer="simple_image", filename=outdir + From c1b2d9d233e674a514c2ab0649fbfbc246c2babe Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 11 Dec 2024 10:33:55 +0100 Subject: [PATCH 256/340] Add support for reading the EUMETSAT AWS/EPS-Sterna L1b data Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws_l1b_nc.yaml | 1 + satpy/etc/readers/eps_sterna_l1b_nc.yaml | 532 +++++++++++++++++++++++ satpy/readers/aws_l1b.py | 15 +- 3 files changed, 546 insertions(+), 2 deletions(-) create mode 100644 satpy/etc/readers/eps_sterna_l1b_nc.yaml diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index 68e395d31d..0d6df8dc4e 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -531,4 +531,5 @@ file_types: file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile file_patterns: [ 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____radsim.nc'] diff --git a/satpy/etc/readers/eps_sterna_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_l1b_nc.yaml new file mode 100644 index 0000000000..026aefc918 --- /dev/null +++ b/satpy/etc/readers/eps_sterna_l1b_nc.yaml @@ -0,0 +1,532 @@ +reader: + name: aws_l1b_nc + short_name: AWS L1B RAD NetCDF4 + long_name: AWS L1B Radiance (NetCDF4) + description: Reader for the EUMETSAT EPS-Sterna Sounder level-1b files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [aws,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + horn: + enum: + - "1" + - "2" + - "3" + - "4" + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "1" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "2" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "3" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + horn: "4" + coordinates: [longitude, latitude] + file_type: eps_sterna_l1b_nc + file_key: data/calibration/toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: eps_sterna_l1b_nc + standard_name: longitude + units: degrees_east + horn: ["1", "2", "3", "4"] + file_key: data/navigation/longitude + + + latitude: + name: latitude + file_type: eps_sterna_l1b_nc + standard_name: latitude + units: degrees_north + horn: ["1", "2", "3", "4"] + file_key: data/navigation/latitude + + +# --- Navigation data --- + + solar_azimuth_horn1: + name: solar_azimuth_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_azimuth_horn2: + name: solar_azimuth_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_azimuth_horn3: + name: solar_azimuth_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_azimuth_horn4: + name: solar_azimuth_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_azimuth_angle + standard_name: solar_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + + solar_zenith_horn1: + name: solar_zenith_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + solar_zenith_horn2: + name: solar_zenith_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + solar_zenith_horn3: + name: solar_zenith_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + solar_zenith_horn4: + name: solar_zenith_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/solar_zenith_angle + standard_name: solar_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_zenith_horn1: + name: satellite_zenith_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_zenith_horn2: + name: satellite_zenith_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_zenith_horn3: + name: satellite_zenith_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_zenith_horn4: + name: satellite_zenith_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_zenith_angle + standard_name: satellite_zenith_angle + horn: "4" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn1: + name: satellite_azimuth_horn1 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "1" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn2: + name: satellite_azimuth_horn2 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "2" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn3: + name: satellite_azimuth_horn3 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "3" + coordinates: + - longitude + - latitude + + satellite_azimuth_horn4: + name: satellite_azimuth_horn4 + file_type: eps_sterna_l1b_nc + file_key: data/navigation/satellite_azimuth_angle + standard_name: satellite_azimuth_angle + horn: "4" + coordinates: + - longitude + - latitude + +file_types: + eps_sterna_l1b_nc: + # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc + file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile + file_patterns: [ + 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_N____.nc', + ] diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index ce07c209e1..9023f6f0ab 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -15,6 +15,10 @@ """Reader for the Arctic Weather Satellite (AWS) Sounder level-1b data. Test data provided by ESA August 23, 2023. + +Sample data for five orbits in September 2024 provided by ESA to the Science +Advisory Group for MWS and AWS, November 26, 2024. + """ import logging @@ -47,6 +51,11 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=Tru cache_handle=True) self.filename_info = filename_info + if filetype_info["file_type"].startswith("eps_sterna"): + self._feed_horn_group_name = "n_feedhorns" + else: + self._feed_horn_group_name = "n_geo_groups" + @property def start_time(self): """Get the start time.""" @@ -147,11 +156,13 @@ def _get_channel_data(self, dataset_id, dataset_info): return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") def _get_navigation_data(self, dataset_id, dataset_info): + """Get the navigation (geolocation) data for one feed horn.""" geo_data = self[dataset_info["file_key"]] - geo_data.coords["n_geo_groups"] = ["1", "2", "3", "4"] + geo_data.coords[self._feed_horn_group_name] = ["1", "2", "3", "4"] geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) horn = dataset_id["horn"].name - return geo_data.sel(n_geo_groups=horn).drop_vars("n_geo_groups") + _selection = {self._feed_horn_group_name: horn} + return geo_data.sel(_selection).drop_vars(self._feed_horn_group_name) def mask_and_scale(data_array): From b2775eb03564c1e283311bee710e415df3bbcffd Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 11 Dec 2024 10:39:38 +0100 Subject: [PATCH 257/340] Add support for reading ESA AWS L1c Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws_l1c_nc.yaml | 375 ++++++++++++++++++++++++++++++ satpy/readers/aws_l1c.py | 122 ++++++++++ 2 files changed, 497 insertions(+) create mode 100644 satpy/etc/readers/aws_l1c_nc.yaml create mode 100644 satpy/readers/aws_l1c.py diff --git a/satpy/etc/readers/aws_l1c_nc.yaml b/satpy/etc/readers/aws_l1c_nc.yaml new file mode 100644 index 0000000000..8abd9150ef --- /dev/null +++ b/satpy/etc/readers/aws_l1c_nc.yaml @@ -0,0 +1,375 @@ +reader: + name: aws_l1c_nc + short_name: AWS L1C RAD NetCDF4 + long_name: AWS L1C Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) Sounder level-1c files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [aws,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: aws_l1c_nc + standard_name: longitude + units: degrees_east + file_key: data/navigation/aws_lon + + latitude: + name: latitude + file_type: aws_l1c_nc + standard_name: latitude + units: degrees_north + file_key: data/navigation/aws_lat + +# --- Navigation data --- + + solar_azimuth: + name: solar_azimuth + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + coordinates: + - longitude + - latitude + + solar_zenith: + name: solar_zenith + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + coordinates: + - longitude + - latitude + + satellite_azimuth: + name: satellite_azimuth + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + coordinates: + - longitude + - latitude + + satellite_zenith: + name: satellite_zenith + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + coordinates: + - longitude + - latitude + +file_types: + aws_l1c_nc: + # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc + file_reader: !!python/name:satpy.readers.aws_l1c.AWSL1CFile + file_patterns: [ + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc',] diff --git a/satpy/readers/aws_l1c.py b/satpy/readers/aws_l1c.py new file mode 100644 index 0000000000..b4566c63f4 --- /dev/null +++ b/satpy/readers/aws_l1c.py @@ -0,0 +1,122 @@ +# Copyright (c) 2023, 2024 Pytroll Developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . +"""Reader for the Arctic Weather Satellite (AWS) Sounder level-1c data. + +Sample data provided by ESA September 27, 2024. +""" + +import logging + +import xarray as xr + +from .netcdf_utils import NetCDF4FileHandler + +logger = logging.getLogger(__name__) + +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" + +AWS_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) + + +class AWSL1CFile(NetCDF4FileHandler): + """Class implementing the AWS L1c Filehandler. + + This class implements the ESA Arctic Weather Satellite (AWS) Level-1b + NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` + class using the :mod:`~satpy.Scene.load` method with the reader + ``"aws_l1c_nc"``. + + """ + + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info, + cache_var_size=10000, + cache_handle=True) + self.filename_info = filename_info + + @property + def start_time(self): + """Get the start time.""" + return self.filename_info["start_time"] + + @property + def end_time(self): + """Get the end time.""" + return self.filename_info["end_time"] + + @property + def sensor(self): + """Get the sensor name.""" + return "MWR" + + @property + def platform_name(self): + """Get the platform name.""" + return self.filename_info["platform_name"] + + def get_dataset(self, dataset_id, dataset_info): + """Get the data.""" + if dataset_id["name"] in AWS_CHANNEL_NAMES: + data_array = self._get_channel_data(dataset_id, dataset_info) + elif (dataset_id["name"] in ["longitude", "latitude", + "solar_azimuth", "solar_zenith", + "satellite_zenith", "satellite_azimuth"]): + data_array = self._get_navigation_data(dataset_id, dataset_info) + else: + raise NotImplementedError + + data_array = mask_and_scale(data_array) + if dataset_id["name"] == "longitude": + data_array = data_array.where(data_array <= 180, data_array - 360) + + data_array.attrs.update(dataset_info) + + data_array.attrs["platform_name"] = self.platform_name + data_array.attrs["sensor"] = self.sensor + return data_array + + def _get_channel_data(self, dataset_id, dataset_info): + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES + channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) + return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") + + def _get_navigation_data(self, dataset_id, dataset_info): + geo_data = self[dataset_info["file_key"]] + geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) + return geo_data + + +def mask_and_scale(data_array): + """Mask then scale the data array.""" + if "missing_value" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array != data_array.attrs["missing_value"]) + data_array.attrs.pop("missing_value") + if "valid_max" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array <= data_array.attrs["valid_max"]) + data_array.attrs.pop("valid_max") + if "valid_min" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array.where(data_array >= data_array.attrs["valid_min"]) + data_array.attrs.pop("valid_min") + if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: + with xr.set_options(keep_attrs=True): + data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] + data_array.attrs.pop("scale_factor") + data_array.attrs.pop("add_offset") + return data_array From 41e7975ebf567b26789e625bd3aab56048c8b825 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 11 Dec 2024 11:22:17 +0100 Subject: [PATCH 258/340] Fix the tests Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/test_aws_l1b.py | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index a9ed72a211..4bb5e6f0ca 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -51,9 +51,9 @@ def random_date(start, end): def aws_file(tmp_path_factory): """Create an AWS file.""" ds = DataTree() - start_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + start_time = datetime(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + end_time = datetime(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) @@ -94,16 +94,15 @@ def aws_file(tmp_path_factory): def aws_handler(aws_file): """Create an aws filehandler.""" filename_info = parse(file_pattern, os.path.basename(aws_file)) - return AWSL1BFile(aws_file, filename_info, dict()) + filetype_info = dict() + filetype_info["file_type"] = "aws_l1b" + return AWSL1BFile(aws_file, filename_info, filetype_info) -def test_start_end_time(aws_file): +def test_start_end_time(aws_handler): """Test that start and end times are read correctly.""" - filename_info = parse(file_pattern, os.path.basename(aws_file)) - handler = AWSL1BFile(aws_file, filename_info, dict()) - - assert handler.start_time == filename_info["start_time"] - assert handler.end_time == filename_info["end_time"] + assert aws_handler.start_time == datetime(2024, 9, 1, 12, 0) + assert aws_handler.end_time == datetime(2024, 9, 1, 12, 15) def test_metadata(aws_handler): From 441ee06f6cb039d19f3221cf6749978611995f5c Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 11 Dec 2024 18:58:51 +0100 Subject: [PATCH 259/340] Adapt test to latest AWS l1b format Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/test_aws_l1b.py | 44 +++++++++++------------- 1 file changed, 20 insertions(+), 24 deletions(-) diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index 4bb5e6f0ca..d3c45e4e3d 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -18,25 +18,22 @@ rng = np.random.default_rng() -fake_data_np = rng.integers(0, 700000, size=19*5*5).reshape((19, 5, 5)) +fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) fake_data_np[0, 0, 0] = -2147483648 -fake_data_np[0, 0, 1] = 700000 + 10 -fake_data_np[0, 0, 2] = -10 - -fake_data = xr.DataArray(fake_data_np, - dims=["n_channels", "n_fovs", "n_scans"]) -fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) -fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=25 * 4).reshape((4, 5, 5)), - dims=["n_geo_groups", "n_fovs", "n_scans"]) +fake_data_np[1, 0, 0] = 700000 + 10 +fake_data_np[2, 0, 0] = -10 + +ARRAY_DIMS = ["n_scans", "n_fovs", "n_channels"] +fake_data = xr.DataArray(fake_data_np, dims=ARRAY_DIMS) + +GEO_DIMS = ["n_scans", "n_fovs", "n_geo_groups"] +GEO_SIZE = 10*145*4 +fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) def random_date(start, end): @@ -81,7 +78,6 @@ def aws_file(tmp_path_factory): ds["status/satellite/subsat_latitude_start"] = np.array(55.41) ds["status/satellite/subsat_longitude_end"] = np.array(296.79) - tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, processing_time=processing_time, platform_name=platform_name)) @@ -129,7 +125,7 @@ def test_get_channel_data(aws_handler): assert "y" in res.dims assert "orbital_parameters" in res.attrs assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 - assert res.dims == ("x", "y") + assert res.dims == ("y", "x") assert "n_channels" not in res.coords assert res.attrs["sensor"] == "AWS" assert res.attrs["platform_name"] == "AWS1" @@ -152,7 +148,7 @@ def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs - assert res.dims == ("x", "y") + assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_geo_groups" not in res.coords if id_name == "longitude": @@ -167,16 +163,16 @@ def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): def test_get_viewing_geometry_data(aws_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) - did = dict(name=id_name, horn=Horn["1"]) + dset_id = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = aws_handler.get_dataset(did, dataset_info) + res = aws_handler.get_dataset(dset_id, dataset_info) np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) assert "x" in res.dims assert "y" in res.dims assert "orbital_parameters" in res.attrs - assert res.dims == ("x", "y") + assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_geo_groups" not in res.coords if id_name == "longitude": From 1437cd193f0b8bde8a93053ddbdbd3699d49d20c Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 11 Dec 2024 19:58:31 +0100 Subject: [PATCH 260/340] Fix tests and add some basic RGB recipes for AWS Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/aws.yaml | 20 ++++++++++++++++---- satpy/etc/enhancements/generic.yaml | 15 +++++++++++++++ satpy/etc/readers/aws_l1b_nc.yaml | 2 +- satpy/etc/readers/eps_sterna_l1b_nc.yaml | 4 ++-- 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/satpy/etc/composites/aws.yaml b/satpy/etc/composites/aws.yaml index 55af749d89..ba38a69d02 100644 --- a/satpy/etc/composites/aws.yaml +++ b/satpy/etc/composites/aws.yaml @@ -2,12 +2,12 @@ sensor_name: aws composites: mw183_humidity: - compositor: !!python/name:satpy.composites.RGBCompositor + standard_name: mw183_humidity + compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: '15' - - name: '13' - name: '11' - standard_name: mw183_humidity + - name: '13' + - name: '15' mw183_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor @@ -32,3 +32,15 @@ composites: - name: '18' - name: '19' standard_name: mw325_humidity + + ch1_tbs_colors: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '1' + standard_name: tbs_colors + + ch10_tbs_colors: + compositor: !!python/name:satpy.composites.SingleBandCompositor + prerequisites: + - name: '10' + standard_name: tbs_colors diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index cdfb7851ad..8989d44152 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1289,3 +1289,18 @@ enhancements: image_ready: standard_name: image_ready operations: [] + + mw183_humidity: + # matches AWS + standard_name: mw183_humidity + operations: + - name: stretch + method: !!python/name:satpy.enhancements.stretch + kwargs: + stretch: crude + min_stretch: [290, 290, 290] + max_stretch: [190, 190, 190] + - name: gamma + method: !!python/name:satpy.enhancements.gamma + kwargs: + gamma: [1.5, 1.2, 1.2] diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws_l1b_nc.yaml index 0d6df8dc4e..1f1341e0e6 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws_l1b_nc.yaml @@ -530,6 +530,6 @@ file_types: # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile file_patterns: [ - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____radsim.nc'] diff --git a/satpy/etc/readers/eps_sterna_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_l1b_nc.yaml index 026aefc918..3313703ea4 100644 --- a/satpy/etc/readers/eps_sterna_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_l1b_nc.yaml @@ -1,5 +1,5 @@ reader: - name: aws_l1b_nc + name: eps_sterna_l1b_nc short_name: AWS L1B RAD NetCDF4 long_name: AWS L1B Radiance (NetCDF4) description: Reader for the EUMETSAT EPS-Sterna Sounder level-1b files in netCDF4. @@ -528,5 +528,5 @@ file_types: # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile file_patterns: [ - 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_N____.nc', + 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', ] From 23fcf3c7121d3289fdc1b727379740be427b113f Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 12 Dec 2024 11:29:13 +0100 Subject: [PATCH 261/340] Fix for xarray > 2024.09, and leave stand alone datatree Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/test_aws_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws_l1b.py index d3c45e4e3d..214f1e1599 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws_l1b.py @@ -8,8 +8,8 @@ import numpy as np import pytest import xarray as xr -from datatree import DataTree from trollsift import compose, parse +from xarray import DataTree from satpy.readers.aws_l1b import DATETIME_FORMAT, AWSL1BFile From dbda9ad0c4ddfd8f160744158c25374f8739e8c4 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 12 Dec 2024 08:59:30 -0600 Subject: [PATCH 262/340] Remove xarray-datatree dependency from CI It is now part of xarray --- continuous_integration/environment.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index d1bb979025..bc898cafe2 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -53,7 +53,6 @@ dependencies: - pip - skyfield - astropy - - xarray-datatree - pint-xarray - ephem - bokeh From 2297ee7c5d40f19598ba1a77d2c531b308bb9668 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 09:35:46 +0100 Subject: [PATCH 263/340] Improve reference data generation script. --- utils/create_reference.py | 78 +++++++++++++++++++++++---------------- 1 file changed, 46 insertions(+), 32 deletions(-) diff --git a/utils/create_reference.py b/utils/create_reference.py index ca0d0a180d..5510054099 100644 --- a/utils/create_reference.py +++ b/utils/create_reference.py @@ -18,40 +18,50 @@ Script to create reference images for the automated image testing system. -create_reference.py - The input data directory must follow the data structure from the image-comparison-tests repository with satellite_data/. This script is a work in progress and expected to change significantly. -It is absolutely not intended for any operational production of satellite -imagery. + +DO NOT USE FOR OPERATIONAL PRODUCTION! """ import argparse +import os import pathlib -from glob import glob + +import hdf5plugin # noqa: F401 from satpy import Scene -def generate_images(reader, filenames, area, composites, outdir): - """Generate reference images for testing purposes.""" - from dask.diagnostics import ProgressBar - scn = Scene(reader="abi_l1b", filenames=filenames) +def generate_images(props): + """Generate reference images for testing purposes. - composites = ["ash", "airmass"] - scn.load(composites) - if area is None: - ls = scn - elif area == "native": + Args: + props (namespace): Object with attributes corresponding to command line + arguments as defined by :func:get_parser. + """ + filenames = (props.basedir / "satellite_data" / props.satellite).glob("*") + + scn = Scene(reader=props.reader, filenames=filenames) + + scn.load(props.composites) + if props.area == "native": ls = scn.resample(resampler="native") + elif props.area is not None: + ls = scn.resample(props.area, resampler="gradient_search") else: - ls = scn.resample(area) + ls = scn + from dask.diagnostics import ProgressBar with ProgressBar(): - ls.save_datasets(writer="simple_image", filename=outdir + - "/satpy-reference-image-{platform_name}-{sensor}-{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png") + ls.save_datasets( + writer="simple_image", + filename=os.fspath( + props.basedir / "reference_images" / + "satpy-reference-image-{platform_name}-{sensor}-" + "{start_time:%Y%m%d%H%M}-{area.area_id}-{name}.png")) def get_parser(): """Return argument parser.""" @@ -66,31 +76,35 @@ def get_parser(): help="Reader name.") parser.add_argument( - "area", action="store", type=str, - help="Area name, 'null' (no resampling) or 'native' (native resampling)") + "-b", "--basedir", action="store", type=pathlib.Path, + default=pathlib.Path("."), + help="Base directory for reference data. " + "This must contain a subdirectories satellite_data and " + "reference_images. The directory satellite_data must contain " + "input data in a subdirectory for the satellite. Output images " + "will be written to the subdirectory reference_images.") parser.add_argument( - "basedir", action="store", type=pathlib.Path, - help="Root directory where reference input data are contained.") + "-o", "--outdir", action="store", type=pathlib.Path, + default=pathlib.Path("."), + help="Directory where to write resulting images.") parser.add_argument( - "outdir", action="store", type=pathlib.Path, - help="Directory where to write resulting images.") + "-c", "--composites", nargs="+", help="composites to generate", + type=str, default=["ash", "airmass"]) + + parser.add_argument( + "-a", "--area", action="store", + default=None, + help="Area name, or 'native' (native resampling)") return parser def main(): """Main function.""" parsed = get_parser().parse_args() - ext_data_path = parsed.basedir - reader = parsed.reader - area = parsed.area - outdir = parsed.outdir - satellite = parsed.satellite - - filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*") - generate_images(reader, filenames, None if area.lower() == "null" else - area, ["airmass", "ash"], outdir) + + generate_images(parsed) if __name__ == "__main__": main() From 02989b9ad3cf850a66cb1577be880d050bb9f38f Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 12:55:31 +0100 Subject: [PATCH 264/340] Fix feature syntax and filename setup --- .../tests/behave/features/image_comparison.feature | 13 +++++++------ .../tests/behave/features/steps/image_comparison.py | 3 ++- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index 5bb6c9c2c9..d367732ae3 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -6,9 +6,10 @@ Feature: Image Comparison Then the generated image should be the same as the reference image Examples: - |satellite |composite | reader | area - |GOES17 |airmass | abi_l1b | null - |GOES16 |airmass | abi_l1b | null - |GOES16 |ash | abi_l1b | null - |GOES17 |ash | abi_l1b | null - |METEOSAT12 | cloudtop | fci_l1b_nc | sve + |satellite |composite | reader | area | + |GOES17 |airmass | abi_l1b | null | + |GOES16 |airmass | abi_l1b | null | + |GOES16 |ash | abi_l1b | null | + |GOES17 |ash | abi_l1b | null | + |Meteosat-12 | cloudtop | fci_l1c_nc | sve | + |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 74f0af3229..6e33baeb1e 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -22,6 +22,7 @@ import cv2 import dask +import hdf5plugin # noqa: F401 import numpy as np from behave import given, then, when @@ -54,7 +55,7 @@ def setup_hooks(): @given("I have a {composite} reference image file from {satellite} resampled to {area}") def step_given_reference_image(context, composite, satellite, area): """Prepare a reference image.""" - reference_image = f"reference_image_{satellite}_{composite}_{area}.png" + reference_image = f"satpy-reference-image-{satellite}-{composite}-{area}.png" context.reference_image = cv2.imread(f"{ext_data_path}/reference_images/{reference_image}") context.satellite = satellite context.composite = composite From 8ce9a62535769c0e4d4171392d025e2541f69fe4 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 14:27:22 +0100 Subject: [PATCH 265/340] import hdf5plugin first despite isort --- satpy/tests/behave/features/steps/image_comparison.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 6e33baeb1e..1349e0b64e 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -15,6 +15,7 @@ # satpy. If not, see . """Image comparison tests.""" +import hdf5plugin # noqa: F401 isort:skip import os import warnings from datetime import datetime @@ -22,7 +23,6 @@ import cv2 import dask -import hdf5plugin # noqa: F401 import numpy as np from behave import given, then, when @@ -80,7 +80,7 @@ def step_when_generate_image(context, composite, satellite, reader, area): if area == "null": ls = scn else: - ls = scn.resample(area) + ls = scn.resample(area, resampler="gradient_search") # Save the generated image in the generated folder generated_image_path = os.path.join(context.test_results_dir, "generated", From b8b08e47308a423136a89b206c5814e7e1215a71 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 13 Dec 2024 14:34:42 +0100 Subject: [PATCH 266/340] First version of the l1b reader for the Arctic Weather Satellite Signed-off-by: Adam.Dybbroe # Conflicts: # satpy/etc/readers/aws_l1b_nc.yaml # satpy/readers/aws_l1b.py --- satpy/readers/aws_l1b.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/aws_l1b.py index 9023f6f0ab..b1ba4dabd2 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/aws_l1b.py @@ -43,7 +43,6 @@ class using the :mod:`~satpy.Scene.load` method with the reader ``"aws_l1b_nc"``. """ - def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, From 7cd8858d6f887a259d66737ed2264bf1a73b334e Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 14:42:16 +0100 Subject: [PATCH 267/340] Does it help if I do Meteosat-12 first? --- satpy/tests/behave/features/image_comparison.feature | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index d367732ae3..b794fa22ef 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -7,9 +7,9 @@ Feature: Image Comparison Examples: |satellite |composite | reader | area | + |Meteosat-12 | cloudtop | fci_l1c_nc | sve | + |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | |GOES17 |airmass | abi_l1b | null | |GOES16 |airmass | abi_l1b | null | |GOES16 |ash | abi_l1b | null | |GOES17 |ash | abi_l1b | null | - |Meteosat-12 | cloudtop | fci_l1c_nc | sve | - |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | From 4d13d122b0e5068c85f641d4d1a23fe801641971 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 15:58:21 +0100 Subject: [PATCH 268/340] workaround for failing hdf5lpguni? Trying John Cintineos workaround. --- satpy/tests/behave/features/steps/image_comparison.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 1349e0b64e..a35dd22c00 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -17,6 +17,10 @@ import hdf5plugin # noqa: F401 isort:skip import os +import os.path + +os.environ["HDF5_PLUGIN_PATH"] = os.path.dirname(hdf5plugin.__file__) + "/plugins/" + import warnings from datetime import datetime from glob import glob From 4c912ebe7d47368451227d6e00e00eed5241a60d Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 13 Dec 2024 16:36:47 +0100 Subject: [PATCH 269/340] switch on debug logging for troubleshooting purposes --- satpy/tests/behave/features/steps/image_comparison.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index a35dd22c00..a7afd56008 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -32,6 +32,8 @@ from satpy import Scene +from satpy.utils import debug_on; debug_on() + ext_data_path = "/app/ext_data" threshold = 2000 From 0c280046886bec208ead8b40877a99bb892e6353 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Sun, 15 Dec 2024 22:14:53 +0100 Subject: [PATCH 270/340] Fix the naming - The AWS/EPS-Sterna sensor is named "MWR" Signed-off-by: Adam.Dybbroe --- .../{aws_l1b_nc.yaml => aws1_mwr_l1b_nc.yaml} | 12 +- satpy/etc/readers/aws_l1c_nc.yaml | 375 ------------------ ...l1b_nc.yaml => eps_sterna_mwr_l1b_nc.yaml} | 2 +- satpy/readers/{aws_l1b.py => mwr_l1b.py} | 22 +- satpy/readers/{aws_l1c.py => mwr_l1c.py} | 2 +- satpy/tests/reader_tests/conftest.py | 3 +- .../{test_aws_l1b.py => test_aws1_mwr_l1b.py} | 59 ++- .../reader_tests/test_eps_sterna_mwr_l1b.py | 130 ++++++ 8 files changed, 190 insertions(+), 415 deletions(-) rename satpy/etc/readers/{aws_l1b_nc.yaml => aws1_mwr_l1b_nc.yaml} (98%) delete mode 100644 satpy/etc/readers/aws_l1c_nc.yaml rename satpy/etc/readers/{eps_sterna_l1b_nc.yaml => eps_sterna_mwr_l1b_nc.yaml} (99%) rename satpy/readers/{aws_l1b.py => mwr_l1b.py} (91%) rename satpy/readers/{aws_l1c.py => mwr_l1c.py} (97%) rename satpy/tests/reader_tests/{test_aws_l1b.py => test_aws1_mwr_l1b.py} (79%) create mode 100644 satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py diff --git a/satpy/etc/readers/aws_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml similarity index 98% rename from satpy/etc/readers/aws_l1b_nc.yaml rename to satpy/etc/readers/aws1_mwr_l1b_nc.yaml index 1f1341e0e6..7fc88332b2 100644 --- a/satpy/etc/readers/aws_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -1,10 +1,10 @@ reader: - name: aws_l1b_nc - short_name: AWS L1B RAD NetCDF4 - long_name: AWS L1B Radiance (NetCDF4) - description: Reader for the ESA AWS (Arctic Weather Satellite) Sounder level-1b files in netCDF4. + name: aws1_mwr_l1b_nc + short_name: AWS1 MWR L1B RAD NetCDF4 + long_name: AWS1 MWR L1B Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) Micorwave Radiometer (MWR) level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [aws,] + sensors: [mwr,] status: Beta supports_fsspec: false @@ -528,7 +528,7 @@ file_types: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc - file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile + file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', diff --git a/satpy/etc/readers/aws_l1c_nc.yaml b/satpy/etc/readers/aws_l1c_nc.yaml deleted file mode 100644 index 8abd9150ef..0000000000 --- a/satpy/etc/readers/aws_l1c_nc.yaml +++ /dev/null @@ -1,375 +0,0 @@ -reader: - name: aws_l1c_nc - short_name: AWS L1C RAD NetCDF4 - long_name: AWS L1C Radiance (NetCDF4) - description: Reader for the ESA AWS (Arctic Weather Satellite) Sounder level-1c files in netCDF4. - reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [aws,] - status: Beta - supports_fsspec: false - - data_identification_keys: - name: - required: true - frequency_double_sideband: - type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand - frequency_range: - type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange - resolution: - polarization: - enum: - - QH - - QV - calibration: - enum: - - brightness_temperature - transitive: true - modifiers: - required: true - default: [] - type: !!python/name:satpy.dataset.ModifierTuple - - coord_identification_keys: - name: - required: true - resolution: - polarization: - enum: - - QH - - QV - -datasets: - '1': - name: '1' - frequency_range: - central: 50.3 - bandwidth: 0.180 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '2': - name: '2' - frequency_range: - central: 52.8 - bandwidth: 0.400 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '3': - name: '3' - frequency_range: - central: 53.246 - bandwidth: 0.300 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '4': - name: '4' - frequency_range: - central: 53.596 - bandwidth: 0.370 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '5': - name: '5' - frequency_range: - central: 54.4 - bandwidth: 0.400 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '6': - name: '6' - frequency_range: - central: 54.94 - bandwidth: 0.400 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '7': - name: '7' - frequency_range: - central: 55.5 - bandwidth: 0.330 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '8': - name: '8' - frequency_range: - central: 57.290344 - bandwidth: 0.330 - unit: GHz - polarization: 'QH' - resolution: 40000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '9': - name: '9' - frequency_range: - central: 89.0 - bandwidth: 4.0 - unit: GHz - polarization: 'QV' - resolution: 20000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '10': - name: '10' - frequency_range: - central: 165.5 - bandwidth: 2.700 - unit: GHz - polarization: 'QH' - resolution: 20000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '11': - name: '11' - frequency_range: - central: 176.311 - bandwidth: 2.0 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '12': - name: '12' - frequency_range: - central: 178.811 - bandwidth: 2.0 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '13': - name: '13' - frequency_range: - central: 180.311 - bandwidth: 1.0 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '14': - name: '14' - frequency_range: - central: 181.511 - bandwidth: 1.0 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '15': - name: '15' - frequency_range: - central: 182.311 - bandwidth: 0.5 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '16': - name: '16' - frequency_double_sideband: - central: 325.15 - side: 1.2 - bandwidth: 0.8 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '17': - name: '17' - frequency_double_sideband: - central: 325.15 - side: 2.4 - bandwidth: 1.2 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '18': - name: '18' - frequency_double_sideband: - central: 325.15 - side: 4.1 - bandwidth: 1.8 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - '19': - name: '19' - frequency_double_sideband: - central: 325.15 - side: 6.6 - bandwidth: 2.8 - unit: GHz - polarization: 'QV' - resolution: 10000 - calibration: - brightness_temperature: - standard_name: toa_brightness_temperature - coordinates: [longitude, latitude] - file_type: aws_l1c_nc - file_key: data/calibration/aws_toa_brightness_temperature - -# --- Coordinates --- - - longitude: - name: longitude - file_type: aws_l1c_nc - standard_name: longitude - units: degrees_east - file_key: data/navigation/aws_lon - - latitude: - name: latitude - file_type: aws_l1c_nc - standard_name: latitude - units: degrees_north - file_key: data/navigation/aws_lat - -# --- Navigation data --- - - solar_azimuth: - name: solar_azimuth - file_type: aws_l1c_nc - file_key: data/navigation/aws_solar_azimuth_angle - standard_name: solar_azimuth_angle - coordinates: - - longitude - - latitude - - solar_zenith: - name: solar_zenith - file_type: aws_l1c_nc - file_key: data/navigation/aws_solar_zenith_angle - standard_name: solar_zenith_angle - coordinates: - - longitude - - latitude - - satellite_azimuth: - name: satellite_azimuth - file_type: aws_l1c_nc - file_key: data/navigation/aws_satellite_azimuth_angle - standard_name: satellite_azimuth_angle - coordinates: - - longitude - - latitude - - satellite_zenith: - name: satellite_zenith - file_type: aws_l1c_nc - file_key: data/navigation/aws_satellite_zenith_angle - standard_name: satellite_zenith_angle - coordinates: - - longitude - - latitude - -file_types: - aws_l1c_nc: - # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc - # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc - # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc - file_reader: !!python/name:satpy.readers.aws_l1c.AWSL1CFile - file_patterns: [ - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc',] diff --git a/satpy/etc/readers/eps_sterna_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml similarity index 99% rename from satpy/etc/readers/eps_sterna_l1b_nc.yaml rename to satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index 3313703ea4..6cf6776d84 100644 --- a/satpy/etc/readers/eps_sterna_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -526,7 +526,7 @@ datasets: file_types: eps_sterna_l1b_nc: # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc - file_reader: !!python/name:satpy.readers.aws_l1b.AWSL1BFile + file_reader: !!python/name:satpy.readers.aws_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', ] diff --git a/satpy/readers/aws_l1b.py b/satpy/readers/mwr_l1b.py similarity index 91% rename from satpy/readers/aws_l1b.py rename to satpy/readers/mwr_l1b.py index b1ba4dabd2..d398cd94cd 100644 --- a/satpy/readers/aws_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -12,13 +12,17 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Reader for the Arctic Weather Satellite (AWS) Sounder level-1b data. +"""Reader for the level-1b data from the MWR sounder onboard AWS and EPS-STerna. -Test data provided by ESA August 23, 2023. +AWS = Arctic Weather Satellite. MWR = Microwave Radiometer. + +AWS test data provided by ESA August 23, 2023. Sample data for five orbits in September 2024 provided by ESA to the Science Advisory Group for MWS and AWS, November 26, 2024. +Sample EPS-Sterna l1b format AWS data from 16 orbits the 9th of November 2024. + """ import logging @@ -34,13 +38,13 @@ AWS_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) -class AWSL1BFile(NetCDF4FileHandler): - """Class implementing the AWS L1b Filehandler. +class AWS_EPS_Sterna_MWR_L1BFile(NetCDF4FileHandler): + """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler. - This class implements the ESA Arctic Weather Satellite (AWS) Level-1b - NetCDF reader. It is designed to be used through the :class:`~satpy.Scene` - class using the :mod:`~satpy.Scene.load` method with the reader - ``"aws_l1b_nc"``. + This class implements the ESA Arctic Weather Satellite (AWS) and EPS-Sterna + MWR Level-1b NetCDF reader. It is designed to be used through the + :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with + the reader ``"mwr_l1b_nc"``. """ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): @@ -130,7 +134,7 @@ def get_dataset(self, dataset_id, dataset_info): elif dataset_id["name"] in ["longitude", "latitude"]: data_array = self._get_navigation_data(dataset_id, dataset_info) else: - raise NotImplementedError + raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") data_array = mask_and_scale(data_array) if dataset_id["name"] == "longitude": diff --git a/satpy/readers/aws_l1c.py b/satpy/readers/mwr_l1c.py similarity index 97% rename from satpy/readers/aws_l1c.py rename to satpy/readers/mwr_l1c.py index b4566c63f4..5dac981d5c 100644 --- a/satpy/readers/aws_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -76,7 +76,7 @@ def get_dataset(self, dataset_id, dataset_info): "satellite_zenith", "satellite_azimuth"]): data_array = self._get_navigation_data(dataset_id, dataset_info) else: - raise NotImplementedError + raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") data_array = mask_and_scale(data_array) if dataset_id["name"] == "longitude": diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 8f6f572494..c1be06b29c 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021 Satpy developers +# Copyright (c) 2021, 2024 Satpy developers # # This file is part of satpy. # @@ -15,4 +15,5 @@ # # You should have received a copy of the GNU General Public License along with # satpy. If not, see . + """Setup and configuration for all reader tests.""" diff --git a/satpy/tests/reader_tests/test_aws_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py similarity index 79% rename from satpy/tests/reader_tests/test_aws_l1b.py rename to satpy/tests/reader_tests/test_aws1_mwr_l1b.py index 214f1e1599..817f0f23d5 100644 --- a/satpy/tests/reader_tests/test_aws_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -11,7 +11,7 @@ from trollsift import compose, parse from xarray import DataTree -from satpy.readers.aws_l1b import DATETIME_FORMAT, AWSL1BFile +from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile platform_name = "AWS1" file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa @@ -45,8 +45,8 @@ def random_date(start, end): @pytest.fixture(scope="session") -def aws_file(tmp_path_factory): - """Create an AWS file.""" +def aws_mwr_file(tmp_path_factory): + """Create an AWS MWR l1b file.""" ds = DataTree() start_time = datetime(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) @@ -54,7 +54,7 @@ def aws_file(tmp_path_factory): ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) - instrument = "AWS" + instrument = "MWR" ds.attrs["instrument"] = instrument ds.attrs["orbit_start"] = 9991 ds.attrs["orbit_end"] = 9992 @@ -87,27 +87,33 @@ def aws_file(tmp_path_factory): @pytest.fixture -def aws_handler(aws_file): - """Create an aws filehandler.""" - filename_info = parse(file_pattern, os.path.basename(aws_file)) +def mwr_handler(aws_mwr_file): + """Create an AWS MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) filetype_info = dict() - filetype_info["file_type"] = "aws_l1b" - return AWSL1BFile(aws_file, filename_info, filetype_info) + filetype_info["file_type"] = "aws1_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) -def test_start_end_time(aws_handler): +def test_start_end_time(mwr_handler): """Test that start and end times are read correctly.""" - assert aws_handler.start_time == datetime(2024, 9, 1, 12, 0) - assert aws_handler.end_time == datetime(2024, 9, 1, 12, 15) + assert mwr_handler.start_time == datetime(2024, 9, 1, 12, 0) + assert mwr_handler.end_time == datetime(2024, 9, 1, 12, 15) -def test_metadata(aws_handler): +def test_orbit_number_start_end(mwr_handler): + """Test that start and end orbit number is read correctly.""" + assert mwr_handler.orbit_start == 9991 + assert mwr_handler.orbit_end == 9992 + + +def test_metadata(mwr_handler): """Test that the metadata is read correctly.""" - assert aws_handler.sensor == "AWS" - assert aws_handler.platform_name == platform_name + assert mwr_handler.sensor == "MWR" + assert mwr_handler.platform_name == platform_name -def test_get_channel_data(aws_handler): +def test_get_channel_data(mwr_handler): """Test retrieving the channel data.""" did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") @@ -119,7 +125,7 @@ def test_get_channel_data(aws_handler): expected = expected.where(expected >= 0) # "calibrate" expected = expected * 0.001 - res = aws_handler.get_dataset(did, dataset_info) + res = mwr_handler.get_dataset(did, dataset_info) np.testing.assert_allclose(res, expected) assert "x" in res.dims assert "y" in res.dims @@ -127,7 +133,7 @@ def test_get_channel_data(aws_handler): assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 assert res.dims == ("y", "x") assert "n_channels" not in res.coords - assert res.attrs["sensor"] == "AWS" + assert res.attrs["sensor"] == "MWR" assert res.attrs["platform_name"] == "AWS1" @@ -135,12 +141,12 @@ def test_get_channel_data(aws_handler): [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), ]) -def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): +def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = aws_handler.get_dataset(did, dataset_info) + res = mwr_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) @@ -160,13 +166,13 @@ def test_get_navigation_data(aws_handler, id_name, file_key, fake_array): ("solar_zenith_horn1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), ("satellite_azimuth_horn1", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), ("satellite_zenith_horn1", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) -def test_get_viewing_geometry_data(aws_handler, id_name, file_key, fake_array): +def test_get_viewing_geometry_data(mwr_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) dset_id = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = aws_handler.get_dataset(dset_id, dataset_info) + res = mwr_handler.get_dataset(dset_id, dataset_info) np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) assert "x" in res.dims @@ -177,3 +183,12 @@ def test_get_viewing_geometry_data(aws_handler, id_name, file_key, fake_array): assert "n_geo_groups" not in res.coords if id_name == "longitude": assert res.max() <= 180 + +def test_try_get_data_not_in_file(mwr_handler): + """Test retrieving a data field that is not available in the file.""" + did = dict(name="toa_brightness_temperature") + dataset_info = dict(file_key="data/calibration/toa_brightness_temperature") + + match_str = "Dataset toa_brightness_temperature not available or not supported yet!" + with pytest.raises(NotImplementedError, match=match_str): + _ = mwr_handler.get_dataset(did, dataset_info) diff --git a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py new file mode 100644 index 0000000000..38c6d3aa78 --- /dev/null +++ b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024 Satpy developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests for the EPS-Sterna MWR l1b filehandlers.""" + +import os +from datetime import datetime +from enum import Enum + +import numpy as np +import pytest +import xarray as xr +from trollsift import compose, parse +from xarray import DataTree + +from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile +from satpy.tests.reader_tests.test_aws1_mwr_l1b import random_date + +platform_name = "AWS1" +# W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc +file_pattern = "W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + +rng = np.random.default_rng() + +fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) +fake_data_np[0, 0, 0] = -2147483648 +fake_data_np[1, 0, 0] = 700000 + 10 +fake_data_np[2, 0, 0] = -10 + +ARRAY_DIMS = ["n_scans", "n_fovs", "n_channels"] +fake_data = xr.DataArray(fake_data_np, dims=ARRAY_DIMS) + +GEO_DIMS = ["n_scans", "n_fovs", "n_feedhorns"] +GEO_SIZE = 10*145*4 +fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) +fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) + + +@pytest.fixture(scope="session") +def eps_sterna_mwr_file(tmp_path_factory): + """Create an EPS-Sterna MWR l1b file.""" + ds = DataTree() + start_time = datetime(2024, 9, 1, 12, 0) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = datetime(2024, 9, 1, 12, 15) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + + instrument = "MWR" + ds.attrs["instrument"] = instrument + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 + ds["data/calibration/toa_brightness_temperature"] = fake_data + ds["data/calibration/toa_brightness_temperature"].attrs["scale_factor"] = 0.001 + ds["data/calibration/toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/calibration/toa_brightness_temperature"].attrs["missing_value"] = -2147483648 + ds["data/calibration/toa_brightness_temperature"].attrs["valid_min"] = 0 + ds["data/calibration/toa_brightness_temperature"].attrs["valid_max"] = 700000 + + ds["data/navigation/longitude"] = fake_lon_data + ds["data/navigation/longitude"].attrs["scale_factor"] = 1e-4 + ds["data/navigation/longitude"].attrs["add_offset"] = 0.0 + ds["data/navigation/latitude"] = fake_lat_data + ds["data/navigation/solar_azimuth_angle"] = fake_sun_azi_data + ds["data/navigation/solar_zenith_angle"] = fake_sun_zen_data + ds["data/navigation/satellite_azimuth_angle"] = fake_sat_azi_data + ds["data/navigation/satellite_zenith_angle"] = fake_sat_zen_data + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + + tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") + filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, + processing_time=processing_time, platform_name=platform_name)) + + ds.to_netcdf(filename) + return filename + + +@pytest.fixture +def mwr_handler(eps_sterna_mwr_file): + """Create an EPS-Sterna MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "eps_sterna_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("longitude", "data/navigation/longitude", fake_lon_data * 1e-4), + ("latitude", "data/navigation/latitude", fake_lat_data), + ]) +def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): + """Test retrieving the geolocation (lon-lat) data.""" + Horn = Enum("Horn", ["1", "2", "3", "4"]) + did = dict(name=id_name, horn=Horn["1"]) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = mwr_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array.isel(n_feedhorns=0)) + assert "x" in res.dims + assert "y" in res.dims + assert "orbital_parameters" in res.attrs + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + assert "n_feedhorns" not in res.coords + if id_name == "longitude": + assert res.max() <= 180 From 9ec30bca409c581a25eb83fd2907719cac9db808 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 15:44:16 +0100 Subject: [PATCH 271/340] Refactor tests and fix naming for AWS/EPS-Sterna Radiometer = MWR Signed-off-by: Adam.Dybbroe --- pyproject.toml | 1 - satpy/etc/composites/{aws.yaml => mwr.yaml} | 0 satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 375 ++++++++++++++++++ satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 4 +- satpy/readers/mwr_l1c.py | 6 +- satpy/tests/reader_tests/conftest.py | 169 ++++++++ satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 136 ++----- .../reader_tests/test_eps_sterna_mwr_l1b.py | 99 +---- 8 files changed, 598 insertions(+), 192 deletions(-) rename satpy/etc/composites/{aws.yaml => mwr.yaml} (100%) create mode 100644 satpy/etc/readers/aws1_mwr_l1c_nc.yaml diff --git a/pyproject.toml b/pyproject.toml index 33b2d4de98..9ed0eda02d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -64,7 +64,6 @@ hsaf_grib = ["pygrib"] remote_reading = ["fsspec"] insat_3d = ["xarray>=2024.10.0"] gms5-vissr_l1b = ["numba"] -aws_l1b = ["xarray-datatree"] # Writers: cf = ["h5netcdf >= 0.7.3"] awips_tiled = ["netCDF4 >= 1.1.8"] diff --git a/satpy/etc/composites/aws.yaml b/satpy/etc/composites/mwr.yaml similarity index 100% rename from satpy/etc/composites/aws.yaml rename to satpy/etc/composites/mwr.yaml diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml new file mode 100644 index 0000000000..7df360ce4a --- /dev/null +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -0,0 +1,375 @@ +reader: + name: aws_l1c_nc + short_name: AWS L1C RAD NetCDF4 + long_name: AWS L1C Radiance (NetCDF4) + description: Reader for the ESA AWS (Arctic Weather Satellite) MWR level-1c files in netCDF4. + reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader + sensors: [mwr,] + status: Beta + supports_fsspec: false + + data_identification_keys: + name: + required: true + frequency_double_sideband: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyDoubleSideBand + frequency_range: + type: !!python/name:satpy.readers.pmw_channels_definitions.FrequencyRange + resolution: + polarization: + enum: + - QH + - QV + calibration: + enum: + - brightness_temperature + transitive: true + modifiers: + required: true + default: [] + type: !!python/name:satpy.dataset.ModifierTuple + + coord_identification_keys: + name: + required: true + resolution: + polarization: + enum: + - QH + - QV + +datasets: + '1': + name: '1' + frequency_range: + central: 50.3 + bandwidth: 0.180 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '2': + name: '2' + frequency_range: + central: 52.8 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '3': + name: '3' + frequency_range: + central: 53.246 + bandwidth: 0.300 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '4': + name: '4' + frequency_range: + central: 53.596 + bandwidth: 0.370 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '5': + name: '5' + frequency_range: + central: 54.4 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '6': + name: '6' + frequency_range: + central: 54.94 + bandwidth: 0.400 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '7': + name: '7' + frequency_range: + central: 55.5 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '8': + name: '8' + frequency_range: + central: 57.290344 + bandwidth: 0.330 + unit: GHz + polarization: 'QH' + resolution: 40000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '9': + name: '9' + frequency_range: + central: 89.0 + bandwidth: 4.0 + unit: GHz + polarization: 'QV' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '10': + name: '10' + frequency_range: + central: 165.5 + bandwidth: 2.700 + unit: GHz + polarization: 'QH' + resolution: 20000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '11': + name: '11' + frequency_range: + central: 176.311 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '12': + name: '12' + frequency_range: + central: 178.811 + bandwidth: 2.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '13': + name: '13' + frequency_range: + central: 180.311 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '14': + name: '14' + frequency_range: + central: 181.511 + bandwidth: 1.0 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '15': + name: '15' + frequency_range: + central: 182.311 + bandwidth: 0.5 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '16': + name: '16' + frequency_double_sideband: + central: 325.15 + side: 1.2 + bandwidth: 0.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '17': + name: '17' + frequency_double_sideband: + central: 325.15 + side: 2.4 + bandwidth: 1.2 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '18': + name: '18' + frequency_double_sideband: + central: 325.15 + side: 4.1 + bandwidth: 1.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + '19': + name: '19' + frequency_double_sideband: + central: 325.15 + side: 6.6 + bandwidth: 2.8 + unit: GHz + polarization: 'QV' + resolution: 10000 + calibration: + brightness_temperature: + standard_name: toa_brightness_temperature + coordinates: [longitude, latitude] + file_type: aws_l1c_nc + file_key: data/calibration/aws_toa_brightness_temperature + +# --- Coordinates --- + + longitude: + name: longitude + file_type: aws_l1c_nc + standard_name: longitude + units: degrees_east + file_key: data/navigation/aws_lon + + latitude: + name: latitude + file_type: aws_l1c_nc + standard_name: latitude + units: degrees_north + file_key: data/navigation/aws_lat + +# --- Navigation data --- + + solar_azimuth: + name: solar_azimuth + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_azimuth_angle + standard_name: solar_azimuth_angle + coordinates: + - longitude + - latitude + + solar_zenith: + name: solar_zenith + file_type: aws_l1c_nc + file_key: data/navigation/aws_solar_zenith_angle + standard_name: solar_zenith_angle + coordinates: + - longitude + - latitude + + satellite_azimuth: + name: satellite_azimuth + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_azimuth_angle + standard_name: satellite_azimuth_angle + coordinates: + - longitude + - latitude + + satellite_zenith: + name: satellite_zenith + file_type: aws_l1c_nc + file_key: data/navigation/aws_satellite_zenith_angle + standard_name: satellite_zenith_angle + coordinates: + - longitude + - latitude + +file_types: + aws_l1c_nc: + # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc + file_reader: !!python/name:satpy.readers.mwr_l1c.AWSL1CFile + file_patterns: [ + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', + 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc',] diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index 6cf6776d84..c3a665b9c4 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -4,7 +4,7 @@ reader: long_name: AWS L1B Radiance (NetCDF4) description: Reader for the EUMETSAT EPS-Sterna Sounder level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader - sensors: [aws,] + sensors: [mwr,] status: Beta supports_fsspec: false @@ -526,7 +526,7 @@ datasets: file_types: eps_sterna_l1b_nc: # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc - file_reader: !!python/name:satpy.readers.aws_l1b.AWS_EPS_Sterna_MWR_L1BFile + file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', ] diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 5dac981d5c..9acfc604a7 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, 2024 Pytroll Developers +# Copyright (c) 2024 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -12,7 +12,9 @@ # You should have received a copy of the GNU General Public License # along with this program. If not, see . -"""Reader for the Arctic Weather Satellite (AWS) Sounder level-1c data. +"""Reader for the Arctic Weather Satellite (AWS) MWR level-1c data. + +MWR = Microwaver Radiometer, onboard AWS and EPS-Sterna Sample data provided by ESA September 27, 2024. """ diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index c1be06b29c..65c1303865 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -17,3 +17,172 @@ # satpy. If not, see . """Setup and configuration for all reader tests.""" + +import os +from datetime import datetime, timedelta +from random import randrange + +import numpy as np +import pytest +import xarray as xr +from trollsift import compose, parse +from xarray import DataTree + +from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile + +platform_name = "AWS1" +# W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc +eumetsat_file_pattern = "W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + +esa_file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + +rng = np.random.default_rng() + +def random_date(start, end): + """Create a random datetime between two datetimes.""" + delta = end - start + int_delta = (delta.days * 24 * 60 * 60) + delta.seconds + random_second = randrange(int_delta) + return start + timedelta(seconds=random_second) + + +@pytest.fixture(scope="session") +def fake_mwr_data_array(): + """Return a fake AWS/EPS-Sterna MWR l1b data array.""" + fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) + fake_data_np[0, 0, 0] = -2147483648 + fake_data_np[1, 0, 0] = 700000 + 10 + fake_data_np[2, 0, 0] = -10 + array_dims = ["n_scans", "n_fovs", "n_channels"] + return xr.DataArray(fake_data_np, dims=array_dims) + + +def make_fake_angles(geo_size, geo_dims): + """Return fake sun-satellite angle array.""" + maxval = 36000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + return xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + + +def make_fake_mwr_lonlats(geo_size, geo_dims): + """Return fake geolocation data arrays.""" + maxval = 3600000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + maxval = 1800000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") + fake_lat_data = xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + return (fake_lon_data, fake_lat_data) + + +@pytest.fixture(scope="module") +def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an EPS-Sterna MWR l1b file.""" + geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] + geo_size = 10*145*4 + + ds = DataTree() + start_time = datetime(2024, 9, 1, 12, 0) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = datetime(2024, 9, 1, 12, 15) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + processing_time = random_date(datetime(2024, 9, 1, 13), datetime(2030, 6, 1)) + + instrument = "MWR" + ds.attrs["instrument"] = instrument + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 + ds["data/calibration/toa_brightness_temperature"] = fake_mwr_data_array + ds["data/calibration/toa_brightness_temperature"].attrs["scale_factor"] = 0.001 + ds["data/calibration/toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/calibration/toa_brightness_temperature"].attrs["missing_value"] = -2147483648 + ds["data/calibration/toa_brightness_temperature"].attrs["valid_min"] = 0 + ds["data/calibration/toa_brightness_temperature"].attrs["valid_max"] = 700000 + + fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) + + ds["data/navigation/longitude"] = fake_lon_data + ds["data/navigation/longitude"].attrs["scale_factor"] = 1e-4 + ds["data/navigation/longitude"].attrs["add_offset"] = 0.0 + ds["data/navigation/latitude"] = fake_lat_data + ds["data/navigation/solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + + tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") + filename = tmp_dir / compose(eumetsat_file_pattern, dict(start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) + + ds.to_netcdf(filename) + return filename + + +@pytest.fixture +def eps_sterna_mwr_handler(eps_sterna_mwr_file): + """Create an EPS-Sterna MWR filehandler.""" + filename_info = parse(eumetsat_file_pattern, os.path.basename(eps_sterna_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "eps_sterna_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) + + +@pytest.fixture(scope="session") +def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an AWS MWR l1b file.""" + geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] + geo_size = 10*145*4 + + ds = DataTree() + start_time = datetime(2024, 9, 1, 12, 0) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = datetime(2024, 9, 1, 12, 15) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + + instrument = "MWR" + ds.attrs["instrument"] = instrument + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 + ds["data/calibration/aws_toa_brightness_temperature"] = fake_mwr_data_array + ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 + + fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) + + ds["data/navigation/aws_lon"] = fake_lon_data + ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 + ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 + ds["data/navigation/aws_lat"] = fake_lat_data + ds["data/navigation/aws_solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/aws_solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/aws_satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds["data/navigation/aws_satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + + tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") + filename = tmp_dir / compose(esa_file_pattern, dict(start_time=start_time, end_time=end_time, + processing_time=processing_time, platform_name=platform_name)) + + ds.to_netcdf(filename) + return filename + + +@pytest.fixture +def aws_mwr_handler(aws_mwr_file): + """Create an AWS MWR filehandler.""" + filename_info = parse(esa_file_pattern, os.path.basename(aws_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "aws1_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index 817f0f23d5..6441715223 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -1,123 +1,50 @@ """Tests for aws l1b filehandlers.""" -import os -from datetime import datetime, timedelta +from datetime import datetime from enum import Enum -from random import randrange import numpy as np import pytest -import xarray as xr -from trollsift import compose, parse -from xarray import DataTree -from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile +from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_lonlats platform_name = "AWS1" file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa -rng = np.random.default_rng() - -fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) -fake_data_np[0, 0, 0] = -2147483648 -fake_data_np[1, 0, 0] = 700000 + 10 -fake_data_np[2, 0, 0] = -10 - -ARRAY_DIMS = ["n_scans", "n_fovs", "n_channels"] -fake_data = xr.DataArray(fake_data_np, dims=ARRAY_DIMS) - -GEO_DIMS = ["n_scans", "n_fovs", "n_geo_groups"] -GEO_SIZE = 10*145*4 -fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) - - -def random_date(start, end): - """Create a random datetime between two datetimes.""" - delta = end - start - int_delta = (delta.days * 24 * 60 * 60) + delta.seconds - random_second = randrange(int_delta) - return start + timedelta(seconds=random_second) - - -@pytest.fixture(scope="session") -def aws_mwr_file(tmp_path_factory): - """Create an AWS MWR l1b file.""" - ds = DataTree() - start_time = datetime(2024, 9, 1, 12, 0) - ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = datetime(2024, 9, 1, 12, 15) - ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) - - instrument = "MWR" - ds.attrs["instrument"] = instrument - ds.attrs["orbit_start"] = 9991 - ds.attrs["orbit_end"] = 9992 - ds["data/calibration/aws_toa_brightness_temperature"] = fake_data - ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 - - ds["data/navigation/aws_lon"] = fake_lon_data - ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 - ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 - ds["data/navigation/aws_lat"] = fake_lat_data - ds["data/navigation/aws_solar_azimuth_angle"] = fake_sun_azi_data - ds["data/navigation/aws_solar_zenith_angle"] = fake_sun_zen_data - ds["data/navigation/aws_satellite_azimuth_angle"] = fake_sat_azi_data - ds["data/navigation/aws_satellite_zenith_angle"] = fake_sat_zen_data - ds["status/satellite/subsat_latitude_end"] = np.array(22.39) - ds["status/satellite/subsat_longitude_start"] = np.array(304.79) - ds["status/satellite/subsat_latitude_start"] = np.array(55.41) - ds["status/satellite/subsat_longitude_end"] = np.array(296.79) - - tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") - filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, - processing_time=processing_time, platform_name=platform_name)) - - ds.to_netcdf(filename) - return filename - - -@pytest.fixture -def mwr_handler(aws_mwr_file): - """Create an AWS MWR filehandler.""" - filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) - filetype_info = dict() - filetype_info["file_type"] = "aws1_mwr_l1b" - return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) - - -def test_start_end_time(mwr_handler): + +geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] +geo_size = 10*145*4 +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) +fake_sun_azi_data = make_fake_angles(geo_size, geo_dims) +fake_sun_zen_data = make_fake_angles(geo_size, geo_dims) +fake_sat_azi_data = make_fake_angles(geo_size, geo_dims) +fake_sat_zen_data = make_fake_angles(geo_size, geo_dims) + + + +def test_start_end_time(aws_mwr_handler): """Test that start and end times are read correctly.""" - assert mwr_handler.start_time == datetime(2024, 9, 1, 12, 0) - assert mwr_handler.end_time == datetime(2024, 9, 1, 12, 15) + assert aws_mwr_handler.start_time == datetime(2024, 9, 1, 12, 0) + assert aws_mwr_handler.end_time == datetime(2024, 9, 1, 12, 15) -def test_orbit_number_start_end(mwr_handler): +def test_orbit_number_start_end(aws_mwr_handler): """Test that start and end orbit number is read correctly.""" - assert mwr_handler.orbit_start == 9991 - assert mwr_handler.orbit_end == 9992 + assert aws_mwr_handler.orbit_start == 9991 + assert aws_mwr_handler.orbit_end == 9992 -def test_metadata(mwr_handler): +def test_metadata(aws_mwr_handler): """Test that the metadata is read correctly.""" - assert mwr_handler.sensor == "MWR" - assert mwr_handler.platform_name == platform_name + assert aws_mwr_handler.sensor == "MWR" + assert aws_mwr_handler.platform_name == platform_name -def test_get_channel_data(mwr_handler): +def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): """Test retrieving the channel data.""" did = dict(name="1") dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") - expected = fake_data.isel(n_channels=0) + expected = fake_mwr_data_array.isel(n_channels=0) # mask no_data value expected = expected.where(expected != -2147483648) # mask outside the valid range @@ -125,7 +52,7 @@ def test_get_channel_data(mwr_handler): expected = expected.where(expected >= 0) # "calibrate" expected = expected * 0.001 - res = mwr_handler.get_dataset(did, dataset_info) + res = aws_mwr_handler.get_dataset(did, dataset_info) np.testing.assert_allclose(res, expected) assert "x" in res.dims assert "y" in res.dims @@ -141,12 +68,12 @@ def test_get_channel_data(mwr_handler): [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), ("latitude", "data/navigation/aws_lat", fake_lat_data), ]) -def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): +def test_get_navigation_data(aws_mwr_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = mwr_handler.get_dataset(did, dataset_info) + res = aws_mwr_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) @@ -166,13 +93,13 @@ def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): ("solar_zenith_horn1", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), ("satellite_azimuth_horn1", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), ("satellite_zenith_horn1", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) -def test_get_viewing_geometry_data(mwr_handler, id_name, file_key, fake_array): +def test_get_viewing_geometry_data(aws_mwr_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) dset_id = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = mwr_handler.get_dataset(dset_id, dataset_info) + res = aws_mwr_handler.get_dataset(dset_id, dataset_info) np.testing.assert_allclose(res, fake_array.isel(n_geo_groups=0)) assert "x" in res.dims @@ -184,11 +111,12 @@ def test_get_viewing_geometry_data(mwr_handler, id_name, file_key, fake_array): if id_name == "longitude": assert res.max() <= 180 -def test_try_get_data_not_in_file(mwr_handler): + +def test_try_get_data_not_in_file(aws_mwr_handler): """Test retrieving a data field that is not available in the file.""" did = dict(name="toa_brightness_temperature") dataset_info = dict(file_key="data/calibration/toa_brightness_temperature") match_str = "Dataset toa_brightness_temperature not available or not supported yet!" with pytest.raises(NotImplementedError, match=match_str): - _ = mwr_handler.get_dataset(did, dataset_info) + _ = aws_mwr_handler.get_dataset(did, dataset_info) diff --git a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py index 38c6d3aa78..0620bc8437 100644 --- a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py @@ -18,104 +18,27 @@ """Tests for the EPS-Sterna MWR l1b filehandlers.""" -import os -from datetime import datetime from enum import Enum import numpy as np import pytest -import xarray as xr -from trollsift import compose, parse -from xarray import DataTree -from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile -from satpy.tests.reader_tests.test_aws1_mwr_l1b import random_date - -platform_name = "AWS1" -# W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc -file_pattern = "W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa - -rng = np.random.default_rng() - -fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) -fake_data_np[0, 0, 0] = -2147483648 -fake_data_np[1, 0, 0] = 700000 + 10 -fake_data_np[2, 0, 0] = -10 - -ARRAY_DIMS = ["n_scans", "n_fovs", "n_channels"] -fake_data = xr.DataArray(fake_data_np, dims=ARRAY_DIMS) - -GEO_DIMS = ["n_scans", "n_fovs", "n_feedhorns"] -GEO_SIZE = 10*145*4 -fake_lon_data = xr.DataArray(rng.integers(0, 3599999, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_lat_data = xr.DataArray(rng.integers(-900000, 900000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sun_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sun_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sat_azi_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) -fake_sat_zen_data = xr.DataArray(rng.integers(0, 36000, size=GEO_SIZE).reshape((10, 145, 4)), dims=GEO_DIMS) - - -@pytest.fixture(scope="session") -def eps_sterna_mwr_file(tmp_path_factory): - """Create an EPS-Sterna MWR l1b file.""" - ds = DataTree() - start_time = datetime(2024, 9, 1, 12, 0) - ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = datetime(2024, 9, 1, 12, 15) - ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) - - instrument = "MWR" - ds.attrs["instrument"] = instrument - ds.attrs["orbit_start"] = 9991 - ds.attrs["orbit_end"] = 9992 - ds["data/calibration/toa_brightness_temperature"] = fake_data - ds["data/calibration/toa_brightness_temperature"].attrs["scale_factor"] = 0.001 - ds["data/calibration/toa_brightness_temperature"].attrs["add_offset"] = 0.0 - ds["data/calibration/toa_brightness_temperature"].attrs["missing_value"] = -2147483648 - ds["data/calibration/toa_brightness_temperature"].attrs["valid_min"] = 0 - ds["data/calibration/toa_brightness_temperature"].attrs["valid_max"] = 700000 - - ds["data/navigation/longitude"] = fake_lon_data - ds["data/navigation/longitude"].attrs["scale_factor"] = 1e-4 - ds["data/navigation/longitude"].attrs["add_offset"] = 0.0 - ds["data/navigation/latitude"] = fake_lat_data - ds["data/navigation/solar_azimuth_angle"] = fake_sun_azi_data - ds["data/navigation/solar_zenith_angle"] = fake_sun_zen_data - ds["data/navigation/satellite_azimuth_angle"] = fake_sat_azi_data - ds["data/navigation/satellite_zenith_angle"] = fake_sat_zen_data - ds["status/satellite/subsat_latitude_end"] = np.array(22.39) - ds["status/satellite/subsat_longitude_start"] = np.array(304.79) - ds["status/satellite/subsat_latitude_start"] = np.array(55.41) - ds["status/satellite/subsat_longitude_end"] = np.array(296.79) - - tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") - filename = tmp_dir / compose(file_pattern, dict(start_time=start_time, end_time=end_time, - processing_time=processing_time, platform_name=platform_name)) - - ds.to_netcdf(filename) - return filename - - -@pytest.fixture -def mwr_handler(eps_sterna_mwr_file): - """Create an EPS-Sterna MWR filehandler.""" - filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) - filetype_info = dict() - filetype_info["file_type"] = "eps_sterna_mwr_l1b" - return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) +from satpy.tests.reader_tests.conftest import make_fake_mwr_lonlats +geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] +geo_size = 10*145*4 +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/longitude", fake_lon_data * 1e-4), ("latitude", "data/navigation/latitude", fake_lat_data), ]) -def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): +def test_get_navigation_data(eps_sterna_mwr_handler, id_name, file_key, fake_array): """Test retrieving the geolocation (lon-lat) data.""" Horn = Enum("Horn", ["1", "2", "3", "4"]) did = dict(name=id_name, horn=Horn["1"]) dataset_info = dict(file_key=file_key, standard_name=id_name) - res = mwr_handler.get_dataset(did, dataset_info) + res = eps_sterna_mwr_handler.get_dataset(did, dataset_info) if id_name == "longitude": fake_array = fake_array.where(fake_array <= 180, fake_array - 360) @@ -128,3 +51,13 @@ def test_get_navigation_data(mwr_handler, id_name, file_key, fake_array): assert "n_feedhorns" not in res.coords if id_name == "longitude": assert res.max() <= 180 + + +def test_try_get_data_not_in_file(eps_sterna_mwr_handler): + """Test retrieving a data field that is not available in the file.""" + did = dict(name="aws_toa_brightness_temperature") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + + match_str = "Dataset aws_toa_brightness_temperature not available or not supported yet!" + with pytest.raises(NotImplementedError, match=match_str): + _ = eps_sterna_mwr_handler.get_dataset(did, dataset_info) From bf450fd217b9b8112b064d07f9ee5ac36127cb0d Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 15:48:26 +0100 Subject: [PATCH 272/340] Revert back to the old (current) version Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/atms.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/atms.yaml b/satpy/etc/composites/atms.yaml index 624f0bc93b..27afd5d2b8 100644 --- a/satpy/etc/composites/atms.yaml +++ b/satpy/etc/composites/atms.yaml @@ -14,5 +14,5 @@ composites: prerequisites: - name: '16' - name: '17' - - name: '18' + - name: '22' standard_name: mw183_humidity_surface From 8bee181393d397e8f7c50f0ac0cfbcff2127d9ab Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 16:07:06 +0100 Subject: [PATCH 273/340] Bugfix Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml index 7df360ce4a..2a679ec182 100644 --- a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -1,5 +1,5 @@ reader: - name: aws_l1c_nc + name: aws1_mwr_l1c_nc short_name: AWS L1C RAD NetCDF4 long_name: AWS L1C Radiance (NetCDF4) description: Reader for the ESA AWS (Arctic Weather Satellite) MWR level-1c files in netCDF4. From 50a6dfc67de9069049a64ceb6cf95f0c4696d0a2 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 16 Dec 2024 17:16:05 +0100 Subject: [PATCH 274/340] Add unit test for FCI radiance clipping Add a unit test for FCI radiance clipping. The unit test fails, because there is no implementation yet. --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 34 ++++++++++++++++----- 1 file changed, 26 insertions(+), 8 deletions(-) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index aa98990df3..8365205779 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -121,6 +121,7 @@ DICT_CALIBRATION = {"radiance": {"dtype": np.float32, "value_1": 15, "value_0": 9700, + "value_2": -5, "attrs_dict": {"calibration": "radiance", "units": "mW m-2 sr-1 (cm-1)-1", "radiance_unit_conversion_coefficient": np.float32(1234.56) @@ -134,8 +135,9 @@ }, "counts": {"dtype": np.uint16, - "value_1": 1, + "value_1": 5, "value_0": 5000, + "value_2": 1, "attrs_dict": {"calibration": "counts", "units": "count", }, @@ -144,6 +146,7 @@ "brightness_temperature": {"dtype": np.float32, "value_1": np.float32(209.68275), "value_0": np.float32(1888.8513), + "value_2": np.float32("nan"), "attrs_dict": {"calibration": "brightness_temperature", "units": "K", }, @@ -293,16 +296,17 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): common_attrs = { "scale_factor": 5, - "add_offset": 10, + "add_offset": -10, "long_name": "Effective Radiance", "units": "mW.m-2.sr-1.(cm-1)-1", "ancillary_variables": "pixel_quality" } if "38" in ch_path: fire_line = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) * 5000 - data_without_fires = da.ones((n_rows_cols[0] - 1, n_rows_cols[1]), dtype="uint16", chunks=1024) + data_without_fires = da.full((n_rows_cols[0] - 2, n_rows_cols[1]), 5, dtype="uint16", chunks=1024) + neg_rad = da.ones((1, n_rows_cols[1]), dtype="uint16", chunks=1024) d = FakeH5Variable( - da.concatenate([fire_line, data_without_fires], axis=0), + da.concatenate([fire_line, data_without_fires, neg_rad], axis=0), dims=("y", "x"), attrs={ "valid_range": [0, 8191], @@ -313,7 +317,7 @@ def _get_test_image_data_for_channel(data, ch_str, n_rows_cols): ) else: d = FakeH5Variable( - da.ones(n_rows_cols, dtype="uint16", chunks=1024), + da.full(n_rows_cols, 5, dtype="uint16", chunks=1024), dims=("y", "x"), attrs={ "valid_range": [0, 4095], @@ -542,11 +546,11 @@ def reader_configs(): os.path.join("readers", "fci_l1c_nc.yaml")) -def _get_reader_with_filehandlers(filenames, reader_configs): +def _get_reader_with_filehandlers(filenames, reader_configs, **reader_kwargs): from satpy.readers import load_reader reader = load_reader(reader_configs) loadables = reader.select_files_from_pathnames(filenames) - reader.create_filehandlers(loadables) + reader.create_filehandlers(loadables, fh_kwargs=reader_kwargs) clear_cache(reader) return reader @@ -738,7 +742,8 @@ def _reflectance_test(tab, filenames): def _other_calibration_test(res, ch, dict_arg): """Test of other calibration test.""" if ch == "ir_38": - numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_1"]) + numpy.testing.assert_array_equal(res[ch][-1], dict_arg["value_2"]) + numpy.testing.assert_array_equal(res[ch][-2], dict_arg["value_1"]) numpy.testing.assert_array_equal(res[ch][0], dict_arg["value_0"]) else: numpy.testing.assert_array_equal(res[ch], dict_arg["value_1"]) @@ -860,6 +865,19 @@ def test_load_calibration(self, reader_configs, fh_param, self._get_assert_load(res, ch, DICT_CALIBRATION[calibration], fh_param["filenames"][0]) + @pytest.mark.parametrize("fh_param", [lazy_fixture("FakeFCIFileHandlerFDHSI_fixture")]) + def test_load_calibration_negative_rad(self, reader_configs, fh_param): + """Test calibrating negative radiances. + + See https://github.com/pytroll/satpy/issues/3009. + """ + reader = _get_reader_with_filehandlers(fh_param["filenames"], + reader_configs, + clip_negative_radiance=True) + res = reader.load([make_dataid(name="ir_38", calibration="radiance")], + pad_data=False) + numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance + @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) for calibration in ["counts", "radiance", "brightness_temperature", "reflectance"] From 3badb8d00f64dd5c3597a7990d45ec4d0f1fa2f7 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 17:22:38 +0100 Subject: [PATCH 275/340] Bugfix Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index c3a665b9c4..9544a9ce3e 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -1,5 +1,5 @@ reader: - name: eps_sterna_l1b_nc + name: eps_sterna_mwr_l1b_nc short_name: AWS L1B RAD NetCDF4 long_name: AWS L1B Radiance (NetCDF4) description: Reader for the EUMETSAT EPS-Sterna Sounder level-1b files in netCDF4. From 5d0675bd2d7e05728d6e1173de906375f2a79d13 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 16 Dec 2024 17:58:05 +0100 Subject: [PATCH 276/340] Clip negative radiances Clip negative radiances when a keyword arguments asks for it. --- satpy/readers/fci_l1c_nc.py | 12 +++++++++++- satpy/readers/yaml_reader.py | 3 +-- satpy/tests/reader_tests/test_fci_l1c_nc.py | 9 +++++++-- 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index fc40916699..caad045f90 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -208,7 +208,8 @@ class using the :mod:`~satpy.Scene.load` method with the reader "MTI3": "MTG-I3", "MTI4": "MTG-I4"} - def __init__(self, filename, filename_info, filetype_info): + def __init__(self, filename, filename_info, filetype_info, + clip_negative_radiances=False, **kwargs): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, @@ -233,6 +234,7 @@ def __init__(self, filename, filename_info, filetype_info): else: self.is_iqt = False + self.clip_negative_radiances = clip_negative_radiances self._cache = {} @property @@ -661,6 +663,8 @@ def calibrate_counts_to_physical_quantity(self, data, key): def calibrate_counts_to_rad(self, data, key): """Calibrate counts to radiances.""" + if self.clip_negative_radiances: + data = self._clipneg(data) if key["name"] == "ir_38": data = xr.where(((2 ** 12 - 1 < data) & (data <= 2 ** 13 - 1)), (data * data.attrs.get("warm_scale_factor", 1) + @@ -677,6 +681,12 @@ def calibrate_counts_to_rad(self, data, key): self.get_and_cache_npxr(measured + "/radiance_unit_conversion_coefficient")}) return data + @staticmethod + def _clipneg(data): + """Clip counts to avoid negative radiances.""" + lo = -data.attrs.get("add_offset", 0) // data.attrs.get("scale_factor", 1) + 1 + return data.where(data>=lo, lo) + def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" # using the method from PUG section Converting from Effective Radiance to Brightness Temperature for IR Channels diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index 5bbaba4a6c..ef371a6284 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -474,8 +474,7 @@ class FileYAMLReader(GenericYAMLReader, DataDownloadMixin): def __init__(self, config_dict, filter_parameters=None, - filter_filenames=True, - **kwargs): + filter_filenames=True): """Set up initial internal storage for loading file data.""" super().__init__(config_dict, filter_parameters, filter_filenames) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 8365205779..718d51c819 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -444,9 +444,14 @@ class FakeFCIFileHandlerBase(FakeNetCDF4FileHandler): """Class for faking the NetCDF4 Filehandler.""" cached_file_content: Dict[str, xr.DataArray] = {} - # overwritten by FDHSI and HRFI FIle Handlers + # overwritten by FDHSI and HRFI File Handlers chan_patterns: Dict[str, Dict[str, Union[List[int], str]]] = {} + def __init__(self, *args, **kwargs): + """Initiative fake file handler.""" + kwargs.pop("clip_negative_radiances", None) + super().__init__(*args, **kwargs) + def _get_test_content_all_channels(self): data = {} for pat in self.chan_patterns: @@ -873,7 +878,7 @@ def test_load_calibration_negative_rad(self, reader_configs, fh_param): """ reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs, - clip_negative_radiance=True) + clip_negative_radiances=True) res = reader.load([make_dataid(name="ir_38", calibration="radiance")], pad_data=False) numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance From aae848b146d10b5f424acad626035b9bb12f3926 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 16 Dec 2024 18:22:56 +0100 Subject: [PATCH 277/340] Clip in image comparison tests --- .../behave/features/image_comparison.feature | 16 ++++++++-------- .../behave/features/steps/image_comparison.py | 14 ++++++-------- 2 files changed, 14 insertions(+), 16 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index b794fa22ef..0497a96c93 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -2,14 +2,14 @@ Feature: Image Comparison Scenario Outline: Compare generated image with reference image Given I have a reference image file from resampled to - When I generate a new image file from with for + When I generate a new image file from with for with clipping Then the generated image should be the same as the reference image Examples: - |satellite |composite | reader | area | - |Meteosat-12 | cloudtop | fci_l1c_nc | sve | - |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | - |GOES17 |airmass | abi_l1b | null | - |GOES16 |airmass | abi_l1b | null | - |GOES16 |ash | abi_l1b | null | - |GOES17 |ash | abi_l1b | null | + |satellite |composite | reader | area | clip | + |Meteosat-12 | cloudtop | fci_l1c_nc | sve | True | + |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | True | + |GOES17 |airmass | abi_l1b | null | null | + |GOES16 |airmass | abi_l1b | null | null | + |GOES16 |ash | abi_l1b | null | null | + |GOES17 |ash | abi_l1b | null | null | diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index a7afd56008..92c5fa0034 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -18,9 +18,6 @@ import hdf5plugin # noqa: F401 isort:skip import os import os.path - -os.environ["HDF5_PLUGIN_PATH"] = os.path.dirname(hdf5plugin.__file__) + "/plugins/" - import warnings from datetime import datetime from glob import glob @@ -32,8 +29,6 @@ from satpy import Scene -from satpy.utils import debug_on; debug_on() - ext_data_path = "/app/ext_data" threshold = 2000 @@ -68,8 +63,8 @@ def step_given_reference_image(context, composite, satellite, area): context.area = area -@when("I generate a new {composite} image file from {satellite} with {reader} for {area}") -def step_when_generate_image(context, composite, satellite, reader, area): +@when("I generate a new {composite} image file from {satellite} with {reader} for {area} with clipping {clip}") +def step_when_generate_image(context, composite, satellite, reader, area, clip): """Generate test images.""" os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" os.environ["PYTROLL_CHUNK_SIZE"] = "1024" @@ -79,7 +74,10 @@ def step_when_generate_image(context, composite, satellite, reader, area): # Get the list of satellite files to open filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") - scn = Scene(reader=reader, filenames=filenames) + reader_kwargs = {} + if clip != "null": + reader_kwargs["clip_negative_radiances"] = clip + scn = Scene(reader=reader, filenames=filenames, reader_kwargs=reader_kwargs) scn.load([composite]) From 5f6598b6d2db00add68ca7c1a207d0659c497d45 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Mon, 16 Dec 2024 18:27:52 +0100 Subject: [PATCH 278/340] revert erroneous removal of **kwargs --- satpy/readers/yaml_reader.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/readers/yaml_reader.py b/satpy/readers/yaml_reader.py index ef371a6284..5bbaba4a6c 100644 --- a/satpy/readers/yaml_reader.py +++ b/satpy/readers/yaml_reader.py @@ -474,7 +474,8 @@ class FileYAMLReader(GenericYAMLReader, DataDownloadMixin): def __init__(self, config_dict, filter_parameters=None, - filter_filenames=True): + filter_filenames=True, + **kwargs): """Set up initial internal storage for loading file data.""" super().__init__(config_dict, filter_parameters, filter_filenames) From a6414c96af9e8a152836244193557c9d86d7ce53 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 21:45:01 +0100 Subject: [PATCH 279/340] Refactor and share code between the l1b and l1c readers Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 2 +- satpy/readers/mwr_l1b.py | 12 ++--- satpy/readers/mwr_l1c.py | 64 ++------------------------ 3 files changed, 9 insertions(+), 69 deletions(-) diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml index 2a679ec182..ad2acd4dc3 100644 --- a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -369,7 +369,7 @@ file_types: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc - file_reader: !!python/name:satpy.readers.mwr_l1c.AWSL1CFile + file_reader: !!python/name:satpy.readers.mwr_l1c.AWS_MWR_L1CFile file_patterns: [ 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc',] diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index d398cd94cd..6767b68eb9 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -25,17 +25,11 @@ """ -import logging - import xarray as xr from .netcdf_utils import NetCDF4FileHandler -logger = logging.getLogger(__name__) - -DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" - -AWS_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) +MWR_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) class AWS_EPS_Sterna_MWR_L1BFile(NetCDF4FileHandler): @@ -111,7 +105,7 @@ def sub_satellite_latitude_end(self): def get_dataset(self, dataset_id, dataset_info): """Get the data.""" - if dataset_id["name"] in AWS_CHANNEL_NAMES: + if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) elif dataset_id["name"] in ["satellite_zenith_horn1", "satellite_zenith_horn2", @@ -154,7 +148,7 @@ def get_dataset(self, dataset_id, dataset_info): def _get_channel_data(self, dataset_id, dataset_info): channel_data = self[dataset_info["file_key"]] - channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES + channel_data.coords["n_channels"] = MWR_CHANNEL_NAMES channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 9acfc604a7..6bc0320dc8 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -19,20 +19,10 @@ Sample data provided by ESA September 27, 2024. """ -import logging +from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_MWR_L1BFile, mask_and_scale -import xarray as xr -from .netcdf_utils import NetCDF4FileHandler - -logger = logging.getLogger(__name__) - -DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" - -AWS_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) - - -class AWSL1CFile(NetCDF4FileHandler): +class AWS_MWR_L1CFile(AWS_EPS_Sterna_MWR_L1BFile): """Class implementing the AWS L1c Filehandler. This class implements the ESA Arctic Weather Satellite (AWS) Level-1b @@ -41,37 +31,19 @@ class using the :mod:`~satpy.Scene.load` method with the reader ``"aws_l1c_nc"``. """ - def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" - super().__init__(filename, filename_info, filetype_info, - cache_var_size=10000, - cache_handle=True) + super().__init__(filename, filename_info, filetype_info, auto_maskandscale) self.filename_info = filename_info - @property - def start_time(self): - """Get the start time.""" - return self.filename_info["start_time"] - - @property - def end_time(self): - """Get the end time.""" - return self.filename_info["end_time"] - @property def sensor(self): """Get the sensor name.""" return "MWR" - @property - def platform_name(self): - """Get the platform name.""" - return self.filename_info["platform_name"] - def get_dataset(self, dataset_id, dataset_info): """Get the data.""" - if dataset_id["name"] in AWS_CHANNEL_NAMES: + if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) elif (dataset_id["name"] in ["longitude", "latitude", "solar_azimuth", "solar_zenith", @@ -90,35 +62,9 @@ def get_dataset(self, dataset_id, dataset_info): data_array.attrs["sensor"] = self.sensor return data_array - def _get_channel_data(self, dataset_id, dataset_info): - channel_data = self[dataset_info["file_key"]] - channel_data.coords["n_channels"] = AWS_CHANNEL_NAMES - channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) - return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") def _get_navigation_data(self, dataset_id, dataset_info): + """Get the navigation (geolocation) data.""" geo_data = self[dataset_info["file_key"]] geo_data = geo_data.rename({"n_fovs": "x", "n_scans": "y"}) return geo_data - - -def mask_and_scale(data_array): - """Mask then scale the data array.""" - if "missing_value" in data_array.attrs: - with xr.set_options(keep_attrs=True): - data_array = data_array.where(data_array != data_array.attrs["missing_value"]) - data_array.attrs.pop("missing_value") - if "valid_max" in data_array.attrs: - with xr.set_options(keep_attrs=True): - data_array = data_array.where(data_array <= data_array.attrs["valid_max"]) - data_array.attrs.pop("valid_max") - if "valid_min" in data_array.attrs: - with xr.set_options(keep_attrs=True): - data_array = data_array.where(data_array >= data_array.attrs["valid_min"]) - data_array.attrs.pop("valid_min") - if "scale_factor" in data_array.attrs and "add_offset" in data_array.attrs: - with xr.set_options(keep_attrs=True): - data_array = data_array * data_array.attrs["scale_factor"] + data_array.attrs["add_offset"] - data_array.attrs.pop("scale_factor") - data_array.attrs.pop("add_offset") - return data_array From a5d472a51396f228f4070ce11348d1c3b351f8a9 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 16 Dec 2024 21:49:02 +0100 Subject: [PATCH 280/340] Fix tests Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 65c1303865..9e168518f4 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -28,7 +28,9 @@ from trollsift import compose, parse from xarray import DataTree -from satpy.readers.mwr_l1b import DATETIME_FORMAT, AWS_EPS_Sterna_MWR_L1BFile +from satpy.readers.mwr_l1b import AWS_EPS_Sterna_MWR_L1BFile + +DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" platform_name = "AWS1" # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc From d404a795704b9cf628b6210a11673bdcc19f0a45 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Wed, 18 Dec 2024 12:06:52 +0100 Subject: [PATCH 281/340] add doc & don't clip space pxiels --- doc/source/config.rst | 2 +- satpy/readers/fci_l1c_nc.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/config.rst b/doc/source/config.rst index 9babc1abbf..1cbbbec2ed 100644 --- a/doc/source/config.rst +++ b/doc/source/config.rst @@ -272,7 +272,7 @@ If ``clip_negative_radiances=False``, pixels with negative radiances will have Clipping of negative radiances is currently implemented for the following readers: -* ``abi_l1b``, ``ami_l1b`` +* ``abi_l1b``, ``ami_l1b``, ``fci_l1c_nc`` Temporary Directory diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index caad045f90..c24c9b4849 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -685,7 +685,7 @@ def calibrate_counts_to_rad(self, data, key): def _clipneg(data): """Clip counts to avoid negative radiances.""" lo = -data.attrs.get("add_offset", 0) // data.attrs.get("scale_factor", 1) + 1 - return data.where(data>=lo, lo) + return data.where((~data.notnull())|(data>=lo), lo) def calibrate_rad_to_bt(self, radiance, key): """IR channel calibration.""" From 982a31c81c1701271b3ccfae7e80ee15d9f1fc35 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Wed, 18 Dec 2024 16:52:20 +0100 Subject: [PATCH 282/340] Change ESSL colorisation approach Change the approach for ESSL colorisation. Remove the dedicated class and instead define a colourmap to be used with the colorize enhancement. Fix the ratio to be the right way around. Fixes and closes 3020. --- satpy/enhancements/atmosphere.py | 110 ----------------- satpy/etc/composites/visir.yaml | 4 +- satpy/etc/enhancements/generic.yaml | 114 +++++++++++++++++- .../enhancement_tests/test_atmosphere.py | 61 ---------- 4 files changed, 110 insertions(+), 179 deletions(-) delete mode 100644 satpy/enhancements/atmosphere.py delete mode 100644 satpy/tests/enhancement_tests/test_atmosphere.py diff --git a/satpy/enhancements/atmosphere.py b/satpy/enhancements/atmosphere.py deleted file mode 100644 index bbc4bc3a86..0000000000 --- a/satpy/enhancements/atmosphere.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (c) 2022- Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Enhancements related to visualising atmospheric phenomena.""" - -import datetime - -import dask.array as da -import xarray as xr - - -def essl_moisture(img, low=1.1, high=1.6) -> None: - r"""Low level moisture by European Severe Storms Laboratory (ESSL). - - Expects a mode L image with data corresponding to the ratio of the - calibrated reflectances for the 0.86 µm and 0.906 µm channel. - - This composite and its colorisation were developed by ESSL. - - Ratio values are scaled from the range ``[low, high]``, which is by default - between 1.1 and 1.6, but might be tuned based on region or sensor, - to ``[0, 1]``. Values outside this range are clipped. Color values - for red, green, and blue are calculated as follows, where ``x`` is the - ratio between the 0.86 µm and 0.905 µm channels: - - .. math:: - - R = \max(1.375 - 2.67 x, -0.75 + x) \\ - G = 1 - \frac{8x}{7} \\ - B = \max(0.75 - 1.5 x, 0.25 - (x - 0.75)^2) \\ - - The value of ``img.data`` is modified in-place. - - A color interpretation guide is pending further adjustments to the - parameters for current and future sensors. - - Args: - img: XRImage containing the relevant composite - low: optional, low end for scaling, defaults to 1.1 - high: optional, high end for scaling, defaults to 1.6 - """ - ratio = img.data - if _is_fci_test_data(img.data): - # Due to a bug in the FCI pre-launch simulated test data, - # the 0.86 µm channel is too bright. To correct for this, its - # reflectances should be multiplied by 0.8. - ratio *= 0.8 - - with xr.set_options(keep_attrs=True): - ratio = _scale_and_clip(ratio, low, high) - red = _calc_essl_red(ratio) - green = _calc_essl_green(ratio) - blue = _calc_essl_blue(ratio) - data = xr.concat([red, green, blue], dim="bands") - data.attrs["mode"] = "RGB" - data["bands"] = ["R", "G", "B"] - img.data = data - - -def _scale_and_clip(ratio, low, high): - """Scale ratio values to [0, 1] and clip values outside this range.""" - scaled = (ratio - low) / (high - low) - scaled.data = da.clip(scaled.data, 0, 1) - return scaled - - -def _calc_essl_red(ratio): - """Calculate values for red based on scaled and clipped ratio.""" - red_a = 1.375 - 2.67 * ratio - red_b = -0.75 + ratio - red = xr.where(red_a > red_b, red_a, red_b) - red.data = da.clip(red.data, 0, 1) - return red - - -def _calc_essl_green(ratio): - """Calculate values for green based on scaled and clipped ratio.""" - green = 1 - (8/7) * ratio - green.data = da.clip(green.data, 0, 1) - return green - - -def _calc_essl_blue(ratio): - """Calculate values for blue based on scaled and clipped ratio.""" - blue_a = 0.75 - 1.5 * ratio - blue_b = 0.25 - (ratio - 0.75)**2 - blue = xr.where(blue_a > blue_b, blue_a, blue_b) - blue.data = da.clip(blue.data, 0, 1) - return blue - - -def _is_fci_test_data(data): - """Check if we are working with FCI test data.""" - return ("sensor" in data.attrs and - "start_time" in data.attrs and - data.attrs["sensor"] == "fci" and - isinstance(data.attrs["start_time"], datetime.datetime) and - data.attrs["start_time"] < datetime.datetime(2022, 11, 30)) diff --git a/satpy/etc/composites/visir.yaml b/satpy/etc/composites/visir.yaml index ffe3be4183..fa774e26da 100644 --- a/satpy/etc/composites/visir.yaml +++ b/satpy/etc/composites/visir.yaml @@ -626,8 +626,8 @@ composites: is still under development and may be subject to change. compositor: !!python/name:satpy.composites.RatioCompositor prerequisites: - - wavelength: 0.86 - wavelength: 0.905 + - wavelength: 0.86 standard_name: essl_colorized_low_level_moisture day_essl_colorized_low_level_moisture: @@ -638,7 +638,7 @@ composites: day_night: day_only prerequisites: - name: essl_colorized_low_level_moisture - standard_name: day_essl_colorized_low_level_moisture + standard_name: image_ready rocket_plume_day: description: > diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index cdfb7851ad..01668aaf5e 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1243,12 +1243,114 @@ enhancements: essl_colorized_low_level_moisture: name: essl_colorized_low_level_moisture operations: - - name: essl_moisture - method: !!python/name:satpy.enhancements.atmosphere.essl_moisture - - day_essl_colorized_low_level_moisture: - standard_name: day_essl_colorized_low_level_moisture - operations: [] + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - min_value: 0.625 + max_value: 0.91 + values: + - 0.6250 + - 0.6290 + - 0.6331 + - 0.6372 + - 0.6414 + - 0.6456 + - 0.6499 + - 0.6542 + - 0.6586 + - 0.6631 + - 0.6676 + - 0.6722 + - 0.6768 + - 0.6815 + - 0.6863 + - 0.6911 + - 0.6960 + - 0.7010 + - 0.7061 + - 0.7112 + - 0.7164 + - 0.7216 + - 0.7270 + - 0.7324 + - 0.7380 + - 0.7436 + - 0.7492 + - 0.7550 + - 0.7609 + - 0.7668 + - 0.7729 + - 0.7790 + - 0.7853 + - 0.7916 + - 0.7980 + - 0.8046 + - 0.8113 + - 0.8180 + - 0.8249 + - 0.8319 + - 0.8390 + - 0.8463 + - 0.8537 + - 0.8612 + - 0.8688 + - 0.8766 + - 0.8845 + - 0.8925 + - 0.9007 + - 0.9091 + colors: + - [63, 0, 47] + - [58, 0, 50] + - [53, 0, 52] + - [48, 0, 54] + - [42, 0, 56] + - [37, 0, 58] + - [32, 0, 59] + - [27, 5, 60] + - [22, 11, 61] + - [16, 17, 62] + - [11, 23, 63] + - [6, 28, 63] + - [1, 34, 63] + - [0, 40, 63] + - [0, 46, 63] + - [0, 52, 62] + - [0, 58, 62] + - [0, 64, 61] + - [0, 70, 60] + - [0, 76, 58] + - [0, 82, 57] + - [0, 88, 55] + - [0, 94, 53] + - [0, 100, 51] + - [3, 106, 49] + - [17, 112, 46] + - [31, 118, 43] + - [44, 124, 40] + - [58, 130, 37] + - [72, 136, 35] + - [86, 141, 42] + - [100, 147, 50] + - [114, 153, 58] + - [128, 159, 66] + - [142, 165, 74] + - [156, 171, 81] + - [169, 177, 89] + - [183, 183, 97] + - [197, 189, 105] + - [211, 195, 113] + - [225, 201, 120] + - [239, 207, 128] + - [253, 213, 136] + - [255, 219, 144] + - [255, 225, 152] + - [255, 231, 160] + - [255, 237, 167] + - [255, 243, 175] + - [255, 249, 183] + - [255, 255, 191] rocket_plume: standard_name: rocket_plume diff --git a/satpy/tests/enhancement_tests/test_atmosphere.py b/satpy/tests/enhancement_tests/test_atmosphere.py deleted file mode 100644 index 42e25af0c6..0000000000 --- a/satpy/tests/enhancement_tests/test_atmosphere.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright (c) 2022- Satpy developers -# -# This file is part of satpy. -# -# satpy is free software: you can redistribute it and/or modify it under the -# terms of the GNU General Public License as published by the Free Software -# Foundation, either version 3 of the License, or (at your option) any later -# version. -# -# satpy is distributed in the hope that it will be useful, but WITHOUT ANY -# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR -# A PARTICULAR PURPOSE. See the GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License along with -# satpy. If not, see . -"""Tests for enhancements in enhancements/atmosphere.py.""" - -import datetime - -import dask.array as da -import numpy as np -import xarray as xr -from trollimage.xrimage import XRImage - - -def test_essl_moisture(): - """Test ESSL moisture compositor.""" - from satpy.enhancements.atmosphere import essl_moisture - - ratio = xr.DataArray( - da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)), - dims=("y", "x"), - attrs={"name": "ratio", - "calibration": "reflectance", - "units": "%", - "mode": "L"}) - im = XRImage(ratio) - - essl_moisture(im) - assert im.data.attrs["mode"] == "RGB" - np.testing.assert_array_equal(im.data["bands"], ["R", "G", "B"]) - assert im.data.sel(bands="R")[0, 0] == 1 - np.testing.assert_allclose(im.data.sel(bands="R")[2, 2], 0.04, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="G")[2, 2], 0.42857, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="B")[2, 2], 0.1875, rtol=1e-4) - - # test FCI test data correction - ratio = xr.DataArray( - da.linspace(1.0, 1.7, 25, chunks=5).reshape((5, 5)), - dims=("y", "x"), - attrs={"name": "ratio", - "calibration": "reflectance", - "units": "%", - "mode": "L", - "sensor": "fci", - "start_time": datetime.datetime(1999, 1, 1)}) - im = XRImage(ratio) - essl_moisture(im) - np.testing.assert_allclose(im.data.sel(bands="R")[3, 3], 0.7342, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="G")[3, 3], 0.7257, rtol=1e-4) - np.testing.assert_allclose(im.data.sel(bands="B")[3, 3], 0.39, rtol=1e-4) From 55c999e0e65858bb518422f35e98b9137fcd6151 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Wed, 18 Dec 2024 20:44:21 +0100 Subject: [PATCH 283/340] Refactor and add test coverage for the ESA AWS Level-1c reader Signed-off-by: Adam.Dybbroe --- satpy/readers/mwr_l1b.py | 52 ++++++---- satpy/readers/mwr_l1c.py | 4 +- satpy/tests/reader_tests/conftest.py | 73 +++++++++++++- satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 2 - satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 99 +++++++++++++++++++ 5 files changed, 204 insertions(+), 26 deletions(-) create mode 100644 satpy/tests/reader_tests/test_aws1_mwr_l1c.py diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index 6767b68eb9..03a8b9f122 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -32,15 +32,9 @@ MWR_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) -class AWS_EPS_Sterna_MWR_L1BFile(NetCDF4FileHandler): - """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler. - - This class implements the ESA Arctic Weather Satellite (AWS) and EPS-Sterna - MWR Level-1b NetCDF reader. It is designed to be used through the - :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with - the reader ``"mwr_l1b_nc"``. +class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): + """Base class implementing the AWS/EPS-Sterna MWR Level-1b&c Filehandlers.""" - """ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, @@ -48,11 +42,6 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=Tru cache_handle=True) self.filename_info = filename_info - if filetype_info["file_type"].startswith("eps_sterna"): - self._feed_horn_group_name = "n_feedhorns" - else: - self._feed_horn_group_name = "n_geo_groups" - @property def start_time(self): """Get the start time.""" @@ -83,6 +72,37 @@ def orbit_end(self): """Get the orbit number for the end of data.""" return int(self["/attr/orbit_end"]) + def get_dataset(self, dataset_id, dataset_info): + """Get the data.""" + raise NotImplementedError("This is not implemented in the Base class.") + + def _get_channel_data(self, dataset_id, dataset_info): + channel_data = self[dataset_info["file_key"]] + channel_data.coords["n_channels"] = MWR_CHANNEL_NAMES + channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) + return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") + + + +class AWS_EPS_Sterna_MWR_L1BFile(AWS_EPS_Sterna_BaseFileHandler): + """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler. + + This class implements the ESA Arctic Weather Satellite (AWS) and EPS-Sterna + MWR Level-1b NetCDF reader. It is designed to be used through the + :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with + the reader ``"mwr_l1b_nc"``. + + """ + def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): + """Initialize the handler.""" + super().__init__(filename, filename_info, filetype_info, auto_maskandscale) + + if filetype_info["file_type"].startswith("eps_sterna"): + self._feed_horn_group_name = "n_feedhorns" + else: + self._feed_horn_group_name = "n_geo_groups" + + @property def sub_satellite_longitude_start(self): """Get the longitude of sub-satellite point at start of the product.""" @@ -146,12 +166,6 @@ def get_dataset(self, dataset_id, dataset_info): data_array.attrs["orbit_number"] = self.orbit_start return data_array - def _get_channel_data(self, dataset_id, dataset_info): - channel_data = self[dataset_info["file_key"]] - channel_data.coords["n_channels"] = MWR_CHANNEL_NAMES - channel_data = channel_data.rename({"n_fovs": "x", "n_scans": "y"}) - return channel_data.sel(n_channels=dataset_id["name"]).drop_vars("n_channels") - def _get_navigation_data(self, dataset_id, dataset_info): """Get the navigation (geolocation) data for one feed horn.""" geo_data = self[dataset_info["file_key"]] diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 6bc0320dc8..1b6f7269f4 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -19,10 +19,10 @@ Sample data provided by ESA September 27, 2024. """ -from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_MWR_L1BFile, mask_and_scale +from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_BaseFileHandler, mask_and_scale -class AWS_MWR_L1CFile(AWS_EPS_Sterna_MWR_L1BFile): +class AWS_MWR_L1CFile(AWS_EPS_Sterna_BaseFileHandler): """Class implementing the AWS L1c Filehandler. This class implements the ESA Arctic Weather Satellite (AWS) Level-1b diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 9e168518f4..a541da2be5 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -29,6 +29,7 @@ from xarray import DataTree from satpy.readers.mwr_l1b import AWS_EPS_Sterna_MWR_L1BFile +from satpy.readers.mwr_l1c import AWS_MWR_L1CFile DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S.%f" @@ -38,6 +39,9 @@ esa_file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa +esa_l1c_file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + + rng = np.random.default_rng() def random_date(start, end): @@ -59,15 +63,15 @@ def fake_mwr_data_array(): return xr.DataArray(fake_data_np, dims=array_dims) -def make_fake_angles(geo_size, geo_dims): +def make_fake_angles(geo_size, geo_dims, shape=(10, 145, 4)): """Return fake sun-satellite angle array.""" maxval = 36000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") - return xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + return xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) def make_fake_mwr_lonlats(geo_size, geo_dims): - """Return fake geolocation data arrays.""" + """Return fake geolocation data arrays for all 4 MWR horns.""" maxval = 3600000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) @@ -77,6 +81,17 @@ def make_fake_mwr_lonlats(geo_size, geo_dims): return (fake_lon_data, fake_lat_data) +def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): + """Return fake level-1c geolocation data arrays.""" + maxval = 3600000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") + fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) + maxval = 1800000 + dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") + fake_lat_data = xr.DataArray(dummy_array.reshape((10, 145)), dims=geo_dims) + return (fake_lon_data, fake_lat_data) + + @pytest.fixture(scope="module") def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an EPS-Sterna MWR l1b file.""" @@ -188,3 +203,55 @@ def aws_mwr_handler(aws_mwr_file): filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1b" return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) + + +@pytest.fixture(scope="session") +def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): + """Create an AWS MWR l1c file.""" + geo_dims = ["n_scans", "n_fovs"] + geo_size = 10*145 + + ds = DataTree() + start_time = datetime(2024, 9, 1, 12, 0) + ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) + end_time = datetime(2024, 9, 1, 12, 15) + ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) + processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + + ds.attrs["instrument"] = "MWR" + ds.attrs["orbit_start"] = 9991 + ds.attrs["orbit_end"] = 9992 + ds["data/calibration/aws_toa_brightness_temperature"] = fake_mwr_data_array + ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 + ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 + + fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) + + ds["data/navigation/aws_lon"] = fake_lon_data + ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 + ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 + ds["data/navigation/aws_lat"] = fake_lat_data + ds["data/navigation/aws_solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + ds["data/navigation/aws_solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + ds["data/navigation/aws_satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + ds["data/navigation/aws_satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + + tmp_dir = tmp_path_factory.mktemp("aws_l1c_tests") + filename = tmp_dir / compose(esa_l1c_file_pattern, dict(start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) + + ds.to_netcdf(filename) + return filename + + +@pytest.fixture +def aws_mwr_l1c_handler(aws_mwr_l1c_file): + """Create an AWS MWR level-1c filehandler.""" + filename_info = parse(esa_l1c_file_pattern, os.path.basename(aws_mwr_l1c_file)) + filetype_info = dict() + filetype_info["file_type"] = "aws1_mwr_l1c" + return AWS_MWR_L1CFile(aws_mwr_l1c_file, filename_info, filetype_info) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index 6441715223..9a789bd08f 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -108,8 +108,6 @@ def test_get_viewing_geometry_data(aws_mwr_handler, id_name, file_key, fake_arra assert res.dims == ("y", "x") assert "standard_name" in res.attrs assert "n_geo_groups" not in res.coords - if id_name == "longitude": - assert res.max() <= 180 def test_try_get_data_not_in_file(aws_mwr_handler): diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py new file mode 100644 index 0000000000..fc4118bc34 --- /dev/null +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright (c) 2024 Satpy developers + +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Tests for ESA Arctic Weather Satellite (AWS) level-1c file reading.""" + + + +import numpy as np +import pytest + +from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_l1c_lonlats + +platform_name = "AWS1" +# W_XX-OHB-Stockholm,SAT,AWS1-MWR-1C-RAD_C_OHB__20241126183628_G_D_20240913222540_20240914000332_T_B____.nc +file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa + + +geo_dims = ["n_scans", "n_fovs"] +geo_size = 10*145 +fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) +fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) +fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) + + +def test_get_channel_data(aws_mwr_l1c_handler, fake_mwr_data_array): + """Test retrieving the channel data.""" + did = dict(name="1") + dataset_info = dict(file_key="data/calibration/aws_toa_brightness_temperature") + expected = fake_mwr_data_array.isel(n_channels=0) + # mask no_data value + expected = expected.where(expected != -2147483648) + # mask outside the valid range + expected = expected.where(expected <= 700000) + expected = expected.where(expected >= 0) + # "calibrate" + expected = expected * 0.001 + res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) + np.testing.assert_allclose(res, expected) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "n_channels" not in res.coords + assert res.attrs["sensor"] == "MWR" + assert res.attrs["platform_name"] == "AWS1" + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("longitude", "data/navigation/aws_lon", fake_lon_data * 1e-4), + ("latitude", "data/navigation/aws_lat", fake_lat_data), + ]) +def test_get_navigation_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): + """Test retrieving the geolocation (lon, lat) data.""" + did = dict(name=id_name) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_l1c_handler.get_dataset(did, dataset_info) + if id_name == "longitude": + fake_array = fake_array.where(fake_array <= 180, fake_array - 360) + + np.testing.assert_allclose(res, fake_array) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs + if id_name == "longitude": + assert res.max() <= 180 + + +@pytest.mark.parametrize(("id_name", "file_key", "fake_array"), + [("solar_azimuth", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) +def test_get_viewing_geometry_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): + """Test retrieving the angles_data.""" + dset_id = dict(name=id_name) + dataset_info = dict(file_key=file_key, standard_name=id_name) + res = aws_mwr_l1c_handler.get_dataset(dset_id, dataset_info) + np.testing.assert_allclose(res, fake_array) + assert "x" in res.dims + assert "y" in res.dims + assert res.dims == ("y", "x") + assert "standard_name" in res.attrs From 110cd9c4f0fceca8ea805a22d58da85f08fada84 Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 12:44:54 +0100 Subject: [PATCH 284/340] Update satpy/etc/composites/mwr.yaml Co-authored-by: Panu Lahtinen --- satpy/etc/composites/mwr.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/composites/mwr.yaml b/satpy/etc/composites/mwr.yaml index ba38a69d02..907185838a 100644 --- a/satpy/etc/composites/mwr.yaml +++ b/satpy/etc/composites/mwr.yaml @@ -1,4 +1,4 @@ -sensor_name: aws +sensor_name: mwr composites: mw183_humidity: From 7347fb0bfe01b622b3c5ba15ae5a62905ae2e37b Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 12:46:31 +0100 Subject: [PATCH 285/340] Update satpy/tests/reader_tests/conftest.py Co-authored-by: Panu Lahtinen --- satpy/tests/reader_tests/conftest.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index a541da2be5..aeec133f30 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -96,7 +96,7 @@ def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an EPS-Sterna MWR l1b file.""" geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] - geo_size = 10*145*4 + geo_size = 10 * 145 * 4 ds = DataTree() start_time = datetime(2024, 9, 1, 12, 0) From 1cf434c6b7d412b3bf9d7824fc03ca0f57a47eb6 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 19 Dec 2024 12:58:01 +0100 Subject: [PATCH 286/340] Fix file naming and secure composite recipe consistency Signed-off-by: Adam.Dybbroe --- satpy/etc/enhancements/generic.yaml | 2 +- satpy/etc/enhancements/{aws.yaml => mwr.yaml} | 12 +++---- satpy/readers/mwr_l1b.py | 33 ++++++++++--------- 3 files changed, 24 insertions(+), 23 deletions(-) rename satpy/etc/enhancements/{aws.yaml => mwr.yaml} (89%) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 8989d44152..ff6e500560 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1291,7 +1291,7 @@ enhancements: operations: [] mw183_humidity: - # matches AWS + # matches EPS-Sterna and AWS MWR standard_name: mw183_humidity operations: - name: stretch diff --git a/satpy/etc/enhancements/aws.yaml b/satpy/etc/enhancements/mwr.yaml similarity index 89% rename from satpy/etc/enhancements/aws.yaml rename to satpy/etc/enhancements/mwr.yaml index 4d79b0ae11..da32d6a499 100644 --- a/satpy/etc/enhancements/aws.yaml +++ b/satpy/etc/enhancements/mwr.yaml @@ -3,16 +3,16 @@ enhancements: mw183_humidity: standard_name: mw183_humidity operations: - - name: inverse - method: !!python/name:satpy.enhancements.invert - args: - - [true, true, true] - name: stretch method: !!python/name:satpy.enhancements.stretch - kwargs: {stretch: linear} + kwargs: + stretch: crude + min_stretch: [290, 290, 290] + max_stretch: [190, 190, 190] - name: gamma method: !!python/name:satpy.enhancements.gamma - kwargs: {gamma: 1.2} + kwargs: + gamma: [1.5, 1.2, 1.2] mw183_humidity_surface: standard_name: mw183_humidity_surface diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index 03a8b9f122..5eac8c2699 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -31,6 +31,22 @@ MWR_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) +NAVIGATION_DATASET_NAMES = ["satellite_zenith_horn1", + "satellite_zenith_horn2", + "satellite_zenith_horn3", + "satellite_zenith_horn4", + "solar_azimuth_horn1", + "solar_azimuth_horn2", + "solar_azimuth_horn3", + "solar_azimuth_horn4", + "solar_zenith_horn1", + "solar_zenith_horn2", + "solar_zenith_horn3", + "solar_zenith_horn4", + "satellite_azimuth_horn1", + "satellite_azimuth_horn2", + "satellite_azimuth_horn3", + "satellite_azimuth_horn4"] class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): """Base class implementing the AWS/EPS-Sterna MWR Level-1b&c Filehandlers.""" @@ -127,22 +143,7 @@ def get_dataset(self, dataset_id, dataset_info): """Get the data.""" if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) - elif dataset_id["name"] in ["satellite_zenith_horn1", - "satellite_zenith_horn2", - "satellite_zenith_horn3", - "satellite_zenith_horn4", - "solar_azimuth_horn1", - "solar_azimuth_horn2", - "solar_azimuth_horn3", - "solar_azimuth_horn4", - "solar_zenith_horn1", - "solar_zenith_horn2", - "solar_zenith_horn3", - "solar_zenith_horn4", - "satellite_azimuth_horn1", - "satellite_azimuth_horn2", - "satellite_azimuth_horn3", - "satellite_azimuth_horn4"]: + elif dataset_id["name"] in NAVIGATION_DATASET_NAMES: data_array = self._get_navigation_data(dataset_id, dataset_info) elif dataset_id["name"] in ["longitude", "latitude"]: From 1e49dd5333d98cfc1c185291ea8748e0df1df23a Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 12:59:18 +0100 Subject: [PATCH 287/340] Update satpy/readers/mwr_l1b.py Co-authored-by: Panu Lahtinen --- satpy/readers/mwr_l1b.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index 03a8b9f122..0d3a5e7568 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -29,7 +29,7 @@ from .netcdf_utils import NetCDF4FileHandler -MWR_CHANNEL_NAMES = list(str(i) for i in range(1, 20)) +MWR_CHANNEL_NAMES = [str(i) for i in range(1, 20)] class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): From 62c3b3637950542921fbb3f8ab30463303a01c22 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 19 Dec 2024 13:04:05 +0100 Subject: [PATCH 288/340] Remove redundant MWR specific enhancement Signed-off-by: Adam.Dybbroe --- satpy/etc/enhancements/generic.yaml | 2 +- satpy/etc/enhancements/mwr.yaml | 14 -------------- 2 files changed, 1 insertion(+), 15 deletions(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index ff6e500560..088a59f625 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1291,7 +1291,7 @@ enhancements: operations: [] mw183_humidity: - # matches EPS-Sterna and AWS MWR + # matches EPS-Sterna and AWS MWR, and ATMS and MHS standard_name: mw183_humidity operations: - name: stretch diff --git a/satpy/etc/enhancements/mwr.yaml b/satpy/etc/enhancements/mwr.yaml index da32d6a499..26b18b2549 100644 --- a/satpy/etc/enhancements/mwr.yaml +++ b/satpy/etc/enhancements/mwr.yaml @@ -1,19 +1,5 @@ enhancements: - mw183_humidity: - standard_name: mw183_humidity - operations: - - name: stretch - method: !!python/name:satpy.enhancements.stretch - kwargs: - stretch: crude - min_stretch: [290, 290, 290] - max_stretch: [190, 190, 190] - - name: gamma - method: !!python/name:satpy.enhancements.gamma - kwargs: - gamma: [1.5, 1.2, 1.2] - mw183_humidity_surface: standard_name: mw183_humidity_surface operations: From b2f0e1410e068bec64dc58546930b1f291986d0b Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 13:09:14 +0100 Subject: [PATCH 289/340] Update satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml Co-authored-by: Panu Lahtinen --- satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index 9544a9ce3e..97a8e8888c 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -2,7 +2,7 @@ reader: name: eps_sterna_mwr_l1b_nc short_name: AWS L1B RAD NetCDF4 long_name: AWS L1B Radiance (NetCDF4) - description: Reader for the EUMETSAT EPS-Sterna Sounder level-1b files in netCDF4. + description: Reader for the EUMETSAT EPS-Sterna radiometer level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] status: Beta From 1ef7cc380875d83cb6989e84e0b8287f3739c6ba Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 14:00:02 +0100 Subject: [PATCH 290/340] Fix spelling Co-authored-by: Panu Lahtinen --- satpy/etc/readers/aws1_mwr_l1b_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml index 7fc88332b2..03978afe49 100644 --- a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -2,7 +2,7 @@ reader: name: aws1_mwr_l1b_nc short_name: AWS1 MWR L1B RAD NetCDF4 long_name: AWS1 MWR L1B Radiance (NetCDF4) - description: Reader for the ESA AWS (Arctic Weather Satellite) Micorwave Radiometer (MWR) level-1b files in netCDF4. + description: Reader for the ESA AWS (Arctic Weather Satellite) Microwave Radiometer (MWR) level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] status: Beta From afe175b138571d8bdd07c59e3278f1d606c375a0 Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 14:00:23 +0100 Subject: [PATCH 291/340] Update satpy/tests/reader_tests/test_aws1_mwr_l1c.py Co-authored-by: Panu Lahtinen --- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 1 - 1 file changed, 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index fc4118bc34..14ed54d38e 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -19,7 +19,6 @@ """Tests for ESA Arctic Weather Satellite (AWS) level-1c file reading.""" - import numpy as np import pytest From 3a71be5f2c1393116ba652bec8bcd817eca472af Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 14:01:04 +0100 Subject: [PATCH 292/340] Update satpy/readers/mwr_l1c.py Co-authored-by: Panu Lahtinen --- satpy/readers/mwr_l1c.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 1b6f7269f4..ae93c72414 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -46,8 +46,8 @@ def get_dataset(self, dataset_id, dataset_info): if dataset_id["name"] in MWR_CHANNEL_NAMES: data_array = self._get_channel_data(dataset_id, dataset_info) elif (dataset_id["name"] in ["longitude", "latitude", - "solar_azimuth", "solar_zenith", - "satellite_zenith", "satellite_azimuth"]): + "solar_azimuth_angle", "solar_zenith_angle", + "satellite_zenith_angle", "satellite_azimuth_angle"]): data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") From 49fb56453a09b41688e6bdc82e91cd450f73556f Mon Sep 17 00:00:00 2001 From: Adam Dybbroe Date: Thu, 19 Dec 2024 14:01:33 +0100 Subject: [PATCH 293/340] Update satpy/tests/reader_tests/test_aws1_mwr_l1c.py Co-authored-by: Panu Lahtinen --- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index 14ed54d38e..41d097aca4 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -82,10 +82,10 @@ def test_get_navigation_data(aws_mwr_l1c_handler, id_name, file_key, fake_array) @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), - [("solar_azimuth", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), - ("solar_zenith", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), - ("satellite_azimuth", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), - ("satellite_zenith", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) + [("solar_azimuth_angle", "data/navigation/aws_solar_azimuth_angle", fake_sun_azi_data), + ("solar_zenith_angle", "data/navigation/aws_solar_zenith_angle", fake_sun_zen_data), + ("satellite_azimuth_angle", "data/navigation/aws_satellite_azimuth_angle", fake_sat_azi_data), + ("satellite_zenith_angle", "data/navigation/aws_satellite_zenith_angle", fake_sat_zen_data)]) def test_get_viewing_geometry_data(aws_mwr_l1c_handler, id_name, file_key, fake_array): """Test retrieving the angles_data.""" dset_id = dict(name=id_name) From ba126ccac97066be03848d5a31e76032bda72429 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 19 Dec 2024 15:21:41 +0100 Subject: [PATCH 294/340] Fix sensor name and adjust yaml files accordingly + fix a few issues raised in the PR review Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/mwr.yaml | 6 ++-- satpy/etc/enhancements/mwr.yaml | 32 ++---------------- satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 3 -- satpy/readers/mwr_l1b.py | 4 ++- satpy/readers/mwr_l1c.py | 4 ++- satpy/tests/reader_tests/conftest.py | 33 ++++++++++--------- satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 10 +++--- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 2 +- 8 files changed, 34 insertions(+), 60 deletions(-) diff --git a/satpy/etc/composites/mwr.yaml b/satpy/etc/composites/mwr.yaml index 907185838a..d959faa632 100644 --- a/satpy/etc/composites/mwr.yaml +++ b/satpy/etc/composites/mwr.yaml @@ -15,7 +15,7 @@ composites: - name: '9' - name: '10' - name: '12' - standard_name: mw183_humidity_surface + standard_name: mw_humidity_surface mw325_humidity_surface: compositor: !!python/name:satpy.composites.RGBCompositor @@ -23,7 +23,7 @@ composites: - name: '9' - name: '10' - name: '19' - standard_name: mw325_humidity_surface + standard_name: mw_humidity_surface mw325_humidity: compositor: !!python/name:satpy.composites.RGBCompositor @@ -31,7 +31,7 @@ composites: - name: '16' - name: '18' - name: '19' - standard_name: mw325_humidity + standard_name: mw_humidity_surface ch1_tbs_colors: compositor: !!python/name:satpy.composites.SingleBandCompositor diff --git a/satpy/etc/enhancements/mwr.yaml b/satpy/etc/enhancements/mwr.yaml index 26b18b2549..4a9cff6354 100644 --- a/satpy/etc/enhancements/mwr.yaml +++ b/satpy/etc/enhancements/mwr.yaml @@ -1,35 +1,7 @@ enhancements: - mw183_humidity_surface: - standard_name: mw183_humidity_surface - operations: - - name: inverse - method: !!python/name:satpy.enhancements.invert - args: - - [true, true, true] - - name: stretch - method: !!python/name:satpy.enhancements.stretch - kwargs: {stretch: linear} - - name: gamma - method: !!python/name:satpy.enhancements.gamma - kwargs: {gamma: 1.2} - - mw325_humidity: - standard_name: mw325_humidity - operations: - - name: inverse - method: !!python/name:satpy.enhancements.invert - args: - - [true, true, true] - - name: stretch - method: !!python/name:satpy.enhancements.stretch - kwargs: {stretch: linear} - - name: gamma - method: !!python/name:satpy.enhancements.gamma - kwargs: {gamma: 1.2} - - mw325_humidity_surface: - standard_name: mw325_humidity_surface + mw_humidity_surface: + standard_name: mw_humidity_surface operations: - name: inverse method: !!python/name:satpy.enhancements.invert diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml index ad2acd4dc3..4a6215a5a1 100644 --- a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -366,9 +366,6 @@ datasets: file_types: aws_l1c_nc: - # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc - # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc - # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc file_reader: !!python/name:satpy.readers.mwr_l1c.AWS_MWR_L1CFile file_patterns: [ 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index 362641306b..7f77f6dbf9 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -71,7 +71,9 @@ def end_time(self): @property def sensor(self): """Get the sensor name.""" - return self["/attr/instrument"] + # This should have been self["/attr/instrument"] + # But the sensor name is currently incorrect in the ESA level-1b files + return "mwr" @property def platform_name(self): diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 1b6f7269f4..dba26d45a5 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -39,7 +39,9 @@ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=Tru @property def sensor(self): """Get the sensor name.""" - return "MWR" + # This should have been self["/attr/instrument"] + # But the sensor name is currently incorrect in the ESA level-1b files + return "mwr" def get_dataset(self, dataset_id, dataset_info): """Get the data.""" diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index aeec133f30..83a6291288 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -19,7 +19,8 @@ """Setup and configuration for all reader tests.""" import os -from datetime import datetime, timedelta +from datetime import datetime as dt +from datetime import timedelta from random import randrange import numpy as np @@ -99,11 +100,11 @@ def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): geo_size = 10 * 145 * 4 ds = DataTree() - start_time = datetime(2024, 9, 1, 12, 0) + start_time = dt(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = datetime(2024, 9, 1, 12, 15) + end_time = dt(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(datetime(2024, 9, 1, 13), datetime(2030, 6, 1)) + processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) instrument = "MWR" ds.attrs["instrument"] = instrument @@ -140,7 +141,7 @@ def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): return filename -@pytest.fixture +@pytest.fixture(scope="module") def eps_sterna_mwr_handler(eps_sterna_mwr_file): """Create an EPS-Sterna MWR filehandler.""" filename_info = parse(eumetsat_file_pattern, os.path.basename(eps_sterna_mwr_file)) @@ -149,18 +150,18 @@ def eps_sterna_mwr_handler(eps_sterna_mwr_file): return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1b file.""" geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] - geo_size = 10*145*4 + geo_size = 10 * 145 * 4 ds = DataTree() - start_time = datetime(2024, 9, 1, 12, 0) + start_time = dt(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = datetime(2024, 9, 1, 12, 15) + end_time = dt(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + processing_time = random_date(dt(2024, 6, 1), dt(2030, 6, 1)) instrument = "MWR" ds.attrs["instrument"] = instrument @@ -196,7 +197,7 @@ def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): return filename -@pytest.fixture +@pytest.fixture(scope="module") def aws_mwr_handler(aws_mwr_file): """Create an AWS MWR filehandler.""" filename_info = parse(esa_file_pattern, os.path.basename(aws_mwr_file)) @@ -205,18 +206,18 @@ def aws_mwr_handler(aws_mwr_file): return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1c file.""" geo_dims = ["n_scans", "n_fovs"] geo_size = 10*145 ds = DataTree() - start_time = datetime(2024, 9, 1, 12, 0) + start_time = dt(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = datetime(2024, 9, 1, 12, 15) + end_time = dt(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(datetime(2024, 6, 1), datetime(2030, 6, 1)) + processing_time = random_date(dt(2024, 6, 1), dt(2030, 6, 1)) ds.attrs["instrument"] = "MWR" ds.attrs["orbit_start"] = 9991 @@ -248,7 +249,7 @@ def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): return filename -@pytest.fixture +@pytest.fixture(scope="module") def aws_mwr_l1c_handler(aws_mwr_l1c_file): """Create an AWS MWR level-1c filehandler.""" filename_info = parse(esa_l1c_file_pattern, os.path.basename(aws_mwr_l1c_file)) diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index 9a789bd08f..a88cd8e062 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -1,6 +1,6 @@ """Tests for aws l1b filehandlers.""" -from datetime import datetime +from datetime import datetime as dt from enum import Enum import numpy as np @@ -24,8 +24,8 @@ def test_start_end_time(aws_mwr_handler): """Test that start and end times are read correctly.""" - assert aws_mwr_handler.start_time == datetime(2024, 9, 1, 12, 0) - assert aws_mwr_handler.end_time == datetime(2024, 9, 1, 12, 15) + assert aws_mwr_handler.start_time == dt(2024, 9, 1, 12, 0) + assert aws_mwr_handler.end_time == dt(2024, 9, 1, 12, 15) def test_orbit_number_start_end(aws_mwr_handler): @@ -36,7 +36,7 @@ def test_orbit_number_start_end(aws_mwr_handler): def test_metadata(aws_mwr_handler): """Test that the metadata is read correctly.""" - assert aws_mwr_handler.sensor == "MWR" + assert aws_mwr_handler.sensor == "mwr" assert aws_mwr_handler.platform_name == platform_name @@ -60,7 +60,7 @@ def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): assert res.attrs["orbital_parameters"]["sub_satellite_longitude_end"] == 296.79 assert res.dims == ("y", "x") assert "n_channels" not in res.coords - assert res.attrs["sensor"] == "MWR" + assert res.attrs["sensor"] == "mwr" assert res.attrs["platform_name"] == "AWS1" diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index fc4118bc34..86ee5600d7 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -57,7 +57,7 @@ def test_get_channel_data(aws_mwr_l1c_handler, fake_mwr_data_array): assert "y" in res.dims assert res.dims == ("y", "x") assert "n_channels" not in res.coords - assert res.attrs["sensor"] == "MWR" + assert res.attrs["sensor"] == "mwr" assert res.attrs["platform_name"] == "AWS1" From 0eb261a505fb7931010a95fc0e0647e2b6d30a24 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 19 Dec 2024 15:30:45 +0100 Subject: [PATCH 295/340] Fix file name pattern Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws1_mwr_l1b_nc.yaml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml index 03978afe49..a232621ba9 100644 --- a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -527,9 +527,7 @@ file_types: aws_l1b_nc: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc - # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230816120142_G_D_20240115111111_20240115125434_T_B____radsim.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____radsim.nc'] + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc'] From 05d952ee9cb037f7e1e33fd77ad1b657e805f9fe Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Thu, 19 Dec 2024 21:01:07 +0100 Subject: [PATCH 296/340] Refactor the AWS/EPS-STerna tests Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 181 +++++++++++++-------------- 1 file changed, 90 insertions(+), 91 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 83a6291288..c78cf5ae1b 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -36,11 +36,7 @@ platform_name = "AWS1" # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc -eumetsat_file_pattern = "W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa - -esa_file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa - -esa_l1c_file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa +file_pattern = "W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-{processing_level}-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa rng = np.random.default_rng() @@ -93,10 +89,20 @@ def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): return (fake_lon_data, fake_lat_data) -@pytest.fixture(scope="module") -def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): - """Create an EPS-Sterna MWR l1b file.""" - geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] +def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): + """Create an AWS and EPS-Sterna MWR l1b file.""" + if eps_sterna: + n_feedhorns="n_feedhorns" + prefix = "" + longitude_attr = "longitude" + latitude_attr = "latitude" + else: + n_feedhorns="n_geo_groups" + prefix = "aws_" + longitude_attr = "aws_lon" + latitude_attr = "aws_lat" + + geo_dims = ["n_scans", "n_fovs", n_feedhorns] geo_size = 10 * 145 * 4 ds = DataTree() @@ -104,113 +110,84 @@ def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) end_time = dt(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) instrument = "MWR" ds.attrs["instrument"] = instrument ds.attrs["orbit_start"] = 9991 ds.attrs["orbit_end"] = 9992 - ds["data/calibration/toa_brightness_temperature"] = fake_mwr_data_array - ds["data/calibration/toa_brightness_temperature"].attrs["scale_factor"] = 0.001 - ds["data/calibration/toa_brightness_temperature"].attrs["add_offset"] = 0.0 - ds["data/calibration/toa_brightness_temperature"].attrs["missing_value"] = -2147483648 - ds["data/calibration/toa_brightness_temperature"].attrs["valid_min"] = 0 - ds["data/calibration/toa_brightness_temperature"].attrs["valid_max"] = 700000 + dset_name = f"data/calibration/{prefix}toa_brightness_temperature" + ds[dset_name] = fake_mwr_data_array + ds[dset_name].attrs["scale_factor"] = 0.001 + ds[dset_name].attrs["add_offset"] = 0.0 + ds[dset_name].attrs["missing_value"] = -2147483648 + ds[dset_name].attrs["valid_min"] = 0 + ds[dset_name].attrs["valid_max"] = 700000 fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) - ds["data/navigation/longitude"] = fake_lon_data - ds["data/navigation/longitude"].attrs["scale_factor"] = 1e-4 - ds["data/navigation/longitude"].attrs["add_offset"] = 0.0 - ds["data/navigation/latitude"] = fake_lat_data - ds["data/navigation/solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds[f"data/navigation/{longitude_attr}"] = fake_lon_data + ds[f"data/navigation/{longitude_attr}"].attrs["scale_factor"] = 1e-4 + ds[f"data/navigation/{longitude_attr}"].attrs["add_offset"] = 0.0 + ds[f"data/navigation/{latitude_attr}"] = fake_lat_data + ds[f"data/navigation/{prefix}solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds[f"data/navigation/{prefix}solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) + ds[f"data/navigation/{prefix}satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) + ds[f"data/navigation/{prefix}satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) ds["status/satellite/subsat_latitude_end"] = np.array(22.39) ds["status/satellite/subsat_longitude_start"] = np.array(304.79) ds["status/satellite/subsat_latitude_start"] = np.array(55.41) ds["status/satellite/subsat_longitude_end"] = np.array(296.79) - tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") - filename = tmp_dir / compose(eumetsat_file_pattern, dict(start_time=start_time, end_time=end_time, - processing_time=processing_time, - platform_name=platform_name)) - - ds.to_netcdf(filename) - return filename - + return ds @pytest.fixture(scope="module") -def eps_sterna_mwr_handler(eps_sterna_mwr_file): - """Create an EPS-Sterna MWR filehandler.""" - filename_info = parse(eumetsat_file_pattern, os.path.basename(eps_sterna_mwr_file)) - filetype_info = dict() - filetype_info["file_type"] = "eps_sterna_mwr_l1b" - return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) +def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an EPS-Sterna MWR l1b file.""" + ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True) + tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") + start_time = dt.fromisoformat(ds.attrs["sensing_start_time_utc"]) + end_time = dt.fromisoformat(ds.attrs["sensing_end_time_utc"]) + platform_name = "AWS1" + processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) + filename = tmp_dir / compose(file_pattern, dict(country="XX", + organisation="EUMETSAT", + location="Darmstadt", + processing_level="1B", + originator="EUMT", + start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) + ds.to_netcdf(filename) + return filename @pytest.fixture(scope="module") def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1b file.""" - geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] - geo_size = 10 * 145 * 4 - - ds = DataTree() - start_time = dt(2024, 9, 1, 12, 0) - ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = dt(2024, 9, 1, 12, 15) - ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(dt(2024, 6, 1), dt(2030, 6, 1)) - - instrument = "MWR" - ds.attrs["instrument"] = instrument - ds.attrs["orbit_start"] = 9991 - ds.attrs["orbit_end"] = 9992 - ds["data/calibration/aws_toa_brightness_temperature"] = fake_mwr_data_array - ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 - - fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) - - ds["data/navigation/aws_lon"] = fake_lon_data - ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 - ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 - ds["data/navigation/aws_lat"] = fake_lat_data - ds["data/navigation/aws_solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/aws_solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/aws_satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds["data/navigation/aws_satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) - ds["status/satellite/subsat_latitude_end"] = np.array(22.39) - ds["status/satellite/subsat_longitude_start"] = np.array(304.79) - ds["status/satellite/subsat_latitude_start"] = np.array(55.41) - ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=False) tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") - filename = tmp_dir / compose(esa_file_pattern, dict(start_time=start_time, end_time=end_time, - processing_time=processing_time, platform_name=platform_name)) - + start_time = dt.fromisoformat(ds.attrs["sensing_start_time_utc"]) + end_time = dt.fromisoformat(ds.attrs["sensing_end_time_utc"]) + platform_name = "AWS1" + processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) + filename = tmp_dir / compose(file_pattern, dict(country="SE", + organisation="SMHI", + location="Norrkoping", + processing_level="1B", + originator="SMHI", + start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) ds.to_netcdf(filename) return filename -@pytest.fixture(scope="module") -def aws_mwr_handler(aws_mwr_file): - """Create an AWS MWR filehandler.""" - filename_info = parse(esa_file_pattern, os.path.basename(aws_mwr_file)) - filetype_info = dict() - filetype_info["file_type"] = "aws1_mwr_l1b" - return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) - - @pytest.fixture(scope="module") def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1c file.""" geo_dims = ["n_scans", "n_fovs"] - geo_size = 10*145 + geo_size = 10 * 145 ds = DataTree() start_time = dt(2024, 9, 1, 12, 0) @@ -241,18 +218,40 @@ def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): ds["data/navigation/aws_satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) tmp_dir = tmp_path_factory.mktemp("aws_l1c_tests") - filename = tmp_dir / compose(esa_l1c_file_pattern, dict(start_time=start_time, end_time=end_time, - processing_time=processing_time, - platform_name=platform_name)) - + filename = tmp_dir / compose(file_pattern, dict(country="SE", + organisation="SMHI", + location="Norrkoping", + processing_level="1C", + originator="SMHI", + start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) ds.to_netcdf(filename) return filename +@pytest.fixture(scope="module") +def eps_sterna_mwr_handler(eps_sterna_mwr_file): + """Create an EPS-Sterna MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "eps_sterna_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) + + +@pytest.fixture(scope="module") +def aws_mwr_handler(aws_mwr_file): + """Create an AWS MWR filehandler.""" + filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) + filetype_info = dict() + filetype_info["file_type"] = "aws1_mwr_l1b" + return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) + + @pytest.fixture(scope="module") def aws_mwr_l1c_handler(aws_mwr_l1c_file): """Create an AWS MWR level-1c filehandler.""" - filename_info = parse(esa_l1c_file_pattern, os.path.basename(aws_mwr_l1c_file)) + filename_info = parse(file_pattern, os.path.basename(aws_mwr_l1c_file)) filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1c" return AWS_MWR_L1CFile(aws_mwr_l1c_file, filename_info, filetype_info) From 152b45ec947a3a5fbc250f34ba2a340e55d6469e Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 20 Dec 2024 12:05:07 +0100 Subject: [PATCH 297/340] Fixed datetime import syntax - use custom practice Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 24 +++++++++---------- satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 6 ++--- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 2 +- 3 files changed, 16 insertions(+), 16 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index c78cf5ae1b..241f00f28f 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -18,8 +18,8 @@ """Setup and configuration for all reader tests.""" +import datetime as dt import os -from datetime import datetime as dt from datetime import timedelta from random import randrange @@ -106,9 +106,9 @@ def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): geo_size = 10 * 145 * 4 ds = DataTree() - start_time = dt(2024, 9, 1, 12, 0) + start_time = dt.datetime(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = dt(2024, 9, 1, 12, 15) + end_time = dt.datetime(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) instrument = "MWR" @@ -146,10 +146,10 @@ def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True) tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") - start_time = dt.fromisoformat(ds.attrs["sensing_start_time_utc"]) - end_time = dt.fromisoformat(ds.attrs["sensing_end_time_utc"]) + start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) + end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) platform_name = "AWS1" - processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) + processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) filename = tmp_dir / compose(file_pattern, dict(country="XX", organisation="EUMETSAT", location="Darmstadt", @@ -167,10 +167,10 @@ def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=False) tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") - start_time = dt.fromisoformat(ds.attrs["sensing_start_time_utc"]) - end_time = dt.fromisoformat(ds.attrs["sensing_end_time_utc"]) + start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) + end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) platform_name = "AWS1" - processing_time = random_date(dt(2024, 9, 1, 13), dt(2030, 6, 1)) + processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) filename = tmp_dir / compose(file_pattern, dict(country="SE", organisation="SMHI", location="Norrkoping", @@ -190,11 +190,11 @@ def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): geo_size = 10 * 145 ds = DataTree() - start_time = dt(2024, 9, 1, 12, 0) + start_time = dt.datetime(2024, 9, 1, 12, 0) ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = dt(2024, 9, 1, 12, 15) + end_time = dt.datetime(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(dt(2024, 6, 1), dt(2030, 6, 1)) + processing_time = random_date(dt.datetime(2024, 6, 1), dt.datetime(2030, 6, 1)) ds.attrs["instrument"] = "MWR" ds.attrs["orbit_start"] = 9991 diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index a88cd8e062..cfecfd4a66 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -1,6 +1,6 @@ """Tests for aws l1b filehandlers.""" -from datetime import datetime as dt +import datetime as dt from enum import Enum import numpy as np @@ -24,8 +24,8 @@ def test_start_end_time(aws_mwr_handler): """Test that start and end times are read correctly.""" - assert aws_mwr_handler.start_time == dt(2024, 9, 1, 12, 0) - assert aws_mwr_handler.end_time == dt(2024, 9, 1, 12, 15) + assert aws_mwr_handler.start_time == dt.datetime(2024, 9, 1, 12, 0) + assert aws_mwr_handler.end_time == dt.datetime(2024, 9, 1, 12, 15) def test_orbit_number_start_end(aws_mwr_handler): diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index 5ef44a9636..9fe034a76d 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -30,7 +30,7 @@ geo_dims = ["n_scans", "n_fovs"] -geo_size = 10*145 +geo_size = 10 * 145 fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) From 4ad6ef4ec4bcc9344255a88336ab9ed8a2f3a4db Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 20 Dec 2024 12:07:44 +0100 Subject: [PATCH 298/340] Remove redundant timedelta import Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 241f00f28f..cfee6d4523 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -20,7 +20,6 @@ import datetime as dt import os -from datetime import timedelta from random import randrange import numpy as np @@ -46,7 +45,7 @@ def random_date(start, end): delta = end - start int_delta = (delta.days * 24 * 60 * 60) + delta.seconds random_second = randrange(int_delta) - return start + timedelta(seconds=random_second) + return start + dt.timedelta(seconds=random_second) @pytest.fixture(scope="session") From 2b8aecb238e12d37a854c045af59e587ac059da8 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 20 Dec 2024 12:18:36 +0100 Subject: [PATCH 299/340] Clean up and improve file patterns for AWS/EPS-Sterna Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws1_mwr_l1b_nc.yaml | 4 ++-- satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 4 ++-- satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml index a232621ba9..bbc4a567e7 100644 --- a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -529,5 +529,5 @@ file_types: # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ - 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', - 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc'] + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + ] diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml index 4a6215a5a1..e62b88d54d 100644 --- a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -368,5 +368,5 @@ file_types: aws_l1c_nc: file_reader: !!python/name:satpy.readers.mwr_l1c.AWS_MWR_L1CFile file_patterns: [ - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc', - 'W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc',] + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1C-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + ] diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index 97a8e8888c..efc7dc8bc9 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -528,5 +528,5 @@ file_types: # W_XX-EUMETSAT-Darmstadt,SAT,AWS1-MWR-1B-RAD_C_EUMT_20241121085911_G_D_20241109234502_20241110004559_T_N____.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ - 'W_XX-EUMETSAT-Darmstadt,SAT,{platform_name}-MWR-1B-RAD_C_EUMT_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] From cd21dd258f2f8545332cdd69f5672eee179c0acd Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 20 Dec 2024 13:26:50 +0100 Subject: [PATCH 300/340] Fix short/long names and make improvements raised in the review Signed-off-by: Adam.Dybbroe --- satpy/etc/readers/aws1_mwr_l1b_nc.yaml | 2 +- satpy/etc/readers/aws1_mwr_l1c_nc.yaml | 20 +++++++++---------- satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 4 ++-- satpy/readers/mwr_l1c.py | 2 +- satpy/tests/reader_tests/conftest.py | 2 +- satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 7 +++---- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 7 ++----- .../reader_tests/test_eps_sterna_mwr_l1b.py | 5 +++++ 8 files changed, 25 insertions(+), 24 deletions(-) diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml index bbc4a567e7..4b3cccc648 100644 --- a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -1,6 +1,6 @@ reader: name: aws1_mwr_l1b_nc - short_name: AWS1 MWR L1B RAD NetCDF4 + short_name: AWS1 MWR L1B long_name: AWS1 MWR L1B Radiance (NetCDF4) description: Reader for the ESA AWS (Arctic Weather Satellite) Microwave Radiometer (MWR) level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader diff --git a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml index e62b88d54d..e27cc99a2d 100644 --- a/satpy/etc/readers/aws1_mwr_l1c_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1c_nc.yaml @@ -1,7 +1,7 @@ reader: name: aws1_mwr_l1c_nc - short_name: AWS L1C RAD NetCDF4 - long_name: AWS L1C Radiance (NetCDF4) + short_name: AWS1 MWR L1C + long_name: AWS1 MWR L1C Radiance (NetCDF4) description: Reader for the ESA AWS (Arctic Weather Satellite) MWR level-1c files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] @@ -328,8 +328,8 @@ datasets: # --- Navigation data --- - solar_azimuth: - name: solar_azimuth + solar_azimuth_angle: + name: solar_azimuth_angle file_type: aws_l1c_nc file_key: data/navigation/aws_solar_azimuth_angle standard_name: solar_azimuth_angle @@ -337,8 +337,8 @@ datasets: - longitude - latitude - solar_zenith: - name: solar_zenith + solar_zenith_angle: + name: solar_zenith_angle file_type: aws_l1c_nc file_key: data/navigation/aws_solar_zenith_angle standard_name: solar_zenith_angle @@ -346,8 +346,8 @@ datasets: - longitude - latitude - satellite_azimuth: - name: satellite_azimuth + satellite_azimuth_angle: + name: satellite_azimuth_angle file_type: aws_l1c_nc file_key: data/navigation/aws_satellite_azimuth_angle standard_name: satellite_azimuth_angle @@ -355,8 +355,8 @@ datasets: - longitude - latitude - satellite_zenith: - name: satellite_zenith + satellite_zenith_angle: + name: satellite_zenith_angle file_type: aws_l1c_nc file_key: data/navigation/aws_satellite_zenith_angle standard_name: satellite_zenith_angle diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index efc7dc8bc9..fe5bea93b2 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -1,7 +1,7 @@ reader: name: eps_sterna_mwr_l1b_nc - short_name: AWS L1B RAD NetCDF4 - long_name: AWS L1B Radiance (NetCDF4) + short_name: EPS-Sterna MWR L1B + long_name: EPS-Sterna MWR L1B Radiance (NetCDF4) description: Reader for the EUMETSAT EPS-Sterna radiometer level-1b files in netCDF4. reader: !!python/name:satpy.readers.yaml_reader.FileYAMLReader sensors: [mwr,] diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index d14ab6783a..0182b1d77c 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -14,7 +14,7 @@ # along with this program. If not, see . """Reader for the Arctic Weather Satellite (AWS) MWR level-1c data. -MWR = Microwaver Radiometer, onboard AWS and EPS-Sterna +MWR = Microwave Radiometer, onboard AWS and EPS-Sterna Sample data provided by ESA September 27, 2024. """ diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index cfee6d4523..92e9ef5445 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -147,7 +147,7 @@ def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) - platform_name = "AWS1" + platform_name = "ST01" processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) filename = tmp_dir / compose(file_pattern, dict(country="XX", organisation="EUMETSAT", diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index cfecfd4a66..59c429cca5 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -8,8 +8,7 @@ from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_lonlats -platform_name = "AWS1" -file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1B-RAD_C_OHB_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa +PLATFORM_NAME = "AWS1" geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] @@ -37,7 +36,7 @@ def test_orbit_number_start_end(aws_mwr_handler): def test_metadata(aws_mwr_handler): """Test that the metadata is read correctly.""" assert aws_mwr_handler.sensor == "mwr" - assert aws_mwr_handler.platform_name == platform_name + assert aws_mwr_handler.platform_name == PLATFORM_NAME def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): @@ -61,7 +60,7 @@ def test_get_channel_data(aws_mwr_handler, fake_mwr_data_array): assert res.dims == ("y", "x") assert "n_channels" not in res.coords assert res.attrs["sensor"] == "mwr" - assert res.attrs["platform_name"] == "AWS1" + assert res.attrs["platform_name"] == PLATFORM_NAME @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index 9fe034a76d..7ebca463fa 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -24,10 +24,7 @@ from satpy.tests.reader_tests.conftest import make_fake_angles, make_fake_mwr_l1c_lonlats -platform_name = "AWS1" -# W_XX-OHB-Stockholm,SAT,AWS1-MWR-1C-RAD_C_OHB__20241126183628_G_D_20240913222540_20240914000332_T_B____.nc -file_pattern = "W_XX-OHB-Stockholm,SAT,{platform_name}-MWR-1C-RAD_C_OHB__{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_T_B____.nc" # noqa - +PLATFORM_NAME = "AWS1" geo_dims = ["n_scans", "n_fovs"] geo_size = 10 * 145 @@ -57,7 +54,7 @@ def test_get_channel_data(aws_mwr_l1c_handler, fake_mwr_data_array): assert res.dims == ("y", "x") assert "n_channels" not in res.coords assert res.attrs["sensor"] == "mwr" - assert res.attrs["platform_name"] == "AWS1" + assert res.attrs["platform_name"] == PLATFORM_NAME @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), diff --git a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py index 0620bc8437..bd9be5b694 100644 --- a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py @@ -61,3 +61,8 @@ def test_try_get_data_not_in_file(eps_sterna_mwr_handler): match_str = "Dataset aws_toa_brightness_temperature not available or not supported yet!" with pytest.raises(NotImplementedError, match=match_str): _ = eps_sterna_mwr_handler.get_dataset(did, dataset_info) + +def test_metadata(eps_sterna_mwr_handler): + """Test that the metadata is read correctly.""" + assert eps_sterna_mwr_handler.sensor == "mwr" + assert eps_sterna_mwr_handler.platform_name == "ST01" From a70375726f4b81dd80cf79a01c6e436add1278c1 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Fri, 20 Dec 2024 14:02:43 +0100 Subject: [PATCH 301/340] Add missing enhancement Signed-off-by: Adam.Dybbroe --- satpy/etc/enhancements/mwr.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/satpy/etc/enhancements/mwr.yaml b/satpy/etc/enhancements/mwr.yaml index 4a9cff6354..6c39440de8 100644 --- a/satpy/etc/enhancements/mwr.yaml +++ b/satpy/etc/enhancements/mwr.yaml @@ -13,3 +13,12 @@ enhancements: - name: gamma method: !!python/name:satpy.enhancements.gamma kwargs: {gamma: 1.2} + + tbs_colors: + standard_name: tbs_colors + operations: + - name: colorize + method: !!python/name:satpy.enhancements.colorize + kwargs: + palettes: + - {colors: spectral, min_value: 280, max_value: 180} From 3aa40a0a975bd5ad684f509a22b2e7ef55afe173 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Jan 2025 10:25:30 +0000 Subject: [PATCH 302/340] Bump pypa/gh-action-pypi-publish from 1.12.2 to 1.12.3 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.12.2 to 1.12.3. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.12.2...v1.12.3) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index f97f137b80..fd0429062a 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.12.2 + uses: pypa/gh-action-pypi-publish@v1.12.3 with: user: __token__ password: ${{ secrets.pypi_password }} From a17b8b33306ccf432239c18d18c1c7876c984ec8 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Sun, 5 Jan 2025 21:24:34 +0100 Subject: [PATCH 303/340] Refactor AWS/EPS-Sterna l1b file fixtures Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 58 +++++++++++++--------------- 1 file changed, 26 insertions(+), 32 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index 92e9ef5445..a6459d96d2 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2021, 2024 Satpy developers +# Copyright (c) 2021, 2024, 2025 Satpy developers # # This file is part of satpy. # @@ -139,47 +139,41 @@ def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): return ds -@pytest.fixture(scope="module") -def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): - """Create an EPS-Sterna MWR l1b file.""" - ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True) - tmp_dir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") +def create_mwr_file(tmpdir, data_array, eps_sterna=False): + """Create an AWS or EPS-Sterna MWR l1b file.""" + ds = aws_eps_sterna_mwr_l1bfile(data_array, eps_sterna=eps_sterna) start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) - platform_name = "ST01" + if eps_sterna: + platform_name = "ST01" + else: + platform_name = "AWS1" + processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) - filename = tmp_dir / compose(file_pattern, dict(country="XX", - organisation="EUMETSAT", - location="Darmstadt", - processing_level="1B", - originator="EUMT", - start_time=start_time, end_time=end_time, - processing_time=processing_time, - platform_name=platform_name)) + filename = tmpdir / compose(file_pattern, dict(country="XX", + organisation="EUMETSAT", + location="Darmstadt", + processing_level="1B", + originator="EUMT", + start_time=start_time, end_time=end_time, + processing_time=processing_time, + platform_name=platform_name)) ds.to_netcdf(filename) return filename +@pytest.fixture(scope="module") +def eps_sterna_mwr_file(tmp_path_factory, fake_mwr_data_array): + """Create an EPS-Sterna MWR l1b file.""" + tmpdir = tmp_path_factory.mktemp("eps_sterna_mwr_l1b_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=True) + + @pytest.fixture(scope="module") def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1b file.""" - ds = aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=False) - - tmp_dir = tmp_path_factory.mktemp("aws_l1b_tests") - start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) - end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) - platform_name = "AWS1" - processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) - filename = tmp_dir / compose(file_pattern, dict(country="SE", - organisation="SMHI", - location="Norrkoping", - processing_level="1B", - originator="SMHI", - start_time=start_time, end_time=end_time, - processing_time=processing_time, - platform_name=platform_name)) - ds.to_netcdf(filename) - return filename + tmpdir = tmp_path_factory.mktemp("aws_l1b_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False) @pytest.fixture(scope="module") From c7d5fe9602a99618a38ae1b5eb5e0de49770298e Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 6 Jan 2025 19:04:22 +0100 Subject: [PATCH 304/340] Refactoring tests reusing the l1b file creation part also for level-1c Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 92 ++++++------------- satpy/tests/reader_tests/test_aws1_mwr_l1b.py | 11 ++- .../reader_tests/test_eps_sterna_mwr_l1b.py | 6 +- 3 files changed, 39 insertions(+), 70 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index a6459d96d2..ddb7bde8d2 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -59,21 +59,21 @@ def fake_mwr_data_array(): return xr.DataArray(fake_data_np, dims=array_dims) -def make_fake_angles(geo_size, geo_dims, shape=(10, 145, 4)): +def make_fake_angles(geo_size, geo_dims, shape): """Return fake sun-satellite angle array.""" maxval = 36000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") return xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) -def make_fake_mwr_lonlats(geo_size, geo_dims): +def make_fake_mwr_lonlats(geo_size, geo_dims, shape): """Return fake geolocation data arrays for all 4 MWR horns.""" maxval = 3600000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size).astype("int32") - fake_lon_data = xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + fake_lon_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) maxval = 1800000 dummy_array = (np.arange(0, geo_size) * maxval/geo_size - maxval/2).astype("int32") - fake_lat_data = xr.DataArray(dummy_array.reshape((10, 145, 4)), dims=geo_dims) + fake_lat_data = xr.DataArray(dummy_array.reshape(shape), dims=geo_dims) return (fake_lon_data, fake_lat_data) @@ -88,7 +88,7 @@ def make_fake_mwr_l1c_lonlats(geo_size, geo_dims): return (fake_lon_data, fake_lat_data) -def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): +def aws_eps_sterna_mwr_level1_file(fake_mwr_data_array, eps_sterna=True, l1b=True): """Create an AWS and EPS-Sterna MWR l1b file.""" if eps_sterna: n_feedhorns="n_feedhorns" @@ -101,8 +101,14 @@ def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): longitude_attr = "aws_lon" latitude_attr = "aws_lat" - geo_dims = ["n_scans", "n_fovs", n_feedhorns] - geo_size = 10 * 145 * 4 + if l1b: + geo_dims = ["n_scans", "n_fovs", n_feedhorns] + geo_size = 10 * 145 * 4 + shape = (10, 145, 4) + else: + geo_dims = ["n_scans", "n_fovs"] + geo_size = 10 * 145 + shape = (10, 145) ds = DataTree() start_time = dt.datetime(2024, 9, 1, 12, 0) @@ -110,8 +116,7 @@ def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): end_time = dt.datetime(2024, 9, 1, 12, 15) ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - instrument = "MWR" - ds.attrs["instrument"] = instrument + ds.attrs["instrument"] = "MWR" ds.attrs["orbit_start"] = 9991 ds.attrs["orbit_end"] = 9992 dset_name = f"data/calibration/{prefix}toa_brightness_temperature" @@ -122,27 +127,28 @@ def aws_eps_sterna_mwr_l1bfile(fake_mwr_data_array, eps_sterna=True): ds[dset_name].attrs["valid_min"] = 0 ds[dset_name].attrs["valid_max"] = 700000 - fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) + fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) ds[f"data/navigation/{longitude_attr}"] = fake_lon_data ds[f"data/navigation/{longitude_attr}"].attrs["scale_factor"] = 1e-4 ds[f"data/navigation/{longitude_attr}"].attrs["add_offset"] = 0.0 ds[f"data/navigation/{latitude_attr}"] = fake_lat_data - ds[f"data/navigation/{prefix}solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds[f"data/navigation/{prefix}solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims) - ds[f"data/navigation/{prefix}satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims) - ds[f"data/navigation/{prefix}satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims) - ds["status/satellite/subsat_latitude_end"] = np.array(22.39) - ds["status/satellite/subsat_longitude_start"] = np.array(304.79) - ds["status/satellite/subsat_latitude_start"] = np.array(55.41) - ds["status/satellite/subsat_longitude_end"] = np.array(296.79) + ds[f"data/navigation/{prefix}solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape) + ds[f"data/navigation/{prefix}satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape) + if l1b: + ds["status/satellite/subsat_latitude_end"] = np.array(22.39) + ds["status/satellite/subsat_longitude_start"] = np.array(304.79) + ds["status/satellite/subsat_latitude_start"] = np.array(55.41) + ds["status/satellite/subsat_longitude_end"] = np.array(296.79) return ds -def create_mwr_file(tmpdir, data_array, eps_sterna=False): - """Create an AWS or EPS-Sterna MWR l1b file.""" - ds = aws_eps_sterna_mwr_l1bfile(data_array, eps_sterna=eps_sterna) +def create_mwr_file(tmpdir, data_array, eps_sterna=False, l1b=True): + """Create an AWS or EPS-Sterna MWR l1b (or level-1c) file.""" + ds = aws_eps_sterna_mwr_level1_file(data_array, eps_sterna=eps_sterna, l1b=l1b) start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) if eps_sterna: @@ -179,48 +185,8 @@ def aws_mwr_file(tmp_path_factory, fake_mwr_data_array): @pytest.fixture(scope="module") def aws_mwr_l1c_file(tmp_path_factory, fake_mwr_data_array): """Create an AWS MWR l1c file.""" - geo_dims = ["n_scans", "n_fovs"] - geo_size = 10 * 145 - - ds = DataTree() - start_time = dt.datetime(2024, 9, 1, 12, 0) - ds.attrs["sensing_start_time_utc"] = start_time.strftime(DATETIME_FORMAT) - end_time = dt.datetime(2024, 9, 1, 12, 15) - ds.attrs["sensing_end_time_utc"] = end_time.strftime(DATETIME_FORMAT) - processing_time = random_date(dt.datetime(2024, 6, 1), dt.datetime(2030, 6, 1)) - - ds.attrs["instrument"] = "MWR" - ds.attrs["orbit_start"] = 9991 - ds.attrs["orbit_end"] = 9992 - ds["data/calibration/aws_toa_brightness_temperature"] = fake_mwr_data_array - ds["data/calibration/aws_toa_brightness_temperature"].attrs["scale_factor"] = 0.001 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["add_offset"] = 0.0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["missing_value"] = -2147483648 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_min"] = 0 - ds["data/calibration/aws_toa_brightness_temperature"].attrs["valid_max"] = 700000 - - fake_lon_data, fake_lat_data = make_fake_mwr_l1c_lonlats(geo_size, geo_dims) - - ds["data/navigation/aws_lon"] = fake_lon_data - ds["data/navigation/aws_lon"].attrs["scale_factor"] = 1e-4 - ds["data/navigation/aws_lon"].attrs["add_offset"] = 0.0 - ds["data/navigation/aws_lat"] = fake_lat_data - ds["data/navigation/aws_solar_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) - ds["data/navigation/aws_solar_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) - ds["data/navigation/aws_satellite_azimuth_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) - ds["data/navigation/aws_satellite_zenith_angle"] = make_fake_angles(geo_size, geo_dims, shape=(10, 145)) - - tmp_dir = tmp_path_factory.mktemp("aws_l1c_tests") - filename = tmp_dir / compose(file_pattern, dict(country="SE", - organisation="SMHI", - location="Norrkoping", - processing_level="1C", - originator="SMHI", - start_time=start_time, end_time=end_time, - processing_time=processing_time, - platform_name=platform_name)) - ds.to_netcdf(filename) - return filename + tmpdir = tmp_path_factory.mktemp("aws_l1c_tests") + return create_mwr_file(tmpdir, fake_mwr_data_array, eps_sterna=False, l1b=False) @pytest.fixture(scope="module") diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py index 59c429cca5..e388b6186e 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1b.py @@ -13,11 +13,12 @@ geo_dims = ["n_scans", "n_fovs", "n_geo_groups"] geo_size = 10*145*4 -fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) -fake_sun_azi_data = make_fake_angles(geo_size, geo_dims) -fake_sun_zen_data = make_fake_angles(geo_size, geo_dims) -fake_sat_azi_data = make_fake_angles(geo_size, geo_dims) -fake_sat_zen_data = make_fake_angles(geo_size, geo_dims) +shape = (10, 145, 4) +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) +fake_sun_azi_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sun_zen_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sat_azi_data = make_fake_angles(geo_size, geo_dims, shape) +fake_sat_zen_data = make_fake_angles(geo_size, geo_dims, shape) diff --git a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py index bd9be5b694..43abdf74d4 100644 --- a/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py +++ b/satpy/tests/reader_tests/test_eps_sterna_mwr_l1b.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2024 Satpy developers +# Copyright (c) 2024, 2025 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -27,7 +27,9 @@ geo_dims = ["n_scans", "n_fovs", "n_feedhorns"] geo_size = 10*145*4 -fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims) +shape = (10, 145, 4) +fake_lon_data, fake_lat_data = make_fake_mwr_lonlats(geo_size, geo_dims, shape) + @pytest.mark.parametrize(("id_name", "file_key", "fake_array"), [("longitude", "data/navigation/longitude", fake_lon_data * 1e-4), From 95a86147ccf48311a8d520ceba3ae6e55e3c4f1e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 23:22:16 +0000 Subject: [PATCH 305/340] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.1 → v0.8.6](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.1...v0.8.6) - [github.com/pre-commit/mirrors-mypy: v1.13.0 → v1.14.1](https://github.com/pre-commit/mirrors-mypy/compare/v1.13.0...v1.14.1) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ff8c9568c9..790d6a925c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.8.1' + rev: 'v0.8.6' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -19,7 +19,7 @@ repos: - id: bandit args: [--ini, .bandit] - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.13.0' # Use the sha / tag you want to point at + rev: 'v1.14.1' # Use the sha / tag you want to point at hooks: - id: mypy additional_dependencies: From d4654d157e7e4b4eabebea41f6f357eca0bea123 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Tue, 7 Jan 2025 09:05:38 +0100 Subject: [PATCH 306/340] Fix right processing level in tmp file name Signed-off-by: Adam.Dybbroe --- satpy/tests/reader_tests/conftest.py | 9 ++++----- satpy/tests/reader_tests/test_aws1_mwr_l1c.py | 2 +- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index ddb7bde8d2..f4b9844851 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -151,16 +151,15 @@ def create_mwr_file(tmpdir, data_array, eps_sterna=False, l1b=True): ds = aws_eps_sterna_mwr_level1_file(data_array, eps_sterna=eps_sterna, l1b=l1b) start_time = dt.datetime.fromisoformat(ds.attrs["sensing_start_time_utc"]) end_time = dt.datetime.fromisoformat(ds.attrs["sensing_end_time_utc"]) - if eps_sterna: - platform_name = "ST01" - else: - platform_name = "AWS1" + + platform_name = "ST01" if eps_sterna else "AWS1" + processing_level = "1B" if l1b else "1C" processing_time = random_date(dt.datetime(2024, 9, 1, 13), dt.datetime(2030, 6, 1)) filename = tmpdir / compose(file_pattern, dict(country="XX", organisation="EUMETSAT", location="Darmstadt", - processing_level="1B", + processing_level=processing_level, originator="EUMT", start_time=start_time, end_time=end_time, processing_time=processing_time, diff --git a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py index 7ebca463fa..10c499838d 100644 --- a/satpy/tests/reader_tests/test_aws1_mwr_l1c.py +++ b/satpy/tests/reader_tests/test_aws1_mwr_l1c.py @@ -1,7 +1,7 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2024 Satpy developers +# Copyright (c) 2024-2025 Satpy developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by From b3e3b31f7a5b5d32622067ecb2daf97217a1bba6 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Tue, 7 Jan 2025 13:52:16 +0100 Subject: [PATCH 307/340] name should be standard_name --- satpy/etc/enhancements/generic.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 01668aaf5e..38592c232f 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1241,7 +1241,7 @@ enhancements: operations: [] essl_colorized_low_level_moisture: - name: essl_colorized_low_level_moisture + standard_name: essl_colorized_low_level_moisture operations: - name: colorize method: !!python/name:satpy.enhancements.colorize From d86ee60bd8a772250e11f627bff3c356eee1f32e Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Wed, 8 Jan 2025 09:21:35 +0100 Subject: [PATCH 308/340] change standard_name to name in enhancement Change standard_name back to name in the enhancement, or satpy won't find the correct enhancement and won't use it. --- satpy/etc/enhancements/generic.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/enhancements/generic.yaml b/satpy/etc/enhancements/generic.yaml index 38592c232f..01668aaf5e 100644 --- a/satpy/etc/enhancements/generic.yaml +++ b/satpy/etc/enhancements/generic.yaml @@ -1241,7 +1241,7 @@ enhancements: operations: [] essl_colorized_low_level_moisture: - standard_name: essl_colorized_low_level_moisture + name: essl_colorized_low_level_moisture operations: - name: colorize method: !!python/name:satpy.enhancements.colorize From a008f9f1914f326cfe21842c1bd5fbce8242ddd0 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Fri, 10 Jan 2025 10:47:13 +0000 Subject: [PATCH 309/340] correction : Apllied corrections asked in this MR https://github.com/pytroll/satpy/pull/2895 --- satpy/composites/lightning.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index a7931027b9..4f72ffd7a1 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -18,7 +18,6 @@ """Composite classes for the LI instrument.""" import logging -import sys import numpy as np import xarray as xr @@ -38,16 +37,15 @@ class LightningTimeCompositor(CompositeBase): """ def __init__(self, name, prerequisites=None, optional_prerequisites=None, **kwargs): """Initialisation of the class.""" - self.name = name super().__init__(name, prerequisites, optional_prerequisites, **kwargs) # Get the time_range which is in minute self.time_range = self.attrs["time_range"] self.standard_name = self.attrs["standard_name"] - self.reference_time = self.attrs["reference_time"] + self.reference_time_attr = self.attrs["reference_time"] - def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: - """Normalised the time in the range between [end_time,end_time - time_range]. + def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray: + """Normalize the time in the range between [end_time, end_time - time_range]. The range of the normalised data is between 0 and 1 where 0 corresponds to the date end_time - time_range and 1 to the end_time. Where end_times represent the latest lightning event and time_range is the range of @@ -61,7 +59,7 @@ def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: xr.DataArray: Normalised time """ # Compute the maximum time value - end_time = np.array(np.datetime64(data.attrs[self.reference_time])) + end_time = np.array(np.datetime64(data.attrs[self.reference_time_attr])) # Compute the minimum time value based on the time range begin_time = end_time - np.timedelta64(self.time_range, "m") # Drop values that are bellow begin_time @@ -69,11 +67,13 @@ def _normalize_time(self,data:xr.DataArray,attrs:dict)->xr.DataArray: # exit if data is empty afer filtering if data.size == 0 : LOG.error(f"All the flash_age events happened before {begin_time}") - sys.exit(1) + raise ValueError(f"Invalid data: data size is zero. All flash_age \ + events occurred before the specified start time ({begin_time})." + ) # Normalize the time values normalized_data = (data - begin_time) / (end_time - begin_time) # Ensure the result is still an xarray.DataArray - return xr.DataArray(normalized_data, dims=data.dims, coords=data.coords,attrs=attrs) + return xr.DataArray(normalized_data, dims=data.dims, coords=data.coords, attrs=attrs) @staticmethod @@ -92,7 +92,7 @@ def _redefine_metadata(self,attrs:dict)->dict: dict: atualised attributes """ attrs["name"] = self.standard_name - attrs["standard_name"] =self.standard_name + attrs["standard_name"] = self.standard_name # Attributes to describe the values range return attrs @@ -103,4 +103,4 @@ def __call__(self,projectables, nonprojectables=None, **attrs): new_attrs = data.attrs.copy() self._update_missing_metadata(new_attrs, attrs) new_attrs = self._redefine_metadata(new_attrs) - return self._normalize_time(data,new_attrs) + return self._normalize_time(data, new_attrs) From 9d432aba54e0ad561a6090b7feb442c0129d46c1 Mon Sep 17 00:00:00 2001 From: Johan Strandgren Date: Mon, 13 Jan 2025 13:44:08 +0000 Subject: [PATCH 310/340] Remove leading white space in flag_meanings entry in fci_l2_nc.yaml --- satpy/etc/readers/fci_l2_nc.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/etc/readers/fci_l2_nc.yaml b/satpy/etc/readers/fci_l2_nc.yaml index 6a465f3f7a..ec15013555 100644 --- a/satpy/etc/readers/fci_l2_nc.yaml +++ b/satpy/etc/readers/fci_l2_nc.yaml @@ -1129,7 +1129,7 @@ datasets: nc_key: cloud_mask_test_result extract_byte: 0 flag_values: [0,1] - flag_meanings: ['No snow/ice detected',' Snow/ice detected'] + flag_meanings: ['No snow/ice detected','Snow/ice detected'] cloud_test_cmt1: name: cloud_test_cmt1 From 0b9760c4ecee4d47642cb4e83bb78e2099b36b61 Mon Sep 17 00:00:00 2001 From: "Adam.Dybbroe" Date: Mon, 13 Jan 2025 15:08:19 +0100 Subject: [PATCH 311/340] Improvements following review comments. Signed-off-by: Adam.Dybbroe --- satpy/etc/composites/microwave.yaml | 1 + satpy/etc/composites/mwr.yaml | 2 +- satpy/etc/readers/aws1_mwr_l1b_nc.yaml | 5 +- satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml | 1 + satpy/readers/mwr_l1b.py | 54 +++++++++++++------- satpy/readers/mwr_l1c.py | 25 ++++++++- satpy/tests/reader_tests/conftest.py | 5 +- 7 files changed, 71 insertions(+), 22 deletions(-) create mode 100644 satpy/etc/composites/microwave.yaml diff --git a/satpy/etc/composites/microwave.yaml b/satpy/etc/composites/microwave.yaml new file mode 100644 index 0000000000..9cc6789cbd --- /dev/null +++ b/satpy/etc/composites/microwave.yaml @@ -0,0 +1 @@ +sensor_name: microwave diff --git a/satpy/etc/composites/mwr.yaml b/satpy/etc/composites/mwr.yaml index d959faa632..5f10986d4c 100644 --- a/satpy/etc/composites/mwr.yaml +++ b/satpy/etc/composites/mwr.yaml @@ -1,4 +1,4 @@ -sensor_name: mwr +sensor_name: microwave/mwr composites: mw183_humidity: diff --git a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml index 4b3cccc648..f487d58265 100644 --- a/satpy/etc/readers/aws1_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/aws1_mwr_l1b_nc.yaml @@ -527,7 +527,10 @@ file_types: aws_l1b_nc: # W_XX-OHB-Unknown,SAT,1-AWS-1B-RAD_C_OHB_20230707124607_G_D_20220621090100_20220621090618_T_B____.nc # W_XX-OHB-Stockholm,SAT,AWS1-MWR-1B-RAD_C_OHB_20230823161321_G_D_20240115111111_20240115125434_T_B____.nc + # W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__20250110134851_G_O_20250110114708_20250110132329_C_N____.nc file_reader: !!python/name:satpy.readers.mwr_l1b.AWS_EPS_Sterna_MWR_L1BFile file_patterns: [ - 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc', + 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_O_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] + feed_horn_group_name: n_geo_groups diff --git a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml index fe5bea93b2..d08113270a 100644 --- a/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml +++ b/satpy/etc/readers/eps_sterna_mwr_l1b_nc.yaml @@ -530,3 +530,4 @@ file_types: file_patterns: [ 'W_{country:2s}-{organisation:s}-{location:s},SAT,{platform_name}-MWR-1B-RAD_C_{originator:4s}_{processing_time:%Y%m%d%H%M%S}_G_D_{start_time:%Y%m%d%H%M%S}_{end_time:%Y%m%d%H%M%S}_{disposition_mode:1s}_{processing_mode:1s}____.nc' ] + feed_horn_group_name: n_feedhorns diff --git a/satpy/readers/mwr_l1b.py b/satpy/readers/mwr_l1b.py index 7f77f6dbf9..173c729ed8 100644 --- a/satpy/readers/mwr_l1b.py +++ b/satpy/readers/mwr_l1b.py @@ -1,4 +1,4 @@ -# Copyright (c) 2023, 2024 Pytroll Developers +# Copyright (c) 2023 - 2025 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -23,6 +23,36 @@ Sample EPS-Sterna l1b format AWS data from 16 orbits the 9th of November 2024. +Continous feed (though restricted to the SAG members and selected European +users/evaluators) in the EUMETSAT Data Store of global AWS data from January +9th, 2025. + +Example: +-------- +Here is an example how to read the data in satpy: + +.. code-block:: python + + from satpy import Scene + from glob import glob + + filenames = glob("data/W_NO-KSAT-Tromso,SAT,AWS1-MWR-1B-RAD_C_OHB__*_G_O_20250110114708*.nc" + scn = Scene(filenames=filenames, reader='aws1_mwr_l1b_nc') + + composites = ['mw183_humidity'] + dataset_names = composites + ['1'] + + scn.load(dataset_names) + print(scn['1']) + scn.show('mw183_humidity') + + +As the file format for the EPS Sterna Level-1b is slightly different from the +ESA format, reading the EPS Sterna level-1b data uses a different reader, named +`eps_sterna_mwr_l1b_nc`. So, if specifying the reader name as in the above code +example, please provide the actual name for that data: eps_sterna_mwr_l1b_nc. + + """ import xarray as xr @@ -46,7 +76,9 @@ "satellite_azimuth_horn1", "satellite_azimuth_horn2", "satellite_azimuth_horn3", - "satellite_azimuth_horn4"] + "satellite_azimuth_horn4", + "longitude", + "latitude"] class AWS_EPS_Sterna_BaseFileHandler(NetCDF4FileHandler): """Base class implementing the AWS/EPS-Sterna MWR Level-1b&c Filehandlers.""" @@ -103,23 +135,12 @@ def _get_channel_data(self, dataset_id, dataset_info): class AWS_EPS_Sterna_MWR_L1BFile(AWS_EPS_Sterna_BaseFileHandler): - """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler. + """Class implementing the AWS/EPS-Sterna MWR L1b Filehandler.""" - This class implements the ESA Arctic Weather Satellite (AWS) and EPS-Sterna - MWR Level-1b NetCDF reader. It is designed to be used through the - :class:`~satpy.Scene` class using the :mod:`~satpy.Scene.load` method with - the reader ``"mwr_l1b_nc"``. - - """ def __init__(self, filename, filename_info, filetype_info, auto_maskandscale=True): """Initialize the handler.""" super().__init__(filename, filename_info, filetype_info, auto_maskandscale) - - if filetype_info["file_type"].startswith("eps_sterna"): - self._feed_horn_group_name = "n_feedhorns" - else: - self._feed_horn_group_name = "n_geo_groups" - + self._feed_horn_group_name = filetype_info.get("feed_horn_group_name") @property def sub_satellite_longitude_start(self): @@ -147,9 +168,6 @@ def get_dataset(self, dataset_id, dataset_info): data_array = self._get_channel_data(dataset_id, dataset_info) elif dataset_id["name"] in NAVIGATION_DATASET_NAMES: data_array = self._get_navigation_data(dataset_id, dataset_info) - - elif dataset_id["name"] in ["longitude", "latitude"]: - data_array = self._get_navigation_data(dataset_id, dataset_info) else: raise NotImplementedError(f"Dataset {dataset_id['name']} not available or not supported yet!") diff --git a/satpy/readers/mwr_l1c.py b/satpy/readers/mwr_l1c.py index 0182b1d77c..3d429fd8f5 100644 --- a/satpy/readers/mwr_l1c.py +++ b/satpy/readers/mwr_l1c.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Pytroll Developers +# Copyright (c) 2024 - 2025 Pytroll Developers # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -17,8 +17,31 @@ MWR = Microwave Radiometer, onboard AWS and EPS-Sterna Sample data provided by ESA September 27, 2024. + + +Example: +-------- +Here is an example how to read the data in satpy: + +.. code-block:: python + + from satpy import Scene + from glob import glob + + filenames = glob("data/W_XX-OHB-Stockholm,SAT,AWS1-MWR-1C-RAD_C_OHB_*20240913204851_*.nc") + + scn = Scene(filenames=filenames, reader='aws1_mwr_l1c_nc') + + composites = ['mw183_humidity'] + dataset_names = composites + ['1'] + + scn.load(dataset_names) + print(scn['1']) + scn.show('mw183_humidity') + """ + from satpy.readers.mwr_l1b import MWR_CHANNEL_NAMES, AWS_EPS_Sterna_BaseFileHandler, mask_and_scale diff --git a/satpy/tests/reader_tests/conftest.py b/satpy/tests/reader_tests/conftest.py index f4b9844851..00742574ef 100644 --- a/satpy/tests/reader_tests/conftest.py +++ b/satpy/tests/reader_tests/conftest.py @@ -48,7 +48,7 @@ def random_date(start, end): return start + dt.timedelta(seconds=random_second) -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def fake_mwr_data_array(): """Return a fake AWS/EPS-Sterna MWR l1b data array.""" fake_data_np = rng.integers(0, 700000, size=10*145*19).reshape((10, 145, 19)) @@ -194,6 +194,7 @@ def eps_sterna_mwr_handler(eps_sterna_mwr_file): filename_info = parse(file_pattern, os.path.basename(eps_sterna_mwr_file)) filetype_info = dict() filetype_info["file_type"] = "eps_sterna_mwr_l1b" + filetype_info["feed_horn_group_name"] = "n_feedhorns" return AWS_EPS_Sterna_MWR_L1BFile(eps_sterna_mwr_file, filename_info, filetype_info) @@ -203,6 +204,7 @@ def aws_mwr_handler(aws_mwr_file): filename_info = parse(file_pattern, os.path.basename(aws_mwr_file)) filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1b" + filetype_info["feed_horn_group_name"] = "n_geo_groups" return AWS_EPS_Sterna_MWR_L1BFile(aws_mwr_file, filename_info, filetype_info) @@ -212,4 +214,5 @@ def aws_mwr_l1c_handler(aws_mwr_l1c_file): filename_info = parse(file_pattern, os.path.basename(aws_mwr_l1c_file)) filetype_info = dict() filetype_info["file_type"] = "aws1_mwr_l1c" + filetype_info["feed_horn_group_name"] = None return AWS_MWR_L1CFile(aws_mwr_l1c_file, filename_info, filetype_info) From 6c185de2426d228eb7359349f18efadbd91171f1 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Mon, 13 Jan 2025 13:23:03 -0600 Subject: [PATCH 312/340] Fix sdist tarball including unnecessary files --- MANIFEST.in | 17 ----------------- pyproject.toml | 9 +++++++++ 2 files changed, 9 insertions(+), 17 deletions(-) delete mode 100644 MANIFEST.in diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 05c921b367..0000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,17 +0,0 @@ -prune * -exclude * -graft doc -recursive-exclude doc/build * -graft satpy -include LICENSE.txt -include README.rst -include AUTHORS.md -include CHANGELOG.md -include SECURITY.md -include CITATION -include satpy/version.py -include pyproject.toml -include setup.py -include setup.cfg -include satpy/py.typed -global-exclude *.py[cod] diff --git a/pyproject.toml b/pyproject.toml index 9ed0eda02d..b47e292f22 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -112,6 +112,15 @@ build-backend = "hatchling.build" [tool.hatch.metadata] allow-direct-references = true +[tool.hatch.build.targets.sdist] +only-include = [ + "satpy", + "AUTHORS.md", + "CHANGELOG.md", + "SECURITY.md", + "CITATION", +] + [tool.hatch.build.targets.wheel] packages = ["satpy"] From 7676013dc8ad35fcf25abb3491ec022032a5a087 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 14 Jan 2025 10:05:40 +0000 Subject: [PATCH 313/340] test: Correct the test_lightning.py::test_empty_array_error --- satpy/composites/lightning.py | 4 ++-- .../tests/compositor_tests/test_lightning.py | 22 +++++++++++-------- 2 files changed, 15 insertions(+), 11 deletions(-) diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index 4f72ffd7a1..be4d0c769b 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -67,8 +67,8 @@ def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray: # exit if data is empty afer filtering if data.size == 0 : LOG.error(f"All the flash_age events happened before {begin_time}") - raise ValueError(f"Invalid data: data size is zero. All flash_age \ - events occurred before the specified start time ({begin_time})." + raise ValueError(f"Invalid data: data size is zero. All flash_age " + f"events occurred before the specified start time ({begin_time})." ) # Normalize the time values normalized_data = (data - begin_time) / (end_time - begin_time) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index 4c1f8b9a8c..6b1dab7b4c 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -20,9 +20,9 @@ import datetime import logging -from unittest import mock import numpy as np +import pytest import xarray as xr from satpy.composites.lightning import LightningTimeCompositor @@ -66,8 +66,9 @@ def test_empty_array_error(caplog): time_range=60, reference_time="end_time") attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", - "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), - "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} + "start_time": np.datetime64(datetime.datetime(2024, 8, 1, 10, 0, 0)), + "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0), + "reader": "li_l2_nc"} flash_age_value = np.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") flash_age = xr.DataArray( flash_age_value, @@ -75,14 +76,17 @@ def test_empty_array_error(caplog): coords={ "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" },attrs = attrs_flash_age,name="flash_time") - with mock.patch("sys.exit") as mock_exit: - # Capture logging output - with caplog.at_level(logging.ERROR): + with caplog.at_level(logging.ERROR): + # Simulate the operation that raises the exception + with pytest.raises(ValueError, match="data size is zero") as excinfo: _ = comp([flash_age]) - mock_exit.assert_called_once_with(1) - - assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text + # Assert the exception message + assert str(excinfo.value) == ( + f"Invalid data: data size is zero. All flash_age events occurred before " + f"the specified start time ({attrs_flash_age['start_time']})." + ) + assert "All the flash_age events happened before 2024-08-01T10:00:00" in caplog.text def test_update_missing_metadata(): """Test the _update_missing_metadata method.""" From 14dd1b8b7966357d0223973c04f10592ce5eb66c Mon Sep 17 00:00:00 2001 From: aoun Date: Tue, 14 Jan 2025 15:25:53 +0000 Subject: [PATCH 314/340] Fix the handling of AMVs unit to units by applying suggestion in #2898 --- satpy/readers/fci_l2_nc.py | 17 +++++++++-------- 1 file changed, 9 insertions(+), 8 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 8971eb4996..579e66a5f7 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -81,14 +81,16 @@ def _get_global_attributes(self): } return attributes - def _set_attributes(self, variable, dataset_info, segmented=False): + def _set_attributes(self, variable, dataset_info, product_type="pixel"): """Set dataset attributes.""" - if segmented: - xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" - else: + if product_type == "pixel": xdim, ydim = "number_of_columns", "number_of_rows" + elif product_type == "segmented": + xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" - if dataset_info["nc_key"] not in ["product_quality", "product_completeness", "product_timeliness"]: + if product_type in ["pixel", "segmented"] and dataset_info["nc_key"] not in ["product_quality", + "product_completeness", + "product_timeliness"]: variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) @@ -382,7 +384,7 @@ def get_dataset(self, dataset_id, dataset_info): if "fill_value" in dataset_info: variable = self._mask_data(variable, dataset_info["fill_value"]) - variable = self._set_attributes(variable, dataset_info, segmented=True) + variable = self._set_attributes(variable, dataset_info, product_type="segmented") return variable @@ -489,7 +491,6 @@ def get_dataset(self, dataset_id, dataset_info): return None # Manage the attributes of the dataset - variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + variable = self._set_attributes(variable, dataset_info, product_type="amv") return variable From b1b587ca279015eb24fb0dc7d026d4aca5d7e131 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Tue, 14 Jan 2025 10:28:12 -0600 Subject: [PATCH 315/340] Add doc directory to sdist --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index b47e292f22..ada8ddf619 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,6 +115,7 @@ allow-direct-references = true [tool.hatch.build.targets.sdist] only-include = [ "satpy", + "doc", "AUTHORS.md", "CHANGELOG.md", "SECURITY.md", From b5a27be7cb32e783d29148029a7b89a582030ffc Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 13:07:32 +0000 Subject: [PATCH 316/340] Move the if condition for pixel and segmented above to avoid going trhough the loop when reading amvs --- satpy/readers/fci_l2_nc.py | 19 ++++++++++--------- 1 file changed, 10 insertions(+), 9 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 579e66a5f7..1179ca07e9 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -83,15 +83,16 @@ def _get_global_attributes(self): def _set_attributes(self, variable, dataset_info, product_type="pixel"): """Set dataset attributes.""" - if product_type == "pixel": - xdim, ydim = "number_of_columns", "number_of_rows" - elif product_type == "segmented": - xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" - - if product_type in ["pixel", "segmented"] and dataset_info["nc_key"] not in ["product_quality", - "product_completeness", - "product_timeliness"]: - variable = variable.swap_dims({ydim: "y", xdim: "x"}) + if product_type in ["pixel", "segmented"]: + if product_type == "pixel": + xdim, ydim = "number_of_columns", "number_of_rows" + elif product_type == "segmented": + xdim, ydim = "number_of_FoR_cols", "number_of_FoR_rows" + + if dataset_info["nc_key"] not in ["product_quality", + "product_completeness", + "product_timeliness"]: + variable = variable.swap_dims({ydim: "y", xdim: "x"}) variable.attrs.setdefault("units", None) if "unit" in variable.attrs: From ee12792495461f19321a1e65435e3c2091040ac5 Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 13:45:46 +0000 Subject: [PATCH 317/340] Add a default values to avoid breaking the default behavior --- satpy/readers/fci_l2_nc.py | 52 ++++++++++++++++++++++---------------- 1 file changed, 30 insertions(+), 22 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 1179ca07e9..77b92f82f4 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -60,7 +60,7 @@ def ssp_lon(self): f"of {SSP_DEFAULT} degrees east instead") return SSP_DEFAULT - def _get_global_attributes(self): + def _get_global_attributes(self,product_type="pixel"): """Create a dictionary of global attributes to be added to all datasets. Returns: @@ -70,15 +70,23 @@ def _get_global_attributes(self): ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform + Only for AMVS product: + channel: channel at which the AMVs have been retrieved + """ attributes = { "filename": self.filename, "spacecraft_name": self.spacecraft_name, - "ssp_lon": self.ssp_lon, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, } + + if product_type=="amv": + attributes["channel"] = self.filename_info["channel"] + else: + attributes["ssp_lon"] = self.ssp_lon + return attributes def _set_attributes(self, variable, dataset_info, product_type="pixel"): @@ -101,7 +109,7 @@ def _set_attributes(self, variable, dataset_info, product_type="pixel"): del variable.attrs["unit"] variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes()) + variable.attrs.update(self._get_global_attributes(product_type)) import_enum_information = dataset_info.get("import_enum_information", False) if import_enum_information: @@ -460,25 +468,25 @@ def nc(self): } ) - def _get_global_attributes(self): - """Create a dictionary of global attributes to be added to all datasets. - - Returns: - dict: A dictionary of global attributes. - filename: name of the product file - spacecraft_name: name of the spacecraft - sensor: name of sensor - platform_name: name of the platform - - """ - attributes = { - "filename": self.filename, - "spacecraft_name": self.spacecraft_name, - "sensor": self.sensor_name, - "platform_name": self.spacecraft_name, - "channel": self.filename_info["channel"] - } - return attributes + # def _get_global_attributes(self): + # """Create a dictionary of global attributes to be added to all datasets. + + # Returns: + # dict: A dictionary of global attributes. + # filename: name of the product file + # spacecraft_name: name of the spacecraft + # sensor: name of sensor + # platform_name: name of the platform + + # """ + # attributes = { + # "filename": self.filename, + # "spacecraft_name": self.spacecraft_name, + # "sensor": self.sensor_name, + # "platform_name": self.spacecraft_name, + # "channel": self.filename_info["channel"] + # } + # return attributes def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" From e2a78363e2553c50d6fd0faac003fdcc3f5ca3dd Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 13:47:10 +0000 Subject: [PATCH 318/340] Fix AMV test_all_basic by adding the prodcut_type values dedicated to AMVs behavior --- satpy/tests/reader_tests/test_fci_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 830f793d00..79f4549316 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -616,7 +616,7 @@ def test_all_basic(self, amv_filehandler, amv_file): assert amv_filehandler.sensor_name == "test_data_source" assert amv_filehandler.ssp_lon == 0.0 - global_attributes = amv_filehandler._get_global_attributes() + global_attributes = amv_filehandler._get_global_attributes(product_type="amv") expected_global_attributes = { "filename": amv_file, "spacecraft_name": "test_platform", From c7495a3615a5e02bfa1f366aa09f5e6a3a33d267 Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 13:48:50 +0000 Subject: [PATCH 319/340] Remove the _get_global_attributes methode from the AMV file handler since it is now covered by the one in the common function --- satpy/readers/fci_l2_nc.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 77b92f82f4..2c5434e4e8 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -468,26 +468,6 @@ def nc(self): } ) - # def _get_global_attributes(self): - # """Create a dictionary of global attributes to be added to all datasets. - - # Returns: - # dict: A dictionary of global attributes. - # filename: name of the product file - # spacecraft_name: name of the spacecraft - # sensor: name of sensor - # platform_name: name of the platform - - # """ - # attributes = { - # "filename": self.filename, - # "spacecraft_name": self.spacecraft_name, - # "sensor": self.sensor_name, - # "platform_name": self.spacecraft_name, - # "channel": self.filename_info["channel"] - # } - # return attributes - def get_dataset(self, dataset_id, dataset_info): """Get dataset using the nc_key in dataset_info.""" var_key = dataset_info["nc_key"] From c317c174ba30e4a20f9d91055238e9ccce993fa1 Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 16:41:06 +0000 Subject: [PATCH 320/340] Remove the ssp_lon as a conditional attribute but here for all --- satpy/readers/fci_l2_nc.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 2c5434e4e8..cfde51100b 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -80,12 +80,11 @@ def _get_global_attributes(self,product_type="pixel"): "spacecraft_name": self.spacecraft_name, "sensor": self.sensor_name, "platform_name": self.spacecraft_name, + "ssp_lon": self.ssp_lon, } if product_type=="amv": attributes["channel"] = self.filename_info["channel"] - else: - attributes["ssp_lon"] = self.ssp_lon return attributes From fdeeed4a8ce8d3c0bf5b041916580f472de37a3f Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 16:42:42 +0000 Subject: [PATCH 321/340] Fix typo and improve readbility for the call of the get_global_attribute with the product_type --- satpy/readers/fci_l2_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index cfde51100b..000847f4b1 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -70,7 +70,7 @@ def _get_global_attributes(self,product_type="pixel"): ssp_lon: longitude of subsatellite point sensor: name of sensor platform_name: name of the platform - Only for AMVS product: + Only for AMVs product: channel: channel at which the AMVs have been retrieved @@ -108,7 +108,7 @@ def _set_attributes(self, variable, dataset_info, product_type="pixel"): del variable.attrs["unit"] variable.attrs.update(dataset_info) - variable.attrs.update(self._get_global_attributes(product_type)) + variable.attrs.update(self._get_global_attributes(product_type=product_type)) import_enum_information = dataset_info.get("import_enum_information", False) if import_enum_information: From d42c00d703afe04a64084f5d7b3cf6396f2009cd Mon Sep 17 00:00:00 2001 From: aoun Date: Wed, 15 Jan 2025 16:44:59 +0000 Subject: [PATCH 322/340] Add ssp_lon to the dict of attribute to be tested --- satpy/tests/reader_tests/test_fci_l2_nc.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/satpy/tests/reader_tests/test_fci_l2_nc.py b/satpy/tests/reader_tests/test_fci_l2_nc.py index 79f4549316..a3f7e6318c 100644 --- a/satpy/tests/reader_tests/test_fci_l2_nc.py +++ b/satpy/tests/reader_tests/test_fci_l2_nc.py @@ -622,7 +622,8 @@ def test_all_basic(self, amv_filehandler, amv_file): "spacecraft_name": "test_platform", "sensor": "test_data_source", "platform_name": "test_platform", - "channel": "test_channel" + "channel": "test_channel", + "ssp_lon": 0.0, } assert global_attributes == expected_global_attributes From aad9b3efe9ba37596fc23638bd4064eb2c0f1fe2 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 16 Jan 2025 15:32:14 +0100 Subject: [PATCH 323/340] Set issue type in templates In the issue template for bug, set the issue type bug. In the issue template for feature request, set the issue type feature. For more information on issue types, see https://github.blog/changelog/2025-01-13-evolving-github-issues-public-preview/ --- .github/ISSUE_TEMPLATE/bug_report.md | 1 + .github/ISSUE_TEMPLATE/feature_request.md | 1 + 2 files changed, 2 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index f866a6cfab..1867599b8f 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,6 +1,7 @@ --- name: Bug report about: Create a report to help us improve +type: 'bug' --- diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 854663574b..f5c967d2f0 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,6 +1,7 @@ --- name: Feature request about: Suggest an idea for this project +type: 'feature' --- From 08f0087dfa89c49823e98950db689b68be034db3 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 16 Jan 2025 16:59:36 +0100 Subject: [PATCH 324/340] Respect config setting for FCI clipping --- satpy/readers/fci_l1c_nc.py | 5 ++++- satpy/tests/reader_tests/test_fci_l1c_nc.py | 10 ++++++++-- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/satpy/readers/fci_l1c_nc.py b/satpy/readers/fci_l1c_nc.py index c24c9b4849..4b8efd81a4 100644 --- a/satpy/readers/fci_l1c_nc.py +++ b/satpy/readers/fci_l1c_nc.py @@ -127,6 +127,7 @@ from pyorbital.astronomy import sun_earth_distance_correction from pyresample import geometry +import satpy from satpy.readers._geos_area import get_geos_area_naming from satpy.readers.eum_base import get_service_mode @@ -209,7 +210,7 @@ class using the :mod:`~satpy.Scene.load` method with the reader "MTI4": "MTG-I4"} def __init__(self, filename, filename_info, filetype_info, - clip_negative_radiances=False, **kwargs): + clip_negative_radiances=None, **kwargs): """Initialize file handler.""" super().__init__(filename, filename_info, filetype_info, @@ -234,6 +235,8 @@ def __init__(self, filename, filename_info, filetype_info, else: self.is_iqt = False + if clip_negative_radiances is None: + clip_negative_radiances = satpy.config.get("readers.clip_negative_radiances") self.clip_negative_radiances = clip_negative_radiances self._cache = {} diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 718d51c819..7257d95e97 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -876,12 +876,18 @@ def test_load_calibration_negative_rad(self, reader_configs, fh_param): See https://github.com/pytroll/satpy/issues/3009. """ + import satpy reader = _get_reader_with_filehandlers(fh_param["filenames"], reader_configs, clip_negative_radiances=True) - res = reader.load([make_dataid(name="ir_38", calibration="radiance")], - pad_data=False) + did = make_dataid(name="ir_38", calibration="radiance") + res = reader.load([did], pad_data=False) + with satpy.config.set({"readers.clip_negative_radiances": True}): + reader2 = _get_reader_with_filehandlers(fh_param["filenames"], + reader_configs) + res2 = reader2.load([did], pad_data=False) numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance + numpy.testing.assert_array_equal(res2["ir_38"][-1, :], 5) # smallest positive radiance @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) From 92b9719499e0bbd15397876c2add035b9eede2a4 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Thu, 16 Jan 2025 17:02:50 +0100 Subject: [PATCH 325/340] Add test confirming preservation of dtype --- satpy/tests/reader_tests/test_fci_l1c_nc.py | 1 + 1 file changed, 1 insertion(+) diff --git a/satpy/tests/reader_tests/test_fci_l1c_nc.py b/satpy/tests/reader_tests/test_fci_l1c_nc.py index 7257d95e97..93b6513108 100644 --- a/satpy/tests/reader_tests/test_fci_l1c_nc.py +++ b/satpy/tests/reader_tests/test_fci_l1c_nc.py @@ -888,6 +888,7 @@ def test_load_calibration_negative_rad(self, reader_configs, fh_param): res2 = reader2.load([did], pad_data=False) numpy.testing.assert_array_equal(res["ir_38"][-1, :], 5) # smallest positive radiance numpy.testing.assert_array_equal(res2["ir_38"][-1, :], 5) # smallest positive radiance + assert res["ir_38"].dtype == res2["ir_38"].dtype == np.dtype("float32") @pytest.mark.parametrize(("calibration", "channel", "resolution"), [ (calibration, channel, resolution) From 449a0846f21491a4512d8f6eca07bbf7d9447026 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 17 Jan 2025 15:27:40 +0100 Subject: [PATCH 326/340] Remove unused outdir argument --- utils/create_reference.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/utils/create_reference.py b/utils/create_reference.py index 5510054099..531f8963fc 100644 --- a/utils/create_reference.py +++ b/utils/create_reference.py @@ -84,11 +84,6 @@ def get_parser(): "input data in a subdirectory for the satellite. Output images " "will be written to the subdirectory reference_images.") - parser.add_argument( - "-o", "--outdir", action="store", type=pathlib.Path, - default=pathlib.Path("."), - help="Directory where to write resulting images.") - parser.add_argument( "-c", "--composites", nargs="+", help="composites to generate", type=str, default=["ash", "airmass"]) From cc850ab565a6c39f9f489c3a0de4472196b25059 Mon Sep 17 00:00:00 2001 From: Gerrit Holl Date: Fri, 17 Jan 2025 15:55:44 +0100 Subject: [PATCH 327/340] Add imagery tests for colorized low level moisture --- .../behave/features/image_comparison.feature | 17 +++++++++-------- .../behave/features/steps/image_comparison.py | 7 ++++--- utils/create_reference.py | 10 +++++++--- 3 files changed, 20 insertions(+), 14 deletions(-) diff --git a/satpy/tests/behave/features/image_comparison.feature b/satpy/tests/behave/features/image_comparison.feature index 0497a96c93..686062462c 100755 --- a/satpy/tests/behave/features/image_comparison.feature +++ b/satpy/tests/behave/features/image_comparison.feature @@ -2,14 +2,15 @@ Feature: Image Comparison Scenario Outline: Compare generated image with reference image Given I have a reference image file from resampled to - When I generate a new image file from with for with clipping + When I generate a new image file from case with for with clipping Then the generated image should be the same as the reference image Examples: - |satellite |composite | reader | area | clip | - |Meteosat-12 | cloudtop | fci_l1c_nc | sve | True | - |Meteosat-12 | night_microphysics | fci_l1c_nc | sve | True | - |GOES17 |airmass | abi_l1b | null | null | - |GOES16 |airmass | abi_l1b | null | null | - |GOES16 |ash | abi_l1b | null | null | - |GOES17 |ash | abi_l1b | null | null | + |satellite | case | composite | reader | area | clip | + |Meteosat-12 | scan_night | cloudtop | fci_l1c_nc | sve | True | + |Meteosat-12 | scan_night | night_microphysics | fci_l1c_nc | sve | True | + |Meteosat-12 | mali_day | essl_colorized_low_level_moisture | fci_l1c_nc | mali | False | + |GOES17 | americas_night | airmass | abi_l1b | null | null | + |GOES16 | americas_night | airmass | abi_l1b | null | null | + |GOES16 | americas_night | ash | abi_l1b | null | null | + |GOES17 | americas_night | ash | abi_l1b | null | null | diff --git a/satpy/tests/behave/features/steps/image_comparison.py b/satpy/tests/behave/features/steps/image_comparison.py index 92c5fa0034..5e7135bc53 100644 --- a/satpy/tests/behave/features/steps/image_comparison.py +++ b/satpy/tests/behave/features/steps/image_comparison.py @@ -63,8 +63,9 @@ def step_given_reference_image(context, composite, satellite, area): context.area = area -@when("I generate a new {composite} image file from {satellite} with {reader} for {area} with clipping {clip}") -def step_when_generate_image(context, composite, satellite, reader, area, clip): +@when("I generate a new {composite} image file from {satellite} case {case} " + "with {reader} for {area} with clipping {clip}") +def step_when_generate_image(context, composite, satellite, case, reader, area, clip): """Generate test images.""" os.environ["OMP_NUM_THREADS"] = os.environ["MKL_NUM_THREADS"] = "2" os.environ["PYTROLL_CHUNK_SIZE"] = "1024" @@ -72,7 +73,7 @@ def step_when_generate_image(context, composite, satellite, reader, area, clip): dask.config.set(scheduler="threads", num_workers=4) # Get the list of satellite files to open - filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/*.nc") + filenames = glob(f"{ext_data_path}/satellite_data/{satellite}/{case}/*.nc") reader_kwargs = {} if clip != "null": diff --git a/utils/create_reference.py b/utils/create_reference.py index 531f8963fc..04bffdd9a3 100644 --- a/utils/create_reference.py +++ b/utils/create_reference.py @@ -1,4 +1,4 @@ -# Copyright (c) 2024 Satpy developers +# Copyright (c) 2024-2025 Satpy developers # # This file is part of satpy. # @@ -42,7 +42,8 @@ def generate_images(props): props (namespace): Object with attributes corresponding to command line arguments as defined by :func:get_parser. """ - filenames = (props.basedir / "satellite_data" / props.satellite).glob("*") + filenames = (props.basedir / "satellite_data" / props.satellite / + props.case).glob("*") scn = Scene(reader=props.reader, filenames=filenames) @@ -75,13 +76,16 @@ def get_parser(): "reader", action="store", type=str, help="Reader name.") + parser.add_argument( + "case", help="case to generate", type=str) + parser.add_argument( "-b", "--basedir", action="store", type=pathlib.Path, default=pathlib.Path("."), help="Base directory for reference data. " "This must contain a subdirectories satellite_data and " "reference_images. The directory satellite_data must contain " - "input data in a subdirectory for the satellite. Output images " + "input data in a subdirectory for the satellite and case. Output images " "will be written to the subdirectory reference_images.") parser.add_argument( From e0ab59b16338d787bd3ab7dcdd6330803049a157 Mon Sep 17 00:00:00 2001 From: Youva <120452807+YouvaEUMex@users.noreply.github.com> Date: Mon, 20 Jan 2025 14:47:07 +0100 Subject: [PATCH 328/340] Update satpy/readers/fci_l2_nc.py Co-authored-by: Martin Raspaud --- satpy/readers/fci_l2_nc.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/satpy/readers/fci_l2_nc.py b/satpy/readers/fci_l2_nc.py index 000847f4b1..bf5c7a9bf6 100644 --- a/satpy/readers/fci_l2_nc.py +++ b/satpy/readers/fci_l2_nc.py @@ -60,7 +60,7 @@ def ssp_lon(self): f"of {SSP_DEFAULT} degrees east instead") return SSP_DEFAULT - def _get_global_attributes(self,product_type="pixel"): + def _get_global_attributes(self, product_type="pixel"): """Create a dictionary of global attributes to be added to all datasets. Returns: From 357856d5a02239396c59cb268a85f69a9fc19ec7 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Jan 2025 15:16:23 +0100 Subject: [PATCH 329/340] Pin dask to avoid dataframe problem --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 9ed0eda02d..0dce58d5c8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,7 +7,7 @@ authors = [ ] dependencies = [ "platformdirs", - "dask[array]>=0.17.1", + "dask[array]>=0.17.1,<2025.1.0", "donfig", "numpy>=1.21", "packaging", From 3765a5eb213838f57a56653e47ed9e301c78b347 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Jan 2025 15:33:25 +0100 Subject: [PATCH 330/340] Pin dask in ci also --- continuous_integration/environment.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/continuous_integration/environment.yaml b/continuous_integration/environment.yaml index bc898cafe2..df10c1a7a8 100644 --- a/continuous_integration/environment.yaml +++ b/continuous_integration/environment.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - xarray!=2022.9.0 - - dask + - dask<2025.1.0 - distributed - dask-image - donfig From 6fc15fe66450e664decab37a6bcb9ae73bd8a0c2 Mon Sep 17 00:00:00 2001 From: Martin Raspaud Date: Mon, 20 Jan 2025 15:57:49 +0100 Subject: [PATCH 331/340] Update changelog for v0.54.0 --- CHANGELOG.md | 53 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index acc4a07d30..b709083c5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,56 @@ +## Version 0.54.0 (2025/01/20) + +### Issues Closed + +* [Issue 3020](https://github.com/pytroll/satpy/issues/3020) - Re-implement essl_colorized_low_level_moisture using colorize ([PR 3021](https://github.com/pytroll/satpy/pull/3021) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 3009](https://github.com/pytroll/satpy/issues/3009) - artefacts in FCI RGBs using 3.8 µm ([PR 3013](https://github.com/pytroll/satpy/pull/3013) by [@gerritholl](https://github.com/gerritholl)) +* [Issue 2991](https://github.com/pytroll/satpy/issues/2991) - Resampling MTG FCI high res bands fails when the resample includes bands at different spatial resolutions +* [Issue 2981](https://github.com/pytroll/satpy/issues/2981) - Fix the bug with `satpy` when using `numpy 2.x` which leads to `SEVIRI` resampled files having a double size ([PR 2983](https://github.com/pytroll/satpy/pull/2983) by [@pkhalaj](https://github.com/pkhalaj)) +* [Issue 2979](https://github.com/pytroll/satpy/issues/2979) - Improving resolution when setting extent +* [Issue 2977](https://github.com/pytroll/satpy/issues/2977) - CRS data is being printed to title of image +* [Issue 2975](https://github.com/pytroll/satpy/issues/2975) - can't create ABI geo_color composite +* [Issue 2963](https://github.com/pytroll/satpy/issues/2963) - ahi_hrit reader cannot create a Scene +* [Issue 2814](https://github.com/pytroll/satpy/issues/2814) - Reading LI L2 point data is not daskified ([PR 2985](https://github.com/pytroll/satpy/pull/2985) by [@ClementLaplace](https://github.com/ClementLaplace)) +* [Issue 2566](https://github.com/pytroll/satpy/issues/2566) - Wrong version numbers at readthedocs +* [Issue 1997](https://github.com/pytroll/satpy/issues/1997) - Resampling from SwathDefinition to AreaDefinition fails with OSError and AssertionError +* [Issue 1788](https://github.com/pytroll/satpy/issues/1788) - integration / regression tests that compare images +* [Issue 1755](https://github.com/pytroll/satpy/issues/1755) - Store project metadata in pyproject.toml +* [Issue 1240](https://github.com/pytroll/satpy/issues/1240) - iber projection lost in the North Pacific + +In this release 14 issues were closed. + +### Pull Requests Merged + +#### Bugs fixed + +* [PR 3035](https://github.com/pytroll/satpy/pull/3035) - Pin dask to avoid dataframe problem +* [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files +* [PR 2995](https://github.com/pytroll/satpy/pull/2995) - Add new ABI L2 "CPS" variable name for Cloud Particle Size +* [PR 2985](https://github.com/pytroll/satpy/pull/2985) - li2_nc reader daskified ([2814](https://github.com/pytroll/satpy/issues/2814)) +* [PR 2983](https://github.com/pytroll/satpy/pull/2983) - Fix dtype promotion in SEVIRI native reader ([2981](https://github.com/pytroll/satpy/issues/2981)) +* [PR 2976](https://github.com/pytroll/satpy/pull/2976) - Fix dtype promotion in `mersi2_l1b` reader +* [PR 2969](https://github.com/pytroll/satpy/pull/2969) - Fix geos proj parameters for Insat 3d satellites +* [PR 2959](https://github.com/pytroll/satpy/pull/2959) - Modified the issue with the calibration coefficient indices for FY-3 satellite data reader + +#### Features added + +* [PR 3034](https://github.com/pytroll/satpy/pull/3034) - Set issue type in templates +* [PR 3021](https://github.com/pytroll/satpy/pull/3021) - Change ESSL colorisation approach ([3020](https://github.com/pytroll/satpy/issues/3020)) +* [PR 3013](https://github.com/pytroll/satpy/pull/3013) - Clip negative FCI radiances ([3009](https://github.com/pytroll/satpy/issues/3009)) +* [PR 3007](https://github.com/pytroll/satpy/pull/3007) - Add t865 dataset to olci l2 list ([1767](https://github.com/pytroll/satpy/issues/1767)) +* [PR 2999](https://github.com/pytroll/satpy/pull/2999) - Add Accsos image comparison tests +* [PR 2941](https://github.com/pytroll/satpy/pull/2941) - Refactor MVIRI dataset access +* [PR 2565](https://github.com/pytroll/satpy/pull/2565) - Add level-1 readers for the arctic weather satelliter data + +#### Clean ups + +* [PR 3030](https://github.com/pytroll/satpy/pull/3030) - Fix sdist tarball including unnecessary files +* [PR 3014](https://github.com/pytroll/satpy/pull/3014) - Remove xarray-datatree dependency from CI +* [PR 3010](https://github.com/pytroll/satpy/pull/3010) - Remove version limit on pytest in CI + +In this release 18 pull requests were closed. + + ## Version 0.53.0 (2024/11/08) ### Issues Closed From f202054eb07c9c2935a9836196a5f089dff6ff24 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 21 Jan 2025 12:15:22 +0000 Subject: [PATCH 332/340] fix : fix the handling to drop date with dask array that is introduced with the PR #2985 --- satpy/composites/lightning.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/satpy/composites/lightning.py b/satpy/composites/lightning.py index be4d0c769b..5fc52d7c9c 100644 --- a/satpy/composites/lightning.py +++ b/satpy/composites/lightning.py @@ -63,7 +63,9 @@ def _normalize_time(self, data:xr.DataArray, attrs:dict) -> xr.DataArray: # Compute the minimum time value based on the time range begin_time = end_time - np.timedelta64(self.time_range, "m") # Drop values that are bellow begin_time - data = data.where(data >= begin_time, drop=True) + condition_time = data >= begin_time + condition_time_computed = condition_time.compute() + data = data.where(condition_time_computed, drop=True) # exit if data is empty afer filtering if data.size == 0 : LOG.error(f"All the flash_age events happened before {begin_time}") From 4c25948f3ea474780142088157076e3e0a85c8d6 Mon Sep 17 00:00:00 2001 From: clement laplace Date: Tue, 21 Jan 2025 12:50:25 +0000 Subject: [PATCH 333/340] fix: Correct the test by replacing numpy array to dask array to simulate the real case data --- satpy/tests/compositor_tests/test_lightning.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/satpy/tests/compositor_tests/test_lightning.py b/satpy/tests/compositor_tests/test_lightning.py index 6b1dab7b4c..52eea22f3b 100644 --- a/satpy/tests/compositor_tests/test_lightning.py +++ b/satpy/tests/compositor_tests/test_lightning.py @@ -21,6 +21,7 @@ import datetime import logging +import dask.array as da import numpy as np import pytest import xarray as xr @@ -37,7 +38,7 @@ def test_flash_age_compositor(): attrs_flash_age = {"variable_name": "flash_time","name": "flash_time", "start_time": datetime.datetime(2024, 8, 1, 10, 50, 0), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0),"reader": "li_l2_nc"} - flash_age_value = np.array(["2024-08-01T09:00:00", + flash_age_value = da.array(["2024-08-01T09:00:00", "2024-08-01T10:00:00", "2024-08-01T10:30:00","2024-08-01T11:00:00"], dtype="datetime64[ns]") flash_age = xr.DataArray( flash_age_value, @@ -52,7 +53,7 @@ def test_flash_age_compositor(): "standard_name": "ligtning_time" } expected_array = xr.DataArray( - np.array([0.0,0.5,1.0]), + da.array([0.0,0.5,1.0]), dims=["y"], coords={ "crs": "8B +proj=longlat +ellps=WGS84 +type=crs" @@ -69,7 +70,7 @@ def test_empty_array_error(caplog): "start_time": np.datetime64(datetime.datetime(2024, 8, 1, 10, 0, 0)), "end_time": datetime.datetime(2024, 8, 1, 11, 0, 0), "reader": "li_l2_nc"} - flash_age_value = np.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") + flash_age_value = da.array(["2024-08-01T09:00:00"], dtype="datetime64[ns]") flash_age = xr.DataArray( flash_age_value, dims=["y"], From 01237e2cbd0098931240e40cd4f7f5eeda3e532f Mon Sep 17 00:00:00 2001 From: clement laplace Date: Wed, 22 Jan 2025 06:58:12 +0000 Subject: [PATCH 334/340] feat: Add the component element true_color_with_night_ir105_flash_age --- satpy/etc/composites/fci.yaml | 149 +++++++++++++++++---------------- satpy/etc/enhancements/li.yaml | 2 +- 2 files changed, 79 insertions(+), 72 deletions(-) diff --git a/satpy/etc/composites/fci.yaml b/satpy/etc/composites/fci.yaml index 5dd812c73f..07ff48da8a 100644 --- a/satpy/etc/composites/fci.yaml +++ b/satpy/etc/composites/fci.yaml @@ -1,17 +1,17 @@ sensor_name: visir/fci composites: -### L2 + ### L2 binary_cloud_mask: # This will set all clear pixels to '0', all pixels with cloudy features (meteorological/dust/ash clouds) to '1' and # missing/undefined pixels to 'nan'. This can be used for the official EUMETSAT cloud mask product (CLM). compositor: !!python/name:satpy.composites.CategoricalDataCompositor prerequisites: - - name: 'cloud_state' - lut: [ .nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan ] + - name: "cloud_state" + lut: [.nan, 0, 1, 1, 1, 1, 1, 1, 0, .nan] standard_name: binary_cloud_mask -### Night Layers + ### Night Layers night_ir105: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: @@ -41,7 +41,7 @@ composites: - night_ir_alpha - _night_background_hires -### Green Corrections + ### Green Corrections ndvi_hybrid_green: description: > The FCI green band at 0.51 µm deliberately misses the chlorophyll band, such that @@ -58,7 +58,7 @@ composites: - name: vis_06 modifiers: [sunz_corrected, rayleigh_corrected, sunz_reduced] - name: vis_08 - modifiers: [sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] standard_name: toa_bidirectional_reflectance ndvi_hybrid_green_raw: @@ -76,18 +76,18 @@ composites: ndvi_hybrid_green_fully_sunzencorrected: description: Same as ndvi_hybrid_green, but without Sun-zenith reduction compositor: !!python/name:satpy.composites.spectral.NDVIHybridGreen - limits: [ 0.15, 0.05 ] + limits: [0.15, 0.05] strength: 3.0 prerequisites: - name: vis_05 - modifiers: [ sunz_corrected, rayleigh_corrected ] + modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_06 - modifiers: [ sunz_corrected, rayleigh_corrected ] + modifiers: [sunz_corrected, rayleigh_corrected] - name: vis_08 - modifiers: [ sunz_corrected ] + modifiers: [sunz_corrected] standard_name: toa_bidirectional_reflectance -### True Color + ### True Color true_color: compositor: !!python/name:satpy.composites.SelfSharpenedRGB description: > @@ -190,7 +190,7 @@ composites: - name: vis_04 standard_name: true_color_reproduction_color_stretch -### True Color with LI lightning + ### True Color with LI lightning true_color_with_night_ir105_acc_flash: compositor: !!python/name:satpy.composites.BackgroundCompositor @@ -227,74 +227,81 @@ composites: - group_radiance_alpha - true_color_with_night_ir105 -### GeoColor + true_color_with_night_ir105_flash_age: + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: imager_with_lightning + prerequisites: + - flash_age + - true_color_with_night_ir105 + + ### GeoColor geo_color: - compositor: !!python/name:satpy.composites.DayNightCompositor - description: > - GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true - color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a - high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static - surface terrain layer with city lights (NASA Black Marble). - references: - Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml - lim_low: 78 - lim_high: 88 - standard_name: geo_color_day_night_blend - prerequisites: - - true_color - - geo_color_night + compositor: !!python/name:satpy.composites.DayNightCompositor + description: > + GeoColor is a multi-layer blended RGB composite where the day-time part of the image is represented by true + color imagery and the nighttime part of the image by a three layer vertically blended stack composed of a + high-level cloud layer (single IR window channel), a low-level cloud layer (IR split window) and a static + surface terrain layer with city lights (NASA Black Marble). + references: + Research Article: https://journals.ametsoc.org/view/journals/atot/37/3/JTECH-D-19-0134.1.xml + lim_low: 78 + lim_high: 88 + standard_name: geo_color_day_night_blend + prerequisites: + - true_color + - geo_color_night geo_color_high_clouds: - standard_name: geo_color_high_clouds - compositor: !!python/name:satpy.composites.HighCloudCompositor - prerequisites: - - name: ir_105 + standard_name: geo_color_high_clouds + compositor: !!python/name:satpy.composites.HighCloudCompositor + prerequisites: + - name: ir_105 geo_color_low_clouds: - standard_name: geo_color_low_clouds - compositor: !!python/name:satpy.composites.LowCloudCompositor - values_water: 0 - values_land: 100 - range_water: [0.0, 4.0] - range_land: [1.5, 4.0] - prerequisites: - - compositor: !!python/name:satpy.composites.DifferenceCompositor - prerequisites: - - name: ir_105 - - name: ir_38 - - name: ir_105 - - compositor: !!python/name:satpy.composites.StaticImageCompositor - standard_name: land_water_mask - url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" - known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" + standard_name: geo_color_low_clouds + compositor: !!python/name:satpy.composites.LowCloudCompositor + values_water: 0 + values_land: 100 + range_water: [0.0, 4.0] + range_land: [1.5, 4.0] + prerequisites: + - compositor: !!python/name:satpy.composites.DifferenceCompositor + prerequisites: + - name: ir_105 + - name: ir_38 + - name: ir_105 + - compositor: !!python/name:satpy.composites.StaticImageCompositor + standard_name: land_water_mask + url: "https://zenodo.org/records/10076199/files/gshhs_land_water_mask_3km_i.tif" + known_hash: "sha256:96df83c57416217e191f95dde3d3c1ce0373a8fc220e929228873db246ca3569" geo_color_background_with_low_clouds: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - geo_color_low_clouds - - _night_background_hires + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_low_clouds + - _night_background_hires geo_color_night: - compositor: !!python/name:satpy.composites.BackgroundCompositor - standard_name: night_ir_with_background - prerequisites: - - geo_color_high_clouds - - geo_color_background_with_low_clouds + compositor: !!python/name:satpy.composites.BackgroundCompositor + standard_name: night_ir_with_background + prerequisites: + - geo_color_high_clouds + - geo_color_background_with_low_clouds -### IR-Sandwich + ### IR-Sandwich ir_sandwich: compositor: !!python/name:satpy.composites.SandwichCompositor standard_name: ir_sandwich prerequisites: - - name: 'vis_06' - modifiers: [ sunz_corrected ] + - name: "vis_06" + modifiers: [sunz_corrected] - name: colorized_ir_clouds colorized_ir_clouds: compositor: !!python/name:satpy.composites.SingleBandCompositor prerequisites: - - name: 'ir_105' + - name: "ir_105" standard_name: colorized_ir_clouds ir_sandwich_with_night_colorized_ir_clouds: @@ -306,7 +313,7 @@ composites: - ir_sandwich - colorized_ir_clouds -### other RGBs + ### other RGBs cloud_type: description: > Equal to cimss_cloud_type recipe, but with additional sunz_reducer modifier to avoid saturation at the terminator. @@ -316,11 +323,11 @@ composites: compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - name: nir_13 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] - name: vis_06 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] - name: nir_16 - modifiers: [ sunz_corrected, sunz_reduced ] + modifiers: [sunz_corrected, sunz_reduced] standard_name: cimss_cloud_type cloud_type_with_night_ir105: @@ -416,10 +423,10 @@ composites: Recipe: https://resources.eumetrain.org/RGBguide/recipes/RGB_recipes.pdf compositor: !!python/name:satpy.composites.GenericCompositor prerequisites: - - name: vis_08 - modifiers: [sunz_corrected] - - name: nir_16 - modifiers: [sunz_corrected] - - name: ir_38 - modifiers: [nir_reflectance] + - name: vis_08 + modifiers: [sunz_corrected] + - name: nir_16 + modifiers: [sunz_corrected] + - name: ir_38 + modifiers: [nir_reflectance] standard_name: snow diff --git a/satpy/etc/enhancements/li.yaml b/satpy/etc/enhancements/li.yaml index 9aaa5c4a0b..82b6056b9a 100644 --- a/satpy/etc/enhancements/li.yaml +++ b/satpy/etc/enhancements/li.yaml @@ -81,4 +81,4 @@ enhancements: method: !!python/name:satpy.enhancements.colorize kwargs: palettes: - - { colors: ylorrd, min_value: 0, max_value: 1 } + - { colors: ylorrd, reverse: True, min_value: 0, max_value: 1 } From 9be06d284d4445790393b20e8c405efbaee59a12 Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 30 Jan 2025 10:02:40 +0200 Subject: [PATCH 335/340] Add reader configurations for more VIIRS EDR datasets --- satpy/etc/readers/viirs_edr.yaml | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 4c4c91a91f..67b54bea51 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -36,6 +36,38 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSLSTHandler file_patterns: - 'LST_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudbase: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudBase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_aerosol_detection: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_clouddcomp: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudDCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudncomp: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudNCOMP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudlayers: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudCoverLayers_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_cloudphase: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-CloudPhase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_iceconcentration: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-IceConcentration_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' + jrr_iceage: + file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler + file_patterns: + - 'JRR-IceAge_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' datasets: From edd3ac9105e23368748b2b1e89dc9df6fae7a0aa Mon Sep 17 00:00:00 2001 From: Panu Lahtinen Date: Thu, 30 Jan 2025 10:13:21 +0200 Subject: [PATCH 336/340] Remove duplicate ADB entry --- satpy/etc/readers/viirs_edr.yaml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/satpy/etc/readers/viirs_edr.yaml b/satpy/etc/readers/viirs_edr.yaml index 67b54bea51..e53a27d072 100644 --- a/satpy/etc/readers/viirs_edr.yaml +++ b/satpy/etc/readers/viirs_edr.yaml @@ -40,10 +40,6 @@ file_types: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: - 'JRR-CloudBase_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' - jrr_aerosol_detection: - file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler - file_patterns: - - 'JRR-ADP_{version}_{platform_shortname}_s{start_time:%Y%m%d%H%M%S%f}_e{end_time:%Y%m%d%H%M%S%f}_c{creation_time}.nc' jrr_clouddcomp: file_reader: !!python/name:satpy.readers.viirs_edr.VIIRSJRRFileHandler file_patterns: From c2cbf3ff1798c039d83a9d3ab2fe0e74e36ca1d6 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 30 Jan 2025 09:35:10 -0600 Subject: [PATCH 337/340] Change docstring section title in satpy_cf_nc.py --- satpy/readers/satpy_cf_nc.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/satpy/readers/satpy_cf_nc.py b/satpy/readers/satpy_cf_nc.py index 9f742272a1..b34e13d028 100644 --- a/satpy/readers/satpy_cf_nc.py +++ b/satpy/readers/satpy_cf_nc.py @@ -91,8 +91,8 @@ ''AVHRR-GAC_FDR_1C_{platform}_{start_time:%Y%m%dT%H%M%SZ}_{end_time:%Y%m%dT%H%M%SZ}_{processing_mode}_{disposition_mode}_{creation_time}_{version_int:04d}.nc' -Example: --------- +**Example**: + Here is an example how to read the data in satpy: .. code-block:: python From 0a734babc73e41c99cf7496ad8935a83d94b3d04 Mon Sep 17 00:00:00 2001 From: David Hoese Date: Thu, 30 Jan 2025 09:36:04 -0600 Subject: [PATCH 338/340] Add dask-expr to rtd_environment.yml --- doc/rtd_environment.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/doc/rtd_environment.yml b/doc/rtd_environment.yml index 3b11a9a20b..58a1e068f5 100644 --- a/doc/rtd_environment.yml +++ b/doc/rtd_environment.yml @@ -7,6 +7,7 @@ dependencies: - platformdirs - dask - dask-image + - dask-expr - defusedxml - donfig # 2.19.1 seems to cause library linking issues From 39434e506dc0c85dfe5364599937a37c281fece5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sat, 1 Feb 2025 10:15:44 +0000 Subject: [PATCH 339/340] Bump pypa/gh-action-pypi-publish from 1.12.3 to 1.12.4 Bumps [pypa/gh-action-pypi-publish](https://github.com/pypa/gh-action-pypi-publish) from 1.12.3 to 1.12.4. - [Release notes](https://github.com/pypa/gh-action-pypi-publish/releases) - [Commits](https://github.com/pypa/gh-action-pypi-publish/compare/v1.12.3...v1.12.4) --- updated-dependencies: - dependency-name: pypa/gh-action-pypi-publish dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- .github/workflows/deploy-sdist.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-sdist.yaml b/.github/workflows/deploy-sdist.yaml index fd0429062a..45b90be73b 100644 --- a/.github/workflows/deploy-sdist.yaml +++ b/.github/workflows/deploy-sdist.yaml @@ -23,7 +23,7 @@ jobs: - name: Publish package to PyPI if: github.event.action == 'published' - uses: pypa/gh-action-pypi-publish@v1.12.3 + uses: pypa/gh-action-pypi-publish@v1.12.4 with: user: __token__ password: ${{ secrets.pypi_password }} From 12709e59638ca225d9dbdd5a289d3c376d06c325 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 3 Feb 2025 23:18:56 +0000 Subject: [PATCH 340/340] [pre-commit.ci] pre-commit autoupdate MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.8.6 → v0.9.4](https://github.com/astral-sh/ruff-pre-commit/compare/v0.8.6...v0.9.4) - [github.com/PyCQA/bandit: 1.8.0 → 1.8.2](https://github.com/PyCQA/bandit/compare/1.8.0...1.8.2) - [github.com/pycqa/isort: 5.13.2 → 6.0.0](https://github.com/pycqa/isort/compare/5.13.2...6.0.0) --- .pre-commit-config.yaml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 790d6a925c..741d23cc1c 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ fail_fast: false repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: 'v0.8.6' + rev: 'v0.9.4' hooks: - id: ruff - repo: https://github.com/pre-commit/pre-commit-hooks @@ -14,7 +14,7 @@ repos: - id: check-yaml args: [--unsafe] - repo: https://github.com/PyCQA/bandit - rev: '1.8.0' # Update me! + rev: '1.8.2' # Update me! hooks: - id: bandit args: [--ini, .bandit] @@ -29,7 +29,7 @@ repos: - types-requests args: ["--python-version", "3.10", "--ignore-missing-imports"] - repo: https://github.com/pycqa/isort - rev: 5.13.2 + rev: 6.0.0 hooks: - id: isort language_version: python3