diff --git a/satpy/etc/readers/olci_l2.yaml b/satpy/etc/readers/olci_l2.yaml index 259c1e307d..9b10f2093f 100644 --- a/satpy/etc/readers/olci_l2.yaml +++ b/satpy/etc/readers/olci_l2.yaml @@ -26,7 +26,7 @@ file_types: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/tsm_nn.nc'] esa_l2_wqsf: - file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 + file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2Flags file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/wqsf.nc'] esa_angles: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIAngles @@ -34,7 +34,12 @@ file_types: esa_geo: file_reader: !!python/name:satpy.readers.olci_nc.NCOLCIGeo file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/geo_coordinates.nc'] - + esa_l2_iwv: + file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 + file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/iwv.nc'] + esa_l2_w_aer: + file_reader: !!python/name:satpy.readers.olci_nc.NCOLCI2 + file_patterns: ['{mission_id:3s}_OL_2_{datatype_id:_<6s}_{start_time:%Y%m%dT%H%M%S}_{end_time:%Y%m%dT%H%M%S}_{creation_time:%Y%m%dT%H%M%S}_{duration:4d}_{cycle:3d}_{relative_orbit:3d}_{frame:4d}_{centre:3s}_{mode:1s}_{timeliness:2s}_{collection:3s}.SEN3/w_aer.nc'] datasets: longitude: @@ -328,10 +333,8 @@ datasets: name: chl_oc4me sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: algal_pigment_concentration - units: "lg(re mg.m-3)" + standard_name: algal_pigment_concentration + units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_oc4me nc_key: CHL_OC4ME @@ -340,10 +343,8 @@ datasets: name: chl_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: algal_pigment_concentration - units: "lg(re mg.m-3)" + standard_name: algal_pigment_concentration + units: "lg(re mg.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_chl_nn nc_key: CHL_NN @@ -352,10 +353,8 @@ datasets: name: iop_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: cdm_absorption_coefficient - units: "lg(re m-l)" + standard_name: cdm_absorption_coefficient + units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_iop_nn nc_key: ADG443_NN @@ -364,10 +363,8 @@ datasets: name: trsp sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: diffuse_attenuation_coefficient - units: "lg(re m-l)" + standard_name: diffuse_attenuation_coefficient + units: "lg(re m-l)" coordinates: [longitude, latitude] file_type: esa_l2_trsp nc_key: KD490_M07 @@ -376,10 +373,8 @@ datasets: name: tsm_nn sensor: olci resolution: 300 - calibration: - reflectance: - standard_name: total_suspended_matter_concentration - units: "lg(re g.m-3)" + standard_name: total_suspended_matter_concentration + units: "lg(re g.m-3)" coordinates: [longitude, latitude] file_type: esa_l2_tsm_nn nc_key: TSM_NN @@ -399,6 +394,27 @@ datasets: coordinates: [longitude, latitude] file_type: esa_l2_wqsf nc_key: WQSF + masked_items: [ "INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND" ] + + cloud_mask: + name: cloud_mask + sensor: olci + resolution: 300 + coordinates: [longitude, latitude] + file_type: esa_l2_wqsf + nc_key: WQSF + masked_items: [ "CLOUD" ] + + ocnn_mask: + name: ocnn_mask + sensor: olci + resolution: 300 + coordinates: [longitude, latitude] + file_type: esa_l2_wqsf + nc_key: WQSF + masked_items: [ "OCNN_FAIL" ] solar_zenith_angle: name: solar_zenith_angle @@ -427,3 +443,23 @@ datasets: resolution: 300 coordinates: [longitude, latitude] file_type: esa_angles + + iwv: + name: iwv + sensor: olci + resolution: 300 + standard_name: integrated_water_vapour_column + units: "lg(re m-l)" + coordinates: [longitude, latitude] + file_type: esa_l2_iwv + nc_key: IWV + + w_aer: + name: w_aer + sensor: olci + resolution: 300 + standard_name: aerosol_optical_thickness + units: "lg(re g.m-3)" + coordinates: [longitude, latitude] + file_type: esa_l2_w_aer + nc_key: T865 diff --git a/satpy/readers/olci_nc.py b/satpy/readers/olci_nc.py index 982e774f77..d0811ecaaf 100644 --- a/satpy/readers/olci_nc.py +++ b/satpy/readers/olci_nc.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- -# Copyright (c) 2016 Satpy developers +# Copyright (c) 2016-2021 Satpy developers # # This file is part of satpy. # @@ -59,39 +59,38 @@ 'S3B': 'Sentinel-3B'} -class BitFlags(object): +class BitFlags: """Manipulate flags stored bitwise.""" - flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', - 'HISOLZEN', 'SATURATED', 'MEGLINT', 'HIGHGLINT', - 'WHITECAPS', 'ADJAC', 'WV_FAIL', 'PAR_FAIL', - 'AC_FAIL', 'OC4ME_FAIL', 'OCNN_FAIL', - 'Extra_1', - 'KDM_FAIL', - 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', 'WHITE_SCATT', - 'LOWRW', 'HIGHRW'] + def __init__(self, masks, meanings): + """Init the flags.""" + self._masks = masks + self._meanings = meanings + self._map = dict(zip(meanings, masks)) - meaning = {f: i for i, f in enumerate(flag_list)} + @property + def masks(self): + """Return masks.""" + return self._masks - def __init__(self, value): - """Init the flags.""" - self._value = value - - def __getitem__(self, item): - """Get the item.""" - pos = self.meaning[item] - data = self._value - if isinstance(data, xr.DataArray): - data = data.data - res = ((data >> pos) % 2).astype(bool) - res = xr.DataArray(res, coords=self._value.coords, - attrs=self._value.attrs, - dims=self._value.dims) - else: - res = ((data >> pos) % 2).astype(bool) - return res + @property + def meanings(self): + """Return meanings.""" + return self._meanings + + def match_item(self, item, data): + """Match any of the item.""" + mask = self._map[item] + return np.bitwise_and(data, mask).astype(np.bool) + + def match_any(self, items, data): + """Match any of the items in data.""" + mask = reduce(np.bitwise_or, [self._map[item] for item in items]) + return np.bitwise_and(data, mask).astype(np.bool) + + def __eq__(self, other): + """Check equality.""" + return all(self._masks == other.masks) and self._meanings == other.meanings class NCOLCIBase(BaseFileHandler): @@ -146,9 +145,16 @@ def __del__(self): with suppress(IOError, OSError, AttributeError, TypeError): self.nc.close() - -class NCOLCICal(NCOLCIBase): - """Dummy class for calibration.""" + def _fill_dataarray_attrs(self, data, key, info=None): + """Fill the dataarray with relevant attributes.""" + data.attrs['platform_name'] = self.platform_name + data.attrs['sensor'] = self.sensor + data.attrs.update(key.to_dict()) + if info is not None: + info = info.copy() + for key in ["nc_key", "coordinates", "file_type", "name"]: + info.pop(key, None) + data.attrs.update(info) class NCOLCIGeo(NCOLCIBase): @@ -173,18 +179,13 @@ def __init__(self, filename, filename_info, filetype_info, cal, super().__init__(filename, filename_info, filetype_info, engine) self.cal = cal.nc - @staticmethod - def _get_items(idx, solar_flux): - """Get items.""" - return solar_flux[idx] - def _get_solar_flux(self, band): """Get the solar flux for the band.""" solar_flux = self.cal['solar_flux'].isel(bands=band).values d_index = self.cal['detector_index'].fillna(0).astype(int) - return da.map_blocks(self._get_items, d_index.data, - solar_flux=solar_flux, dtype=solar_flux.dtype) + return da.map_blocks(_take_indices, d_index.data, + data=solar_flux, dtype=solar_flux.dtype) def get_dataset(self, key, info): """Load a dataset.""" @@ -200,12 +201,15 @@ def get_dataset(self, key, info): radiances = radiances / sflux * np.pi * 100 radiances.attrs['units'] = '%' - radiances.attrs['platform_name'] = self.platform_name - radiances.attrs['sensor'] = self.sensor - radiances.attrs.update(key.to_dict()) + self._fill_dataarray_attrs(radiances, key) return radiances +def _take_indices(idx, data): + """Take values from data using idx.""" + return data[idx] + + class NCOLCI2(NCOLCIChannelBase): """File handler for OLCI l2.""" @@ -219,24 +223,44 @@ def get_dataset(self, key, info): else: dataset = self.nc[info['nc_key']] + self._fill_dataarray_attrs(dataset, key, info) + return dataset + + +class NCOLCI2Flags(NCOLCIChannelBase): + """File handler for OLCI l2 flag files. + + A correctly-initialized BitFlags instance is added to the "bitflags" + attribute in case the masked items are not defined (eg for wqsf). + """ + + def get_dataset(self, key, info): + """Load a dataset.""" + logger.debug('Reading %s.', key['name']) + dataset = self.nc[info['nc_key']] + self.create_bitflags(dataset) + if key['name'] == 'wqsf': dataset.attrs['_FillValue'] = 1 - elif key['name'] == 'mask': - dataset = self.getbitmask(dataset) + elif "masked_items" in info: + dataset = self.getbitmask(dataset, info["masked_items"]) - dataset.attrs['platform_name'] = self.platform_name - dataset.attrs['sensor'] = self.sensor - dataset.attrs.update(key.to_dict()) + self._fill_dataarray_attrs(dataset, key) return dataset - def getbitmask(self, wqsf, items=None): - """Get the bitmask.""" + def create_bitflags(self, dataset): + """Create the bitflags attribute.""" + bflags = BitFlags(dataset.attrs['flag_masks'], + dataset.attrs['flag_meanings'].split()) + dataset.attrs["bitflags"] = bflags + + def getbitmask(self, dataset, items=None): + """Generate the bitmask.""" if items is None: items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] - bflags = BitFlags(wqsf) - return reduce(np.logical_or, [bflags[item] for item in items]) + return dataset.attrs["bitflags"].match_any(items, dataset) class NCOLCILowResData(NCOLCIBase): @@ -253,7 +277,7 @@ def __init__(self, filename, filename_info, filetype_info, self.c_step = self.nc.attrs['ac_subsampling_factor'] def _do_interpolate(self, data): - + """Do the interpolation.""" if not isinstance(data, tuple): data = (data,) @@ -291,7 +315,8 @@ class NCOLCIAngles(NCOLCILowResData): def get_dataset(self, key, info): """Load a dataset.""" - if key['name'] not in self.datasets: + key_name = key['name'] + if key_name not in self.datasets: return logger.debug('Reading %s.', key['name']) @@ -311,12 +336,9 @@ def get_dataset(self, key, info): else: raise NotImplementedError("Don't know how to read " + key['name']) else: - values = self.nc[self.datasets[key['name']]] - - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor + values = self.nc[self.datasets[key_name]] - values.attrs.update(key.to_dict()) + self._fill_dataarray_attrs(values, key) return values @cached_property @@ -336,6 +358,7 @@ def satellite_angles(self): return azi, zen def _interpolate_angles(self, azi, zen): + """Interpolate angles.""" aattrs = azi.attrs zattrs = zen.attrs x, y, z = angle2xyz(azi, zen) @@ -383,8 +406,5 @@ def get_dataset(self, key, info): else: values = self.nc[key['name']] - values.attrs['platform_name'] = self.platform_name - values.attrs['sensor'] = self.sensor - - values.attrs.update(key.to_dict()) + self._fill_dataarray_attrs(values, key) return values diff --git a/satpy/tests/reader_tests/test_olci_nc.py b/satpy/tests/reader_tests/test_olci_nc.py index b0196eb3b8..67315e50d2 100644 --- a/satpy/tests/reader_tests/test_olci_nc.py +++ b/satpy/tests/reader_tests/test_olci_nc.py @@ -19,16 +19,28 @@ import unittest import unittest.mock as mock +import numpy as np +from satpy.readers.olci_nc import BitFlags + +flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', + 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', + 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', + 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', + 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', + 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', + 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] + + +@mock.patch('xarray.open_dataset') class TestOLCIReader(unittest.TestCase): """Test various olci_nc filehandlers.""" - @mock.patch('xarray.open_dataset') def test_instantiate(self, mocked_dataset): """Test initialization of file handlers.""" import xarray as xr - from satpy.readers.olci_nc import NCOLCI1B, NCOLCI2, NCOLCIBase, NCOLCICal, NCOLCIChannelBase, NCOLCIGeo + from satpy.readers.olci_nc import NCOLCI1B, NCOLCI2, NCOLCIBase, NCOLCIChannelBase, NCOLCIGeo from satpy.tests.utils import make_dataid cal_data = xr.Dataset( @@ -48,11 +60,6 @@ def test_instantiate(self, mocked_dataset): mocked_dataset.assert_called() mocked_dataset.reset_mock() - test = NCOLCICal('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, filename_info) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() - test = NCOLCIGeo('somedir/somefile.nc', filename_info, 'c') test.get_dataset(ds_id, filename_info) mocked_dataset.assert_called() @@ -76,7 +83,6 @@ def test_instantiate(self, mocked_dataset): mocked_dataset.assert_called() mocked_dataset.reset_mock() - @mock.patch('xarray.open_dataset') def test_open_file_objects(self, mocked_open_dataset): """Test initialization of file handlers.""" from satpy.readers.olci_nc import NCOLCIBase @@ -92,90 +98,286 @@ def test_open_file_objects(self, mocked_open_dataset): assert (open_file.open.return_value in mocked_open_dataset.call_args[0] or open_file.open.return_value == mocked_open_dataset.call_args[1].get('filename_or_obj')) - @mock.patch('xarray.open_dataset') def test_get_dataset(self, mocked_dataset): """Test reading datasets.""" - import numpy as np + from satpy.tests.utils import make_dataid + fh, _ = self._create_l2_filehandler(mocked_dataset) + ds_id = make_dataid(name='w_aer') + res = fh.get_dataset(ds_id, {'nc_key': 'T865', 'funky_attr': 'JBs'}) + self.assertEqual(res.dtype, np.uint64) + assert res.attrs['funky_attr'] == 'JBs' + assert "nc_key" not in res.attrs.keys() + + def _create_l2_filehandler(self, mocked_dataset): + """Create a filehandle for the l2 data.""" import xarray as xr from satpy.readers.olci_nc import NCOLCI2 + data = xr.DataArray((2 ** (np.arange(30))).astype(np.uint64).reshape(5, 6), + dims=["rows", "columns"], + coords={'rows': np.arange(5), + 'columns': np.arange(6)}) + mocked_dataset.return_value = xr.Dataset({'T865': data}) + filename_info = {'mission_id': 'S3A', 'dataset_name': None, 'start_time': 0, 'end_time': 0} + fh = NCOLCI2('somedir/somefile.nc', filename_info, 'c') + return fh, data + + +@mock.patch('xarray.open_dataset') +class TestOLCI2Flags(unittest.TestCase): + """Test the olci_nc flag filehandler.""" + + def test_get_dataset(self, mocked_dataset): + """Test reading datasets.""" from satpy.tests.utils import make_dataid - mocked_dataset.return_value = xr.Dataset({'mask': (['rows', 'columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}) + fh, _ = self._create_wqsf_filehandler(mocked_dataset) + ds_id = make_dataid(name='wqsf') + res = fh.get_dataset(ds_id, {'nc_key': 'WQSF'}) + self.assertEqual(res.dtype, np.uint64) + + def _create_wqsf_filehandler(self, mocked_dataset, meanings="INVALID WATER LAND CLOUD"): + """Create a filehandle for the wqsf quality flags.""" + import xarray as xr + + from satpy.readers.olci_nc import NCOLCI2Flags + nb_flags = len(meanings.split()) + wqsf_data = xr.DataArray((2 ** (np.arange(30) % nb_flags)).astype(np.uint64).reshape(5, 6), + dims=["rows", "columns"], + coords={'rows': np.arange(5), + 'columns': np.arange(6)}, + attrs={"flag_masks": 2 ** np.arange(nb_flags), + "flag_meanings": meanings}) + mocked_dataset.return_value = xr.Dataset({'WQSF': wqsf_data}) + filename_info = {'mission_id': 'S3A', 'dataset_name': None, 'start_time': 0, 'end_time': 0} + fh = NCOLCI2Flags('somedir/somefile.nc', filename_info, 'c') + return fh, wqsf_data + + def test_meanings_are_read_from_file(self, mocked_dataset): + """Test that the flag meanings are read from the file.""" + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset) + fh.create_bitflags(wqsf_data) + res = fh.getbitmask(wqsf_data, ["CLOUD"]) + np.testing.assert_allclose(res, (np.arange(30) % 4).reshape(5, 6) == 3) + + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset, "NOTHING FISH SHRIMP TURTLES") + fh.create_bitflags(wqsf_data) + res = fh.getbitmask(wqsf_data, ["TURTLES"]) + np.testing.assert_allclose(res, (np.arange(30) % 4).reshape(5, 6) == 3) + + def test_get_mask(self, mocked_dataset): + """Test reading mask datasets.""" + from satpy.tests.utils import make_dataid + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset, " ".join(flag_list)) + + masked_items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", + "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", + "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] + ds_id = make_dataid(name='mask') - filename_info = {'mission_id': 'S3A', 'dataset_name': 'mask', 'start_time': 0, 'end_time': 0} - test = NCOLCI2('somedir/somefile.nc', filename_info, 'c') - res = test.get_dataset(ds_id, {'nc_key': 'mask'}) - self.assertEqual(res.dtype, np.dtype('bool')) + res = fh.get_dataset(ds_id, {'nc_key': 'WQSF', "masked_items": masked_items}) + self.assertEqual(res.dtype, np.dtype("bool")) - @mock.patch('xarray.open_dataset') - def test_olci_angles(self, mocked_dataset): - """Test reading datasets.""" - import numpy as np + expected = np.array([True, False, True, True, True, True, False, + False, True, True, False, False, False, False, + False, False, False, True, False, True, False, + False, False, True, True, False, False, True, + False, True]).reshape(5, 6) + np.testing.assert_array_equal(res, expected) + + def test_wqsf_has_bitflags_attribute(self, mocked_dataset): + """Test wqsf has a bitflags attribute.""" + from satpy.tests.utils import make_dataid + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset, " ".join(flag_list)) + + ds_id = make_dataid(name='wqsf') + res = fh.get_dataset(ds_id, {'nc_key': 'WQSF'}) + assert isinstance(res.attrs["bitflags"], BitFlags) + + def test_get_cloud_mask(self, mocked_dataset): + """Test reading the cloud_mask dataset.""" + from satpy.tests.utils import make_dataid + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset, " ".join(flag_list)) + + ds_id = make_dataid(name='cloud_mask') + res = fh.get_dataset(ds_id, {'nc_key': 'WQSF', "masked_items": ["CLOUD"]}) + self.assertEqual(res.dtype, np.dtype("bool")) + + expected = np.array([False, False, False, True, False, False, False, + False, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False, False]).reshape(5, 6) + np.testing.assert_array_equal(res, expected) + + def test_get_ocnn_mask(self, mocked_dataset): + """Test reading the ocnn_mask dataset.""" + from satpy.tests.utils import make_dataid + fh, wqsf_data = self._create_wqsf_filehandler(mocked_dataset, " ".join(flag_list)) + + ds_id = make_dataid(name='ocnn_mask') + res = fh.get_dataset(ds_id, {'nc_key': 'WQSF', "masked_items": ["OCNN_FAIL"]}) + self.assertEqual(res.dtype, np.dtype("bool")) + + expected = np.array([False, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False, False, False, False, False, True, False, + False, False, False, False, False, False, False, + False, False]).reshape(5, 6) + np.testing.assert_array_equal(res, expected) + + +class TestOLCIAngles(unittest.TestCase): + """Test the angles olci_nc filehandler.""" + + def setUp(self): + """Set up the test case.""" import xarray as xr from satpy.readers.olci_nc import NCOLCIAngles - from satpy.tests.utils import make_dataid attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } + + self.patcher = mock.patch("xarray.open_dataset") + mocked_dataset = self.patcher.start() + mocked_dataset.return_value = xr.Dataset({'SAA': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6)), 'SZA': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6) + 30), 'OAA': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6)), 'OZA': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6))}, - coords={'rows': np.arange(5), - 'columns': np.arange(6)}, + np.arange(30).reshape(5, 6) + 30)}, + coords={'tie_rows': np.arange(5), + 'tie_columns': np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + self.filename_info = {'mission_id': 'S3A', 'dataset_name': 'Oa01', 'start_time': 0, 'end_time': 0} + self.file_handler = NCOLCIAngles('somedir/somefile.nc', self.filename_info, 'c') + + self.expected_data = np.array([[0, 1, 2, 3, 4, 5], + [3, 4, 5, 6, 7, 8], + [6, 7, 8, 9, 10, 11], + [9, 10, 11, 12, 13, 14], + [12, 13, 14, 15, 16, 17], + [15, 16, 17, 18, 19, 20], + [18, 19, 20, 21, 22, 23], + [21, 22, 23, 24, 25, 26], + [24, 25, 26, 27, 28, 29]] + ) + + def test_olci_angles(self): + """Test reading angles datasets.""" + from satpy.tests.utils import make_dataid + ds_id_sun_azimuth = make_dataid(name='solar_azimuth_angle') + ds_id_sat_zenith = make_dataid(name='satellite_zenith_angle') - ds_id = make_dataid(name='solar_azimuth_angle') - ds_id2 = make_dataid(name='satellite_zenith_angle') - test = NCOLCIAngles('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, filename_info) - test.get_dataset(ds_id2, filename_info) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() + azi = self.file_handler.get_dataset(ds_id_sun_azimuth, self.filename_info) + zen = self.file_handler.get_dataset(ds_id_sat_zenith, self.filename_info) + np.testing.assert_allclose(azi, self.expected_data, atol=0.5) + np.testing.assert_allclose(zen, self.expected_data + 30, atol=0.5) - @mock.patch('xarray.open_dataset') - def test_olci_meteo(self, mocked_dataset): - """Test reading datasets.""" - import numpy as np + def test_olci_angles_caches_interpolation(self): + """Test reading angles datasets caches interpolation.""" + from satpy.tests.utils import make_dataid + + ds_id = make_dataid(name='solar_zenith_angle') + self._check_interpolator_is_called_only_once(ds_id, ds_id) + + def test_olci_different_angles_caches_interpolation(self): + """Test reading different angles datasets caches interpolation.""" + from satpy.tests.utils import make_dataid + + ds_id_zenith = make_dataid(name='solar_zenith_angle') + ds_id_azimuth = make_dataid(name='solar_azimuth_angle') + self._check_interpolator_is_called_only_once(ds_id_azimuth, ds_id_zenith) + + def _check_interpolator_is_called_only_once(self, ds_id_1, ds_id_2): + """Check that the interpolation is used only once.""" + with mock.patch("geotiepoints.interpolator.Interpolator") as interpolator: + interpolator.return_value.interpolate.return_value = ( + self.expected_data, self.expected_data, self.expected_data) + + self.file_handler.get_dataset(ds_id_2, self.filename_info) + self.file_handler.get_dataset(ds_id_1, self.filename_info) + assert (interpolator.call_count == 1) + + def tearDown(self): + """Tear down the test case.""" + self.patcher.stop() + + +class TestOLCIMeteo(unittest.TestCase): + """Test the meteo olci_nc filehandler.""" + + def setUp(self): + """Set up the test case.""" import xarray as xr from satpy.readers.olci_nc import NCOLCIMeteo - from satpy.tests.utils import make_dataid attr_dict = { 'ac_subsampling_factor': 1, 'al_subsampling_factor': 2, } data = {'humidity': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6)), 'total_ozone': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6)), 'sea_level_pressure': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6)), + np.arange(30).reshape(5, 6)), 'total_columnar_water_vapour': (['tie_rows', 'tie_columns'], - np.array([1 << x for x in range(30)]).reshape(5, 6))} + np.arange(30).reshape(5, 6))} + + self.patcher = mock.patch("xarray.open_dataset") + mocked_dataset = self.patcher.start() + mocked_dataset.return_value = xr.Dataset(data, coords={'rows': np.arange(5), 'columns': np.arange(6)}, attrs=attr_dict) - filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + self.filename_info = {'mission_id': 'S3A', 'dataset_name': 'humidity', 'start_time': 0, 'end_time': 0} + self.file_handler = NCOLCIMeteo('somedir/somefile.nc', self.filename_info, 'c') + + self.expected_data = np.array([[0, 1, 2, 3, 4, 5], + [3, 4, 5, 6, 7, 8], + [6, 7, 8, 9, 10, 11], + [9, 10, 11, 12, 13, 14], + [12, 13, 14, 15, 16, 17], + [15, 16, 17, 18, 19, 20], + [18, 19, 20, 21, 22, 23], + [21, 22, 23, 24, 25, 26], + [24, 25, 26, 27, 28, 29]] + ) + + def tearDown(self): + """Tear down the test case.""" + self.patcher.stop() + + def test_olci_meteo_reading(self): + """Test reading meteo datasets.""" + from satpy.tests.utils import make_dataid + + ds_id_humidity = make_dataid(name='humidity') + ds_id_total_ozone = make_dataid(name='total_ozone') + + humidity = self.file_handler.get_dataset(ds_id_humidity, self.filename_info) + total_ozone = self.file_handler.get_dataset(ds_id_total_ozone, self.filename_info) + + np.testing.assert_allclose(humidity, self.expected_data, atol=1e-10) + np.testing.assert_allclose(total_ozone, self.expected_data, atol=1e-10) + + def test_olci_meteo_caches_interpolation(self): + """Test reading meteo datasets caches interpolation.""" + from satpy.tests.utils import make_dataid ds_id = make_dataid(name='humidity') - ds_id2 = make_dataid(name='total_ozone') - test = NCOLCIMeteo('somedir/somefile.nc', filename_info, 'c') - test.get_dataset(ds_id, filename_info) - test.get_dataset(ds_id2, filename_info) - mocked_dataset.assert_called() - mocked_dataset.reset_mock() + with mock.patch("geotiepoints.interpolator.Interpolator") as interpolator: + interpolator.return_value.interpolate.return_value = (self.expected_data, ) + + self.file_handler.get_dataset(ds_id, self.filename_info) + self.file_handler.get_dataset(ds_id, self.filename_info) + assert(interpolator.call_count == 1) class TestBitFlags(unittest.TestCase): @@ -183,31 +385,52 @@ class TestBitFlags(unittest.TestCase): def test_bitflags(self): """Test the BitFlags class.""" - from functools import reduce - - import numpy as np + nb_flags = len(flag_list) - from satpy.readers.olci_nc import BitFlags - flag_list = ['INVALID', 'WATER', 'LAND', 'CLOUD', 'SNOW_ICE', - 'INLAND_WATER', 'TIDAL', 'COSMETIC', 'SUSPECT', 'HISOLZEN', - 'SATURATED', 'MEGLINT', 'HIGHGLINT', 'WHITECAPS', - 'ADJAC', 'WV_FAIL', 'PAR_FAIL', 'AC_FAIL', 'OC4ME_FAIL', - 'OCNN_FAIL', 'Extra_1', 'KDM_FAIL', 'Extra_2', - 'CLOUD_AMBIGUOUS', 'CLOUD_MARGIN', 'BPAC_ON', - 'WHITE_SCATT', 'LOWRW', 'HIGHRW'] + # As a test, the data is just an array with the possible masks + data = 2 ** np.arange(nb_flags) + masks = 2 ** np.arange(nb_flags) - bits = np.array([1 << x for x in range(len(flag_list))]) - - bflags = BitFlags(bits) + bflags = BitFlags(masks, flag_list) items = ["INVALID", "SNOW_ICE", "INLAND_WATER", "SUSPECT", "AC_FAIL", "CLOUD", "HISOLZEN", "OCNN_FAIL", "CLOUD_MARGIN", "CLOUD_AMBIGUOUS", "LOWRW", "LAND"] - mask = reduce(np.logical_or, [bflags[item] for item in items]) + mask = bflags.match_any(items, data) expected = np.array([True, False, True, True, True, True, False, False, True, True, False, False, False, False, False, False, False, True, False, True, False, False, False, True, True, False, False, True, False]) - self.assertTrue(all(mask == expected)) + np.testing.assert_array_equal(mask, expected) + + def test_match_item(self): + """Test matching one item.""" + nb_flags = len(flag_list) + + # As a test, the data is just an array with the possible masks + data = 2 ** np.arange(nb_flags) + masks = 2 ** np.arange(nb_flags) + + bflags = BitFlags(masks, flag_list) + mask = bflags.match_item("INVALID", data) + expected = np.array([True, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False, False, False, False, False, False, False, + False]) + np.testing.assert_array_equal(mask, expected) + + def test_equality(self): + """Test equality.""" + nb_flags = len(flag_list) + + # As a test, the data is just an array with the possible masks + masks = 2 ** np.arange(nb_flags) + + one = BitFlags(masks, flag_list) + + two = BitFlags(masks, flag_list) + + assert one == two