From 83e9ed064b9c7a342c42e435d7dcd7dd140b3611 Mon Sep 17 00:00:00 2001 From: sateesh Date: Tue, 1 Dec 2020 19:38:03 +0530 Subject: [PATCH] Create lma_l2.py lma flash extend 2d --- satpy/readers/lma_l2.py | 158 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 158 insertions(+) create mode 100644 satpy/readers/lma_l2.py diff --git a/satpy/readers/lma_l2.py b/satpy/readers/lma_l2.py new file mode 100644 index 0000000000..b3085d1c1a --- /dev/null +++ b/satpy/readers/lma_l2.py @@ -0,0 +1,158 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# Copyright (c) 2019 Satpy developers +# +# This file is part of Satpy. +# +# Satpy is free software: you can redistribute it and/or modify it under the +# terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Satpy is distributed in the hope that it will be useful, but WITHOUT ANY +# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR +# A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# Satpy. If not, see . +# +# + +""" + LMA flash extent 2d data +""" + +import numpy as np +import xarray as xr +from satpy.readers.netcdf_utils import NetCDF4FileHandler, netCDF4 +from pyresample.geometry import AreaDefinition +import logging + +logger = logging.getLogger(__name__) + + +class LMAflashextent2dFileHandler(NetCDF4FileHandler): + """NetCDF4 reader for LMA L2 (FED-2d).""" + + def __init__(self, filename, filename_info, filetype_info): + """Initialize the reader.""" + super(LMAflashextent2dFileHandler, self).__init__(filename, filename_info, filetype_info) + + def available_datasets(self, configured_datasets=None): + """Get datasets in file matching gelocation shape (lat/lon).""" + lat_shape = self.file_content.get('latitude') + lon_shape = self.file_content.get('longitude') + + # Read the lat/lon variables? + handled_variables = set() + + # update previously configured datasets + logger.debug("Starting previously configured variables loop...") + for is_avail, ds_info in (configured_datasets or []): + # some other file handler knows how to load this + if is_avail is not None: + yield is_avail, ds_info + + var_name = ds_info.get('file_key', ds_info['name']) + # logger.debug("Evaluating previously configured variable: %s", var_name) + matches = self.file_type_matches(ds_info['file_type']) + # we can confidently say that we can provide this dataset and can + # provide more info + if matches and var_name in self: + logger.debug("Handling previously configured variable: %s", var_name) + handled_variables.add(var_name) + new_info = ds_info.copy() # don't mess up the above yielded + yield True, new_info + elif is_avail is None: + # if we didn't know how to handle this dataset and no one else did + # then we should keep it going down the chain + yield is_avail, ds_info + + # Iterate over dataset contents + for var_name, val in self.file_content.items(): + # Only evaluate variables + if isinstance(val, netCDF4.Variable): + logger.debug("Evaluating new variable: %s", var_name) + var_shape = self[var_name + "/shape"] + logger.debug("Dims:{}".format(var_shape)) + if var_shape == (lat_shape, lon_shape): + logger.debug("Found valid additional dataset: %s", var_name) + # Skip anything we have already configured + if var_name in handled_variables: + logger.debug("Already handled, skipping: %s", var_name) + continue + handled_variables.add(var_name) + # Create new ds_info object + new_info = { + 'name': var_name, + 'file_key': var_name, + 'file_type': self.filetype_info['file_type'], + } + logger.debug(var_name) + yield True, new_info + + def get_dataset(self, ds_id, info): + """Load dataset designated by the given key from file.""" + logger.debug("Getting data for: %s", ds_id['name']) + file_key = info.get('file_key', ds_id['name']) + data = np.flipud(self[file_key]) + data = xr.DataArray(data, dims=['y', 'x']) + data.attrs = self.get_metadata(data, info) + + if 'lon' in data.dims: + data.rename({'lon': 'x'}) + if 'lat' in data.dims: + data.rename({'lat': 'y'}) + return data + + def get_area_def(self, dsid): + """Flip data up/down and define equirectangular AreaDefintion.""" + flip_lat = np.flipud(self['latArr']) + latlon = np.meshgrid(self['lonArr'], flip_lat) + + width = self['lonArr/shape'][0] + height = self['latArr/shape'][0] + + lower_left_x = latlon[0][height-1][0] + lower_left_y = latlon[1][height-1][0] + + upper_right_y = latlon[1][0][width-1] + upper_right_x = latlon[0][0][width-1] + + area_extent = (lower_left_x, lower_left_y, upper_right_x, upper_right_y) + description = "Trail data Projection" + area_id = 'lma' + proj_id = 'equirectangular' + proj_dict = {'proj': 'longlat', 'datum': 'WGS84', 'ellps': 'WGS84', } + area_def = AreaDefinition(area_id, description, proj_id, proj_dict, width, height, area_extent, ) + return area_def + + def get_metadata(self, data, info): + """Get general metadata for file.""" + metadata = {} + metadata.update(data.attrs) + metadata.update(info) + metadata.update({ + 'platform_shortname': 'aggregated microwave', + 'sensor': 'lma_l2', + 'start_time': self.start_time, + 'end_time': self.end_time, + }) + + return metadata + + @property + def start_time(self): + """Start timestamp of the dataset determined from yaml.""" + return self.filename_info['start_time'] + + @property + def end_time(self): + """End timestamp of the dataset same as start_time.""" + return self.filename_info.get('end_time', self.start_time) + + @property + def sensor_name(self): + """Sensor name.""" + return self["sensor"]