Skip to content

Commit

Permalink
HIT L1B CDF Creation for SIT-3 (#616)
Browse files Browse the repository at this point in the history
* Create hit l1b processing for SIT-3. Will need to complete this work once sample data is available. Update fill_val for cdf attrs with float values.

* Add l1b processing to HIT section in cli.py

* Add tests for hit housekeeping l1b product

* Update imap_processing/hit/l1b/hit_l1b.py

simplify line of code

Co-authored-by: Greg Lucas <greg.m.lucas@gmail.com>

* Update imap_processing/hit/l1b/hit_l1b.py

simplify creating array of 1 values using np.ones()

Co-authored-by: Greg Lucas <greg.m.lucas@gmail.com>

* Remove path to cdf file and instead call l1a function to create the cdf file needed for l1b processing

* Update shape of adc_channels and leak_i. Use np.ones() to create arrays with one values.

---------

Co-authored-by: Greg Lucas <greg.m.lucas@gmail.com>
  • Loading branch information
vmartinez-cu and greglucas authored Jun 13, 2024
1 parent aeac599 commit d8d25d8
Show file tree
Hide file tree
Showing 4 changed files with 253 additions and 0 deletions.
12 changes: 12 additions & 0 deletions imap_processing/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
from imap_processing.hi.l1a import hi_l1a
from imap_processing.hi.l1b import hi_l1b
from imap_processing.hit.l1a.hit_l1a import hit_l1a
from imap_processing.hit.l1b.hit_l1b import hit_l1b
from imap_processing.idex.idex_packet_parser import PacketParser
from imap_processing.lo.l1a import lo_l1a
from imap_processing.lo.l1b import lo_l1b
Expand Down Expand Up @@ -440,6 +441,17 @@ def do_processing(self, dependencies):
products = hit_l1a(dependencies[0], self.version)
return products

elif self.data_level == "l1b":
if len(dependencies) > 1:
raise ValueError(
f"Unexpected dependencies found for HIT L1B:"
f"{dependencies}. Expected only one dependency."
)
# process data and write all processed data to CDF files
l1a_dataset = load_cdf(dependencies[0])
products = hit_l1b(l1a_dataset)
return products


class Idex(ProcessInstrument):
"""Process IDEX."""
Expand Down
24 changes: 24 additions & 0 deletions imap_processing/hit/hit_cdf_attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -603,6 +603,7 @@
label_axis="Preamp L234A V",
units="V",
format="I4",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"preamp_l1a": replace(
hit_hk_base_attrs,
Expand All @@ -612,6 +613,7 @@
label_axis="Preamp L1A V",
units="V",
format="I4",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"preamp_l1b": replace(
hit_hk_base_attrs,
Expand All @@ -621,6 +623,7 @@
label_axis="Preamp L1B V",
units="V",
format="I4",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"preamp_l234b": replace(
hit_hk_base_attrs,
Expand All @@ -630,6 +633,7 @@
label_axis="Preamp L234B V",
units="V",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"temp0": replace(
hit_hk_base_attrs,
Expand All @@ -641,6 +645,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"temp1": replace(
hit_hk_base_attrs,
Expand All @@ -652,6 +657,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"temp2": replace(
hit_hk_base_attrs,
Expand All @@ -662,6 +668,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"temp3": replace(
hit_hk_base_attrs,
Expand All @@ -672,6 +679,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"analog_temp": replace(
hit_hk_base_attrs,
Expand All @@ -683,6 +691,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"hvps_temp": replace(
hit_hk_base_attrs,
Expand All @@ -694,6 +703,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"idpu_temp": replace(
hit_hk_base_attrs,
Expand All @@ -704,6 +714,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"lvps_temp": replace(
hit_hk_base_attrs,
Expand All @@ -715,6 +726,7 @@
label_axis="Temp",
units="C",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_3d4vd": replace(
hit_hk_base_attrs,
Expand All @@ -725,6 +737,7 @@
label_axis="3.4VD Ebox",
units="V",
format="F2.1",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_5d1vd": replace(
hit_hk_base_attrs,
Expand All @@ -735,6 +748,7 @@
label_axis="5.1VD Ebox",
units="V",
format="F3.2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_p12va": replace(
hit_hk_base_attrs,
Expand All @@ -745,6 +759,7 @@
label_axis="+12VA Ebox",
units="V",
format="F3.1",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_m12va": replace(
hit_hk_base_attrs,
Expand All @@ -755,6 +770,7 @@
label_axis="-12VA Ebox",
units="V",
format="F3.1",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_p5d7va": replace(
hit_hk_base_attrs,
Expand All @@ -765,6 +781,7 @@
label_axis="+5.7VA Ebox",
units="V",
format="F2.1",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_m5d7va": replace(
hit_hk_base_attrs,
Expand All @@ -775,6 +792,7 @@
label_axis="-5.7VA Ebox",
units="V",
format="F5.4",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ref_p5v": replace(
hit_hk_base_attrs,
Expand All @@ -785,6 +803,7 @@
label_axis="+5V ref",
units="V",
format="I4",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"l1ab_bias": replace(
hit_hk_base_attrs,
Expand All @@ -795,6 +814,7 @@
label_axis="L1A/B Bias",
units="V",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"l2ab_bias": replace(
hit_hk_base_attrs,
Expand All @@ -805,6 +825,7 @@
label_axis="L2A/B Bias",
units="V",
format="I2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"l34a_bias": replace(
hit_hk_base_attrs,
Expand All @@ -815,6 +836,7 @@
label_axis="L3/4A Bias",
units="V",
format="I3",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"l34b_bias": replace(
hit_hk_base_attrs,
Expand All @@ -825,6 +847,7 @@
label_axis="L3/4B Bias",
units="V",
format="I3",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
"ebox_p2d0vd": replace(
hit_hk_base_attrs,
Expand All @@ -835,6 +858,7 @@
label_axis="+2.0VD Ebox",
units="V",
format="F3.2",
fill_val=GlobalConstants.DOUBLE_FILLVAL,
),
}

Expand Down
171 changes: 171 additions & 0 deletions imap_processing/hit/l1b/hit_l1b.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
"""IMAP-HIT L1B data processing."""

import logging
from dataclasses import fields

import numpy as np
import xarray as xr

from imap_processing import utils
from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.cdf.utils import write_cdf
from imap_processing.hit import hit_cdf_attrs
from imap_processing.hit.l0.data_classes.housekeeping import Housekeeping

logger = logging.getLogger(__name__)

# TODO review logging levels to use (debug vs. info)


def hit_l1b(l1a_dataset: xr.Dataset):
"""Process HIT data to L1B.
Parameters
----------
l1a_dataset : xarray.Dataset
l1A data
Returns
-------
xarray.Dataset
L1B processed data
"""
# TODO: Check for type of L1A dataset and determine what L1B products to make
# Need more info from instrument teams. Work with housekeeping data for now
logical_source = "imap_hit_l1b_hk"

# Create datasets
datasets = []
if "_hk" in logical_source:
dataset = create_hk_dataset()
datasets.append(dataset)
elif "_sci" in logical_source:
# process science data. placeholder for future code
pass

# Create CDF files
logger.info("Creating CDF files for HIT L1B data")
cdf_filepaths = []
for dataset in datasets:
cdf_file = write_cdf(dataset)
cdf_filepaths.append(cdf_file)
logger.info(f"L1B CDF files created: {cdf_filepaths}")
return cdf_filepaths


# TODO: This is going to work differently when we have sample data
def create_hk_dataset():
"""Create a housekeeping dataset.
Returns
-------
xr.dataset
Dataset with all data product fields in xr.DataArray
"""
logger.info("Creating datasets for HIT L1B data")

# TODO: TEMPORARY. Need to update to use the L1B data class once that exists.
# Using l1a housekeeping data class for now since l1b housekeeping has the
# same data fields
data_fields = fields(Housekeeping)

# TODO define keys to skip. This will change later.
skip_keys = [
"shcoarse",
"ground_sw_version",
"packet_file_name",
"ccsds_header",
"leak_i_raw",
]

# Create fake data for now

# Convert integers into datetime64[s]
epoch_converted_time = [utils.calc_start_time(time) for time in [0, 1, 2]]

# Shape for dims
n_epoch = 3
n_channels = 64

# Create xarray data arrays for dependencies
epoch_time = xr.DataArray(
data=epoch_converted_time,
name="epoch",
dims=["epoch"],
attrs=ConstantCoordinates.EPOCH,
)

adc_channels = xr.DataArray(
np.arange(n_channels, dtype=np.uint16),
name="adc_channels",
dims=["adc_channels"],
attrs=hit_cdf_attrs.l1b_hk_attrs["adc_channels"].output(),
)

# Create xarray dataset
hk_dataset = xr.Dataset(
coords={"epoch": epoch_time, "adc_channels": adc_channels},
attrs=hit_cdf_attrs.hit_hk_l1b_attrs.output(),
)

# Create xarray data array for each data field
for data_field in data_fields:
field = data_field.name.lower()
if field not in skip_keys:
# Create a list of all the dimensions using the DEPEND_I keys in the
# attributes
dims = [
value
for key, value in hit_cdf_attrs.l1b_hk_attrs[field].output().items()
if "DEPEND" in key
]

# TODO: This is temporary.
# The data will be set in the data class when that's created
if field == "leak_i":
# 2D array - needs two dims
hk_dataset[field] = xr.DataArray(
np.ones((n_epoch, n_channels), dtype=np.uint16),
dims=dims,
attrs=hit_cdf_attrs.l1b_hk_attrs[field].output(),
)
elif field in [
"preamp_l234a",
"preamp_l1a",
"preamp_l1b",
"preamp_l234b",
"temp0",
"temp1",
"temp2",
"temp3",
"analog_temp",
"hvps_temp",
"idpu_temp",
"lvps_temp",
"ebox_3d4vd",
"ebox_5d1vd",
"ebox_p12va",
"ebox_m12va",
"ebox_p5d7va",
"ebox_m5d7va",
"ref_p5v",
"l1ab_bias",
"l2ab_bias",
"l34a_bias",
"l34b_bias",
"ebox_p2d0vd",
]:
hk_dataset[field] = xr.DataArray(
np.ones(3, dtype=np.float16),
dims=dims,
attrs=hit_cdf_attrs.l1b_hk_attrs[field].output(),
)
else:
hk_dataset[field] = xr.DataArray(
[1, 1, 1],
dims=dims,
attrs=hit_cdf_attrs.l1b_hk_attrs[field].output(),
)

logger.info("HIT L1B datasets created")
return hk_dataset
Loading

0 comments on commit d8d25d8

Please sign in to comment.