diff --git a/imap_processing/cdf_utils.py b/imap_processing/cdf_utils.py
new file mode 100644
index 000000000..c71791d5b
--- /dev/null
+++ b/imap_processing/cdf_utils.py
@@ -0,0 +1,102 @@
+import os
+
+import numpy as np
+import xarray as xr
+from cdflib.xarray import xarray_to_cdf
+
+# Recommended FILLVAL for all integers
+INT_FILLVAL = np.iinfo(np.int64).min
+# Recommended FILLVALL for all floats
+DOUBLE_FILLVAL = np.float64(-1.0e31)
+# Recommended min/max Epoch based on MMS approved values
+MIN_EPOCH = -315575942816000000
+MAX_EPOCH = 946728069183000000
+
+global_base = {
+ "Project": "STP>Solar-Terrestrial Physics",
+ "Source_name": "IMAP>Interstellar Mapping and Acceleration Probe",
+ "Discipline": "Solar Physics>Heliospheric Physics",
+ "PI_name": "Dr. David J. McComas",
+ "PI_affiliation": [
+ "Princeton Plasma Physics Laboratory",
+ "100 Stellarator Road, Princeton, NJ 08540",
+ ],
+ "Instrument_type": "Particles (space)",
+ "Mission_group": "IMAP>Interstellar Mapping and Acceleration Probe",
+}
+
+epoch_attrs = {
+ "CATDESC": "Default time",
+ "FIELDNAM": "Epoch",
+ "FILLVAL": INT_FILLVAL,
+ "FORMAT": "a2",
+ "LABLAXIS": "Epoch",
+ "UNITS": "ns",
+ "VALIDMIN": MIN_EPOCH,
+ "VALIDMAX": MAX_EPOCH,
+ "VAR_TYPE": "support_data",
+ "SCALETYP": "linear",
+ "MONOTON": "INCREASE",
+ "TIME_BASE": "J2000",
+ "TIME_SCALE": "Terrestrial Time",
+ "REFERENCE_POSITION": "Rotating Earth Geoid",
+}
+
+
+def write_cdf(data: xr.Dataset, description: str = "", directory: str = ""):
+ """Write the contents of "data" to a CDF file using cdflib.xarray_to_cdf.
+
+ This function determines the file name to use from the global attributes,
+ fills in the the final attributes, and converts the whole dataset to a CDF.
+ The date in the file name is determined by the time of the first Epoch in the
+ xarray Dataset. The first 3 file name fields (mission, instrument, level) are
+ determined by the "Logical_source" attribute. The version is determiend from
+ "Data_version".
+
+ Parameters
+ ----------
+ data (xarray.Dataset): The dataset object to convert to a CDF
+ description (str): The description to insert into the file name after the
+ orbit, before the SPICE field. No underscores allowed.
+ directory (str): The directory to write the file to
+
+ Returns
+ -------
+ str
+ The name of the file created
+ """
+ # Determine the start date of the data in the file,
+ # based on the time of the first dust impact
+ file_start_date = data["Epoch"][0].data
+ date_string = np.datetime_as_string(file_start_date, unit="D").replace("-", "")
+
+ # Determine the optional "description" field
+ description = (
+ description
+ if (description.startswith("_") or not description)
+ else f"_{description}"
+ )
+
+ # Determine the file name based on the attributes in the xarray
+ filename = (
+ data.attrs["Logical_source"]
+ + "_"
+ + date_string
+ + description
+ + f"_v{data.attrs['Data_version']}.cdf"
+ )
+ filename_and_path = os.path.join(directory, filename)
+
+ # Insert the final attribute:
+ # The Logical_file_id is always the name of the file without the extension
+ data.attrs["Logical_file_id"] = filename.split(".")[0]
+
+ # Convert the xarray object to a CDF
+ xarray_to_cdf(
+ data,
+ filename_and_path,
+ datetime64_to_cdftt2000=True,
+ terminate_on_warning=True,
+ ) # Terminate if not ISTP compliant
+
+ return filename_and_path
diff --git a/imap_processing/idex/__init__.py b/imap_processing/idex/__init__.py
index e69de29bb..2c2ff0704 100644
--- a/imap_processing/idex/__init__.py
+++ b/imap_processing/idex/__init__.py
@@ -0,0 +1,2 @@
+# Set IDEX software version here
+__version__ = "01"
diff --git a/imap_processing/idex/idex_cdf_attrs.py b/imap_processing/idex/idex_cdf_attrs.py
new file mode 100644
index 000000000..fbd8136fa
--- /dev/null
+++ b/imap_processing/idex/idex_cdf_attrs.py
@@ -0,0 +1,173 @@
+from imap_processing import cdf_utils
+from imap_processing.idex import __version__
+
+# Valid min/maxes
+
+# Data is in a 12 bit unsigned INT
+DATA_MIN = 0 # It could go down to 0 in theory
+DATA_MAX = 4096 # It cannot exceed 4096 (2^12)
+
+# Samples span 130 microseconds at the most, and values are allowed to be negative
+SAMPLE_RATE_MIN = -130 # All might be negative
+SAMPLE_RATE_MAX = 130 # All might be positive
+
+# Global Attributes
+idex_global_base = {
+ "Data_type": "L1>Level-1",
+ "Data_version": __version__,
+ "Descriptor": "IDEX>Interstellar Dust Experiment",
+ "TEXT": (
+ "The Interstellar Dust Experiment (IDEX) is a time-of-flight (TOF) "
+ "dust impact ionization mass spectrometer on the IMAP mission that "
+ "provides the elemental composition, speed, and mass distributions "
+ "of interstellar dust and interplanetary dust particles. Each record "
+ "contains the data from a single dust impact. See "
+ "https://imap.princeton.edu/instruments/idex for more details."
+ ),
+ "Logical_file_id": "FILL ME IN AT FILE CREATION",
+} | cdf_utils.global_base
+
+idex_l1_global_attrs = {
+ "Data_type": "L1>Level-1",
+ "Logical_source": "imap_idex_l1",
+ "Logical_source_description": "IMAP Mission IDEX Instrument Level-1 Data.",
+} | idex_global_base
+
+idex_l2_global_attrs = {
+ "Data_type": "L2>Level-2",
+ "Logical_source": "imap_idex_l2",
+ "Logical_source_description": "IMAP Mission IDEX Instrument Level-2 Data",
+} | idex_global_base
+
+# L1 variables base dictionaries
+# (these need to be filled in by the variable dictionaries below)
+l1_data_base = {
+ "DEPEND_0": "Epoch",
+ "DISPLAY_TYPE": "spectrogram",
+ "FILLVAL": cdf_utils.INT_FILLVAL,
+ "FORMAT": "I12",
+ "UNITS": "dN",
+ "VALIDMIN": DATA_MIN,
+ "VALIDMAX": DATA_MAX,
+ "VAR_TYPE": "data",
+ "SCALETYP": "linear",
+ # "VARIABLE_PURPOSE" tells CDAWeb which variables are worth plotting
+ "VARIABLE_PURPOSE": "PRIMARY",
+}
+
+l1_tof_base = {"DEPEND_1": "Time_High_SR"} | l1_data_base
+
+l1_target_base = {"DEPEND_1": "Time_Low_SR"} | l1_data_base
+
+sample_rate_base = {
+ "DEPEND_0": "Epoch",
+ "FILLVAL": cdf_utils.DOUBLE_FILLVAL,
+ "FORMAT": "F12.5",
+ "LABLAXIS": "Time",
+ "UNITS": "microseconds",
+ "VALIDMIN": SAMPLE_RATE_MIN,
+ "VALIDMAX": SAMPLE_RATE_MAX,
+ "VAR_TYPE": "support_data",
+ "SCALETYP": "linear",
+ "VAR_NOTES": (
+ "The number of microseconds since the event. "
+ "0 is the start of data collection, negative "
+ "numbers represent data collected prior to a dust event"
+ ),
+}
+
+trigger_base = {
+ "DEPEND_0": "Epoch",
+ "FILLVAL": cdf_utils.INT_FILLVAL,
+ "FORMAT": "I12",
+ "VALIDMIN": 0, # All values are positive integers or 0 by design
+ "VAR_TYPE": "data",
+ "DISPLAY_TYPE": "no_plot",
+}
+
+# L1 Attribute Dictionaries
+low_sr_attrs = {
+ "CATDESC": "Low sample rate time steps for a dust event.",
+ "FIELDNAM": "Low Sample Rate Time",
+ "VAR_NOTES": (
+ "The low sample rate in microseconds. "
+ "Steps are approximately 1/4.025 microseconds in duration. "
+ "Used by the Ion_Grid, Target_Low, and Target_High variables."
+ ),
+} | sample_rate_base
+
+high_sr_attrs = {
+ "CATDESC": "High sample rate time steps for a dust event.",
+ "FIELDNAM": "High Sample Rate Time",
+ "VAR_NOTES": (
+ "The high sample rate in microseconds. "
+ "Steps are approximately 1/260 microseconds in duration. "
+ "Used by the TOF_High, TOF_Mid, and TOF_Low variables."
+ ),
+} | sample_rate_base
+
+tof_high_attrs = {
+ "CATDESC": "Time of flight waveform on the high-gain channel",
+ "FIELDNAM": "High Gain Time of Flight",
+ "LABLAXIS": "TOF High Ampl.",
+ "VAR_NOTES": (
+ "High gain channel of the time-of-flight signal. "
+ "Sampled at 260 Megasamples per second, with a 10-bit resolution. "
+ "Data is used to quantify dust composition."
+ ),
+} | l1_tof_base
+
+tof_mid_attrs = {
+ "CATDESC": "Time of flight waveform on the mid-gain channel",
+ "FIELDNAM": "Mid Gain Time of Flight",
+ "LABLAXIS": "TOF Mid Ampl.",
+ "VAR_NOTES": (
+ "Mid gain channel of the time-of-flight signal. "
+ "Sampled at 260 Megasamples per second, with a 10-bit resolution. "
+ "Data is used to quantify dust composition."
+ ),
+} | l1_tof_base
+
+tof_low_attrs = {
+ "CATDESC": "Time of flight waveform on the low-gain channel",
+ "FIELDNAM": "Low Gain Time of Flight",
+ "LABLAXIS": "TOF Low Ampl.",
+ "VAR_NOTES": (
+ "Low gain channel of the time-of-flight signal. "
+ "Sampled at 260 Megasamples per second, with a 10-bit resolution. "
+ "Data is used to quantify dust composition."
+ ),
+} | l1_tof_base
+
+target_low_attrs = {
+ "CATDESC": "Target low charge sensitive amplifier waveform",
+ "FIELDNAM": "Low Target Signal",
+ "LABLAXIS": "Low Target Ampl.",
+ "VAR_NOTES": (
+ "Low gain channel of IDEX's target signal. "
+ "Sampled at 3.75 Msps with 12-bit resolution. "
+ "Data is used to quantify dust charge. "
+ ),
+} | l1_target_base
+
+target_high_attrs = {
+ "CATDESC": "Ion grid charge sensitive amplifier waveform",
+ "FIELDNAM": "High Target Signal",
+ "LABLAXIS": "High Target Ampl.",
+ "VAR_NOTES": (
+ "High gain channel of IDEX's target signal. "
+ "Sampled at 3.75 Msps with 12-bit resolution. "
+ "Data is used to quantify dust charge."
+ ),
+} | l1_target_base
+
+ion_grid_attrs = {
+ "CATDESC": "Ion grid charge sensitive amplifier waveform data",
+ "FIELDNAM": "Ion Grid Signal",
+ "LABLAXIS": "Ion Grid Ampl.",
+ "VAR_NOTES": (
+ "This is the ion grid signal from IDEX. "
+ "Sampled at 3.75 Msps with 12-bit resolution. "
+ "Data is used to quantify dust charge."
+ ),
+} | l1_target_base
diff --git a/imap_processing/idex/idex_packet_parser.py b/imap_processing/idex/idex_packet_parser.py
index b1864ec53..aaea7a565 100644
--- a/imap_processing/idex/idex_packet_parser.py
+++ b/imap_processing/idex/idex_packet_parser.py
@@ -1,11 +1,13 @@
import logging
+from collections import namedtuple
import bitstring
import numpy as np
import xarray as xr
from space_packet_parser import parser, xtcedef
-from imap_processing import imap_module_directory
+from imap_processing import cdf_utils, imap_module_directory
+from imap_processing.idex import idex_cdf_attrs
SCITYPE_MAPPING_TO_NAMES = {
2: "TOF_High",
@@ -31,7 +33,6 @@ class PacketParser:
TODO
----
* Add method to generate quicklook
- * Add method to generate l1a CDF
Examples
--------
@@ -39,7 +40,7 @@ class PacketParser:
>>> from imap_processing.idex.idex_packet_parser import PacketParser
>>> l0_file = "imap_processing/idex/tests/imap_idex_l0_20230725_v01-00.pkts"
>>> l1_data = PacketParser(l0_file)
- >>> print(l1_data.data)
+ >>> l1_data.write_l1_cdf()
"""
@@ -88,6 +89,7 @@ def __init__(self, packet_file: str):
]
self.data = xr.concat(processed_dust_impact_list, dim="Epoch")
+ self.data.attrs = idex_cdf_attrs.idex_l1_global_attrs
class RawDustEvent:
@@ -142,12 +144,16 @@ def __init__(self, header_packet):
self.low_sample_trigger_time,
self.high_sample_trigger_time,
) = self._calc_sample_trigger_times(header_packet)
-
- self.trigger_values_dict, self.trigger_notes_dict = self._get_trigger_dicts(
- header_packet
- )
+ (
+ self.trigger_values,
+ self.trigger_notes,
+ self.trigger_fields,
+ self.trigger_maxes,
+ self.trigger_labels,
+ self.trigger_units,
+ ) = self._get_trigger_dicts(header_packet)
logging.debug(
- f"trigger_values_dict:\n{self.trigger_values_dict}"
+ f"trigger_values:\n{self.trigger_values}"
) # Log values here in case of error
# Initialize the binary data received from future packets
@@ -178,245 +184,527 @@ def _get_trigger_dicts(self, packet):
A dictionary of (CDF variable name : value) pairs
dict
A dictionary of (CDF variable name : variable notes) pairs
+ dict
+ A dictionary of (CDF variable name : variable fields) pairs
+ dict
+ A dictionary of (CDF variable name : variable maxes) pairs
+ dict
+ A dictionary of (CDF variable name : variable labels) pairs
+ dict
+ A dictionary of (CDF variable name : variable units) pairs
"""
- trigger_dict = {}
- trigger_notes_dict = {}
+ trigger_values = {}
+ trigger_notes = {}
+ trigger_maxes = {}
+ trigger_fields = {}
+ trigger_labels = {}
+ trigger_units = {}
+
+ TriggerDescription = namedtuple(
+ "TriggerDescription",
+ ["name", "packet_name", "num_bits", "field", "notes", "label", "units"],
+ )
+
+ def _insert_into_dicts(trigger_description):
+ # Cleans up inserting the values into the dictionaries
+ trigger_values[trigger_description.name] = packet.data[
+ trigger_description.packet_name
+ ].raw_value
+ trigger_notes[trigger_description.name] = trigger_description.notes
+ trigger_maxes[trigger_description.name] = (
+ 2**trigger_description.num_bits - 1
+ )
+ trigger_fields[trigger_description.name] = trigger_description.field
+ trigger_labels[trigger_description.name] = trigger_description.label
+ trigger_units[trigger_description.name] = trigger_description.units
# Get Event Number
- trigger_dict["event_number"] = packet.data["IDX__TXHDREVTNUM"].raw_value
- trigger_notes_dict[
- "event_number"
- ] = "The unique number assigned to the impact by the FPGA"
- # TOF High Trigger Info 1
- trigger_dict["tof_high_trigger_level"] = packet.data[
- "IDX__TXHDRHGTRIGLVL"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_level"
- ] = "Trigger level for the TOF High Channel"
- trigger_dict["tof_high_trigger_num_max_1_2"] = packet.data[
- "IDX__TXHDRHGTRIGNMAX12"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_max_1_2"
- ] = """Maximum number of samples between pulse 1 and 2 for TOF High double
- pulse triggering"""
- trigger_dict["tof_high_trigger_num_min_1_2"] = packet.data[
- "IDX__TXHDRHGTRIGNMIN12"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_min_1_2"
- ] = """Minimum number of samples between pulse 1 and 2 for TOF High double
- pulse triggering"""
- # TOF High Trigger Info 2
- trigger_dict["tof_high_trigger_num_min_1"] = packet.data[
- "IDX__TXHDRHGTRIGNMIN1"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_min_1"
- ] = """Minimum number of samples for pulse 1 for TOF High single and double
- pulse triggering"""
- trigger_dict["tof_high_trigger_num_max_1"] = packet.data[
- "IDX__TXHDRHGTRIGNMAX1"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_max_1"
- ] = """Maximum number of samples for pulse 1 for TOF High single and double
- pulse triggering"""
- trigger_dict["tof_high_trigger_num_min_2"] = packet.data[
- "IDX__TXHDRHGTRIGNMIN2"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_min_2"
- ] = """Minimum number of samples for pulse 2 for TOF High single and double
- pulse triggering"""
- trigger_dict["tof_high_trigger_num_max_2"] = packet.data[
- "IDX__TXHDRHGTRIGNMAX2"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_num_max_2"
- ] = """Maximum number of samples for pulse 2 for TOF High single and double
- pulse triggering"""
- trigger_dict["tof_low_trigger_level"] = packet.data[
- "IDX__TXHDRLGTRIGLVL"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_level"
- ] = "Trigger level for the TOF Low Channel"
- trigger_dict["tof_low_trigger_num_max_1_2"] = packet.data[
- "IDX__TXHDRLGTRIGNMAX12"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_max_1_2"
- ] = """Maximum number of samples between pulse 1 and 2 for TOF Low double
- pulse triggering"""
- trigger_dict["tof_low_trigger_num_min_1_2"] = packet.data[
- "IDX__TXHDRLGTRIGNMIN12"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_min_1_2"
- ] = """Minimum number of samples between pulse 1 and 2 for TOF Low double
- pulse triggering"""
- # TOF Low Trigger Info 2
- trigger_dict["tof_low_trigger_num_min_1"] = packet.data[
- "IDX__TXHDRLGTRIGNMIN1"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_min_1"
- ] = """Minimum number of samples for pulse 1 for TOF Low single and double
- pulse triggering"""
- trigger_dict["tof_low_trigger_num_max_1"] = packet.data[
- "IDX__TXHDRLGTRIGNMAX1"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_max_1"
- ] = """Maximum number of samples for pulse 1 for TOF Low single and double
- pulse triggering"""
- trigger_dict["tof_low_trigger_num_min_2"] = packet.data[
- "IDX__TXHDRLGTRIGNMIN2"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_min_2"
- ] = """Minimum number of samples for pulse 2 for TOF Low single and double
- pulse triggering"""
- trigger_dict["tof_low_trigger_num_max_2"] = packet.data[
- "IDX__TXHDRLGTRIGNMAX2"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_num_max_2"
- ] = """Maximum number of samples for pulse 2 for TOF Low single and double
- pulse triggering"""
- trigger_dict["tof_mid_trigger_level"] = packet.data[
- "IDX__TXHDRMGTRIGLVL"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_level"
- ] = "Trigger level for the TOF Mid Channel"
- trigger_dict["tof_mid_trigger_num_max_1_2"] = packet.data[
- "IDX__TXHDRMGTRIGNMAX12"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_max_1_2"
- ] = """Maximum number of samples between pulse 1 and 2 for TOF Mid double
- pulse triggering"""
- trigger_dict["tof_mid_trigger_num_min_1_2"] = packet.data[
- "IDX__TXHDRMGTRIGNMIN12"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_min_1_2"
- ] = """Minimum number of samples between pulse 1 and 2 for TOF Mid double
- pulse triggering"""
- # TOF Mid Trigger Info 2
- trigger_dict["tof_mid_trigger_num_min_1"] = packet.data[
- "IDX__TXHDRMGTRIGNMIN1"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_min_1"
- ] = """Minimum number of samples for pulse 1 for TOF Mid single and double
- pulse triggering"""
- trigger_dict["tof_mid_trigger_num_max_1"] = packet.data[
- "IDX__TXHDRMGTRIGNMAX1"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_max_1"
- ] = """Maximum number of samples for pulse 1 for TOF Mid single and double
- pulse triggering"""
- trigger_dict["tof_mid_trigger_num_min_2"] = packet.data[
- "IDX__TXHDRMGTRIGNMIN2"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_min_2"
- ] = """Minimum number of samples for pulse 2 for TOF Mid single and double
- pulse triggering"""
- trigger_dict["tof_mid_trigger_num_max_2"] = packet.data[
- "IDX__TXHDRMGTRIGNMAX2"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_num_max_2"
- ] = """Maximum number of samples for pulse 2 for TOF Mid single and double
- pulse triggering"""
-
- # Low Sample Trigger Info
- trigger_dict["low_sample_coincidence_mode_blocks"] = packet.data[
- "IDX__TXHDRLSTRIGCMBLOCKS"
- ].raw_value
- trigger_notes_dict[
- "low_sample_coincidence_mode_blocks"
- ] = "Number of blocks coincidence window is enabled after low sample trigger"
- trigger_dict["low_sample_trigger_polarity"] = packet.data[
- "IDX__TXHDRLSTRIGPOL"
- ].raw_value
- trigger_notes_dict[
- "low_sample_trigger_polarity"
- ] = "The trigger polarity for low sample (0 = normal, 1 = inverted)"
- trigger_dict["low_sample_trigger_level"] = packet.data[
- "IDX__TXHDRLSTRIGLVL"
- ].raw_value
- trigger_notes_dict[
- "low_sample_trigger_level"
- ] = "Trigger level for the low sample"
- trigger_dict["low_sample_trigger_num_min"] = packet.data[
- "IDX__TXHDRLSTRIGNMIN"
- ].raw_value
- trigger_notes_dict[
- "low_sample_trigger_num_min"
- ] = """The minimum number of samples above/below the trigger level for
- triggering the low sample"""
- # Trigger modes
- trigger_dict["low_sample_trigger_mode"] = packet.data[
- "IDX__TXHDRLSTRIGMODE"
- ].raw_value
- trigger_notes_dict[
- "low_sample_trigger_mode"
- ] = "Low sample trigger mode (0=disabled, 1=enabled)"
- trigger_dict["tof_low_trigger_mode"] = packet.data[
- "IDX__TXHDRLSTRIGMODE"
- ].raw_value
- trigger_notes_dict[
- "tof_low_trigger_mode"
- ] = "TOF Low trigger mode (0=disabled, 1=enabled)"
- trigger_dict["tof_mid_trigger_mode"] = packet.data[
- "IDX__TXHDRMGTRIGMODE"
- ].raw_value
- trigger_notes_dict[
- "tof_mid_trigger_mode"
- ] = "TOF Mid trigger mode (0=disabled, 1=enabled)"
- trigger_dict["tof_high_trigger_mode"] = packet.data[
- "IDX__TXHDRHGTRIGMODE"
- ].raw_value
- trigger_notes_dict[
- "tof_high_trigger_mode"
- ] = """TOF Mid trigger mode (0=disabled, 1=threshold mode, 2=single pulse
- mode, 3=double pulse mode)"""
-
- trigger_dict["detector_voltage"] = packet.data["IDX__TXHDRHVPSHKCH0"].raw_value
- trigger_notes_dict[
- "detector_voltage"
- ] = "Last measurement in raw dN for processor board signal: Detector Voltage"
- trigger_dict["sensor_voltage"] = packet.data["IDX__TXHDRHVPSHKCH1"].raw_value
- trigger_notes_dict[
- "sensor_voltage"
- ] = "Last measurement in raw dN for processor board signal: Sensor Voltage"
- trigger_dict["target_voltage"] = packet.data["IDX__TXHDRHVPSHKCH2"].raw_value
- trigger_notes_dict[
- "target_voltage"
- ] = "Last measurement in raw dN for processor board signal: Target Voltage"
- trigger_dict["reflectron_voltage"] = packet.data[
- "IDX__TXHDRHVPSHKCH3"
- ].raw_value
- trigger_notes_dict[
- "reflectron_voltage"
- ] = "Last measurement in raw dN for processor board signal: Reflectron Voltage"
- trigger_dict["rejection_voltage"] = packet.data["IDX__TXHDRHVPSHKCH4"].raw_value
- trigger_notes_dict[
- "rejection_voltage"
- ] = "Last measurement in raw dN for processor board signal: Rejection Voltage"
- trigger_dict["detector_current"] = packet.data["IDX__TXHDRHVPSHKCH5"].raw_value
- trigger_notes_dict[
- "detector_current"
- ] = "Last measurement in raw dN for processor board signal: Detector Current"
-
- return trigger_dict, trigger_notes_dict
+ _insert_into_dicts(
+ TriggerDescription(
+ "event_number",
+ "IDX__TXHDREVTNUM",
+ 16,
+ "Event Number",
+ "The unique number assigned to the impact by the FPGA",
+ "Event #",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_level",
+ "IDX__TXHDRHGTRIGLVL",
+ 10,
+ "TOF High Trigger Level",
+ "Trigger level for the TOF High Channel",
+ "Level",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_max_1_2",
+ "IDX__TXHDRHGTRIGNMAX12",
+ 11,
+ "TOF High Double Pulse Max Samples",
+ (
+ "Maximum number of samples between pulse 1 and 2 for TOF "
+ "High double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_min_1_2",
+ "IDX__TXHDRHGTRIGNMIN12",
+ 11,
+ "TOF High Double Pulse Min Samples",
+ (
+ "Minimum number of samples between pulse 1 and 2 for TOF High "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_min_1",
+ "IDX__TXHDRHGTRIGNMIN1",
+ 8,
+ "TOF High Pulse 1 Min Samples",
+ (
+ "Minimum number of samples for pulse 1 for TOF High single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_max_1",
+ "IDX__TXHDRHGTRIGNMAX1",
+ 8,
+ "TOF High Pulse 1 Max Samples",
+ (
+ "Maximum number of samples for pulse 1 for TOF High single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_min_2",
+ "IDX__TXHDRHGTRIGNMIN2",
+ 8,
+ "TOF High Pulse 2 Min Samples",
+ (
+ "Minimum number of samples for pulse 2 for TOF High single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_num_max_2",
+ "IDX__TXHDRHGTRIGNMAX2",
+ 8,
+ "TOF High Pulse 2 Max Samples",
+ (
+ "Maximum number of samples for pulse 2 for TOF High single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_level",
+ "IDX__TXHDRLGTRIGLVL",
+ 10,
+ "TOF Low Trigger Level",
+ "Trigger level for the TOF Low Channel",
+ "Level",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_max_1_2",
+ "IDX__TXHDRLGTRIGNMAX12",
+ 11,
+ "TOF Low Double Pulse Max Samples",
+ (
+ "Maximum number of samples between pulse 1 and 2 for TOF Low "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_min_1_2",
+ "IDX__TXHDRLGTRIGNMIN12",
+ 11,
+ "TOF Low Double Pulse Min Samples",
+ (
+ "Minimum number of samples between pulse 1 and 2 for TOF Low "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_min_1",
+ "IDX__TXHDRLGTRIGNMIN1",
+ 8,
+ "TOF Low Pulse 1 Min Samples",
+ (
+ "Minimum number of samples for pulse 1 for TOF Low single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_max_1",
+ "IDX__TXHDRLGTRIGNMAX1",
+ 8,
+ "TOF Low Pulse 1 Max Samples",
+ (
+ "Maximum number of samples for pulse 1 for TOF Low single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_min_2",
+ "IDX__TXHDRLGTRIGNMIN2",
+ 8,
+ "TOF Low Pulse 2 Min Samples",
+ (
+ "Minimum number of samples for pulse 2 for TOF Low single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_num_max_2",
+ "IDX__TXHDRLGTRIGNMAX2",
+ 16,
+ "TOF Low Pulse 2 Max Samples",
+ (
+ "Maximum number of samples for pulse 2 for TOF Low single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_level",
+ "IDX__TXHDRMGTRIGLVL",
+ 10,
+ "TOF Mid Trigger Level",
+ "Trigger level for the TOF Mid Channel",
+ "Level",
+ "# Samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_max_1_2",
+ "IDX__TXHDRMGTRIGNMAX12",
+ 11,
+ "TOF Mid Double Pulse Max Samples",
+ (
+ "Maximum number of samples between pulse 1 and 2 for TOF Mid "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_min_1_2",
+ "IDX__TXHDRMGTRIGNMIN12",
+ 11,
+ "TOF Mid Double Pulse Min Samples",
+ (
+ "Minimum number of samples between pulse 1 and 2 for TOF Mid "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_min_1",
+ "IDX__TXHDRMGTRIGNMIN1",
+ 8,
+ "TOF Mid Pulse 1 Min Samples",
+ (
+ "Minimum number of samples for pulse 1 for TOF Mid single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_max_1",
+ "IDX__TXHDRMGTRIGNMAX1",
+ 8,
+ "TOF Mid Pulse 1 Max Samples",
+ (
+ "Maximum number of samples for pulse 1 for TOF Mid single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_min_2",
+ "IDX__TXHDRMGTRIGNMIN2",
+ 8,
+ "TOF Mid Pulse 2 Min Samples",
+ (
+ "Minimum number of samples for pulse 2 for TOF Mid single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_num_max_2",
+ "IDX__TXHDRMGTRIGNMAX2",
+ 8,
+ "TOF Mid Pulse 2 Max Samples",
+ (
+ "Maximum number of samples for pulse 2 for TOF Mid single and "
+ "double pulse triggering"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "low_sample_coincidence_mode_blocks",
+ "IDX__TXHDRLSTRIGCMBLOCKS",
+ 3,
+ "LS Coincidence Mode Blocks",
+ (
+ "Number of blocks coincidence window is enabled after "
+ "low sample trigger"
+ ),
+ "# Blocks",
+ "Blocks",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "low_sample_trigger_polarity",
+ "IDX__TXHDRLSTRIGPOL",
+ 1,
+ "LS Trigger Polarity",
+ "The trigger polarity for low sample (0 = normal, 1 = inverted) ",
+ "Polarity",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "low_sample_trigger_level",
+ "IDX__TXHDRLSTRIGLVL",
+ 12,
+ "LS Trigger Level",
+ "Trigger level for the low sample",
+ "Level",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "low_sample_trigger_num_min",
+ "IDX__TXHDRLSTRIGNMIN",
+ 8,
+ "LS Trigger Min Num Samples",
+ (
+ "The minimum number of samples above/below the trigger level for "
+ "triggering the low sample"
+ ),
+ "# Samples",
+ "samples",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "low_sample_trigger_mode",
+ "IDX__TXHDRLSTRIGMODE",
+ 1,
+ "LS Trigger Mode Enabled",
+ "Low sample trigger mode (0=disabled, 1=enabled)",
+ "Mode",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_low_trigger_mode",
+ "IDX__TXHDRLSTRIGMODE",
+ 1,
+ "TOF Low Trigger Mode Enabled",
+ "TOF Low trigger mode (0=disabled, 1=enabled)",
+ "Mode",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_mid_trigger_mode",
+ "IDX__TXHDRMGTRIGMODE",
+ 1,
+ "TOF Mid Trigger Mode Enabled",
+ "TOF Mid trigger mode (0=disabled, 1=enabled)",
+ "Mode",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "tof_high_trigger_mode",
+ "IDX__TXHDRHGTRIGMODE",
+ 2,
+ "TOF High Trigger Mode Enabled",
+ (
+ "TOF High trigger mode (0=disabled, 1=threshold mode, "
+ "2=single pulse mode, 3=double pulse mode)"
+ ),
+ "Mode",
+ "",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "detector_voltage",
+ "IDX__TXHDRHVPSHKCH0",
+ 12,
+ "Detector Voltage",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Detector Voltage"
+ ),
+ "Voltage",
+ "dN",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "sensor_voltage",
+ "IDX__TXHDRHVPSHKCH1",
+ 12,
+ "Sensor Voltage",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Sensor Voltage "
+ ),
+ "Voltage",
+ "dN",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "target_voltage",
+ "IDX__TXHDRHVPSHKCH2",
+ 12,
+ "Target Voltage",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Target Voltage"
+ ),
+ "Voltage",
+ "dN",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "reflectron_voltage",
+ "IDX__TXHDRHVPSHKCH3",
+ 12,
+ "Reflectron Voltage",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Reflectron Voltage"
+ ),
+ "Voltage",
+ "dN",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "rejection_voltage",
+ "IDX__TXHDRHVPSHKCH4",
+ 12,
+ "Rejection Voltage",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Rejection Voltage"
+ ),
+ "Voltage",
+ "dN",
+ )
+ )
+ _insert_into_dicts(
+ TriggerDescription(
+ "detector_current",
+ "IDX__TXHDRHVPSHKCH5",
+ 12,
+ "Detector Current",
+ (
+ "Last measurement in raw dN for processor board signal: "
+ "Detector Current "
+ ),
+ "Current",
+ "dN",
+ )
+ )
+
+ return (
+ trigger_values,
+ trigger_notes,
+ trigger_fields,
+ trigger_maxes,
+ trigger_labels,
+ trigger_units,
+ )
def _calc_impact_time(self, packet):
"""Calculate the datetime64 from the FPGA header information.
@@ -446,7 +734,7 @@ def _calc_impact_time(self, packet):
# Number of microseconds since the last second
microseconds_since_last_second = 20 * num_of_20_microsecond_increments
# Get the datetime of Jan 1 2012 as the start date
- launch_time = np.datetime64("2012-01-01")
+ launch_time = np.datetime64("2012-01-01T00:00:00.000000000")
return (
launch_time
@@ -625,54 +913,80 @@ def process(self):
"""
# Gather the huge number of trigger info metadata
trigger_vars = {}
- for var, value in self.trigger_values_dict.items():
- trigger_vars[var] = xr.DataArray(name=var, data=[value], dims=("Epoch"))
+ for var, value in self.trigger_values.items():
+ trigger_vars[var] = xr.DataArray(
+ name=var,
+ data=[value],
+ dims=("Epoch"),
+ attrs={
+ "CATDESC": self.trigger_notes[var],
+ "FIELDNAM": self.trigger_fields[var],
+ "VAR_NOTES": self.trigger_notes[var],
+ "VALIDMAX": self.trigger_maxes[var],
+ "LABLAXIS": self.trigger_labels[var],
+ "UNITS": self.trigger_units[var],
+ }
+ | idex_cdf_attrs.trigger_base,
+ )
# Process the 6 primary data variables
tof_high_xr = xr.DataArray(
name="TOF_High",
data=[self._parse_high_sample_waveform(self.TOF_High_bits)],
dims=("Epoch", "Time_High_SR_dim"),
+ attrs=idex_cdf_attrs.tof_high_attrs,
)
tof_low_xr = xr.DataArray(
name="TOF_Low",
data=[self._parse_high_sample_waveform(self.TOF_Low_bits)],
dims=("Epoch", "Time_High_SR_dim"),
+ attrs=idex_cdf_attrs.tof_low_attrs,
)
tof_mid_xr = xr.DataArray(
name="TOF_Mid",
data=[self._parse_high_sample_waveform(self.TOF_Mid_bits)],
dims=("Epoch", "Time_High_SR_dim"),
+ attrs=idex_cdf_attrs.tof_mid_attrs,
)
target_high_xr = xr.DataArray(
name="Target_High",
data=[self._parse_low_sample_waveform(self.Target_High_bits)],
dims=("Epoch", "Time_Low_SR_dim"),
+ attrs=idex_cdf_attrs.target_high_attrs,
)
target_low_xr = xr.DataArray(
name="Target_Low",
data=[self._parse_low_sample_waveform(self.Target_Low_bits)],
dims=("Epoch", "Time_Low_SR_dim"),
+ attrs=idex_cdf_attrs.target_low_attrs,
)
ion_grid_xr = xr.DataArray(
name="Ion_Grid",
data=[self._parse_low_sample_waveform(self.Ion_Grid_bits)],
dims=("Epoch", "Time_Low_SR_dim"),
+ attrs=idex_cdf_attrs.ion_grid_attrs,
)
# Determine the 3 coordinate variables
- epoch_xr = xr.DataArray(name="Epoch", data=[self.impact_time], dims=("Epoch"))
+ epoch_xr = xr.DataArray(
+ name="Epoch",
+ data=[self.impact_time],
+ dims=("Epoch"),
+ attrs=cdf_utils.epoch_attrs,
+ )
time_low_sr_xr = xr.DataArray(
name="Time_Low_SR",
data=[self._calc_low_sample_resolution(len(target_low_xr[0]))],
dims=("Epoch", "Time_Low_SR_dim"),
+ attrs=idex_cdf_attrs.low_sr_attrs,
)
time_high_sr_xr = xr.DataArray(
name="Time_High_SR",
data=[self._calc_high_sample_resolution(len(tof_low_xr[0]))],
dims=("Epoch", "Time_High_SR_dim"),
+ attrs=idex_cdf_attrs.high_sr_attrs,
)
# Combine to return a dataset object
diff --git a/imap_processing/idex/packet_definitions/idex_packet_definition.xml b/imap_processing/idex/packet_definitions/idex_packet_definition.xml
index 09df23c8c..b66ec2c17 100644
--- a/imap_processing/idex/packet_definitions/idex_packet_definition.xml
+++ b/imap_processing/idex/packet_definitions/idex_packet_definition.xml
@@ -309,9 +309,17 @@
+
+
+
+
+
+
+
+
-
+
@@ -1530,6 +1538,13 @@
+
+
+
+
+
+
+
@@ -2859,41 +2874,42 @@
-
-
-
-
-
-
-
+
+
+
+
+
+
+
-
-
-
+
+
+
-
-
-
+
+
+
-
-
-
+
+
+
+
@@ -2912,9 +2928,9 @@
-
-
+
+
@@ -3136,4 +3152,4 @@
-
+
\ No newline at end of file
diff --git a/imap_processing/idex/tests/test_l1_cdfs.py b/imap_processing/idex/tests/test_l1_cdfs.py
new file mode 100644
index 000000000..ce676fb6c
--- /dev/null
+++ b/imap_processing/idex/tests/test_l1_cdfs.py
@@ -0,0 +1,95 @@
+import os
+from pathlib import Path
+
+import numpy as np
+import pytest
+import xarray as xr
+from cdflib.xarray import cdf_to_xarray
+from cdflib.xarray.xarray_to_cdf import ISTPError
+
+from imap_processing import idex
+from imap_processing.cdf_utils import write_cdf
+from imap_processing.idex.idex_packet_parser import PacketParser
+
+
+@pytest.fixture()
+def decom_test_data():
+ return PacketParser("imap_processing/idex/tests/imap_idex_l0_20230725_v01-00.pkts")
+
+
+@pytest.fixture()
+def temp_path(tmp_path_factory):
+ return tmp_path_factory.mktemp("data")
+
+
+def test_idex_cdf_file(decom_test_data, temp_path):
+ # Verify that a CDF file can be created with no errors thrown by xarray_to_cdf
+ file_name = write_cdf(decom_test_data.data, description="", directory=temp_path)
+ date_to_test = "20250724"
+ assert file_name == os.path.join(
+ temp_path,
+ f"{decom_test_data.data.attrs['Logical_source']}_{date_to_test}_v{idex.__version__}.cdf",
+ )
+ assert Path(file_name).exists()
+
+
+def test_bad_cdf_attributes(decom_test_data, temp_path):
+ # Deliberately mess up the attributes to verify that an ISTPError is raised
+ del decom_test_data.data["TOF_High"].attrs["DEPEND_1"]
+ with pytest.raises(ISTPError):
+ write_cdf(decom_test_data.data, description="", directory=temp_path)
+
+
+def test_bad_cdf_file_data(decom_test_data, temp_path):
+ # Deliberately mess up the data to verify that an ISTPError is raised
+ bad_data_attrs = {
+ "CATDESC": "Bad_Data",
+ "DEPEND_0": "Epoch",
+ "DISPLAY_TYPE": "no_plot",
+ "FIELDNAM": "Bad_Data",
+ "FILLVAL": "",
+ "FORMAT": "E12.2",
+ "LABLAXIS": "Bad_Data",
+ "UNITS": "",
+ "VALIDMIN": "1",
+ "VALIDMAX": "50",
+ "VAR_TYPE": "support_data",
+ "VAR_NOTES": """How did this data end up in here?
+ The CDF creation better fail.""",
+ }
+ bad_data_xr = xr.DataArray(
+ name="bad_data",
+ data=np.linspace(1, 50, 50),
+ dims=("bad_data"),
+ attrs=bad_data_attrs,
+ )
+ decom_test_data.data["Bad_data"] = bad_data_xr
+
+ with pytest.raises(ISTPError):
+ write_cdf(decom_test_data.data, description="", directory=temp_path)
+
+
+def test_descriptor_in_file_name(decom_test_data, temp_path):
+ # Deliberately mess up the data to verify no CDF is created
+ file_name = write_cdf(
+ decom_test_data.data, description="impact-lab-test001", directory=temp_path
+ )
+ date_to_test = "20250724"
+ assert file_name == os.path.join(
+ temp_path,
+ f"{decom_test_data.data.attrs['Logical_source']}_{date_to_test}_impact-lab-test001_v{idex.__version__}.cdf",
+ )
+ assert Path(file_name).exists()
+
+
+def test_idex_tof_high_data_from_cdf(decom_test_data, temp_path):
+ # Verify that a sample of the data is correct inside the CDF file
+ # impact_14_tof_high_data.txt has been verified correct by the IDEX team
+ with open("imap_processing/idex/tests/impact_14_tof_high_data.txt") as f:
+ data = np.array([int(line.rstrip()) for line in f])
+
+ file_name = write_cdf(decom_test_data.data, description="", directory=temp_path)
+ l1_data = cdf_to_xarray(
+ file_name
+ ) # Read in the data from the CDF file to an xarray object
+ assert (l1_data["TOF_High"][13].data == data).all()
diff --git a/poetry.lock b/poetry.lock
index af2683631..a289bd96a 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -253,6 +253,24 @@ d = ["aiohttp (>=3.7.4)"]
jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
uvloop = ["uvloop (>=0.15.2)"]
+[[package]]
+name = "cdflib"
+version = "1.2.3"
+description = "A python CDF reader toolkit"
+optional = false
+python-versions = ">=3.6"
+files = [
+ {file = "cdflib-1.2.3-py3-none-any.whl", hash = "sha256:1d4ae2ae2c623357999f0187ea6d86bc636723e28d20dceed708e9a5e9d1568e"},
+ {file = "cdflib-1.2.3.tar.gz", hash = "sha256:199b8cbec273fefcd2d6d489b3836fb6537569629066cac0c498c0259fbb2795"},
+]
+
+[package.dependencies]
+numpy = "*"
+
+[package.extras]
+docs = ["astropy", "sphinx", "sphinx-automodapi", "sphinx-copybutton", "sphinx-rtd-theme", "xarray"]
+tests = ["astropy", "hypothesis", "pytest (>=3.9)", "pytest-cov", "pytest-remotedata", "xarray"]
+
[[package]]
name = "certifi"
version = "2023.7.22"
@@ -634,6 +652,16 @@ files = [
{file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"},
{file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"},
+ {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"},
{file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"},
@@ -1092,6 +1120,7 @@ files = [
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"},
{file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"},
+ {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"},
{file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"},
{file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"},
{file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"},
@@ -1099,8 +1128,15 @@ files = [
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"},
{file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"},
+ {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"},
{file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"},
{file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"},
+ {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"},
+ {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"},
+ {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"},
+ {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"},
{file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"},
{file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"},
@@ -1117,6 +1153,7 @@ files = [
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"},
{file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"},
+ {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"},
{file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"},
{file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"},
{file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"},
@@ -1124,6 +1161,7 @@ files = [
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"},
{file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"},
+ {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"},
{file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"},
{file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"},
{file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"},
@@ -1520,4 +1558,4 @@ tools = ["openpyxl", "pandas"]
[metadata]
lock-version = "2.0"
python-versions = ">=3.9,<3.12"
-content-hash = "cf25c6bf84c11625bf4c766631dcfc801078253972322ca6c941449929b40724"
+content-hash = "3b790d434201384518afe381fa164c39c2e57c5842061e9b5c490e4227334752"
diff --git a/pyproject.toml b/pyproject.toml
index c1662001e..8c69450af 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -32,6 +32,7 @@ python = ">=3.9,<3.12"
xarray = '>=2023.0.0'
space_packet_parser = ">=4.1.0,<5"
bitstring = ">=4.0.1,<5"
+cdflib = "^1.2.3"
# Optional dependencies
black = {version="^23.1.0", optional=true}