From 8f575a1deafbc51666ee29e03dbd9cefa1bcefa9 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Fri, 4 Oct 2024 10:27:59 -0600 Subject: [PATCH 1/7] Add hi_l1b function for computing coincidence type and time deltas for direct events --- imap_processing/hi/l1b/hi_l1b.py | 132 ++++++++++++++++++++++++++++++- imap_processing/hi/utils.py | 33 ++++++++ 2 files changed, 161 insertions(+), 4 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 638fbbf79..fb2e15d6f 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -1,12 +1,14 @@ """IMAP-HI L1B processing module.""" import logging +from enum import IntEnum +import numpy as np import xarray as xr from imap_processing import imap_module_directory from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes -from imap_processing.hi.utils import HIAPID, create_dataset_variables +from imap_processing.hi.utils import HIAPID, HiConstants, create_dataset_variables from imap_processing.utils import convert_raw_to_eu logger = logging.getLogger(__name__) @@ -15,6 +17,23 @@ ATTR_MGR.load_variable_attributes("imap_hi_variable_attrs.yaml") +class TriggerId(IntEnum): + """Int Enum class for trigger id values.""" + + A = 1 + B = 2 + C = 3 + + +class CoincidenceBitmap(IntEnum): + """Int Enum class for coincidence type bitmap values.""" + + A = 2**3 + B = 2**2 + C1 = 2**1 + C2 = 2**0 + + def hi_l1b(l1a_dataset: xr.Dataset, data_version: str) -> xr.Dataset: """ High level IMAP-HI L1B processing function. @@ -49,9 +68,7 @@ def hi_l1b(l1a_dataset: xr.Dataset, data_version: str) -> xr.Dataset: l1a_dataset, conversion_table_path=conversion_table_path, packet_name=packet_enum.name, - comment="#", # type: ignore[arg-type] - # Todo error, Argument "comment" to "convert_raw_to_eu" has incompatible - # type "str"; expected "dict[Any, Any]" + comment="#", converters={"mnemonic": str.lower}, ) @@ -115,3 +132,110 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset: de_global_attrs = ATTR_MGR.get_global_attributes("imap_hi_l1b_de_attrs") l1b_dataset.attrs.update(**de_global_attrs) return l1b_dataset + + +def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> None: + """ + Compute coincidence type and time deltas. + + Modifies the input xarray.DataSet in place. Uses existing L1A variables: + `trigger_id`, `tof_1`, `tof_2`, `tof_3` to compute L1B variables: + `coincidence_type`, `delta_t_ab`, `delta_t_ac1`, `delta_t_bc1`, + `delta_t_c1c2`. + + Parameters + ---------- + dataset : xarray.Dataset + The L1A/B dataset that results from reading in the L1A CDF and + allocating the new L1B DataArrays. + + Returns + ------- + None + Input `dataset` is modified in-place. + """ + a_mask = dataset.trigger_id.values == TriggerId.A + b_mask = dataset.trigger_id.values == TriggerId.B + c_mask = dataset.trigger_id.values == TriggerId.C + + tof_1_valid_mask = np.isin( + dataset.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True + ) + tof_2_valid_mask = np.isin( + dataset.tof_1.values, HiConstants.TOF2_BAD_VALUES, invert=True + ) + tof_1and2_valid_mask = tof_1_valid_mask & tof_2_valid_mask + tof_3_valid_mask = np.isin( + dataset.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True + ) + + # Table denoting how hit-first mask and valid TOF masks are used to set + # coincidence type bitmask + # ----------------------------------------------------------------------- + # | Trigger ID | Hit First | TOF 1 Valid | TOF 2 Valid | TOF 3 Valid | + # ----------------------------------------------------------------------- + # | 1 | A | B | C1 | C2 | + # | 2 | B | A | C1 | C2 | + # | 3 | C1 | A | B | C2 | + # Set coincidence type bitmask + dataset.coincidence_type[a_mask | tof_1_valid_mask] &= CoincidenceBitmap.A + dataset.coincidence_type[ + b_mask | (a_mask & tof_1_valid_mask) | (c_mask & tof_2_valid_mask) + ] &= CoincidenceBitmap.B + dataset.coincidence_type[ + c_mask | (a_mask & tof_1_valid_mask) | (b_mask & tof_2_valid_mask) + ] &= CoincidenceBitmap.C1 + dataset.coincidence_type[tof_3_valid_mask] &= CoincidenceBitmap.C2 + + # Table denoting how TOF is interpreted for each Trigger ID + # ----------------------------------------------------------------------- + # | Trigger ID | Hit First | TOF 1 | TOF 2 | TOF 3 | + # ----------------------------------------------------------------------- + # | 1 | A | t_b - t_a | t_c1 - t_a | t_c2 - t_c1 | + # | 2 | B | t_a - t_b | t_c1 - t_b | t_c2 - t_c1 | + # | 3 | C | t_a - t_c1 | t_b - t_c1 | t_c2 - t_c1 | + # delta_t_ab + dataset.delta_t_ab.values[a_mask & tof_1_valid_mask] = ( + dataset.tof_1.values[a_mask & tof_1_valid_mask].astype(np.float32) + * HiConstants.TOF1_TICK + ) + dataset.delta_t_ab.values[b_mask & tof_1_valid_mask] = ( + -dataset.tof_1.values[b_mask & tof_1_valid_mask].astype(np.float32) + * HiConstants.TOF1_TICK + ) + dataset.delta_t_ab.values[c_mask & tof_1and2_valid_mask] = ( + dataset.tof_2.values[c_mask & tof_1and2_valid_mask].astype(np.float32) + * HiConstants.TOF2_TICK + - dataset.tof_1.values[c_mask & tof_1and2_valid_mask].astype(np.float32) + * HiConstants.TOF1_TICK + ) + + # delta_t_ac1 + dataset.delta_t_ac1.values[a_mask & tof_2_valid_mask] = ( + dataset.tof_2.values[a_mask & tof_2_valid_mask].astype(np.float32) + * HiConstants.TOF2_TICK + ) + dataset.delta_t_ac1.values[b_mask & tof_1and2_valid_mask] = ( + dataset.tof_2.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK + - dataset.tof_1.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + ) + dataset.delta_t_ac1.values[c_mask & tof_1_valid_mask] = ( + -dataset.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK + ) + + # delta_t_bc1 + dataset.delta_t_bc1.values[a_mask & tof_1_valid_mask & tof_2_valid_mask] = ( + dataset.tof_2.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK + - dataset.tof_1.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + ) + dataset.delta_t_bc1.values[b_mask & tof_2_valid_mask] = ( + dataset.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + ) + dataset.delta_t_bc1.values[c_mask & tof_2_valid_mask] = ( + -dataset.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + ) + + # delta_tc1c2 + dataset.delta_t_c1c2.values[tof_3_valid_mask] = ( + dataset.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK + ) diff --git a/imap_processing/hi/utils.py b/imap_processing/hi/utils.py index cbe61a996..21193540a 100644 --- a/imap_processing/hi/utils.py +++ b/imap_processing/hi/utils.py @@ -1,6 +1,7 @@ """IMAP-Hi utils functions.""" from collections.abc import Sequence +from dataclasses import dataclass from enum import IntEnum from typing import Optional, Union @@ -34,6 +35,38 @@ def sensor(self) -> str: return self.name[1:3] + "sensor" +@dataclass(frozen=True) +class HiConstants: + """ + Constants for Hi instrument. + + Attributes + ---------- + TOF1_TICK : float + Duration of Time-of-Flight 1 clock tick. + TOF2_TICK : float + Duration of Time-of-Flight 2 clock tick. + TOF3_TICK : float + Duration of Time-of-Flight 3 clock tick. + TOF1_BAD_VALUES : tuple[int] + Tuple of values indicating TOF1 does not contain a valid time. + TOF2_BAD_VALUES : tuple[int] + Tuple of values indicating TOF2 does not contain a valid time. + TOF3_BAD_VALUES : tuple[int] + Tuple of values indicating TOF3 does not contain a valid time. + """ + + TOF1_TICK = 1e-9 # 1 ns + TOF2_TICK = 1e-9 # 1 ns + TOF3_TICK = 5e-10 # 0.5 ns + + # These values are stored in the TOF telemetry when the TOF timer + # does not have valid data. + TOF1_BAD_VALUES = (511, 1023) + TOF2_BAD_VALUES = (1023,) + TOF3_BAD_VALUES = (1023,) + + def full_dataarray( name: str, attrs: dict, From 92e6bcc3445f7cf15b2f81d03d6ca715878135e7 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Mon, 7 Oct 2024 10:41:13 -0600 Subject: [PATCH 2/7] Add test for compute coincidence type and time deltas funcion --- imap_processing/hi/l1b/hi_l1b.py | 108 +++++++++++++----------- imap_processing/tests/hi/conftest.py | 10 +++ imap_processing/tests/hi/test_hi_l1b.py | 21 ++++- 3 files changed, 89 insertions(+), 50 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index fb2e15d6f..9e6ebde0d 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -107,14 +107,9 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset: l1b_dataset : xarray.Dataset L1B direct event data. """ - n_epoch = l1a_dataset["epoch"].size + l1b_dataset = compute_coincidence_type_and_time_deltas(l1a_dataset) l1b_de_var_names = [ - "coincidence_type", "esa_energy_step", - "delta_t_ab", - "delta_t_ac1", - "delta_t_bc1", - "delta_t_c1c2", "spin_phase", "hae_latitude", "hae_longitude", @@ -122,9 +117,9 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset: "nominal_bin", ] new_data_vars = create_dataset_variables( - l1b_de_var_names, (n_epoch,), att_manager_lookup_str="hi_de_{0}" + l1b_de_var_names, l1a_dataset["epoch"].size, att_manager_lookup_str="hi_de_{0}" ) - l1b_dataset = l1a_dataset.assign(new_data_vars) + l1b_dataset = l1b_dataset.assign(new_data_vars) l1b_dataset = l1b_dataset.drop_vars( ["tof_1", "tof_2", "tof_3", "de_tag", "ccsds_met", "meta_event_met"] ) @@ -134,14 +129,13 @@ def annotate_direct_events(l1a_dataset: xr.Dataset) -> xr.Dataset: return l1b_dataset -def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> None: +def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: """ Compute coincidence type and time deltas. - Modifies the input xarray.DataSet in place. Uses existing L1A variables: - `trigger_id`, `tof_1`, `tof_2`, `tof_3` to compute L1B variables: - `coincidence_type`, `delta_t_ab`, `delta_t_ac1`, `delta_t_bc1`, - `delta_t_c1c2`. + Adds the new variables "coincidence_type", "delta_t_ab", "delta_t_ac1", + "delta_t_bc1", and "delta_t_c1c2" to the input xarray.Dataset and returns + the result. Parameters ---------- @@ -151,22 +145,36 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> None: Returns ------- - None + xr.Dataset Input `dataset` is modified in-place. """ - a_mask = dataset.trigger_id.values == TriggerId.A - b_mask = dataset.trigger_id.values == TriggerId.B - c_mask = dataset.trigger_id.values == TriggerId.C + new_data_vars = create_dataset_variables( + [ + "coincidence_type", + "delta_t_ab", + "delta_t_ac1", + "delta_t_bc1", + "delta_t_c1c2", + ], + len(dataset.epoch), + "hi_de_{0}", + ) + out_ds = dataset.assign(new_data_vars) + + # compute masks needed for coincidence type and delta t calculations + a_mask = out_ds.trigger_id.values == TriggerId.A + b_mask = out_ds.trigger_id.values == TriggerId.B + c_mask = out_ds.trigger_id.values == TriggerId.C tof_1_valid_mask = np.isin( - dataset.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True + out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True ) tof_2_valid_mask = np.isin( - dataset.tof_1.values, HiConstants.TOF2_BAD_VALUES, invert=True + out_ds.tof_1.values, HiConstants.TOF2_BAD_VALUES, invert=True ) tof_1and2_valid_mask = tof_1_valid_mask & tof_2_valid_mask tof_3_valid_mask = np.isin( - dataset.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True + out_ds.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True ) # Table denoting how hit-first mask and valid TOF masks are used to set @@ -178,14 +186,14 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> None: # | 2 | B | A | C1 | C2 | # | 3 | C1 | A | B | C2 | # Set coincidence type bitmask - dataset.coincidence_type[a_mask | tof_1_valid_mask] &= CoincidenceBitmap.A - dataset.coincidence_type[ + out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A + out_ds.coincidence_type[ b_mask | (a_mask & tof_1_valid_mask) | (c_mask & tof_2_valid_mask) - ] &= CoincidenceBitmap.B - dataset.coincidence_type[ + ] |= CoincidenceBitmap.B + out_ds.coincidence_type[ c_mask | (a_mask & tof_1_valid_mask) | (b_mask & tof_2_valid_mask) - ] &= CoincidenceBitmap.C1 - dataset.coincidence_type[tof_3_valid_mask] &= CoincidenceBitmap.C2 + ] |= CoincidenceBitmap.C1 + out_ds.coincidence_type[tof_3_valid_mask] &= CoincidenceBitmap.C2 # Table denoting how TOF is interpreted for each Trigger ID # ----------------------------------------------------------------------- @@ -195,47 +203,49 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> None: # | 2 | B | t_a - t_b | t_c1 - t_b | t_c2 - t_c1 | # | 3 | C | t_a - t_c1 | t_b - t_c1 | t_c2 - t_c1 | # delta_t_ab - dataset.delta_t_ab.values[a_mask & tof_1_valid_mask] = ( - dataset.tof_1.values[a_mask & tof_1_valid_mask].astype(np.float32) + out_ds.delta_t_ab.values[a_mask & tof_1_valid_mask] = ( + out_ds.tof_1.values[a_mask & tof_1_valid_mask].astype(np.float32) * HiConstants.TOF1_TICK ) - dataset.delta_t_ab.values[b_mask & tof_1_valid_mask] = ( - -dataset.tof_1.values[b_mask & tof_1_valid_mask].astype(np.float32) + out_ds.delta_t_ab.values[b_mask & tof_1_valid_mask] = ( + -out_ds.tof_1.values[b_mask & tof_1_valid_mask].astype(np.float32) * HiConstants.TOF1_TICK ) - dataset.delta_t_ab.values[c_mask & tof_1and2_valid_mask] = ( - dataset.tof_2.values[c_mask & tof_1and2_valid_mask].astype(np.float32) + out_ds.delta_t_ab.values[c_mask & tof_1and2_valid_mask] = ( + out_ds.tof_2.values[c_mask & tof_1and2_valid_mask].astype(np.float32) * HiConstants.TOF2_TICK - - dataset.tof_1.values[c_mask & tof_1and2_valid_mask].astype(np.float32) + - out_ds.tof_1.values[c_mask & tof_1and2_valid_mask].astype(np.float32) * HiConstants.TOF1_TICK ) # delta_t_ac1 - dataset.delta_t_ac1.values[a_mask & tof_2_valid_mask] = ( - dataset.tof_2.values[a_mask & tof_2_valid_mask].astype(np.float32) + out_ds.delta_t_ac1.values[a_mask & tof_2_valid_mask] = ( + out_ds.tof_2.values[a_mask & tof_2_valid_mask].astype(np.float32) * HiConstants.TOF2_TICK ) - dataset.delta_t_ac1.values[b_mask & tof_1and2_valid_mask] = ( - dataset.tof_2.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK - - dataset.tof_1.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + out_ds.delta_t_ac1.values[b_mask & tof_1and2_valid_mask] = ( + out_ds.tof_2.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK + - out_ds.tof_1.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK ) - dataset.delta_t_ac1.values[c_mask & tof_1_valid_mask] = ( - -dataset.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK + out_ds.delta_t_ac1.values[c_mask & tof_1_valid_mask] = ( + -out_ds.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK ) # delta_t_bc1 - dataset.delta_t_bc1.values[a_mask & tof_1_valid_mask & tof_2_valid_mask] = ( - dataset.tof_2.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK - - dataset.tof_1.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + out_ds.delta_t_bc1.values[a_mask & tof_1_valid_mask & tof_2_valid_mask] = ( + out_ds.tof_2.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK + - out_ds.tof_1.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK ) - dataset.delta_t_bc1.values[b_mask & tof_2_valid_mask] = ( - dataset.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + out_ds.delta_t_bc1.values[b_mask & tof_2_valid_mask] = ( + out_ds.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK ) - dataset.delta_t_bc1.values[c_mask & tof_2_valid_mask] = ( - -dataset.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + out_ds.delta_t_bc1.values[c_mask & tof_2_valid_mask] = ( + -out_ds.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK ) # delta_tc1c2 - dataset.delta_t_c1c2.values[tof_3_valid_mask] = ( - dataset.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK + out_ds.delta_t_c1c2.values[tof_3_valid_mask] = ( + out_ds.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK ) + + return out_ds diff --git a/imap_processing/tests/hi/conftest.py b/imap_processing/tests/hi/conftest.py index 5c3ef0b56..c2de68a53 100644 --- a/imap_processing/tests/hi/conftest.py +++ b/imap_processing/tests/hi/conftest.py @@ -13,6 +13,16 @@ def hi_l0_test_data_path(hi_test_data_path): return hi_test_data_path / "l0" +@pytest.fixture(scope="session") +def hi_l1a_test_data_path(hi_test_data_path): + return hi_test_data_path / "l1a" + + +@pytest.fixture(scope="session") +def hi_l1a_test_file_path(hi_l1a_test_data_path): + return hi_l1a_test_data_path / "imap_hi_l1a_45sensor-de_20250415_v000.cdf" + + def create_metaevent(esa_step, met_subseconds, met_seconds): start_bitmask_data = 0 # META return ( diff --git a/imap_processing/tests/hi/test_hi_l1b.py b/imap_processing/tests/hi/test_hi_l1b.py index 143599952..8a3a5c8c0 100644 --- a/imap_processing/tests/hi/test_hi_l1b.py +++ b/imap_processing/tests/hi/test_hi_l1b.py @@ -1,7 +1,11 @@ """Test coverage for imap_processing.hi.l1b.hi_l1b.py""" +from imap_processing.cdf.utils import load_cdf from imap_processing.hi.l1a.hi_l1a import hi_l1a -from imap_processing.hi.l1b.hi_l1b import hi_l1b +from imap_processing.hi.l1b.hi_l1b import ( + compute_coincidence_type_and_time_deltas, + hi_l1b, +) from imap_processing.hi.utils import HIAPID @@ -29,3 +33,18 @@ def test_hi_l1b_de(create_de_data, tmp_path): l1b_dataset = hi_l1b(processed_data[0], data_version=data_version) assert l1b_dataset.attrs["Logical_source"] == "imap_hi_l1b_45sensor-de" assert len(l1b_dataset.data_vars) == 14 + + +def test_compute_coincidence_type_and_time_deltas(hi_l1a_test_file_path): + """Test coverage for + `imap_processing.hi.hi_l1b.compute_coincidence_type_and_time_deltas`.""" + l1a_dataset = load_cdf(hi_l1a_test_file_path) + updated_dataset = compute_coincidence_type_and_time_deltas(l1a_dataset) + for var_name in [ + "coincidence_type", + "delta_t_ab", + "delta_t_ac1", + "delta_t_bc1", + "delta_t_c1c2", + ]: + assert var_name in updated_dataset.data_vars From aa8d841e9e611500d52984ea4994493eac6328f4 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Tue, 8 Oct 2024 14:07:53 -0600 Subject: [PATCH 3/7] Add test coverage for coincidence type and delta T computation --- imap_processing/hi/l1b/hi_l1b.py | 44 ++++----- imap_processing/hi/utils.py | 18 ++-- imap_processing/tests/hi/test_hi_l1b.py | 113 ++++++++++++++++++++++-- 3 files changed, 141 insertions(+), 34 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 9e6ebde0d..3d6f71d4c 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -170,7 +170,7 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True ) tof_2_valid_mask = np.isin( - out_ds.tof_1.values, HiConstants.TOF2_BAD_VALUES, invert=True + out_ds.tof_2.values, HiConstants.TOF2_BAD_VALUES, invert=True ) tof_1and2_valid_mask = tof_1_valid_mask & tof_2_valid_mask tof_3_valid_mask = np.isin( @@ -186,14 +186,14 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: # | 2 | B | A | C1 | C2 | # | 3 | C1 | A | B | C2 | # Set coincidence type bitmask - out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A + out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A.value out_ds.coincidence_type[ b_mask | (a_mask & tof_1_valid_mask) | (c_mask & tof_2_valid_mask) - ] |= CoincidenceBitmap.B + ] |= CoincidenceBitmap.B.value out_ds.coincidence_type[ - c_mask | (a_mask & tof_1_valid_mask) | (b_mask & tof_2_valid_mask) - ] |= CoincidenceBitmap.C1 - out_ds.coincidence_type[tof_3_valid_mask] &= CoincidenceBitmap.C2 + c_mask | (a_mask & tof_2_valid_mask) | (b_mask & tof_2_valid_mask) + ] |= CoincidenceBitmap.C1.value + out_ds.coincidence_type[tof_3_valid_mask] |= CoincidenceBitmap.C2.value # Table denoting how TOF is interpreted for each Trigger ID # ----------------------------------------------------------------------- @@ -205,47 +205,51 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: # delta_t_ab out_ds.delta_t_ab.values[a_mask & tof_1_valid_mask] = ( out_ds.tof_1.values[a_mask & tof_1_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK + * HiConstants.TOF1_TICK_PER_NS ) out_ds.delta_t_ab.values[b_mask & tof_1_valid_mask] = ( -out_ds.tof_1.values[b_mask & tof_1_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK + * HiConstants.TOF1_TICK_PER_NS ) out_ds.delta_t_ab.values[c_mask & tof_1and2_valid_mask] = ( out_ds.tof_2.values[c_mask & tof_1and2_valid_mask].astype(np.float32) - * HiConstants.TOF2_TICK + * HiConstants.TOF2_TICK_PER_NS - out_ds.tof_1.values[c_mask & tof_1and2_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK + * HiConstants.TOF1_TICK_PER_NS ) # delta_t_ac1 out_ds.delta_t_ac1.values[a_mask & tof_2_valid_mask] = ( out_ds.tof_2.values[a_mask & tof_2_valid_mask].astype(np.float32) - * HiConstants.TOF2_TICK + * HiConstants.TOF2_TICK_PER_NS ) out_ds.delta_t_ac1.values[b_mask & tof_1and2_valid_mask] = ( - out_ds.tof_2.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK - - out_ds.tof_1.values[b_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + out_ds.tof_2.values[b_mask & tof_1and2_valid_mask] + * HiConstants.TOF2_TICK_PER_NS + - out_ds.tof_1.values[b_mask & tof_1and2_valid_mask] + * HiConstants.TOF1_TICK_PER_NS ) out_ds.delta_t_ac1.values[c_mask & tof_1_valid_mask] = ( - -out_ds.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK + -out_ds.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK_PER_NS ) # delta_t_bc1 out_ds.delta_t_bc1.values[a_mask & tof_1_valid_mask & tof_2_valid_mask] = ( - out_ds.tof_2.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF2_TICK - - out_ds.tof_1.values[a_mask & tof_1and2_valid_mask] * HiConstants.TOF1_TICK + out_ds.tof_2.values[a_mask & tof_1and2_valid_mask] + * HiConstants.TOF2_TICK_PER_NS + - out_ds.tof_1.values[a_mask & tof_1and2_valid_mask] + * HiConstants.TOF1_TICK_PER_NS ) out_ds.delta_t_bc1.values[b_mask & tof_2_valid_mask] = ( - out_ds.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + out_ds.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK_PER_NS ) out_ds.delta_t_bc1.values[c_mask & tof_2_valid_mask] = ( - -out_ds.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK + -out_ds.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK_PER_NS ) - # delta_tc1c2 + # delta_t_c1c2 out_ds.delta_t_c1c2.values[tof_3_valid_mask] = ( - out_ds.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK + out_ds.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK_PER_NS ) return out_ds diff --git a/imap_processing/hi/utils.py b/imap_processing/hi/utils.py index 21193540a..1ba55df90 100644 --- a/imap_processing/hi/utils.py +++ b/imap_processing/hi/utils.py @@ -42,12 +42,12 @@ class HiConstants: Attributes ---------- - TOF1_TICK : float - Duration of Time-of-Flight 1 clock tick. - TOF2_TICK : float - Duration of Time-of-Flight 2 clock tick. - TOF3_TICK : float - Duration of Time-of-Flight 3 clock tick. + TOF1_TICK_PER_NS : int + Duration of Time-of-Flight 1 clock tick in nanoseconds. + TOF2_TICK_PER_NS : int + Duration of Time-of-Flight 2 clock tick in nanoseconds. + TOF3_TICK_PER_NS : int + Duration of Time-of-Flight 3 clock tick in nanoseconds. TOF1_BAD_VALUES : tuple[int] Tuple of values indicating TOF1 does not contain a valid time. TOF2_BAD_VALUES : tuple[int] @@ -56,9 +56,9 @@ class HiConstants: Tuple of values indicating TOF3 does not contain a valid time. """ - TOF1_TICK = 1e-9 # 1 ns - TOF2_TICK = 1e-9 # 1 ns - TOF3_TICK = 5e-10 # 0.5 ns + TOF1_TICK_PER_NS = 1 # 1 ns + TOF2_TICK_PER_NS = 1 # 1 ns + TOF3_TICK_PER_NS = 2 # 0.5 ns # These values are stored in the TOF telemetry when the TOF timer # does not have valid data. diff --git a/imap_processing/tests/hi/test_hi_l1b.py b/imap_processing/tests/hi/test_hi_l1b.py index 8a3a5c8c0..1852966f7 100644 --- a/imap_processing/tests/hi/test_hi_l1b.py +++ b/imap_processing/tests/hi/test_hi_l1b.py @@ -1,12 +1,16 @@ """Test coverage for imap_processing.hi.l1b.hi_l1b.py""" -from imap_processing.cdf.utils import load_cdf +import numpy as np +import pytest +import xarray as xr + from imap_processing.hi.l1a.hi_l1a import hi_l1a from imap_processing.hi.l1b.hi_l1b import ( + CoincidenceBitmap, compute_coincidence_type_and_time_deltas, hi_l1b, ) -from imap_processing.hi.utils import HIAPID +from imap_processing.hi.utils import HIAPID, HiConstants def test_hi_l1b_hk(hi_l0_test_data_path): @@ -35,11 +39,69 @@ def test_hi_l1b_de(create_de_data, tmp_path): assert len(l1b_dataset.data_vars) == 14 -def test_compute_coincidence_type_and_time_deltas(hi_l1a_test_file_path): +@pytest.fixture() +def synthetic_trigger_id_and_tof_data(): + """Create synthetic minimum dataset for testing the + coincidence_type_and_time_deltas algorithm.""" + # The following coincidence type table shows possible values to consider + # Value| # Exp | Requirements to get this value + # -----|-------|------------------------------- + # 0 | 0 | Non-event not recorded + # 1 | 0 | Can't trigger c2 only + # 2 | 2 | trigger_id = 3, tof_3 invalid + # 3 | 2 | trigger_id = 3, tof_3 valid + # 4 | 2 | trigger_id = 2, no valid tofs + # 5 | 0 | B and C2 not possible? + # 6 | 4 | trigger_id = 2 OR 3, tof_2 valid + # 7 | 4 | trigger_id = 2 OR 3, tof_2/3 valid + # 8 | 2 | trigger_id = 3, no valid tofs + # 9 | 0 | A and C2 not possible? + # 10 | 3 | trigger_id = 1, tof_2 OR trigger_id = 3, tof_1 + # 11 | 3 | trigger_id = 1, tof_2/3, OR trigger_id = 3, tof_1/3 + # 12 | 2 | trigger_id = 1 OR 2, tof_1 + # 13 | 0 | A/B and C2 not possible? + # 14 | 3 | trigger_id = 1 OR 2 OR 3, tof_1/2 + # 15 | 3 | trigger_id = 1, 2, 3, tof_1/2/3 + + # Use meshgrid to get all combinations of trigger_id and tof valid/invalid + # Note: this generates 6 impossible occurrences where C1 is not triggered + # but C2 is. Those are manually removed below. + ids = np.arange(3) + 1 + tof1s = np.array(np.concatenate((HiConstants.TOF1_BAD_VALUES, [1]))) + tof2s = np.array(np.concatenate((HiConstants.TOF2_BAD_VALUES, [2]))) + tof3s = np.array(np.concatenate((HiConstants.TOF3_BAD_VALUES, [3]))) + var_names = ["trigger_id", "tof_1", "tof_2", "tof_3"] + data = np.meshgrid(ids, tof1s, tof2s, tof3s) + data = [arr.flatten() for arr in data] + # Remove impossible combinations + good_inds = np.nonzero( + np.logical_not( + np.logical_and(data[0] != 3, ((data[2] >= 511) & (data[3] < 511))) + ) + ) + data = [arr[good_inds] for arr in data] + data_vars = { + n: xr.DataArray(arr, dims=["epoch"]) for n, arr in zip(var_names, data) + } + synthetic_l1a_ds = xr.Dataset( + coords={ + "epoch": xr.DataArray( + np.arange(data_vars["trigger_id"].size), name="epoch", dims=["epoch"] + ) + }, + data_vars=data_vars, + ) + expected_histogram = np.array([0, 0, 2, 2, 2, 0, 4, 4, 2, 0, 3, 3, 2, 0, 3, 3]) + return synthetic_l1a_ds, expected_histogram + + +def test_compute_coincidence_type_and_time_deltas(synthetic_trigger_id_and_tof_data): """Test coverage for `imap_processing.hi.hi_l1b.compute_coincidence_type_and_time_deltas`.""" - l1a_dataset = load_cdf(hi_l1a_test_file_path) - updated_dataset = compute_coincidence_type_and_time_deltas(l1a_dataset) + # l1a_dataset = load_cdf(hi_l1a_test_file_path) + updated_dataset = compute_coincidence_type_and_time_deltas( + synthetic_trigger_id_and_tof_data[0] + ) for var_name in [ "coincidence_type", "delta_t_ab", @@ -48,3 +110,44 @@ def test_compute_coincidence_type_and_time_deltas(hi_l1a_test_file_path): "delta_t_c1c2", ]: assert var_name in updated_dataset.data_vars + # verify coincidence type values + coincidence_hist, bins = np.histogram( + updated_dataset.coincidence_type, bins=np.arange(17) + ) + np.testing.assert_array_equal( + coincidence_hist, synthetic_trigger_id_and_tof_data[1] + ) + # verify delta_t values are valid in the correct locations + np.testing.assert_array_equal( + updated_dataset.delta_t_ab != updated_dataset.delta_t_ab.FILLVAL, + updated_dataset.coincidence_type >= 12, + ) + np.testing.assert_array_equal( + updated_dataset.delta_t_ac1 != updated_dataset.delta_t_ac1.FILLVAL, + np.logical_and( + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.A.value), + np.bitwise_and( + updated_dataset.coincidence_type, CoincidenceBitmap.C1.value + ), + ), + ) + np.testing.assert_array_equal( + updated_dataset.delta_t_bc1 != updated_dataset.delta_t_bc1.FILLVAL, + np.logical_and( + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.B.value), + np.bitwise_and( + updated_dataset.coincidence_type, CoincidenceBitmap.C1.value + ), + ), + ) + np.testing.assert_array_equal( + updated_dataset.delta_t_c1c2 != updated_dataset.delta_t_c1c2.FILLVAL, + np.logical_and( + np.bitwise_and( + updated_dataset.coincidence_type, CoincidenceBitmap.C1.value + ), + np.bitwise_and( + updated_dataset.coincidence_type, CoincidenceBitmap.C2.value + ), + ), + ) From 87215f8354a0e52088cfa9cf16ad66cf0755d164 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Tue, 8 Oct 2024 14:18:40 -0600 Subject: [PATCH 4/7] Cleanup for PR --- imap_processing/hi/l1b/hi_l1b.py | 4 ++-- imap_processing/tests/hi/conftest.py | 10 ---------- 2 files changed, 2 insertions(+), 12 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 3d6f71d4c..518a71c79 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -135,7 +135,7 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: Adds the new variables "coincidence_type", "delta_t_ab", "delta_t_ac1", "delta_t_bc1", and "delta_t_c1c2" to the input xarray.Dataset and returns - the result. + the updated xarray.Dataset. Parameters ---------- @@ -146,7 +146,7 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: Returns ------- xr.Dataset - Input `dataset` is modified in-place. + Updated xarray.Dataset with 5 new variables added. """ new_data_vars = create_dataset_variables( [ diff --git a/imap_processing/tests/hi/conftest.py b/imap_processing/tests/hi/conftest.py index c2de68a53..5c3ef0b56 100644 --- a/imap_processing/tests/hi/conftest.py +++ b/imap_processing/tests/hi/conftest.py @@ -13,16 +13,6 @@ def hi_l0_test_data_path(hi_test_data_path): return hi_test_data_path / "l0" -@pytest.fixture(scope="session") -def hi_l1a_test_data_path(hi_test_data_path): - return hi_test_data_path / "l1a" - - -@pytest.fixture(scope="session") -def hi_l1a_test_file_path(hi_l1a_test_data_path): - return hi_l1a_test_data_path / "imap_hi_l1a_45sensor-de_20250415_v000.cdf" - - def create_metaevent(esa_step, met_subseconds, met_seconds): start_bitmask_data = 0 # META return ( From c240a7be1bbfe7a90067bc47aac10e94ff30d993 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Wed, 9 Oct 2024 09:20:11 -0600 Subject: [PATCH 5/7] Fix broken doc build --- docs/source/conf.py | 2 ++ imap_processing/hi/l1b/hi_l1b.py | 27 +++++++++++++++------------ 2 files changed, 17 insertions(+), 12 deletions(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 0fff825e8..05a98d60a 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -107,6 +107,8 @@ (r"py:.*", r".*CoDICECompression.*"), (r"py:.*", r".*.lo.l0.utils.*"), (r"py:.*", r".*.lo.l0.data_classes.*"), + (r"py:.*", r".*.hi.l1b.hi_l1b.CoincidenceBitmap.*"), + (r"py:.*", r".*.hi.l1b.hi_l1b.TriggerId.*"), (r"py:.*", r".*.hit.l0.utils.*"), (r"py:.*", r".*.hit.l0.data_classes.*"), (r"py:.*", r".*.hit.l1a.*"), diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 518a71c79..62f9ed326 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -11,14 +11,9 @@ from imap_processing.hi.utils import HIAPID, HiConstants, create_dataset_variables from imap_processing.utils import convert_raw_to_eu -logger = logging.getLogger(__name__) -ATTR_MGR = ImapCdfAttributes() -ATTR_MGR.add_instrument_global_attrs("hi") -ATTR_MGR.load_variable_attributes("imap_hi_variable_attrs.yaml") - class TriggerId(IntEnum): - """Int Enum class for trigger id values.""" + """IntEnum class for trigger id values.""" A = 1 B = 2 @@ -26,7 +21,7 @@ class TriggerId(IntEnum): class CoincidenceBitmap(IntEnum): - """Int Enum class for coincidence type bitmap values.""" + """IntEnum class for coincidence type bitmap values.""" A = 2**3 B = 2**2 @@ -34,6 +29,12 @@ class CoincidenceBitmap(IntEnum): C2 = 2**0 +logger = logging.getLogger(__name__) +ATTR_MGR = ImapCdfAttributes() +ATTR_MGR.add_instrument_global_attrs("hi") +ATTR_MGR.load_variable_attributes("imap_hi_variable_attrs.yaml") + + def hi_l1b(l1a_dataset: xr.Dataset, data_version: str) -> xr.Dataset: """ High level IMAP-HI L1B processing function. @@ -68,7 +69,9 @@ def hi_l1b(l1a_dataset: xr.Dataset, data_version: str) -> xr.Dataset: l1a_dataset, conversion_table_path=conversion_table_path, packet_name=packet_enum.name, - comment="#", + comment="#", # type: ignore[arg-type] + # Todo error, Argument "comment" to "convert_raw_to_eu" has incompatible + # type "str"; expected "dict[Any, Any]" converters={"mnemonic": str.lower}, ) @@ -145,7 +148,7 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: Returns ------- - xr.Dataset + xarray.Dataset Updated xarray.Dataset with 5 new variables added. """ new_data_vars = create_dataset_variables( @@ -162,9 +165,9 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: out_ds = dataset.assign(new_data_vars) # compute masks needed for coincidence type and delta t calculations - a_mask = out_ds.trigger_id.values == TriggerId.A - b_mask = out_ds.trigger_id.values == TriggerId.B - c_mask = out_ds.trigger_id.values == TriggerId.C + a_mask = out_ds.trigger_id.values == TriggerId.A.value + b_mask = out_ds.trigger_id.values == TriggerId.B.value + c_mask = out_ds.trigger_id.values == TriggerId.C.value tof_1_valid_mask = np.isin( out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True From 0cae67f9e51ea33fad3459ac103b98b6b4c2cf44 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Wed, 9 Oct 2024 14:38:47 -0600 Subject: [PATCH 6/7] Clearify coincidence type table Simplify C1 coincidence type mask logic Remove .value from IntEnums where not needed --- imap_processing/hi/l1b/hi_l1b.py | 24 ++++++++++++------------ imap_processing/tests/hi/test_hi_l1b.py | 16 ++++------------ 2 files changed, 16 insertions(+), 24 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 62f9ed326..46abd3ba3 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -165,9 +165,9 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: out_ds = dataset.assign(new_data_vars) # compute masks needed for coincidence type and delta t calculations - a_mask = out_ds.trigger_id.values == TriggerId.A.value - b_mask = out_ds.trigger_id.values == TriggerId.B.value - c_mask = out_ds.trigger_id.values == TriggerId.C.value + a_mask = out_ds.trigger_id.values == TriggerId.A + b_mask = out_ds.trigger_id.values == TriggerId.B + c_mask = out_ds.trigger_id.values == TriggerId.C tof_1_valid_mask = np.isin( out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True @@ -185,18 +185,18 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: # ----------------------------------------------------------------------- # | Trigger ID | Hit First | TOF 1 Valid | TOF 2 Valid | TOF 3 Valid | # ----------------------------------------------------------------------- - # | 1 | A | B | C1 | C2 | - # | 2 | B | A | C1 | C2 | - # | 3 | C1 | A | B | C2 | + # | 1 | A | A,B | A,C1 | C1,C2 | + # | 2 | B | A,B | B,C1 | C1,C2 | + # | 3 | C1 | A,C1 | B,C1 | C1,C2 | # Set coincidence type bitmask - out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A.value + out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A out_ds.coincidence_type[ b_mask | (a_mask & tof_1_valid_mask) | (c_mask & tof_2_valid_mask) - ] |= CoincidenceBitmap.B.value - out_ds.coincidence_type[ - c_mask | (a_mask & tof_2_valid_mask) | (b_mask & tof_2_valid_mask) - ] |= CoincidenceBitmap.C1.value - out_ds.coincidence_type[tof_3_valid_mask] |= CoincidenceBitmap.C2.value + ] |= CoincidenceBitmap.B + out_ds.coincidence_type[c_mask | tof_2_valid_mask | tof_2_valid_mask] |= ( + CoincidenceBitmap.C1.value + ) + out_ds.coincidence_type[tof_3_valid_mask] |= CoincidenceBitmap.C2 # Table denoting how TOF is interpreted for each Trigger ID # ----------------------------------------------------------------------- diff --git a/imap_processing/tests/hi/test_hi_l1b.py b/imap_processing/tests/hi/test_hi_l1b.py index 1852966f7..3867f5cca 100644 --- a/imap_processing/tests/hi/test_hi_l1b.py +++ b/imap_processing/tests/hi/test_hi_l1b.py @@ -126,28 +126,20 @@ def test_compute_coincidence_type_and_time_deltas(synthetic_trigger_id_and_tof_d updated_dataset.delta_t_ac1 != updated_dataset.delta_t_ac1.FILLVAL, np.logical_and( np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.A.value), - np.bitwise_and( - updated_dataset.coincidence_type, CoincidenceBitmap.C1.value - ), + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.C1), ), ) np.testing.assert_array_equal( updated_dataset.delta_t_bc1 != updated_dataset.delta_t_bc1.FILLVAL, np.logical_and( np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.B.value), - np.bitwise_and( - updated_dataset.coincidence_type, CoincidenceBitmap.C1.value - ), + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.C1), ), ) np.testing.assert_array_equal( updated_dataset.delta_t_c1c2 != updated_dataset.delta_t_c1c2.FILLVAL, np.logical_and( - np.bitwise_and( - updated_dataset.coincidence_type, CoincidenceBitmap.C1.value - ), - np.bitwise_and( - updated_dataset.coincidence_type, CoincidenceBitmap.C2.value - ), + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.C1), + np.bitwise_and(updated_dataset.coincidence_type, CoincidenceBitmap.C2), ), ) From 5a9fea34809ce313ba56bcf76524077857dc8742 Mon Sep 17 00:00:00 2001 From: Tim Plummer Date: Fri, 11 Oct 2024 13:49:39 -0600 Subject: [PATCH 7/7] Address PR comments Rework some of the code to make it more readable --- imap_processing/hi/l1b/hi_l1b.py | 128 +++++++++++------------- imap_processing/hi/utils.py | 12 +-- imap_processing/tests/hi/test_hi_l1b.py | 1 - 3 files changed, 67 insertions(+), 74 deletions(-) diff --git a/imap_processing/hi/l1b/hi_l1b.py b/imap_processing/hi/l1b/hi_l1b.py index 46abd3ba3..77f1d2808 100644 --- a/imap_processing/hi/l1b/hi_l1b.py +++ b/imap_processing/hi/l1b/hi_l1b.py @@ -165,20 +165,14 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: out_ds = dataset.assign(new_data_vars) # compute masks needed for coincidence type and delta t calculations - a_mask = out_ds.trigger_id.values == TriggerId.A - b_mask = out_ds.trigger_id.values == TriggerId.B - c_mask = out_ds.trigger_id.values == TriggerId.C + a_first = out_ds.trigger_id.values == TriggerId.A + b_first = out_ds.trigger_id.values == TriggerId.B + c_first = out_ds.trigger_id.values == TriggerId.C - tof_1_valid_mask = np.isin( - out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True - ) - tof_2_valid_mask = np.isin( - out_ds.tof_2.values, HiConstants.TOF2_BAD_VALUES, invert=True - ) - tof_1and2_valid_mask = tof_1_valid_mask & tof_2_valid_mask - tof_3_valid_mask = np.isin( - out_ds.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True - ) + tof1_valid = np.isin(out_ds.tof_1.values, HiConstants.TOF1_BAD_VALUES, invert=True) + tof2_valid = np.isin(out_ds.tof_2.values, HiConstants.TOF2_BAD_VALUES, invert=True) + tof1and2_valid = tof1_valid & tof2_valid + tof3_valid = np.isin(out_ds.tof_3.values, HiConstants.TOF3_BAD_VALUES, invert=True) # Table denoting how hit-first mask and valid TOF masks are used to set # coincidence type bitmask @@ -189,14 +183,12 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: # | 2 | B | A,B | B,C1 | C1,C2 | # | 3 | C1 | A,C1 | B,C1 | C1,C2 | # Set coincidence type bitmask - out_ds.coincidence_type[a_mask | tof_1_valid_mask] |= CoincidenceBitmap.A + out_ds.coincidence_type[a_first | tof1_valid] |= CoincidenceBitmap.A out_ds.coincidence_type[ - b_mask | (a_mask & tof_1_valid_mask) | (c_mask & tof_2_valid_mask) + b_first | (a_first & tof1_valid) | (c_first & tof2_valid) ] |= CoincidenceBitmap.B - out_ds.coincidence_type[c_mask | tof_2_valid_mask | tof_2_valid_mask] |= ( - CoincidenceBitmap.C1.value - ) - out_ds.coincidence_type[tof_3_valid_mask] |= CoincidenceBitmap.C2 + out_ds.coincidence_type[c_first | tof2_valid] |= CoincidenceBitmap.C1 + out_ds.coincidence_type[tof3_valid] |= CoincidenceBitmap.C2 # Table denoting how TOF is interpreted for each Trigger ID # ----------------------------------------------------------------------- @@ -205,54 +197,56 @@ def compute_coincidence_type_and_time_deltas(dataset: xr.Dataset) -> xr.Dataset: # | 1 | A | t_b - t_a | t_c1 - t_a | t_c2 - t_c1 | # | 2 | B | t_a - t_b | t_c1 - t_b | t_c2 - t_c1 | # | 3 | C | t_a - t_c1 | t_b - t_c1 | t_c2 - t_c1 | - # delta_t_ab - out_ds.delta_t_ab.values[a_mask & tof_1_valid_mask] = ( - out_ds.tof_1.values[a_mask & tof_1_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK_PER_NS - ) - out_ds.delta_t_ab.values[b_mask & tof_1_valid_mask] = ( - -out_ds.tof_1.values[b_mask & tof_1_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK_PER_NS - ) - out_ds.delta_t_ab.values[c_mask & tof_1and2_valid_mask] = ( - out_ds.tof_2.values[c_mask & tof_1and2_valid_mask].astype(np.float32) - * HiConstants.TOF2_TICK_PER_NS - - out_ds.tof_1.values[c_mask & tof_1and2_valid_mask].astype(np.float32) - * HiConstants.TOF1_TICK_PER_NS - ) - - # delta_t_ac1 - out_ds.delta_t_ac1.values[a_mask & tof_2_valid_mask] = ( - out_ds.tof_2.values[a_mask & tof_2_valid_mask].astype(np.float32) - * HiConstants.TOF2_TICK_PER_NS - ) - out_ds.delta_t_ac1.values[b_mask & tof_1and2_valid_mask] = ( - out_ds.tof_2.values[b_mask & tof_1and2_valid_mask] - * HiConstants.TOF2_TICK_PER_NS - - out_ds.tof_1.values[b_mask & tof_1and2_valid_mask] - * HiConstants.TOF1_TICK_PER_NS - ) - out_ds.delta_t_ac1.values[c_mask & tof_1_valid_mask] = ( - -out_ds.tof_1.values[c_mask & tof_1_valid_mask] * HiConstants.TOF1_TICK_PER_NS - ) - - # delta_t_bc1 - out_ds.delta_t_bc1.values[a_mask & tof_1_valid_mask & tof_2_valid_mask] = ( - out_ds.tof_2.values[a_mask & tof_1and2_valid_mask] - * HiConstants.TOF2_TICK_PER_NS - - out_ds.tof_1.values[a_mask & tof_1and2_valid_mask] - * HiConstants.TOF1_TICK_PER_NS - ) - out_ds.delta_t_bc1.values[b_mask & tof_2_valid_mask] = ( - out_ds.tof_2.values[b_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK_PER_NS - ) - out_ds.delta_t_bc1.values[c_mask & tof_2_valid_mask] = ( - -out_ds.tof_2.values[c_mask & tof_2_valid_mask] * HiConstants.TOF2_TICK_PER_NS - ) - # delta_t_c1c2 - out_ds.delta_t_c1c2.values[tof_3_valid_mask] = ( - out_ds.tof_3.values[tof_3_valid_mask] * HiConstants.TOF3_TICK_PER_NS - ) + # Prepare for delta_t calculations by converting TOF values to nanoseconds + tof_1_ns = (out_ds.tof_1.values * HiConstants.TOF1_TICK_DUR).astype(np.int32) + tof_2_ns = (out_ds.tof_2.values * HiConstants.TOF2_TICK_DUR).astype(np.int32) + tof_3_ns = (out_ds.tof_3.values * HiConstants.TOF3_TICK_DUR).astype(np.int32) + + # # ********** delta_t_ab = (t_b - t_a) ********** + # Table: row 1, column 1 + a_and_tof1 = a_first & tof1_valid + out_ds.delta_t_ab.values[a_and_tof1] = tof_1_ns[a_and_tof1] + # Table: row 2, column 1 + b_and_tof1 = b_first & tof1_valid + out_ds.delta_t_ab.values[b_and_tof1] = -1 * tof_1_ns[b_and_tof1] + # Table: row 3, column 1 and 2 + # delta_t_ab = (t_b - t_c1) - (t_a - t_c1) = (t_b - t_a) + c_and_tof1and2 = c_first & tof1and2_valid + out_ds.delta_t_ab.values[c_and_tof1and2] = ( + tof_2_ns[c_and_tof1and2] - tof_1_ns[c_and_tof1and2] + ) + + # ********** delta_t_ac1 = (t_c1 - t_a) ********** + # Table: row 1, column 2 + a_and_tof2 = a_first & tof2_valid + out_ds.delta_t_ac1.values[a_and_tof2] = tof_2_ns[a_and_tof2] + # Table: row 2, column 1 and 2 + # delta_t_ac1 = (t_c1 - t_b) - (t_a - t_b) = (t_c1 - t_a) + b_and_tof1and2 = b_first & tof1and2_valid + out_ds.delta_t_ac1.values[b_and_tof1and2] = ( + tof_2_ns[b_and_tof1and2] - tof_1_ns[b_and_tof1and2] + ) + # Table: row 3, column 1 + c_and_tof1 = c_first & tof1_valid + out_ds.delta_t_ac1.values[c_and_tof1] = -1 * tof_1_ns[c_and_tof1] + + # ********** delta_t_bc1 = (t_c1 - t_b) ********** + # Table: row 1, column 1 and 2 + # delta_t_bc1 = (t_c1 - t_a) - (t_b - t_a) => (t_c1 - t_b) + a_and_tof1and2 = a_first & tof1and2_valid + out_ds.delta_t_bc1.values[a_and_tof1and2] = ( + tof_2_ns[a_and_tof1and2] - tof_1_ns[a_and_tof1and2] + ) + # Table: row 2, column 2 + b_and_tof2 = b_first & tof2_valid + out_ds.delta_t_bc1.values[b_and_tof2] = tof_2_ns[b_and_tof2] + # Table: row 3, column 2 + c_and_tof2 = c_first & tof2_valid + out_ds.delta_t_bc1.values[c_and_tof2] = -1 * tof_2_ns[c_and_tof2] + + # ********** delta_t_c1c2 = (t_c2 - t_c1) ********** + # Table: all rows, column 3 + out_ds.delta_t_c1c2.values[tof3_valid] = tof_3_ns[tof3_valid] return out_ds diff --git a/imap_processing/hi/utils.py b/imap_processing/hi/utils.py index 1ba55df90..b67e065ca 100644 --- a/imap_processing/hi/utils.py +++ b/imap_processing/hi/utils.py @@ -42,11 +42,11 @@ class HiConstants: Attributes ---------- - TOF1_TICK_PER_NS : int + TOF1_TICK_DUR : int Duration of Time-of-Flight 1 clock tick in nanoseconds. - TOF2_TICK_PER_NS : int + TOF2_TICK_DUR : int Duration of Time-of-Flight 2 clock tick in nanoseconds. - TOF3_TICK_PER_NS : int + TOF3_TICK_DUR : int Duration of Time-of-Flight 3 clock tick in nanoseconds. TOF1_BAD_VALUES : tuple[int] Tuple of values indicating TOF1 does not contain a valid time. @@ -56,9 +56,9 @@ class HiConstants: Tuple of values indicating TOF3 does not contain a valid time. """ - TOF1_TICK_PER_NS = 1 # 1 ns - TOF2_TICK_PER_NS = 1 # 1 ns - TOF3_TICK_PER_NS = 2 # 0.5 ns + TOF1_TICK_DUR = 1 # 1 ns + TOF2_TICK_DUR = 1 # 1 ns + TOF3_TICK_DUR = 0.5 # 0.5 ns # These values are stored in the TOF telemetry when the TOF timer # does not have valid data. diff --git a/imap_processing/tests/hi/test_hi_l1b.py b/imap_processing/tests/hi/test_hi_l1b.py index 3867f5cca..4f09b247c 100644 --- a/imap_processing/tests/hi/test_hi_l1b.py +++ b/imap_processing/tests/hi/test_hi_l1b.py @@ -98,7 +98,6 @@ def synthetic_trigger_id_and_tof_data(): def test_compute_coincidence_type_and_time_deltas(synthetic_trigger_id_and_tof_data): """Test coverage for `imap_processing.hi.hi_l1b.compute_coincidence_type_and_time_deltas`.""" - # l1a_dataset = load_cdf(hi_l1a_test_file_path) updated_dataset = compute_coincidence_type_and_time_deltas( synthetic_trigger_id_and_tof_data[0] )