Skip to content

Commit

Permalink
ENH/MNT: Change from datetime formats to integer epochs
Browse files Browse the repository at this point in the history
We have decided to move to epoch being defined as integer nanoseconds
since the J2000 EPOCH. This means that the epoch variable will be of
datatype int64 and the new conversion utility will convert MET in
seconds to j2000ns for users with a default reference epoch of
2010-01-01 per APL's documentation.
  • Loading branch information
greglucas committed Jun 26, 2024
1 parent 0c67894 commit db88146
Show file tree
Hide file tree
Showing 28 changed files with 155 additions and 166 deletions.
5 changes: 0 additions & 5 deletions imap_processing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@
# This directory is used by the imap_processing package to find the packet definitions.
from pathlib import Path

import numpy as np

from imap_processing._version import __version__, __version_tuple__ # noqa: F401

# Eg. imap_module_directory = /usr/local/lib/python3.11/site-packages/imap_processing
Expand All @@ -34,6 +32,3 @@
"swe": ["l0", "l1a", "l1b", "l2"],
"ultra": ["l0", "l1a", "l1b", "l1c", "l2"],
}

# Reference start time (launch time or epoch)
launch_time = np.datetime64("2010-01-01T00:01:06.184", "ns")
2 changes: 1 addition & 1 deletion imap_processing/cdf/config/imap_hi_variable_attrs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ default_epoch: &default_epoch
TIME_BASE: J2000
TIME_SCALE: Terrestrial Time
REFERENCE_POSITION: Rotating Earth Geoid
dtype: datetime64[ns]
dtype: int64

# ------- L1A DE Section -------
hi_de_ccsds_met:
Expand Down
65 changes: 36 additions & 29 deletions imap_processing/cdf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,36 +16,49 @@
logger = logging.getLogger(__name__)


def convert_met_to_datetime64(
# Reference start time (launch time or epoch)
# DEFAULT_EPOCH = np.datetime64("2010-01-01T00:01:06.184", "ns")
IMAP_EPOCH = np.datetime64("2010-01-01T00:00:00", "ns")
J2000_EPOCH = np.datetime64("2000-01-01T11:58:55.816", "ns")


def met_to_j2000ns(
met: np.typing.ArrayLike,
launch_time: Optional[np.datetime64] = imap_processing.launch_time,
) -> np.datetime64:
"""Convert mission elapsed time (MET) to numpy datetime64.
reference_epoch: Optional[np.datetime64] = IMAP_EPOCH,
) -> np.typing.ArrayLike:
"""
Convert mission elapsed time (MET) to nanoseconds from J2000.
Parameters
----------
met: array_like
Number of seconds since epoch (nominally the launch time)
launch_time : np.datetime64
The time of launch to use as the baseline.
met : array_like
Number of seconds since epoch according to the spacecraft clock.
reference_epoch : np.datetime64
The time of reference for the mission elapsed time. The standard
reference time for IMAP is January 1, 2010 00:00:00 UTC. Per APL's
IMAP Timekeeping System Design document.
Returns
-------
np.datetime64
Numpy datetime64 representation of the mission elapsed time
array_like or scalar, int64
The mission elapsed time converted to nanoseconds since the J2000 epoch.
Notes
-----
This conversion is temporary for now, and will need SPICE in the future.
Nick Dutton mentioned that s/c clock start epoch is
jan-1-2010-00:01:06.184 ET
We will use this for now.
This conversion is temporary for now, and will need SPICE in the future to
account for spacecraft clock drift.
"""
# Convert MET to int, then to nanoseconds as datetime64 requires int input and
# some instruments are adding additional float precision to the MET
# Mission elapsed time is in seconds, convert to nanoseconds
# NOTE: We need to multiply the incoming met by 1e9 first because we could have
# float input and we want to keep ns precision in those floats
# NOTE: We need int64 here when running on 32bit systems as plain int will default
# to 32bit and overflow due to the nanosecond multiplication
time_array = (np.asarray(met) * 1e9).astype(np.int64).astype("timedelta64[ns]")
return launch_time + time_array
time_array = (np.asarray(met) * 1e9).astype(np.int64)
# Calculate the time difference between our reference system and J2000
j2000_offset = (
(reference_epoch - J2000_EPOCH).astype("timedelta64[ns]").astype(np.int64)
)
return j2000_offset + time_array


def load_cdf(
Expand All @@ -61,19 +74,14 @@ def load_cdf(
remove_xarray_attrs : bool
Whether to remove the xarray attributes that get injected by the
cdf_to_xarray function from the output xarray.Dataset. Default is True.
**kwargs : {dict} optional
Keyword arguments for ``cdf_to_xarray``. This function overrides the
``cdf_to_xarray`` default keyword value `to_datetime=False` with
``to_datetime=True`.
**kwargs : dict, optional
Keyword arguments for ``cdf_to_xarray``.
Returns
-------
dataset : xarray.Dataset
The ``xarray`` dataset for the CDF file.
"""
# TODO: remove this when cdflib is updated to version >1.3.0
if "to_datetime" not in kwargs:
kwargs["to_datetime"] = True
dataset = cdf_to_xarray(file_path, kwargs)

# cdf_to_xarray converts single-value attributes to lists
Expand Down Expand Up @@ -118,9 +126,9 @@ def write_cdf(dataset: xr.Dataset):
# Create the filename from the global attributes
# Logical_source looks like "imap_swe_l2_counts-1min"
instrument, data_level, descriptor = dataset.attrs["Logical_source"].split("_")[1:]
start_time = np.datetime_as_string(dataset["epoch"].values[0], unit="D").replace(
"-", ""
)
# Convert J2000 epoch referenced data to datetime64
dt64 = J2000_EPOCH + dataset["epoch"].values[0].astype("timedelta64[ns]")
start_time = np.datetime_as_string(dt64, unit="D").replace("-", "")

# Will now accept vXXX or XXX formats, as batch starter sends versions as vXXX.
r = re.compile(r"v\d{3}")
Expand Down Expand Up @@ -156,7 +164,6 @@ def write_cdf(dataset: xr.Dataset):
xarray_to_cdf(
dataset,
str(file_path),
datetime64_to_cdftt2000=True,
terminate_on_warning=True,
) # Terminate if not ISTP compliant

Expand Down
4 changes: 2 additions & 2 deletions imap_processing/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -621,15 +621,15 @@ def do_processing(self, dependencies):
elif self.data_level == "l1b":
data_dict = {}
for dependency in dependencies:
dataset = load_cdf(dependency, to_datetime=True)
dataset = load_cdf(dependency)
data_dict[dataset.attrs["Logical_source"]] = dataset
output_file = lo_l1b.lo_l1b(data_dict, self.version)
return [output_file]

elif self.data_level == "l1c":
data_dict = {}
for dependency in dependencies:
dataset = load_cdf(dependency, to_datetime=True)
dataset = load_cdf(dependency)
data_dict[dataset.attrs["Logical_source"]] = dataset
output_file = lo_l1c.lo_l1c(data_dict, self.version)
return [output_file]
Expand Down
32 changes: 12 additions & 20 deletions imap_processing/codice/codice_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

from imap_processing import imap_module_directory
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
from imap_processing.cdf.utils import convert_met_to_datetime64, write_cdf
from imap_processing.cdf.utils import IMAP_EPOCH, met_to_j2000ns, write_cdf
from imap_processing.codice import constants
from imap_processing.codice.codice_l0 import decom_packets
from imap_processing.codice.utils import CODICEAPID, create_hskp_dataset
Expand Down Expand Up @@ -78,18 +78,16 @@ def __init__(self, table_id: int, plan_id: int, plan_step: int, view_id: int):
self.plan_step = plan_step
self.view_id = view_id

def create_science_dataset(
self, start_time: np.datetime64, data_version: str
) -> xr.Dataset:
def create_science_dataset(self, met: np.int64, data_version: str) -> xr.Dataset:
"""
Create an ``xarray`` dataset for the unpacked science data.
The dataset can then be written to a CDF file.
Parameters
----------
start_time : numpy.datetime64
The start time of the packet, used to determine epoch data variable.
met : numpy.int64
The mission elapsed time of the packet, used to determine epoch data.
data_version : str
Version of the data product being created.
Expand All @@ -106,10 +104,7 @@ def create_science_dataset(

# Define coordinates
epoch = xr.DataArray(
[
start_time,
start_time + np.timedelta64(1, "s"),
], # TODO: Fix after SIT-3 (see note below)
met_to_j2000ns(met), # TODO: Fix after SIT-3 (see note below)
name="epoch",
dims=["epoch"],
attrs=cdf_attrs.get_variable_attributes("epoch_attrs"),
Expand Down Expand Up @@ -380,11 +375,8 @@ def process_codice_l1a(file_path: Path | str, data_version: str) -> xr.Dataset:
packets = sort_by_time(grouped_data[apid], "SHCOARSE")

# Determine the start time of the packet
start_time = convert_met_to_datetime64(
packets[0].data["ACQ_START_SECONDS"].raw_value,
launch_time=np.datetime64("2010-01-01T00:01:06.184", "ns"),
)

met = packets[0].data["ACQ_START_SECONDS"].raw_value
met = [met, met + 1] # TODO: Remove after SIT-3
# Extract the data
science_values = packets[0].data["DATA"].raw_value

Expand All @@ -397,7 +389,7 @@ def process_codice_l1a(file_path: Path | str, data_version: str) -> xr.Dataset:
pipeline.get_acquisition_times()
pipeline.get_data_products(apid)
pipeline.unpack_science_data(science_values)
dataset = pipeline.create_science_dataset(start_time, data_version)
dataset = pipeline.create_science_dataset(met, data_version)

# TODO: Temporary workaround in order to create hi data products in absence
# of simulated data. This is essentially the same process as is for
Expand All @@ -417,15 +409,15 @@ def process_codice_l1a(file_path: Path | str, data_version: str) -> xr.Dataset:
apid = CODICEAPID.COD_HI_SECT_SPECIES_COUNTS
table_id, plan_id, plan_step, view_id = (1, 0, 0, 6)

start_time = np.datetime64(
"2024-04-29T00:00:00", "ns"
) # Using this to match the other data products
met0 = (np.datetime64("2024-04-29T00:00") - IMAP_EPOCH).astype("timedelta64[s]")
met0 = met0.astype(np.int64)
met = [met0, met0 + 1] # Using this to match the other data products
science_values = "" # Currently don't have simulated data for this

pipeline = CoDICEL1aPipeline(table_id, plan_id, plan_step, view_id)
pipeline.get_data_products(apid)
pipeline.unpack_science_data(science_values)
dataset = pipeline.create_science_dataset(start_time, data_version)
dataset = pipeline.create_science_dataset(met, data_version)

# Write dataset to CDF
logger.info(f"\nFinal data product:\n{dataset}\n")
Expand Down
6 changes: 3 additions & 3 deletions imap_processing/codice/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.cdf.imap_cdf_manager import ImapCdfAttributes
from imap_processing.cdf.utils import convert_met_to_datetime64
from imap_processing.cdf.utils import met_to_j2000ns


class CODICEAPID(IntEnum):
Expand Down Expand Up @@ -129,9 +129,9 @@ def create_hskp_dataset(packets, data_version: str) -> xr.Dataset:

# TODO: Is there a way to get the attrs from the YAML-based method?
epoch = xr.DataArray(
convert_met_to_datetime64(
met_to_j2000ns(
metadata_arrays["SHCOARSE"],
launch_time=np.datetime64("2010-01-01T00:01:06.184", "ns"),
reference_epoch=np.datetime64("2010-01-01T00:01:06.184", "ns"),
),
name="epoch",
dims=["epoch"],
Expand Down
10 changes: 5 additions & 5 deletions imap_processing/glows/l1a/glows_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
import xarray as xr

from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.cdf.utils import convert_met_to_datetime64
from imap_processing.cdf.utils import J2000_EPOCH, met_to_j2000ns
from imap_processing.glows import __version__, glows_cdf_attrs
from imap_processing.glows.l0.decom_glows import decom_packets
from imap_processing.glows.l0.glows_l0_data import DirectEventL0
Expand Down Expand Up @@ -46,7 +46,7 @@ def glows_l1a(packet_filepath: Path, data_version: str) -> list[xr.Dataset]:
hist_l1a = HistogramL1A(hist)
# Split by IMAP start time
# TODO: Should this be MET?
hist_day = convert_met_to_datetime64(hist.SEC).astype("datetime64[D]")
hist_day = (J2000_EPOCH + met_to_j2000ns(hist.SEC)).astype("datetime64[D]")
hists_by_day[hist_day].append(hist_l1a)

# Generate CDF files for each day
Expand Down Expand Up @@ -85,7 +85,7 @@ def process_de_l0(
de_by_day = dict()

for de in de_l0:
de_day = convert_met_to_datetime64(de.MET).astype("datetime64[D]")
de_day = (J2000_EPOCH + met_to_j2000ns(de.MET)).astype("datetime64[D]")
if de_day not in de_by_day:
de_by_day[de_day] = [DirectEventL1A(de)]
elif de.SEQ != 0:
Expand Down Expand Up @@ -163,7 +163,7 @@ def generate_de_dataset(

for index, de in enumerate(de_l1a_list):
# Set the timestamp to the first timestamp of the direct event list
epoch_time = convert_met_to_datetime64(de.l0.MET).astype("datetime64[ns]")
epoch_time = met_to_j2000ns(de.l0.MET).astype("datetime64[ns]")

# determine if the length of the direct_events numpy array is long enough,
# and extend the direct_events length dimension if necessary.
Expand Down Expand Up @@ -332,7 +332,7 @@ def generate_histogram_dataset(

for index, hist in enumerate(hist_l1a_list):
# TODO: Should this be MET?
epoch_time = convert_met_to_datetime64(hist.imap_start_time.to_seconds())
epoch_time = met_to_j2000ns(hist.imap_start_time.to_seconds())
hist_data[index] = hist.histograms

support_data["flags_set_onboard"].append(hist.flags["flags_set_onboard"])
Expand Down
6 changes: 2 additions & 4 deletions imap_processing/hi/l1a/histogram.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
from space_packet_parser.parser import Packet

from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.cdf.utils import convert_met_to_datetime64
from imap_processing.cdf.utils import met_to_j2000ns
from imap_processing.hi import hi_cdf_attrs

# TODO: Verify that these names are OK for counter variables in the CDF
Expand Down Expand Up @@ -57,9 +57,7 @@ def create_dataset(packets: list[Packet]) -> xr.Dataset:

# unpack the packets data into the Dataset
for i_epoch, packet in enumerate(packets):
dataset.epoch.data[i_epoch] = convert_met_to_datetime64(
packet.data["CCSDS_MET"].raw_value
)
dataset.epoch.data[i_epoch] = met_to_j2000ns(packet.data["CCSDS_MET"].raw_value)
dataset.ccsds_met[i_epoch] = packet.data["CCSDS_MET"].raw_value
dataset.esa_step[i_epoch] = packet.data["ESA_STEP"].raw_value

Expand Down
24 changes: 3 additions & 21 deletions imap_processing/hi/l1a/science_direct_event.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
import xarray as xr
from space_packet_parser.parser import Packet

from imap_processing import imap_module_directory, launch_time
from imap_processing import imap_module_directory
from imap_processing.cdf.cdf_attribute_manager import CdfAttributeManager
from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.cdf.utils import met_to_j2000ns

# TODO: read LOOKED_UP_DURATION_OF_TICK from
# instrument status summary later. This value
Expand All @@ -20,25 +21,6 @@
MICROSECOND_TO_NS = 1e3


def get_direct_event_time(time_in_ns: int) -> np.datetime64:
"""
Create MET(Mission Elapsed Time) time using input times.
Parameters
----------
time_in_ns : int
Time in nanoseconds.
Returns
-------
met_datetime : numpy.datetime64
Human-readable MET time.
"""
met_datetime = launch_time + np.timedelta64(int(time_in_ns), "ns")

return met_datetime


def parse_direct_event(event_data: str) -> dict:
"""
Parse event data.
Expand Down Expand Up @@ -267,7 +249,7 @@ def create_dataset(de_data_list: list, packet_met_time: list) -> xr.Dataset:
+ event["de_tag"] * LOOKED_UP_DURATION_OF_TICK * MICROSECOND_TO_NS
)
data_dict["event_met"].append(de_met_in_ns)
data_dict["epoch"].append(get_direct_event_time(de_met_in_ns))
data_dict["epoch"].append(met_to_j2000ns(de_met_in_ns / 1e9))
data_dict["esa_stepping_num"].append(current_esa_step)
# start_bitmask_data is 1, 2, 3 for detector A, B, C
# respectively. This is used to identify which detector
Expand Down
4 changes: 1 addition & 3 deletions imap_processing/hit/l1a/hit_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,9 +173,7 @@ def create_datasets(data: dict, skip_keys=None):
metadata_arrays[data_key].append(field_value)

# Convert integers into datetime64[s]
epoch_converted_times = utils.convert_met_to_datetime64(
metadata_arrays["shcoarse"]
)
epoch_converted_times = utils.met_to_j2000ns(metadata_arrays["shcoarse"])

# Create xarray data arrays for dependencies
epoch_time = xr.DataArray(
Expand Down
2 changes: 1 addition & 1 deletion imap_processing/hit/l1b/hit_l1b.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def create_hk_dataset():
# Create fake data for now

# Convert integers into datetime64[s]
epoch_converted_time = utils.convert_met_to_datetime64([0, 1, 2])
epoch_converted_time = utils.met_to_j2000ns([0, 1, 2])

# Shape for dims
n_epoch = 3
Expand Down
Loading

0 comments on commit db88146

Please sign in to comment.