Skip to content

Commit

Permalink
MNT: Use module-level logger
Browse files Browse the repository at this point in the history
Rather than using the global logger and updating the configuration
globally, we should get a logger instance for each module that wants
it and submit log messages to that specific logger.
  • Loading branch information
greglucas committed Mar 13, 2024
1 parent 9191a05 commit 35e65a3
Show file tree
Hide file tree
Showing 11 changed files with 27 additions and 22 deletions.
4 changes: 3 additions & 1 deletion imap_processing/cdf/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,8 @@
import xarray as xr
from cdflib.xarray import xarray_to_cdf

logger = logging.getLogger(__name__)


def calc_start_time(shcoarse_time: int):
"""Calculate the datetime64 from the CCSDS secondary header information.
Expand Down Expand Up @@ -59,7 +61,7 @@ def write_cdf(data: xr.Dataset, filepath: Path):
Path to the file created
"""
if not filepath.parent.exists():
logging.info("The directory does not exist, creating directory %s", filepath)
logger.info("The directory does not exist, creating directory %s", filepath)
filepath.parent.mkdir(parents=True)

# Insert the final attribute:
Expand Down
4 changes: 2 additions & 2 deletions imap_processing/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -328,10 +328,10 @@ def process(self):
# TODO: figure out data_dir, because now this fails.
# Should switch to using IMAP_DATA_DIR env var.
if filename_norm.exists():
logging.info(f"Uploading file: {filename_norm}")
logger.info(f"Uploading file: {filename_norm}")
imap_data_access.upload(filename_norm)
if filename_burst.exists():
logging.info(f"Uploading file: {filename_burst}")
logger.info(f"Uploading file: {filename_burst}")
imap_data_access.upload(filename_burst)


Expand Down
4 changes: 3 additions & 1 deletion imap_processing/codice/codice_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
from imap_processing.codice.utils import CODICEAPID, create_dataset
from imap_processing.utils import group_by_apid, sort_by_time

logger = logging.getLogger(__name__)


def codice_l1a(packets: list[space_packet_parser.parser.Packet]) -> str:
"""Process CoDICE l0 data to create l1a data products.
Expand All @@ -43,7 +45,7 @@ def codice_l1a(packets: list[space_packet_parser.parser.Packet]) -> str:
sorted_packets = sort_by_time(grouped_data[apid], "SHCOARSE")
data = create_dataset(packets=sorted_packets)
else:
logging.debug(f"{apid} is currently not supported")
logger.debug(f"{apid} is currently not supported")

file = imap_data_access.ScienceFilePath.generate_from_inputs(
"codice", "l1a", "hk", "20210101", "20210102", "v01-01"
Expand Down
14 changes: 7 additions & 7 deletions imap_processing/hit/l0/hit_l1a_decom.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

from imap_processing import decom

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class HitAPID(IntEnum):
Expand Down Expand Up @@ -54,9 +54,9 @@ def decom_hit_packets(packet_file: str, xtce: str):
dictionary will be converted to a CDF.
"""
# TODO: XTCE Files need to be combined
logging.info(f"Unpacking {packet_file} using xtce definitions in {xtce}")
logger.info(f"Unpacking {packet_file} using xtce definitions in {xtce}")
packets = decom.decom_packets(packet_file, xtce)
logging.info(f"{packet_file} unpacked")
logger.info(f"{packet_file} unpacked")
# print(packets[0])
# sort all the packets in the list by their spacecraft time
sorted_packets = sorted(packets, key=lambda x: x.data["SHCOARSE"].derived_value)
Expand All @@ -67,16 +67,16 @@ def decom_hit_packets(packet_file: str, xtce: str):
unpacked_data = {}
for apid_name, apid in [(id.name, id.value) for id in HitAPID]:
# TODO: if science packet, do decompression
logging.info(f"Grouping packet values for {apid_name}:{apid}")
logger.info(f"Grouping packet values for {apid_name}:{apid}")
# get all the packets for this apid and groups them together in a
# dictionary
unpacked_data[apid_name] = group_apid_data(sorted_packets, apid)
logging.info(f"Finished grouping {apid_name}:{apid} packet values")
logger.info(f"Finished grouping {apid_name}:{apid} packet values")

# create datasets
logging.info("Creating a dataset for HIT L1A data")
logger.info("Creating a dataset for HIT L1A data")
dataset_dict = create_datasets(unpacked_data)
logging.info("HIT L1A dataset created")
logger.info("HIT L1A dataset created")
return dataset_dict


Expand Down
1 change: 0 additions & 1 deletion imap_processing/ialirt/l0/decom_ialirt.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

from imap_processing.decom import decom_packets

logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger(__name__)


Expand Down
8 changes: 5 additions & 3 deletions imap_processing/idex/idex_packet_parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
from imap_processing.cdf.global_attrs import ConstantCoordinates
from imap_processing.idex import idex_cdf_attrs

logger = logging.getLogger(__name__)


class Scitype(IntEnum):
"""IDEX Science Type."""
Expand Down Expand Up @@ -515,7 +517,7 @@ def __init__(self, packet_file: str):
# Populate the IDEXRawDustEvent with 1's and 0's
dust_events[event_number].parse_packet(packet)
else:
logging.warning(f"Unhandled packet received: {packet}")
logger.warning(f"Unhandled packet received: {packet}")

processed_dust_impact_list = [
dust_event.process() for dust_event in dust_events.values()
Expand Down Expand Up @@ -584,7 +586,7 @@ def __init__(self, header_packet):
trigger.name: header_packet.data[trigger.packet_name].raw_value
for trigger in trigger_description_dict.values()
}
logging.debug(
logger.debug(
f"trigger_values:\n{self.trigger_values}"
) # Log values here in case of error

Expand Down Expand Up @@ -778,7 +780,7 @@ def _append_raw_data(self, scitype, bits):
elif scitype == Scitype.ION_GRID:
self.Ion_Grid_bits += bits
else:
logging.warning("Unknown science type received: [%s]", scitype)
logger.warning("Unknown science type received: [%s]", scitype)

def process(self):
"""Process the raw data into a xarray.Dataset.
Expand Down
4 changes: 2 additions & 2 deletions imap_processing/mag/l1a/mag_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,8 @@ def mag_l1a(packet_filepath, output_filepath_norm, ouptput_filepath_burst):

if mag_norm is not None:
write_cdf(mag_norm, Path(output_filepath_norm))
logging.info(f"Created CDF file at {output_filepath_norm}")
logger.info(f"Created CDF file at {output_filepath_norm}")

if mag_burst is not None:
write_cdf(mag_burst, Path(ouptput_filepath_burst))
logging.info(f"Created CDF file at {output_filepath_norm}")
logger.info(f"Created CDF file at {output_filepath_norm}")
4 changes: 3 additions & 1 deletion imap_processing/swe/l1a/swe_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@
)
from imap_processing.utils import group_by_apid, sort_by_time

logger = logging.getLogger(__name__)


def swe_l1a(file_path):
"""Process SWE l0 data into l1a data.
Expand Down Expand Up @@ -40,7 +42,7 @@ def swe_l1a(file_path):
if apid == SWEAPID.SWE_SCIENCE:
# sort data by acquisition time
sorted_packets = sort_by_time(grouped_data[apid], "ACQ_START_COARSE")
logging.debug(
logger.debug(
"Processing science data for [%s] packets", len(sorted_packets)
)
data = swe_science(decom_data=sorted_packets)
Expand Down
3 changes: 1 addition & 2 deletions imap_processing/ultra/l0/decom_ultra.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
)
from imap_processing.utils import group_by_apid, sort_by_time

logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


Expand Down Expand Up @@ -104,7 +103,7 @@ def decom_ultra_apids(packet_file: str, xtce: str, apid: int):
append_params(decom_data, packet)
else:
for i in range(count):
logging.info(f"Appending image #{i}")
logger.info(f"Appending image #{i}")
append_params(decom_data, packet)

elif apid in ULTRA_AUX.apid:
Expand Down
2 changes: 1 addition & 1 deletion imap_processing/ultra/l1a/ultra_l1a.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,4 +109,4 @@ def ultra_l1a(packet_file: Path, xtce: Path, output_filepath: Path):
if decom_ultra_aux:
dataset = xarray_aux(decom_ultra_aux)
write_cdf(dataset, Path(output_filepath))
logging.info(f"Created CDF file at {output_filepath}")
logger.info(f"Created CDF file at {output_filepath}")
1 change: 0 additions & 1 deletion tools/spice/spice_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import spiceypy as spice

logging.basicConfig(level=logging.ERROR)
logger = logging.getLogger(__name__)


Expand Down

0 comments on commit 35e65a3

Please sign in to comment.