Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Test infrastructure for external spice kernels #842

Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 7 additions & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,15 @@ jobs:

- name: Testing
id: test
env:
RUN_EXTERNAL_KERNEL_MARK: ${{ contains(matrix.os, 'ubuntu') && matrix.python-version == '3.9' }}
run: |
# Ignore the network marks from the remote test environment
poetry run pytest --color=yes --cov --cov-report=xml
if [ "$RUN_EXTERNAL_KERNEL_MARK" = "true" ]; then
poetry run pytest --color=yes --cov --cov-report=xml
else
poetry run pytest --color=yes --cov --cov-report=xml -m "not external_kernel"
fi

- name: Upload coverage reports to Codecov
uses: codecov/codecov-action@v4
Expand Down
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -177,3 +177,7 @@ cython_debug/

# Data that is downloaded
data/

# Ignore specific SPICE kernels that get downloaded from NAIF automatically for tests
# marked with @pytest.mark.external_kernel
**/de440*.bsp
223 changes: 165 additions & 58 deletions imap_processing/tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
"""Global pytest configuration for the package."""

import logging
import os
import re
import time

import imap_data_access
import numpy as np
import pytest
import requests
import spiceypy as spice

from imap_processing import imap_module_directory
Expand Down Expand Up @@ -36,6 +39,60 @@ def _autoclear_spice():
spice.kclear()


@pytest.fixture(scope="session")
def _download_de440s(spice_test_data_path):
"""This fixture downloads the de440s.bsp kernel into the
tests/spice/test_data directory if it does not already exist there. The
fixture is not intended to be used directly. It is automatically added to
tests marked with "external_kernel" in the hook below."""
logger = logging.getLogger(__name__)
kernel_url = (
"https://naif.jpl.nasa.gov/pub/naif/generic_kernels/spk/planets/de440s.bsp"
)
kernel_name = kernel_url.split("/")[-1]
local_filepath = spice_test_data_path / kernel_name

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

    if local_filepath.exists():
        logger.info("Kernel file already exists: %s", local_filepath)
        return
    allowed_attempts = 3
    for attempt_number in range(allowed_attempts):

etc.

if local_filepath.exists():
return
allowed_attempts = 3
for attempt_number in range(allowed_attempts):
try:
with requests.get(kernel_url, stream=True, timeout=30) as r:
r.raise_for_status()
with open(local_filepath, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
logger.info("Cached kernel file to %s", local_filepath)
break
except requests.exceptions.RequestException as error:
logger.info(f"Request failed. {error}")
if attempt_number < allowed_attempts:
logger.info(
f"Trying again, retries left "
f"{allowed_attempts - attempt_number}, "
f"Exception: {error}"
)
time.sleep(1)
else:
logger.error(
f"Failed to download file after {allowed_attempts} "
f"attempts, Final Error: {error}"
)
raise


def pytest_collection_modifyitems(items):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What else do you envision will go here besides _download_de440s?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

It is possible that certain tests could require other large external kernels be downloaded.

"""
The use of this hook allows modification of test `Items` after tests have
been collected. In this case, it automatically adds the _download_de440s
fixture to any test marked with the `external_kernel`.
https://docs.pytest.org/en/stable/reference/reference.html#pytest.hookspec.pytest_collection_modifyitems
"""
for item in items:
if item.get_closest_marker("external_kernel") is not None:
item.fixturenames.append("_download_de440s")


@pytest.fixture(scope="session")
def spice_test_data_path(imap_tests_path):
return imap_tests_path / "spice/test_data"
Expand Down Expand Up @@ -71,8 +128,63 @@ def monkeypatch_session():
m.undo()


def make_metakernel_from_kernels(metakernel, kernels):
"""Helper function that writes a test metakernel from a list of filenames"""
with open(metakernel, "w") as mk:
mk.writelines(
[
"\n",
"\\begintext\n",
"\n",
"This is a temporary metakernel for imap_processing"
" unit and integration testing.\n",
"\n",
"\\begindata\n",
"\n",
"KERNELS_TO_LOAD = (\n",
]
)
# Put single quotes around every kernel name
kernels_with_quotes = [" '" + kern + "'" for kern in kernels]
# Add a comma and EOL to the end of each kernel path except the last.
formatted_kernels = [kern + ",\n" for kern in kernels_with_quotes[0:-1]]
# Add ')' to the last kernel
formatted_kernels.append(kernels_with_quotes[-1] + "\n)\n\n")
mk.writelines(formatted_kernels)


def get_test_kernels_to_load(template_path, kernel_dir_path):
"""
Helper function for grabbing a list of kernel filenames from the test
metakernel template. This is necessary in order to get absolute paths on
any system. Formats the absolute paths using the test data path fixture
value.
"""
kernels_to_load = []
max_line_length = 80
with open(template_path) as mk:
for k in mk:
kernel = k.rstrip("\n").format(
**{"SPICE_TEST_DATA_PATH": str(kernel_dir_path.absolute())}
)
while len(kernel) > 0:
if len(kernel) <= max_line_length:
kernels_to_load.append(kernel)
break
else:
slash_positions = np.array(
[m.start() for m in re.finditer("/", kernel)]
)
stop_idx = (
slash_positions[slash_positions < max_line_length - 1].max() + 1
)
kernels_to_load.append(kernel[0:stop_idx] + "+")
kernel = kernel[stop_idx:]
return kernels_to_load


@pytest.fixture(scope="session", autouse=True)
def use_test_metakernel(monkeypatch_session, tmpdir_factory, spice_test_data_path):
def session_test_metakernel(monkeypatch_session, tmpdir_factory, spice_test_data_path):
"""Generate a metakernel from the template metakernel by injecting the local
path into the metakernel and set the SPICE_METAKERNEL environment variable.

Expand All @@ -86,70 +198,65 @@ def use_test_metakernel(monkeypatch_session, tmpdir_factory, spice_test_data_pat
variable as needed. Use the `metakernel_path_not_set` fixture in tests that
need to override the environment variable.
"""

def make_metakernel_from_kernels(metakernel, kernels):
"""Helper function that writes a test metakernel from a list of filenames"""
with open(metakernel, "w") as mk:
mk.writelines(
[
"\n",
"\\begintext\n",
"\n",
"This is a temporary metakernel for imap_processing"
" unit and integration testing.\n",
"\n",
"\\begindata\n",
"\n",
"KERNELS_TO_LOAD = (\n",
]
)
# Put single quotes around every kernel name
kernels_with_quotes = [" '" + kern + "'" for kern in kernels]
# Add a comma and EOL to the end of each kernel path except the last.
formated_kernels = [kern + ",\n" for kern in kernels_with_quotes[0:-1]]
# Add ')' to the last kernel
formated_kernels.append(kernels_with_quotes[-1] + "\n)\n\n")
mk.writelines(formated_kernels)

def get_test_kernels_to_load():
"""
Helper function for grabbing a list of kernel filenames from the test
metakernel template. This is necessary in order to get absolute paths on
any system. Formats the absolute paths using the test data path fixture
value.
"""
test_metakernel = spice_test_data_path / "imap_test_metakernel.template"
kernels_to_load = []
max_line_length = 80
with open(test_metakernel) as mk:
for k in mk:
kernel = k.rstrip("\n").format(
**{"SPICE_TEST_DATA_PATH": str(spice_test_data_path.absolute())}
)
while len(kernel) > 0:
if len(kernel) <= max_line_length:
kernels_to_load.append(kernel)
break
else:
slash_positions = np.array(
[m.start() for m in re.finditer("/", kernel)]
)
stop_idx = (
slash_positions[slash_positions < max_line_length - 1].max()
+ 1
)
kernels_to_load.append(kernel[0:stop_idx] + "+")
kernel = kernel[stop_idx:]
return kernels_to_load

template_path = spice_test_data_path / "imap_simple_metakernel.template"
kernels_to_load = get_test_kernels_to_load(template_path, spice_test_data_path)
metakernel_path = tmpdir_factory.mktemp("spice") / "imap_2024_v001.tm"
kernels_to_load = get_test_kernels_to_load()
make_metakernel_from_kernels(metakernel_path, kernels_to_load)
monkeypatch_session.setenv("SPICE_METAKERNEL", str(metakernel_path))
yield str(metakernel_path)
spice.kclear()


@pytest.fixture()
def use_test_metakernel(
request, monkeypatch, spice_test_data_path, session_test_metakernel
):
"""
Generate a metakernel and set SPICE_METAKERNEL environment variable.

This fixture generates a metakernel in the directory pointed to by
`imap_data_access.config["DATA_DIR"]` and sets the SPICE_METAKERNEL
environment variable to point to it for use by the `@ensure_spice` decorator.
The default metekernel is named "imap_simple_metakernel.template". Other
metakerels can be specified by marking the test with metakernel. See
examples below.

Parameters
----------
request : fixture
monkeypatch : fixture
spice_test_data_path : fixture
session_test_metakernel : fixture

Yields
------
metakernel_path : Path

Examples
--------
1. Use the default metakernel template
>>> def test_my_spicey_func(use_test_metakernel):
... pass

2. Specify a different metakernel template
>>> @pytest.mark.metakernel("other_template_mk.template")
... def test_my_spicey_func(use_test_metakernel):
... pass
"""
marker = request.node.get_closest_marker("metakernel")
if marker is None:
yield session_test_metakernel
else:
template_name = marker.args[0]
template_path = spice_test_data_path / template_name
metakernel_path = imap_data_access.config["DATA_DIR"] / "imap_2024_v001.tm"
kernels_to_load = get_test_kernels_to_load(template_path, spice_test_data_path)
make_metakernel_from_kernels(metakernel_path, kernels_to_load)
monkeypatch.setenv("SPICE_METAKERNEL", str(metakernel_path))
yield str(metakernel_path)
spice.kclear()


@pytest.fixture()
def _unset_metakernel_path(monkeypatch):
"""Temporarily unsets the SPICE_METAKERNEL environment variable"""
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{SPICE_TEST_DATA_PATH}/imap_sclk_0000.tsc
{SPICE_TEST_DATA_PATH}/naif0012.tls
{SPICE_TEST_DATA_PATH}/imap_spk_demo.bsp
{SPICE_TEST_DATA_PATH}/de440s.bsp
8 changes: 8 additions & 0 deletions imap_processing/tests/spice/test_geometry.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,3 +23,11 @@ def test_imap_state(et, use_test_metakernel):
np.testing.assert_array_equal(state.shape, (len(et), 6))
else:
assert state.shape == (6,)


@pytest.mark.external_kernel()
@pytest.mark.metakernel("imap_ena_sim_metakernel.template")
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nice!

def test_imap_state_ecliptic(use_test_metakernel):
"""Tests retrieving IMAP state in the ECLIPJ2000 frame"""
state = imap_state(798033670)
assert state.shape == (6,)
6 changes: 3 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

6 changes: 5 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -52,11 +52,12 @@ ruff = {version="==0.2.1", optional=true}
sphinx = {version="*", optional=true}
sphinxcontrib-openapi = {version="^0.8.3", optional=true}
mypy = {version="^1.10.1", optional=true}
requests = {version = "^2.32.3", optional = true}

[tool.poetry.extras]
dev = ["pre-commit", "ruff", "mypy"]
doc = ["numpydoc", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-openapi"]
test = ["openpyxl", "pytest", "pytest-cov"]
test = ["openpyxl", "pytest", "pytest-cov", "requests"]
tools= ["openpyxl", "pandas"]

[project.urls]
Expand All @@ -72,6 +73,9 @@ filterwarnings = [
"ignore:Converting non-nanosecond:UserWarning:cdflib",
"ignore:datetime.datetime.utcfromtimestamp:DeprecationWarning:cdflib",
]
markers = [
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm curious how this will affect codecov checks. For example, if I skip a test codecov complains that there weren't enough tests.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Good question. I don't know how codecov works. It looks like all of the various test runs upload results to the same commit in codecov and so my assumption is that it uses the latest upload. This would mean that the last one to run is the one that gets used in the final check?

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

They all get combined together actually. So you can skip all but one runner for this. This is also useful for combining Windows-specific, Mac-specific, and Linux-specific behaviors to ensure all your code coverage for those specific platforms gets added together.

"external_kernel: marks tests as requiring external SPICE kernels (deselect with '-m \"not external_kernel\"')",
]


[tool.ruff]
Expand Down
Loading