Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions MODULE.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -120,3 +120,5 @@ git_override(
commit = "3c65b223e9f516f95935bb4cd2e83d6088ca016f",
remote = "https://github.com/eclipse-score/baselibs.git",
)

bazel_dep(name = "score_itf", version = "0.1.0")
36 changes: 36 additions & 0 deletions tests/integration/BUILD
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************

load("@score_itf//:defs.bzl", "py_itf_test")

package(default_testonly = True)

py_itf_test(
name = "test_datarouter_dlt",
srcs = [
"__init__.py",
"conftest.py",
"test_datarouter_dlt.py",
],
args = [
"--ecu=s_core_ecu_qemu_bridge_network",
"--target_config=$(location @score_itf//config)",
"--hw", # Required even if we run the test against a QEMU target
],
data = [
"@score_itf//config",
],
plugins = [
"itf.plugins.base.base_plugin",
],
)
69 changes: 69 additions & 0 deletions tests/integration/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************

# Integration Tests

Integration tests for the `score_logging` component that verify DLT (Diagnostic Log and Trace) message capture on a QNX 8.0 QEMU target.

## Prerequisites

Complete the QNX 8.0 QEMU environment setup as described in the [QNX QEMU documentation](score_reference_integration/qnx_qemu/README.md).

### Network Bridge Setup (virbr0)

The integration tests use QEMU with bridge networking, which requires the `virbr0` virtual bridge interface. Instructions: reference_integration/qnx_qemu/README.md#Configure QEMU networking

This creates:
- Bridge interface `virbr0` with IP `192.168.122.1/24` on the host. Reference: /usr/share/libvirt/networks/default.xml
- DHCP server assigning IPs `192.168.122.2-254` to VMs
- NAT rules for outbound internet access

## Usage

### 1. Start the QNX QEMU Target

Build and launch the QEMU virtual machine from the `score_reference_integration/qnx_qemu` directory:

```bash
bazel build --config=x86_64-qnx \
--credential_helper=*.qnx.com=$(pwd)/../toolchains_qnx/tools/qnx_credential_helper.py \
//build:init

bazel run --config=x86_64-qnx //:run_qemu
```

Note the assigned IP address once the QEMU VM boots (e.g., `192.168.122.76`) since the actual IP address of the target (QEMU VM) is assigned dynamically via DHCP hence changes every reboot. We use the S_CORE_ECU_QEMU_BRIDGE_NETWORK_PP target config for the tests.

### 2. Execute the Tests

Run the integration tests from the `score_logging` directory, specifying the target IP:

```bash
bazel test //tests/integration:test_datarouter_dlt \
--test_arg=--target-ip=<QEMU_VM_IP> \
--test_output=streamed \
--nocache_test_results
```

## Test Details

### test_datarouter_dlt

Validates DLT message capture from the datarouter by:

1. Connecting to the QNX target via SSH
2. Starting the datarouter process if not already running
3. Capturing DLT messages over UDP for approximately 10 seconds
4. Verifying receipt of messages with `APP_ID=DR` and `CTX_ID=STAT`

The datarouter emits statistics messages every 5 seconds. The test expects at least 1 message during the capture window for reliable test results
12 changes: 12 additions & 0 deletions tests/integration/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************
86 changes: 86 additions & 0 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************

import logging
import time
import pytest

from itf.plugins.com.ssh import execute_command, execute_command_output

logger = logging.getLogger(__name__)


def pytest_addoption(parser):
parser.addoption(
"--target-ip",
action="store",
required=True,
help="Target IP address for SSH connection to target (QEMU VM)",
)


@pytest.hookimpl(trylast=True)
def pytest_sessionstart(session):
"""Override test target config IP with the current target (QEMU VM)

We use the S_CORE_ECU_QEMU_BRIDGE_NETWORK_PP target config
for the tests, but the actual IP address of the target (QEMU VM) is
assigned dynamically via DHCP. This hook updates the target
configuration for the test session.
"""
from itf.plugins.base.base_plugin import TARGET_CONFIG_KEY

if TARGET_CONFIG_KEY in session.stash:
target_ip_address = session.config.getoption("--target-ip")
target_config = session.stash[TARGET_CONFIG_KEY]
target_config._BaseProcessor__ip_address = target_ip_address
logger.info("Connecting to target IP: %s", target_ip_address)


_DATAROUTER_CHECK_CMD = "/proc/boot/pidin | /proc/boot/grep datarouter"
# pathspace ability provides the datarouter access to the `procnto` pathname prefix space
# required for mw/com message passing with mw::log frontend
_DATAROUTER_START_CMD = (
"cd /usr/bin && on -A nonroot,allow,pathspace -u 1051:1091 "
"./datarouter --no_adaptive_runtime &"
)
_DATAROUTER_STARTUP_TIMEOUT_SEC = 2


@pytest.fixture(scope="module")
def datarouter_running(target_fixture):
with target_fixture.sut.ssh() as ssh:
exit_status, stdout_lines, _ = execute_command_output(ssh, _DATAROUTER_CHECK_CMD)
output = "\n".join(stdout_lines)

if "datarouter" not in output:
logger.info("Datarouter not running. Starting Datarouter..")
execute_command(ssh, _DATAROUTER_START_CMD)
time.sleep(_DATAROUTER_STARTUP_TIMEOUT_SEC)

_, stdout_lines, _ = execute_command_output(ssh, _DATAROUTER_CHECK_CMD)
if "datarouter" not in "\n".join(stdout_lines):
pytest.fail("Failed to start datarouter on target")
logger.info("Datarouter started successfully..")
else:
logger.info("Datarouter already running!")
yield


@pytest.fixture(scope="module")
def dlt_receiver_config(target_config_fixture):
data_router_config = target_config_fixture.data_router_config
return {
"vlan_address": data_router_config["vlan_address"],
"multicast_addresses": data_router_config["multicast_addresses"],
}
65 changes: 65 additions & 0 deletions tests/integration/test_datarouter_dlt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
# *******************************************************************************
# Copyright (c) 2025 Contributors to the Eclipse Foundation
#
# See the NOTICE file(s) distributed with this work for additional
# information regarding copyright ownership.
#
# This program and the accompanying materials are made available under the
# terms of the Apache License Version 2.0 which is available at
# https://www.apache.org/licenses/LICENSE-2.0
#
# SPDX-License-Identifier: Apache-2.0
# *******************************************************************************

import logging
import os
import time

from itf.plugins.dlt.dlt_receive import DltReceive, Protocol


logger = logging.getLogger(__name__)

# Datarouter messages with Context ID "STAT" sent every ~5s hence 10s to reliably capture and verify
CAPTURE_DURATION_SECONDS = 10

# DLT message identifiers for datarouter statistics
APP_ID = "DR"
CTX_ID = "STAT"


def test_dlt_capture(datarouter_running, test_config_fixture, dlt_receiver_config):
"""Verify DLT messages can be captured from the datarouter.
"""
dlt_file = "/tmp/test_dlt_capture.dlt"

vlan_address = dlt_receiver_config["vlan_address"]
multicast_addresses = dlt_receiver_config["multicast_addresses"]

# TODO: Replace with DltWindow when fixed in ITF.
with DltReceive(
target_ip=vlan_address,
protocol=Protocol.UDP,
file_name=dlt_file,
binary_path=test_config_fixture.dlt_receive_path,
data_router_config={
"vlan_address": vlan_address,
"multicast_addresses": multicast_addresses,
},
):
time.sleep(CAPTURE_DURATION_SECONDS)

assert os.path.exists(dlt_file), f"DLT file not created: {dlt_file}"

with open(dlt_file, "rb") as f:
dlt_data = f.read()

logger.info("DLT file size: %d bytes", len(dlt_data))

# DLT extended header: APP-ID (4 bytes) followed by CTX-ID (4 bytes)
pattern = f"{APP_ID}\x00\x00{CTX_ID}".encode()
message_count = dlt_data.count(pattern)

logger.info("Found %d messages with app_id=%s, context_id=%s", message_count, APP_ID, CTX_ID)

assert message_count > 1, f"Expected more than 1 message, but got {message_count}"
Loading