Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,8 @@ jobs:
env:
TARGET_BRANCH: "${{ steps.get-target-branch.outputs.target-branch }}"

# temporary ski : dev agent removed content-type header from traces response
# causing lot of trace to be dropped
# - name: Get agent artifact
# run: ./utils/scripts/load-binary.sh agent
- name: Get agent artifact
run: ./utils/scripts/load-binary.sh agent

# ### appsec-event-rules is now a private repo. The GH_TOKEN provided can't read private repos.
# ### skipping this, waiting for a proper solution
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/run-end-to-end.yml
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ jobs:
if: always() && steps.build.outcome == 'success' && contains(inputs.scenarios, '"IPV6"') && inputs.library != 'ruby'
run: ./run.sh IPV6
- name: Run CROSSED_TRACING_LIBRARIES scenario
if: always() && steps.build.outcome == 'success' && matrix.weblog != 'python3.12' && matrix.weblog != 'django-py3.13' && (matrix.weblog != 'spring-boot-payara' || inputs.ci_environment != 'prod') && contains(inputs.scenarios, '"CROSSED_TRACING_LIBRARIES"')
if: always() && steps.build.outcome == 'success' && matrix.weblog != 'python3.12' && matrix.weblog != 'django-py3.13' && matrix.weblog != 'spring-boot-payara' && contains(inputs.scenarios, '"CROSSED_TRACING_LIBRARIES"')
# python 3.13 issue : APMAPI-1096
run: ./run.sh CROSSED_TRACING_LIBRARIES
env:
Expand Down
12 changes: 12 additions & 0 deletions tests/test_data_integrity.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,5 +210,17 @@ def validator(data):
interfaces.library.validate(validator, success_by_default=True)


@features.data_integrity
class Test_Agent:
@missing_feature(library="cpp", reason="Trace are not reported")
def test_headers(self):
"""All required headers are present in all requests sent by the agent"""
interfaces.library.assert_response_header(
path_filters=interfaces.library.trace_paths,
header_name_pattern="content-type",
header_value_pattern="application/json",
)


def _empty_request(data):
return "content" not in data["request"] or not data["request"]["content"]
34 changes: 26 additions & 8 deletions utils/interfaces/_core.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,15 +119,17 @@ def load_data_from_logs(self):
def _append_data(self, data):
self._data_list.append(data)

def get_data(self, path_filters=None):
def get_data(self, path_filters: list[str] | str | None = None):
if path_filters is not None:
if isinstance(path_filters, str):
path_filters = [path_filters]

path_filters = [re.compile(path) for path in path_filters]
path_regexes = [re.compile(path) for path in path_filters]
else:
path_regexes = None

for data in self._data_list:
if path_filters is not None and all(path.fullmatch(data["path"]) is None for path in path_filters):
if path_regexes is not None and all(path.fullmatch(data["path"]) is None for path in path_regexes):
continue

yield data
Expand Down Expand Up @@ -207,23 +209,39 @@ def assert_schema_points(self, excluded_points=None):

assert not has_error, f"Schema validation failed for {self.name}"

def assert_request_header(self, path, header_name_pattern: str, header_value_pattern: str) -> None:
def assert_response_header(self, path_filters, header_name_pattern: str, header_value_pattern: str) -> None:
"""Assert that a header, and its value are present in all requests for a given path
header_name_pattern: a regular expression to match the header name (lower case)
header_value_pattern: a regular expression to match the header value
"""

self._assert_header(path_filters, "response", header_name_pattern, header_value_pattern)

def assert_request_header(self, path_filters, header_name_pattern: str, header_value_pattern: str) -> None:
"""Assert that a header, and its value are present in all requests for a given path
header_name_pattern: a regular expression to match the header name (lower case)
header_value_pattern: a regular expression to match the header value
"""

self._assert_header(path_filters, "request", header_name_pattern, header_value_pattern)

def _assert_header(
self, path_filters, request_or_response: str, header_name_pattern: str, header_value_pattern: str
) -> None:
data_found = False

for data in self.get_data(path):
for data in self.get_data(path_filters):
data_found = True

found = False

for header, value in data["request"]["headers"]:
for header, value in data[request_or_response]["headers"]:
if re.fullmatch(header_name_pattern, header.lower()):
if not re.fullmatch(header_value_pattern, value):
logger.error(f"Header {header} found in {data['log_filename']}, but value is {value}")
logger.error(
f"{request_or_response} header {header} found in "
f"{data['log_filename']}, but value is {value}"
)
else:
found = True
continue
Expand All @@ -232,7 +250,7 @@ def assert_request_header(self, path, header_name_pattern: str, header_value_pat
raise ValueError(f"{header_name_pattern} not found (or incorrect) in {data['log_filename']}")

if not data_found:
raise ValueError(f"No data found for {path}")
raise ValueError(f"No data found for {path_filters}")


class ValidationError(Exception):
Expand Down
6 changes: 3 additions & 3 deletions utils/interfaces/_library/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@
class LibraryInterfaceValidator(ProxyBasedInterfaceValidator):
"""Validate library/agent interface"""

trace_paths = ["/v0.4/traces", "/v0.5/traces"]

def __init__(self, name):
super().__init__(name)
self.ready = threading.Event()
Expand All @@ -47,14 +49,12 @@ def wait_function(data):

############################################################
def get_traces(self, request=None):
paths = ["/v0.4/traces", "/v0.5/traces"]

rid = get_rid_from_request(request)

if rid:
logger.debug(f"Try to find traces related to request {rid}")

for data in self.get_data(path_filters=paths):
for data in self.get_data(path_filters=self.trace_paths):
traces = data["request"]["content"]
if not traces: # may be none
continue
Expand Down
Loading