Skip to content

Commit

Permalink
Add E2E tests to aggregate analysis endpoint
Browse files Browse the repository at this point in the history
  • Loading branch information
hynnot committed Jan 15, 2025
1 parent bc064f3 commit a57e53f
Show file tree
Hide file tree
Showing 2 changed files with 151 additions and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
from fastapi import APIRouter, Depends, Query
from pydantic import BaseModel

from .utils import get_measurement_start_day_agg, TimeGrains
from .utils import get_measurement_start_day_agg, TimeGrains, parse_probe_asn
from ...dependencies import (
get_clickhouse_session,
)
Expand All @@ -30,6 +30,7 @@
"probe_cc",
"probe_asn",
"test_name",
"input"
]


Expand All @@ -47,13 +48,15 @@ class AggregationEntry(BaseModel):
ok_count: float
measurement_count: float

measurement_start_day: date
measurement_start_day: Optional[datetime] = None
outcome_label: str
outcome_value: float

domain: Optional[str] = None
probe_cc: Optional[str] = None
probe_asn: Optional[int] = None
test_name: Optional[str] = None
input: Optional[str] = None


class AggregationResponse(BaseModel):
Expand All @@ -67,7 +70,6 @@ class AggregationResponse(BaseModel):
async def get_aggregation_analysis(
axis_x: Annotated[AggregationKeys, Query()] = "measurement_start_day",
axis_y: Annotated[Optional[AggregationKeys], Query()] = None,
category_code: Annotated[Optional[str], Query()] = None,
test_name: Annotated[Optional[str], Query()] = None,
domain: Annotated[Optional[str], Query()] = None,
input: Annotated[Optional[str], Query()] = None,
Expand Down Expand Up @@ -97,8 +99,7 @@ async def get_aggregation_analysis(
extra_cols[axis_x] = axis_x

if probe_asn is not None:
if isinstance(probe_asn, str) and probe_asn.startswith("AS"):
probe_asn = int(probe_asn[2:])
probe_asn = parse_probe_asn(probe_asn)
q_args["probe_asn"] = probe_asn
and_clauses.append("probe_asn = %(probe_asn)d")
extra_cols["probe_asn"] = "probe_asn"
Expand Down Expand Up @@ -259,7 +260,6 @@ async def get_aggregation_analysis(
results: List[AggregationEntry] = []
if rows and isinstance(rows, list):
for row in rows:
print(row)
d = dict(zip(list(extra_cols.keys()) + fixed_cols, row))
outcome_value = d["outcome_value"]
outcome_label = d["outcome_label"]
Expand All @@ -285,12 +285,14 @@ async def get_aggregation_analysis(
failure_count=failure_count,
ok_count=ok_count,
measurement_count=1.0,
measurement_start_day=d["measurement_start_day"],
measurement_start_day=d.get("measurement_start_day"),
outcome_label=outcome_label,
outcome_value=outcome_value,
domain=d.get("domain"),
probe_cc=d.get("probe_cc"),
probe_asn=d.get("probe_asn"),
test_name=d.get("test_name"),
input=d.get("input"),
)
results.append(entry)
return AggregationResponse(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
import pytest

route = "api/v1/aggregation/analysis"
since = "2024-11-01"
until = "2024-11-10"


def test_oonidata_aggregation_analysis(client):
response = client.get(route)

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) == 0


def test_oonidata_aggregation_analysis_with_since_and_until(client):
response = client.get(f"{route}?since={since}&until={until}")

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) > 0

for result in json["results"]:
assert "anomaly_count" in result, result
assert "domain" in result, result


@pytest.mark.parametrize(
"filter_param, filter_value",
[
("domain", "zh.wikipedia.org"),
("probe_cc", "IR"),
("probe_asn", 45758),
("test_name", "whatsapp"),
("input", "stun://stun.voys.nl:3478"),
]
)
def test_oonidata_aggregation_analysis_with_filters(client, filter_param, filter_value):
params = {
"since": since,
"until": until
}
params[filter_param] = filter_value

response = client.get(route, params=params)

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) > 0
for result in json["results"]:
assert result[filter_param] == filter_value, result


def test_oonidata_aggregation_analysis_filtering_by_probe_asn_as_a_string_with_since_and_until(client):
probe_asn = 45758
probe_asn_as_a_string = "AS" + str(probe_asn)

response = client.get(f"{route}?probe_asn={probe_asn_as_a_string}&since={since}&until={until}")

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) > 0
for result in json["results"]:
assert result["probe_asn"] == probe_asn, result


@pytest.mark.parametrize(
"field", [
"measurement_start_day",
"domain",
"probe_cc",
"probe_asn",
"test_name",
"input",
]
)
def test_oonidata_aggregation_analysis_with_axis_x(client, field):
params = {
"since": since,
"until": until,
"axis_x": field
}

response = client.get(route, params=params)

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) > 0
for result in json["results"]:
assert result[field] is not None, result


@pytest.mark.parametrize(
"field", [
"measurement_start_day",
"domain",
"probe_cc",
"probe_asn",
"test_name",
"input",
]
)
def test_oonidata_aggregation_analysis_axis_y(client, field):
params = {
"since": since,
"until": until,
"axis_y": field
}

response = client.get(route, params=params)

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) > 0
for result in json["results"]:
assert result[field] is not None, result


@pytest.mark.parametrize(
"time_grain, total",
[
("hour", 216),
("day", 9),
("week", 2),
("month", 1),
("year", 1),
("auto", 9),
]
)
def test_oonidata_aggregation_analysis_time_grain(client, time_grain, total):
params = {
"since": since,
"until": until,
"time_grain": time_grain
}

response = client.get(route, params=params)

json = response.json()
assert isinstance(json["results"], list), json
assert len(json["results"]) == total

0 comments on commit a57e53f

Please sign in to comment.