Skip to content

Commit

Permalink
feat: add history, fix percent_diff rounding and flake aggregate time
Browse files Browse the repository at this point in the history
ref: Replace setex with set in gql rate limit code (#866)
  • Loading branch information
suejung-sentry committed Oct 8, 2024
1 parent b71cd91 commit c7bdcf1
Show file tree
Hide file tree
Showing 8 changed files with 194 additions and 59 deletions.
14 changes: 7 additions & 7 deletions graphql_api/tests/test_flake_aggregates.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ def setUp(self):
self.owner = OwnerFactory(username="randomOwner")
self.repository = RepositoryFactory(author=self.owner, branch="main")

test = TestFactory(repository=self.repository)
for i in range(0, 30):
test = TestFactory(repository=self.repository)
_ = FlakeFactory(
repository=self.repository,
test=test,
Expand All @@ -31,27 +31,27 @@ def setUp(self):
latest_run=datetime.now() - timedelta(days=i),
fail_count=1,
skip_count=1,
pass_count=0,
pass_count=1,
flaky_fail_count=1 if i % 5 == 0 else 0,
branch="main",
)

for i in range(30, 60):
test = TestFactory(repository=self.repository)
if i % 2 == 0:
_ = FlakeFactory(
repository=self.repository,
test=test,
start_date=datetime.now() - timedelta(days=i + 1),
end_date=datetime.now() - timedelta(days=i),
)
_ = DailyTestRollupFactory(
test=test,
date=date.today() - timedelta(days=i),
avg_duration_seconds=float(i),
latest_run=datetime.now() - timedelta(days=i),
fail_count=1,
fail_count=3,
skip_count=1,
pass_count=0,
pass_count=1,
flaky_fail_count=3 if i % 5 == 0 else 0,
branch="main",
)
Expand Down Expand Up @@ -80,8 +80,8 @@ def test_fetch_test_result_total_runtime(self) -> None:

assert "errors" not in result
assert result["owner"]["repository"]["testAnalytics"]["flakeAggregates"] == {
"flakeRate": 0.2,
"flakeRate": 0.1,
"flakeCount": 30,
"flakeRatePercentChange": -66.66666666666666,
"flakeRatePercentChange": -33.33333,
"flakeCountPercentChange": 100.0,
}
77 changes: 71 additions & 6 deletions graphql_api/tests/test_test_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -669,8 +669,8 @@ def test_test_results_aggregates(self) -> None:
"slowestTestsDuration": 29.0,
"totalFails": 10,
"totalSkips": 5,
"totalDurationPercentChange": -63.10679611650486,
"slowestTestsDurationPercentChange": -50.847457627118644,
"totalDurationPercentChange": -63.1068,
"slowestTestsDurationPercentChange": -50.84746,
"totalFailsPercentChange": 100.0,
"totalSkipsPercentChange": -50.0,
}
Expand Down Expand Up @@ -732,19 +732,19 @@ def test_flake_aggregates(self) -> None:
_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=70),
start_date=datetime.datetime.now() - datetime.timedelta(days=90),
end_date=datetime.datetime.now() - datetime.timedelta(days=30),
)
_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=80),
start_date=datetime.datetime.now() - datetime.timedelta(days=90),
end_date=datetime.datetime.now() - datetime.timedelta(days=59),
)
_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=70),
start_date=datetime.datetime.now() - datetime.timedelta(days=90),
end_date=datetime.datetime.now() - datetime.timedelta(days=61),
)

Expand Down Expand Up @@ -783,7 +783,7 @@ def test_flake_aggregates(self) -> None:
assert res["flakeAggregates"] == {
"flakeCount": 2,
"flakeRate": 0.125,
"flakeCountPercentChange": -50.0,
"flakeCountPercentChange": -33.33333,
"flakeRatePercentChange": -50.0,
}

Expand Down Expand Up @@ -825,3 +825,68 @@ def test_flake_aggregates_no_history(self) -> None:
"flakeCountPercentChange": None,
"flakeRatePercentChange": None,
}

def test_flake_aggregates_7_days(self) -> None:
repo = RepositoryFactory(
author=self.owner, active=True, private=True, branch="main"
)

test = TestFactory(repository=repo)

_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=1),
end_date=None,
)
_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=11),
end_date=datetime.datetime.now() - datetime.timedelta(days=8),
)
_ = FlakeFactory(
repository=repo,
test=test,
start_date=datetime.datetime.now() - datetime.timedelta(days=30),
end_date=datetime.datetime.now() - datetime.timedelta(days=10),
)

for i in range(0, 7):
_ = DailyTestRollupFactory(
test=test,
repoid=repo.repoid,
branch="main",
flaky_fail_count=1 if i % 7 == 0 else 0,
fail_count=1 if i % 7 == 0 else 0,
skip_count=0,
pass_count=1,
avg_duration_seconds=float(i),
last_duration_seconds=float(i),
date=datetime.date.today() - datetime.timedelta(days=i),
)
for i in range(7, 14):
_ = DailyTestRollupFactory(
test=test,
repoid=repo.repoid,
branch="main",
flaky_fail_count=1 if i % 3 == 0 else 0,
fail_count=1 if i % 3 == 0 else 0,
skip_count=0,
pass_count=1,
avg_duration_seconds=float(i),
last_duration_seconds=float(i),
date=datetime.date.today() - datetime.timedelta(days=i),
)

res = self.fetch_test_analytics(
repo.name,
"""flakeAggregates(history: INTERVAL_7_DAY) { flakeCount, flakeRate, flakeCountPercentChange, flakeRatePercentChange }""",
)

assert res["flakeAggregates"] == {
"flakeCount": 1,
"flakeRate": 0.125,
"flakeCountPercentChange": -50.0,
"flakeRatePercentChange": -43.75,
}
1 change: 1 addition & 0 deletions graphql_api/types/inputs/test_results_filters.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ input TestResultsFilters {
parameter: TestResultsFilterParameter
test_suites: [String!]
flags: [String!]
history: MeasurementInterval
}

input TestResultsOrdering {
Expand Down
61 changes: 46 additions & 15 deletions graphql_api/types/repository/repository.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import logging
from datetime import datetime
from datetime import datetime, timedelta
from typing import List, Mapping, Optional

import shared.rate_limits as rate_limits
Expand Down Expand Up @@ -30,6 +30,7 @@
CoverageAnalyticsProps,
)
from graphql_api.types.enums import OrderingDirection, TestResultsFilterParameter
from graphql_api.types.enums.enum_types import MeasurementInterval
from graphql_api.types.errors.errors import NotFoundError, OwnerNotActivatedError
from services.components import ComponentMeasurements
from services.profiling import CriticalFile, ProfilingSummary
Expand Down Expand Up @@ -525,6 +526,38 @@ def resolve_is_github_rate_limited(repository: Repository, info) -> bool | None:
return None


# TODO - remove with #2291
def convert_history_to_timedelta(interval: MeasurementInterval | None) -> timedelta:
if interval is None:
return timedelta(days=30)

match interval:
case MeasurementInterval.INTERVAL_1_DAY:
return timedelta(days=1)
case MeasurementInterval.INTERVAL_7_DAY:
return timedelta(days=7)
case MeasurementInterval.INTERVAL_30_DAY:
return timedelta(days=30)

Check warning on line 540 in graphql_api/types/repository/repository.py

View check run for this annotation

Codecov Notifications / codecov/patch

graphql_api/types/repository/repository.py#L534-L540

Added lines #L534 - L540 were not covered by tests


# TODO - remove with #2291
def convert_test_results_filter_parameter(
parameter: TestResultsFilterParameter | None,
) -> GENERATE_TEST_RESULT_PARAM | None:
if parameter is None:
return None

match parameter:
case TestResultsFilterParameter.FLAKY_TESTS:
return GENERATE_TEST_RESULT_PARAM.FLAKY
case TestResultsFilterParameter.FAILED_TESTS:
return GENERATE_TEST_RESULT_PARAM.FAILED
case TestResultsFilterParameter.SLOWEST_TESTS:
return GENERATE_TEST_RESULT_PARAM.SLOWEST
case TestResultsFilterParameter.SKIPPED_TESTS:
return GENERATE_TEST_RESULT_PARAM.SKIPPED


# TODO - remove with #2291
@repository_bindable.field("testResults")
@convert_kwargs_to_snake_case
Expand All @@ -535,24 +568,22 @@ async def resolve_test_results(
filters=None,
**kwargs,
):
parameter = None
generate_test_results_param = None
if filters:
parameter = filters.get("parameter")
match parameter:
case TestResultsFilterParameter.FLAKY_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.FLAKY
case TestResultsFilterParameter.FAILED_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.FAILED
case TestResultsFilterParameter.SLOWEST_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.SLOWEST
case TestResultsFilterParameter.SKIPPED_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.SKIPPED
parameter = (
convert_test_results_filter_parameter(filters.get("parameter"))
if filters
else None
)
history = (
convert_history_to_timedelta(filters.get("history"))
if filters
else timedelta(days=30)
)

queryset = await sync_to_async(generate_test_results)(
repoid=repository.repoid,
history=history,
branch=filters.get("branch") if filters else None,
parameter=generate_test_results_param,
parameter=parameter,
testsuites=filters.get("test_suites") if filters else None,
flags=filters.get("flags") if filters else None,
)
Expand Down
4 changes: 2 additions & 2 deletions graphql_api/types/test_analytics/test_analytics.graphql
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ type TestAnalytics {
): TestResultConnection! @cost(complexity: 10, multipliers: ["first", "last"])

"Results aggregates are analytics data totals across all tests"
resultsAggregates: TestResultsAggregates
resultsAggregates(history: MeasurementInterval): TestResultsAggregates

"Flake aggregates are flake totals across all tests"
flakeAggregates: FlakeAggregates
flakeAggregates(history: MeasurementInterval): FlakeAggregates
}

type TestResultConnection {
Expand Down
75 changes: 58 additions & 17 deletions graphql_api/types/test_analytics/test_analytics.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
from datetime import timedelta

from ariadne import ObjectType, convert_kwargs_to_snake_case
from graphql.type.definition import GraphQLResolveInfo
Expand All @@ -7,6 +8,7 @@
from core.models import Repository
from graphql_api.helpers.connection import queryset_to_connection
from graphql_api.types.enums import OrderingDirection, TestResultsFilterParameter
from graphql_api.types.enums.enum_types import MeasurementInterval
from utils.test_results import (
GENERATE_TEST_RESULT_PARAM,
generate_flake_aggregates,
Expand All @@ -28,24 +30,22 @@ async def resolve_results(
filters=None,
**kwargs,
):
parameter = None
generate_test_results_param = None
if filters:
parameter = filters.get("parameter")
match parameter:
case TestResultsFilterParameter.FLAKY_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.FLAKY
case TestResultsFilterParameter.FAILED_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.FAILED
case TestResultsFilterParameter.SLOWEST_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.SLOWEST
case TestResultsFilterParameter.SKIPPED_TESTS:
generate_test_results_param = GENERATE_TEST_RESULT_PARAM.SKIPPED
parameter = (
convert_test_results_filter_parameter(filters.get("parameter"))
if filters
else None
)
history = (
convert_history_to_timedelta(filters.get("history"))
if filters
else timedelta(days=30)
)

queryset = await sync_to_async(generate_test_results)(
repoid=repository.repoid,
history=history,
branch=filters.get("branch") if filters else None,
parameter=generate_test_results_param,
parameter=parameter,
testsuites=filters.get("test_suites") if filters else None,
flags=filters.get("flags") if filters else None,
)
Expand All @@ -69,13 +69,54 @@ async def resolve_results(
async def resolve_results_aggregates(
repository: Repository,
info: GraphQLResolveInfo,
history: MeasurementInterval | None = None,
**_,
):
history = convert_history_to_timedelta(history)
return await sync_to_async(generate_test_results_aggregates)(
repoid=repository.repoid
repoid=repository.repoid, history=history
)


@test_analytics_bindable.field("flakeAggregates")
@convert_kwargs_to_snake_case
async def resolve_flake_aggregates(repository: Repository, info: GraphQLResolveInfo):
return await sync_to_async(generate_flake_aggregates)(repoid=repository.repoid)
async def resolve_flake_aggregates(
repository: Repository,
info: GraphQLResolveInfo,
history: MeasurementInterval | None = None,
**_,
):
history = convert_history_to_timedelta(history)
return await sync_to_async(generate_flake_aggregates)(
repoid=repository.repoid, history=history
)


def convert_history_to_timedelta(interval: MeasurementInterval | None) -> timedelta:
if interval is None:
return timedelta(days=30)

match interval:
case MeasurementInterval.INTERVAL_1_DAY:
return timedelta(days=1)

Check warning on line 101 in graphql_api/types/test_analytics/test_analytics.py

View check run for this annotation

Codecov Notifications / codecov/patch

graphql_api/types/test_analytics/test_analytics.py#L101

Added line #L101 was not covered by tests
case MeasurementInterval.INTERVAL_7_DAY:
return timedelta(days=7)
case MeasurementInterval.INTERVAL_30_DAY:
return timedelta(days=30)

Check warning on line 105 in graphql_api/types/test_analytics/test_analytics.py

View check run for this annotation

Codecov Notifications / codecov/patch

graphql_api/types/test_analytics/test_analytics.py#L104-L105

Added lines #L104 - L105 were not covered by tests


def convert_test_results_filter_parameter(
parameter: TestResultsFilterParameter | None,
) -> GENERATE_TEST_RESULT_PARAM | None:
if parameter is None:
return None

match parameter:
case TestResultsFilterParameter.FLAKY_TESTS:
return GENERATE_TEST_RESULT_PARAM.FLAKY
case TestResultsFilterParameter.FAILED_TESTS:
return GENERATE_TEST_RESULT_PARAM.FAILED
case TestResultsFilterParameter.SLOWEST_TESTS:
return GENERATE_TEST_RESULT_PARAM.SLOWEST
case TestResultsFilterParameter.SKIPPED_TESTS:
return GENERATE_TEST_RESULT_PARAM.SKIPPED
2 changes: 1 addition & 1 deletion graphql_api/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ def _check_ratelimit(self, request):
"[GQL Rate Limit] - Setting new key",
extra=dict(key=key, user_id=user_id),
)
redis.setex(key, window, 1)
redis.set(name=key, ex=window, value=1)
elif int(current_count) >= limit:
log.warning(
"[GQL Rate Limit] - Rate limit reached for key",
Expand Down
Loading

0 comments on commit c7bdcf1

Please sign in to comment.