Skip to content

Commit

Permalink
feat: #2955 Allow to exclude some statistics from aggregation
Browse files Browse the repository at this point in the history
  • Loading branch information
tyge68 committed Oct 26, 2024
1 parent ab5faf3 commit 1426699
Show file tree
Hide file tree
Showing 5 changed files with 992 additions and 1,658 deletions.
5 changes: 3 additions & 2 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"editor.formatOnSave": true,
"editor.formatOnSave": false,
"files.exclude": {
".pytest_cache/**/*": true,
"**/*.pyc": true,
Expand Down Expand Up @@ -44,5 +44,6 @@
"editor.defaultFormatter": "charliermarsh.ruff"
},
"python.testing.pytestEnabled": false,
"python.testing.unittestEnabled": true
"python.testing.unittestEnabled": true,
"python.analysis.autoImportCompletions": false
}
6 changes: 4 additions & 2 deletions locust/env.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

from operator import methodcaller
from re import Pattern
from typing import Callable, TypeVar

from configargparse import Namespace
Expand All @@ -27,6 +28,7 @@ def __init__(
tags: list[str] | None = None,
locustfile: str | None = None,
exclude_tags: list[str] | None = None,
exclude_from_aggregation: str | Pattern[str] | None = None,
events: Events | None = None,
host: str | None = None,
reset_stats=False,
Expand Down Expand Up @@ -69,7 +71,7 @@ def __init__(
"""If set, only tasks that are tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.exclude_tags = exclude_tags
"""If set, only tasks that aren't tagged by tags in this list will be executed. Leave this as None to use the one from parsed_options"""
self.stats = RequestStats()
self.stats = RequestStats(environment=self)
"""Reference to RequestStats instance"""
self.host = host
"""Base URL of the target system"""
Expand Down Expand Up @@ -154,7 +156,7 @@ def create_worker_runner(self, master_host: str, master_port: int) -> WorkerRunn
"""
# Create a new RequestStats with use_response_times_cache set to False to save some memory
# and CPU cycles, since the response_times_cache is not needed for Worker nodes
self.stats = RequestStats(use_response_times_cache=False)
self.stats = RequestStats(self, use_response_times_cache=False, environment=self)
return self._create_runner(
WorkerRunner,
master_host=master_host,
Expand Down
18 changes: 15 additions & 3 deletions locust/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import json
import logging
import os
import re
import signal
import time
from abc import abstractmethod
Expand All @@ -14,7 +15,7 @@
from html import escape
from itertools import chain
from types import FrameType
from typing import TYPE_CHECKING, Any, Callable, NoReturn, Protocol, TypedDict, TypeVar, cast
from typing import TYPE_CHECKING, Any, Callable, NoReturn, Optional, Protocol, TypedDict, TypeVar, cast

import gevent

Expand Down Expand Up @@ -184,17 +185,19 @@ class RequestStats:
Class that holds the request statistics. Accessible in a User from self.environment.stats
"""

def __init__(self, use_response_times_cache=True):
def __init__(self, use_response_times_cache=True, environment: Optional[Environment]=None):
"""
:param use_response_times_cache: The value of use_response_times_cache will be set for each StatsEntry()
when they are created. Settings it to False saves some memory and CPU
cycles which we can do on Worker nodes where the response_times_cache
is not needed.
:param environment: The environment context.
"""
self.use_response_times_cache = use_response_times_cache
self.entries: dict[tuple[str, str], StatsEntry] = EntriesDict(self)
self.errors: dict[str, StatsError] = {}
self.total = StatsEntry(self, "Aggregated", None, use_response_times_cache=self.use_response_times_cache)
self.environment = environment
self.history = []

@property
Expand All @@ -217,8 +220,17 @@ def last_request_timestamp(self):
def start_time(self):
return self.total.start_time

def exclude_from_total(self, method: str, name: str):
exclude_from_aggregation = getattr(getattr(self, "environment", None), "exclude_from_aggregation", None)
if exclude_from_aggregation:
found_in_method = re.search(exclude_from_aggregation, method)
found_in_name = re.search(exclude_from_aggregation, name)
return found_in_method or found_in_name
return False

def log_request(self, method: str, name: str, response_time: int, content_length: int) -> None:
self.total.log(response_time, content_length)
if not self.exclude_from_total(method, name):
self.total.log(response_time, content_length)
self.entries[(name, method)].log(response_time, content_length)

def log_error(self, method: str, name: str, error: Exception | str | None) -> None:
Expand Down
11 changes: 11 additions & 0 deletions locust/test/test_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,17 @@ def test_total_rps(self):
self.assertAlmostEqual(s2.total_rps, 1 / 5.0)
self.assertEqual(self.stats.total.total_rps, 10 / 5.0)

def test_total_exclude_from_aggregation(self):
env = Environment()
self.stats.environment = env
# First without exclusion
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests(), 1)
# Second with exclusion
env.exclude_from_aggregation = r"CUSTOM"
self.stats.log_request("CUSTOM", "some_name", 1337, 1337)
self.assertEqual(self.stats.num_requests(), 1)

def test_rps_less_than_one_second(self):
s = StatsEntry(self.stats, "percentile_test", "GET")
for i in range(10):
Expand Down
Loading

0 comments on commit 1426699

Please sign in to comment.