Skip to content

Commit

Permalink
Reformat
Browse files Browse the repository at this point in the history
  • Loading branch information
kenodegard committed Mar 20, 2024
1 parent d75356f commit 11cf662
Show file tree
Hide file tree
Showing 3 changed files with 32 additions and 37 deletions.
4 changes: 3 additions & 1 deletion src/pytest_codspeed/_wrapper/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from __future__ import annotations

import os
from typing import TYPE_CHECKING

Expand All @@ -22,7 +24,7 @@ def _get_ffi():
return ffi


def get_lib() -> "LibType":
def get_lib() -> LibType:
try:
ffi = _get_ffi()
build_lock = FileLock(f"{_wrapper_dir}/build.lock")
Expand Down
63 changes: 28 additions & 35 deletions src/pytest_codspeed/plugin.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,11 @@
from __future__ import annotations

import gc
import os
import pkgutil
import sys
from dataclasses import dataclass, field
from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
List,
Optional,
Tuple,
TypeVar,
Union,
)
from typing import TYPE_CHECKING

import pytest
from _pytest.fixtures import FixtureManager
Expand All @@ -24,15 +16,19 @@
from ._wrapper import get_lib

if TYPE_CHECKING:
from typing import Any, Callable, TypeVar

from ._wrapper import LibType

T = TypeVar("T")

IS_PYTEST_BENCHMARK_INSTALLED = pkgutil.find_loader("pytest_benchmark") is not None
SUPPORTS_PERF_TRAMPOLINE = sys.version_info >= (3, 12)
BEFORE_PYTEST_8_1_1 = pytest.version_tuple < (8, 1, 1)


@pytest.hookimpl(trylast=True)
def pytest_addoption(parser: "pytest.Parser"):
def pytest_addoption(parser: pytest.Parser):
group = parser.getgroup("CodSpeed benchmarking")
group.addoption(
"--codspeed",
Expand All @@ -46,20 +42,20 @@ def pytest_addoption(parser: "pytest.Parser"):
class CodSpeedPlugin:
is_codspeed_enabled: bool
should_measure: bool
lib: Optional["LibType"]
disabled_plugins: Tuple[str, ...]
lib: LibType | None
disabled_plugins: tuple[str, ...]
benchmark_count: int = field(default=0, hash=False, compare=False)


PLUGIN_NAME = "codspeed_plugin"


def get_plugin(config: "pytest.Config") -> "CodSpeedPlugin":
def get_plugin(config: pytest.Config) -> CodSpeedPlugin:
return config.pluginmanager.get_plugin(PLUGIN_NAME)


@pytest.hookimpl(tryfirst=True)
def pytest_configure(config: "pytest.Config"):
def pytest_configure(config: pytest.Config):
config.addinivalue_line(
"markers", "codspeed_benchmark: mark an entire test for codspeed benchmarking"
)
Expand All @@ -73,7 +69,7 @@ def pytest_configure(config: "pytest.Config"):
lib = get_lib() if should_measure else None
if lib is not None:
lib.dump_stats_at(f"Metadata: pytest-codspeed {__version__}".encode("ascii"))
disabled_plugins: List[str] = []
disabled_plugins: list[str] = []
# Disable pytest-benchmark if codspeed is enabled
if is_codspeed_enabled and IS_PYTEST_BENCHMARK_INSTALLED:
object.__setattr__(config.option, "benchmark_disable", True)
Expand All @@ -89,7 +85,7 @@ def pytest_configure(config: "pytest.Config"):
config.pluginmanager.register(plugin, PLUGIN_NAME)


def pytest_plugin_registered(plugin, manager: "pytest.PytestPluginManager"):
def pytest_plugin_registered(plugin, manager: pytest.PytestPluginManager):
"""Patch the benchmark fixture to use the codspeed one if codspeed is enabled"""
if IS_PYTEST_BENCHMARK_INSTALLED and isinstance(plugin, FixtureManager):
fixture_manager = plugin
Expand All @@ -111,7 +107,7 @@ def pytest_plugin_registered(plugin, manager: "pytest.PytestPluginManager"):


@pytest.hookimpl(trylast=True)
def pytest_report_header(config: "pytest.Config"):
def pytest_report_header(config: pytest.Config):
out = [
f"codspeed: {__version__} "
f"(callgraph: {'enabled' if SUPPORTS_PERF_TRAMPOLINE else 'not supported'})"
Expand All @@ -132,24 +128,24 @@ def pytest_report_header(config: "pytest.Config"):
return "\n".join(out)


def has_benchmark_fixture(item: "pytest.Item") -> bool:
def has_benchmark_fixture(item: pytest.Item) -> bool:
item_fixtures = getattr(item, "fixturenames", [])
return "benchmark" in item_fixtures or "codspeed_benchmark" in item_fixtures


def has_benchmark_marker(item: "pytest.Item") -> bool:
def has_benchmark_marker(item: pytest.Item) -> bool:
return (
item.get_closest_marker("codspeed_benchmark") is not None
or item.get_closest_marker("benchmark") is not None
)


def should_benchmark_item(item: "pytest.Item") -> bool:
def should_benchmark_item(item: pytest.Item) -> bool:
return has_benchmark_fixture(item) or has_benchmark_marker(item)


@pytest.hookimpl()
def pytest_sessionstart(session: "pytest.Session"):
def pytest_sessionstart(session: pytest.Session):
plugin = get_plugin(session.config)
if plugin.is_codspeed_enabled:
plugin.benchmark_count = 0
Expand All @@ -159,7 +155,7 @@ def pytest_sessionstart(session: "pytest.Session"):

@pytest.hookimpl(trylast=True)
def pytest_collection_modifyitems(
session: "pytest.Session", config: "pytest.Config", items: "List[pytest.Item]"
session: pytest.Session, config: pytest.Config, items: list[pytest.Item]
):
plugin = get_plugin(config)
if plugin.is_codspeed_enabled:
Expand All @@ -175,9 +171,9 @@ def pytest_collection_modifyitems(


def _run_with_instrumentation(
lib: "LibType",
lib: LibType,
nodeId: str,
config: "pytest.Config",
config: pytest.Config,
fn: Callable[..., Any],
*args,
**kwargs,
Expand Down Expand Up @@ -209,7 +205,7 @@ def __codspeed_root_frame__():


@pytest.hookimpl(tryfirst=True)
def pytest_runtest_protocol(item: "pytest.Item", nextitem: Union["pytest.Item", None]):
def pytest_runtest_protocol(item: pytest.Item, nextitem: pytest.Item | None):
plugin = get_plugin(item.config)
if not plugin.is_codspeed_enabled or not should_benchmark_item(item):
return (
Expand Down Expand Up @@ -258,14 +254,11 @@ def pytest_runtest_protocol(item: "pytest.Item", nextitem: Union["pytest.Item",
return reports # Deny further protocol hooks execution


T = TypeVar("T")


class BenchmarkFixture:
"""The fixture that can be used to benchmark a function."""

def __init__(self, request: "pytest.FixtureRequest"):
self.extra_info: Dict = {}
def __init__(self, request: pytest.FixtureRequest):
self.extra_info: dict = {}

self._request = request

Expand All @@ -283,22 +276,22 @@ def __call__(self, func: Callable[..., T], *args: Any, **kwargs: Any) -> T:


@pytest.fixture(scope="function")
def codspeed_benchmark(request: "pytest.FixtureRequest") -> Callable:
def codspeed_benchmark(request: pytest.FixtureRequest) -> Callable:
return BenchmarkFixture(request)


if not IS_PYTEST_BENCHMARK_INSTALLED:

@pytest.fixture(scope="function")
def benchmark(codspeed_benchmark, request: "pytest.FixtureRequest"):
def benchmark(codspeed_benchmark, request: pytest.FixtureRequest):
"""
Compatibility with pytest-benchmark
"""
return codspeed_benchmark


@pytest.hookimpl()
def pytest_sessionfinish(session: "pytest.Session", exitstatus):
def pytest_sessionfinish(session: pytest.Session, exitstatus):
plugin = get_plugin(session.config)
if plugin.is_codspeed_enabled:
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
Expand Down
2 changes: 1 addition & 1 deletion tests/test_pytest_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def fixtured_child():
perf_filepath = f"/tmp/perf-{current_pid}.map"
print(perf_filepath)

with open(perf_filepath, "r") as perf_file:
with open(perf_filepath) as perf_file:
lines = perf_file.readlines()
assert any(
"py::_run_with_instrumentation.<locals>.__codspeed_root_frame__" in line
Expand Down

0 comments on commit 11cf662

Please sign in to comment.