Skip to content

Commit e3f4cb9

Browse files
committed
chore(config): Add benchmark markers and cache tracking
what: - Add performance and benchmark pytest markers to pyproject.toml - Add fixture cache hit/miss tracking hooks to conftest.py
1 parent 6b1cfb1 commit e3f4cb9

File tree

3 files changed

+194
-3
lines changed

3 files changed

+194
-3
lines changed

conftest.py

Lines changed: 158 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,16 +10,174 @@
1010

1111
from __future__ import annotations
1212

13+
import dataclasses
14+
import time
1315
import typing as t
16+
from collections import defaultdict
1417

1518
import pytest
1619

1720
if t.TYPE_CHECKING:
1821
import pathlib
1922

23+
from _pytest.fixtures import FixtureDef, SubRequest
24+
from _pytest.terminal import TerminalReporter
25+
2026
pytest_plugins = ["pytester"]
2127

2228

29+
@dataclasses.dataclass
30+
class FixtureMetrics:
31+
"""Metrics collected during fixture execution."""
32+
33+
fixture_name: str
34+
duration: float
35+
cache_hit: bool | None = None # None if not applicable (non-repo fixture)
36+
37+
38+
# Fixture profiling storage
39+
_fixture_timings: dict[str, list[float]] = defaultdict(list)
40+
_fixture_call_counts: dict[str, int] = defaultdict(int)
41+
_fixture_cache_hits: dict[str, int] = defaultdict(int)
42+
_fixture_cache_misses: dict[str, int] = defaultdict(int)
43+
44+
45+
def pytest_addoption(parser: pytest.Parser) -> None:
46+
"""Add fixture profiling options."""
47+
group = parser.getgroup("libvcs", "libvcs fixture options")
48+
group.addoption(
49+
"--fixture-durations",
50+
action="store",
51+
type=int,
52+
default=0,
53+
metavar="N",
54+
help="Show N slowest fixture setup times (N=0 for all)",
55+
)
56+
group.addoption(
57+
"--fixture-durations-min",
58+
action="store",
59+
type=float,
60+
default=0.005,
61+
metavar="SECONDS",
62+
help="Minimum duration to show in fixture timing report (default: 0.005)",
63+
)
64+
group.addoption(
65+
"--run-performance",
66+
action="store_true",
67+
default=False,
68+
help="Run performance tests (marked with @pytest.mark.performance)",
69+
)
70+
71+
72+
def pytest_collection_modifyitems(
73+
config: pytest.Config,
74+
items: list[pytest.Item],
75+
) -> None:
76+
"""Skip performance tests unless --run-performance is given."""
77+
if config.getoption("--run-performance"):
78+
# --run-performance given: run all tests
79+
return
80+
81+
skip_performance = pytest.mark.skip(reason="need --run-performance option to run")
82+
for item in items:
83+
if "performance" in item.keywords:
84+
item.add_marker(skip_performance)
85+
86+
87+
@pytest.hookimpl(wrapper=True)
88+
def pytest_fixture_setup(
89+
fixturedef: FixtureDef[t.Any],
90+
request: SubRequest,
91+
) -> t.Generator[None, t.Any, t.Any]:
92+
"""Wrap fixture setup to measure timing and track cache hits."""
93+
start = time.perf_counter()
94+
try:
95+
result = yield
96+
# Track cache hits for fixtures that support it (RepoFixtureResult)
97+
if hasattr(result, "from_cache"):
98+
fixture_name = fixturedef.argname
99+
if result.from_cache:
100+
_fixture_cache_hits[fixture_name] += 1
101+
else:
102+
_fixture_cache_misses[fixture_name] += 1
103+
return result
104+
finally:
105+
duration = time.perf_counter() - start
106+
fixture_name = fixturedef.argname
107+
_fixture_timings[fixture_name].append(duration)
108+
_fixture_call_counts[fixture_name] += 1
109+
110+
111+
def pytest_terminal_summary(
112+
terminalreporter: TerminalReporter,
113+
exitstatus: int,
114+
config: pytest.Config,
115+
) -> None:
116+
"""Display fixture timing and cache statistics summary."""
117+
durations_count = config.option.fixture_durations
118+
durations_min = config.option.fixture_durations_min
119+
120+
# Skip if no timing requested (durations_count defaults to 0 meaning "off")
121+
if durations_count == 0 and not config.option.verbose:
122+
return
123+
124+
# Build summary data
125+
fixture_stats: list[tuple[str, float, int, float]] = []
126+
for name, times in _fixture_timings.items():
127+
total_time = sum(times)
128+
call_count = len(times)
129+
avg_time = total_time / call_count if call_count > 0 else 0
130+
fixture_stats.append((name, total_time, call_count, avg_time))
131+
132+
# Sort by total time descending
133+
fixture_stats.sort(key=lambda x: x[1], reverse=True)
134+
135+
# Filter by minimum duration
136+
fixture_stats = [s for s in fixture_stats if s[1] >= durations_min]
137+
138+
if not fixture_stats:
139+
return
140+
141+
# Limit count if specified
142+
if durations_count > 0:
143+
fixture_stats = fixture_stats[:durations_count]
144+
145+
terminalreporter.write_sep("=", "fixture setup times")
146+
terminalreporter.write_line("")
147+
terminalreporter.write_line(
148+
f"{'Fixture':<40} {'Total':>10} {'Calls':>8} {'Avg':>10}",
149+
)
150+
terminalreporter.write_line("-" * 70)
151+
152+
for name, total, calls, avg in fixture_stats:
153+
terminalreporter.write_line(
154+
f"{name:<40} {total:>9.3f}s {calls:>8} {avg:>9.3f}s",
155+
)
156+
157+
# Display cache statistics if any repo fixtures were used
158+
if _fixture_cache_hits or _fixture_cache_misses:
159+
terminalreporter.write_line("")
160+
terminalreporter.write_sep("=", "fixture cache statistics")
161+
terminalreporter.write_line("")
162+
terminalreporter.write_line(
163+
f"{'Fixture':<40} {'Hits':>8} {'Misses':>8} {'Hit Rate':>10}",
164+
)
165+
terminalreporter.write_line("-" * 70)
166+
167+
# Combine hits and misses for all fixtures that have cache tracking
168+
all_cache_fixtures = set(_fixture_cache_hits.keys()) | set(
169+
_fixture_cache_misses.keys()
170+
)
171+
for name in sorted(all_cache_fixtures):
172+
hits = _fixture_cache_hits.get(name, 0)
173+
misses = _fixture_cache_misses.get(name, 0)
174+
total = hits + misses
175+
hit_rate = (hits / total * 100) if total > 0 else 0
176+
terminalreporter.write_line(
177+
f"{name:<40} {hits:>8} {misses:>8} {hit_rate:>9.1f}%",
178+
)
179+
180+
23181
@pytest.fixture(autouse=True)
24182
def add_doctest_fixtures(
25183
request: pytest.FixtureRequest,

pyproject.toml

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -78,8 +78,9 @@ dev = [
7878
# Testing
7979
"gp-libs",
8080
"pytest",
81-
"pytest-rerunfailures",
81+
"pytest-asyncio",
8282
"pytest-mock",
83+
"pytest-rerunfailures",
8384
"pytest-watcher",
8485
# Coverage
8586
"codecov",
@@ -106,8 +107,9 @@ docs = [
106107
testing = [
107108
"gp-libs",
108109
"pytest",
109-
"pytest-rerunfailures",
110+
"pytest-asyncio",
110111
"pytest-mock",
112+
"pytest-rerunfailures",
111113
"pytest-watcher",
112114
]
113115
coverage =[
@@ -234,6 +236,10 @@ testpaths = [
234236
filterwarnings = [
235237
"ignore:The frontend.Option(Parser)? class.*:DeprecationWarning::",
236238
]
239+
markers = [
240+
"performance: marks tests as performance tests (deselect with '-m \"not performance\"')",
241+
"benchmark: marks tests as benchmark tests for comparing implementation methods",
242+
]
237243

238244
[tool.pytest-watcher]
239245
now = true

uv.lock

Lines changed: 28 additions & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)