Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: support callgraph generation #8

Merged
merged 15 commits into from
Jul 4, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
25 changes: 5 additions & 20 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
- name: Set up Python 3.11
uses: actions/setup-python@v2
with:
python-version: "3.9"
- uses: pre-commit/action@v2.0.0
python-version: "3.11"
- uses: pre-commit/action@v3.0.0
with:
extra_args: --all-files

Expand All @@ -37,6 +37,7 @@ jobs:
- "3.9"
- "3.10"
- "3.11"
- "3.12.0-beta.1"

steps:
- uses: actions/checkout@v2
Expand All @@ -51,22 +52,6 @@ jobs:
run: pip install .[dev]
- if: matrix.config == 'pytest-benchmark'
name: Install pytest-benchmark to test compatibility
run: pip install pytest-benchmark~=4.0.0 py
run: pip install pytest-benchmark~=4.0.0
- name: Run tests
run: pytest -vs

benchmarks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.12
uses: actions/setup-python@v2
with:
python-version: "3.11"
- name: Install local version of pytest-codspeed
run: pip install .

- name: Run benchmarks
uses: CodSpeedHQ/action@main
with:
run: pytest tests/benchmarks/ --codspeed
23 changes: 23 additions & 0 deletions .github/workflows/codspeed.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: benchmarks
on:
push:
branches: [master]
pull_request:
branches: [master]
workflow_dispatch:

jobs:
benchmarks:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.12
uses: actions/setup-python@v2
with:
python-version: "3.12.0-beta.3"
- name: Install local version of pytest-codspeed
run: pip install .
- name: Run benchmarks
uses: CodSpeedHQ/action@main
with:
run: pytest tests/benchmarks/ --codspeed
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ repos:
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.961
rev: v1.3.0
hooks:
- id: mypy
- repo: https://github.com/charliermarsh/ruff-pre-commit
Expand Down
36 changes: 25 additions & 11 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,33 +28,46 @@ classifiers = [
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Software Development :: Testing",
"Topic :: System :: Benchmark",
"Topic :: Utilities",
"Typing :: Typed",
]
dependencies = ["cffi ~= 1.15.1", "pytest>=3.8"]
dependencies = [
"cffi ~= 1.15.1",
"pytest>=3.8",
"setuptools ~= 67.8.0; python_version >= '3.12'", # FIXME: remove when cffi supports directly python 3.12
]

[project.optional-dependencies]
dev = [
"hatchling ~= 1.11.1",
"black ~= 22.3.0",
"mypy ~= 0.961",
"ruff ~= 0.0.275",
"pytest ~= 7.0",
"pytest-cov ~= 4.0.0",
]
compatibility = ["pytest-benchmarks ~= 3.4.1"]
lint = ["black ~= 23.3.0", "isort ~=5.12.0", "mypy ~= 1.3.0", "ruff ~= 0.0.275"]
compat = ["pytest-benchmark ~= 4.0.0"]
test = ["pytest ~= 7.0", "pytest-cov ~= 4.0.0"]

[project.entry-points]
pytest11 = { codspeed = "pytest_codspeed.plugin" }

[tool.hatch.envs.default]
python = "3.12"
features = ["lint", "test", "compat"]

[tool.hatch.envs.test]
features = ["test"]

[[tool.hatch.envs.test.matrix]]
python = ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12"]
features = ["compat", "test"]

[tool.hatch.version]
path = "src/pytest_codspeed/__init__.py"

[tool.hatch.build.targets.sdist]
include = ["/src"]

[tool.mypy]
python_version = "3.12"

[tool.ruff]
line-length = 88
select = ["E", "F", "I", "C"]
Expand All @@ -68,7 +81,8 @@ force_grid_wrap = 0
float_to_top = true

[tool.pytest.ini_options]
addopts = "--ignore=tests/benchmarks"
addopts = "--ignore=tests/benchmarks --ignore=tests/examples"
filterwarnings = ["ignore::DeprecationWarning:pytest_benchmark.utils.*:"]

[tool.coverage.run]
branch = true
Expand Down
2 changes: 1 addition & 1 deletion src/pytest_codspeed/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.2.2"
__version__ = "1.3.0"
110 changes: 82 additions & 28 deletions src/pytest_codspeed/plugin.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
import gc
import os
import pkgutil
import sys
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Union

import pytest
from _pytest.fixtures import FixtureManager
Expand All @@ -14,6 +15,7 @@
from ._wrapper import LibType

IS_PYTEST_BENCHMARK_INSTALLED = pkgutil.find_loader("pytest_benchmark") is not None
SUPPORTS_PERF_TRAMPOLINE = sys.version_info >= (3, 12)


@pytest.hookimpl(trylast=True)
Expand Down Expand Up @@ -94,7 +96,10 @@ def pytest_plugin_registered(plugin, manager: "pytest.PytestPluginManager"):

@pytest.hookimpl(trylast=True)
def pytest_report_header(config: "pytest.Config"):
out = [f"codspeed: {__version__}"]
out = [
f"codspeed: {__version__} "
f"(callgraph: {'enabled' if SUPPORTS_PERF_TRAMPOLINE else 'not supported'})"
]
plugin = get_plugin(config)
if plugin.is_codspeed_enabled and not plugin.should_measure:
out.append(
Expand All @@ -111,19 +116,29 @@ def pytest_report_header(config: "pytest.Config"):
return "\n".join(out)


def should_benchmark_item(item: "pytest.Item") -> bool:
def has_benchmark_fixture(item: "pytest.Item") -> bool:
item_fixtures = getattr(item, "fixturenames", [])
return "benchmark" in item_fixtures or "codspeed_benchmark" in item_fixtures


def has_benchmark_marker(item: "pytest.Item") -> bool:
return (
item.get_closest_marker("codspeed_benchmark") is not None
or item.get_closest_marker("benchmark") is not None
or "benchmark" in getattr(item, "fixturenames", [])
)


def should_benchmark_item(item: "pytest.Item") -> bool:
return has_benchmark_fixture(item) or has_benchmark_marker(item)


@pytest.hookimpl()
def pytest_sessionstart(session: "pytest.Session"):
plugin = get_plugin(session.config)
if plugin.is_codspeed_enabled:
plugin.benchmark_count = 0
if plugin.should_measure and SUPPORTS_PERF_TRAMPOLINE:
sys.activate_stack_trampoline("perf") # type: ignore


@pytest.hookimpl(trylast=True)
Expand All @@ -150,50 +165,77 @@ def _run_with_instrumentation(
if is_gc_enabled:
gc.collect()
gc.disable()

def __codspeed_root_frame__():
fn(*args, **kwargs)

if SUPPORTS_PERF_TRAMPOLINE:
# Warmup CPython performance map cache
__codspeed_root_frame__()

lib.zero_stats()
lib.start_instrumentation()
fn(*args, **kwargs)
__codspeed_root_frame__()
lib.stop_instrumentation()
lib.dump_stats_at(f"{nodeId}".encode("ascii"))
if is_gc_enabled:
gc.enable()


@pytest.hookimpl(trylast=True)
def pytest_runtest_call(item: "pytest.Item"):
@pytest.hookimpl(tryfirst=True)
def pytest_runtest_protocol(item: "pytest.Item", nextitem: Union["pytest.Item", None]):
plugin = get_plugin(item.config)

if not plugin.is_codspeed_enabled or not should_benchmark_item(item):
return # Avoid running the test multiple times when codspeed is disabled
else:
plugin.benchmark_count += 1
if "benchmark" in getattr(item, "fixturenames", []):
# This is a benchmark fixture, so the measurement is done by the fixture
item.runtest()
elif not plugin.should_measure:
item.runtest()
else:
assert plugin.lib is not None
_run_with_instrumentation(plugin.lib, item.nodeid, item.runtest)
return (
None # Defer to the default test protocol since no benchmarking is needed
)

if has_benchmark_fixture(item):
return None # Instrumentation is handled by the fixture

@pytest.hookimpl()
def pytest_sessionfinish(session: "pytest.Session", exitstatus):
plugin = get_plugin(session.config)
if plugin.is_codspeed_enabled:
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
count_suffix = "benchmarked" if plugin.should_measure else "benchmark tested"
reporter.write_sep(
"=",
f"{plugin.benchmark_count} {count_suffix}",
plugin.benchmark_count += 1
if not plugin.should_measure:
return None # Benchmark counted but will be run in the default protocol

# Setup phase
reports = []
ihook = item.ihook
ihook.pytest_runtest_logstart(nodeid=item.nodeid, location=item.location)
setup_call = pytest.CallInfo.from_call(
lambda: ihook.pytest_runtest_setup(item=item, nextitem=nextitem), "setup"
)
setup_report = ihook.pytest_runtest_makereport(item=item, call=setup_call)
ihook.pytest_runtest_logreport(report=setup_report)
reports.append(setup_report)
# Run phase
if setup_report.passed and not item.config.getoption("setuponly"):
assert plugin.lib is not None
runtest_call = pytest.CallInfo.from_call(
lambda: _run_with_instrumentation(plugin.lib, item.nodeid, item.runtest),
"call",
)
runtest_report = ihook.pytest_runtest_makereport(item=item, call=runtest_call)
ihook.pytest_runtest_logreport(report=runtest_report)
reports.append(runtest_report)

# Teardown phase
teardown_call = pytest.CallInfo.from_call(
lambda: ihook.pytest_runtest_teardown(item=item, nextitem=nextitem), "teardown"
)
teardown_report = ihook.pytest_runtest_makereport(item=item, call=teardown_call)
ihook.pytest_runtest_logreport(report=teardown_report)
reports.append(teardown_report)
ihook.pytest_runtest_logfinish(nodeid=item.nodeid, location=item.location)

return reports # Deny further protocol hooks execution


@pytest.fixture(scope="function")
def codspeed_benchmark(request: "pytest.FixtureRequest") -> Callable:
plugin = get_plugin(request.config)

def run(func: Callable[..., Any], *args: Any, **kwargs: Any) -> Any:
plugin.benchmark_count += 1
if plugin.is_codspeed_enabled and plugin.should_measure:
assert plugin.lib is not None
_run_with_instrumentation(
Expand All @@ -213,3 +255,15 @@ def benchmark(codspeed_benchmark, request: "pytest.FixtureRequest"):
Compatibility with pytest-benchmark
"""
return codspeed_benchmark


@pytest.hookimpl()
def pytest_sessionfinish(session: "pytest.Session", exitstatus):
plugin = get_plugin(session.config)
if plugin.is_codspeed_enabled:
reporter = session.config.pluginmanager.get_plugin("terminalreporter")
count_suffix = "benchmarked" if plugin.should_measure else "benchmark tested"
reporter.write_sep(
"=",
f"{plugin.benchmark_count} {count_suffix}",
)
13 changes: 11 additions & 2 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import pkgutil
import importlib.util
import shutil
import sys

import pytest

pytest_plugins = ["pytester"]

IS_PYTEST_BENCHMARK_INSTALLED = pkgutil.find_loader("pytest_benchmark") is not None
IS_PYTEST_BENCHMARK_INSTALLED = importlib.util.find_spec("pytest_benchmark") is not None
skip_without_pytest_benchmark = pytest.mark.skipif(
not IS_PYTEST_BENCHMARK_INSTALLED, reason="pytest_benchmark not installed"
)
Expand All @@ -25,3 +25,12 @@

if IS_VALGRIND_INSTALLED:
print("NOTICE: Testing with valgrind compatibility", file=sys.stderr, flush=True)

IS_PERF_TRAMPOLINE_SUPPORTED = sys.version_info >= (3, 12)
skip_without_perf_trampoline = pytest.mark.skipif(
not IS_PERF_TRAMPOLINE_SUPPORTED, reason="perf trampoline is not supported"
)

skip_with_perf_trampoline = pytest.mark.skipif(
IS_PERF_TRAMPOLINE_SUPPORTED, reason="perf trampoline is supported"
)
4 changes: 4 additions & 0 deletions tests/examples/test_addition_fixture.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
def test_some_addition_performance(benchmark):
@benchmark
def _():
return 1 + 1
Loading