Skip to content

Commit

Permalink
Merge pull request #134 from rhoban13/exitfirst_working
Browse files Browse the repository at this point in the history
Support -x to fail fast
  • Loading branch information
nicoddemus authored Jul 7, 2024
2 parents 89e0434 + f03ed7a commit 96c8408
Show file tree
Hide file tree
Showing 3 changed files with 154 additions and 61 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@ CHANGELOG
UNRELEASED
----------

* Support ``-x/--exitfirst`` (`#134`_).
* Hide the traceback inside the ``SubTests.test()`` method (`#131`_).

.. _#131: https://github.com/pytest-dev/pytest-subtests/pull/131
.. _#134: https://github.com/pytest-dev/pytest-subtests/pull/134

0.12.1 (2024-03-07)
-------------------
Expand Down
187 changes: 126 additions & 61 deletions src/pytest_subtests/plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,13 @@
import sys
import time
from contextlib import contextmanager
from contextlib import ExitStack
from contextlib import nullcontext
from typing import Any
from typing import Callable
from typing import ContextManager
from typing import Generator
from typing import Iterator
from typing import Mapping
from typing import TYPE_CHECKING
from unittest import TestCase
Expand Down Expand Up @@ -174,91 +176,107 @@ class SubTests:
def item(self) -> pytest.Item:
return self.request.node

@contextmanager
def _capturing_output(self) -> Generator[Captured, None, None]:
option = self.request.config.getoption("capture", None)
def test(
self,
msg: str | None = None,
**kwargs: Any,
) -> _SubTestContextManager:
"""
Context manager for subtests, capturing exceptions raised inside the subtest scope and handling
them through the pytest machinery.
Usage:
.. code-block:: python
with subtests.test(msg="subtest"):
assert 1 == 1
"""
return _SubTestContextManager(
self.ihook,
msg,
kwargs,
request=self.request,
suspend_capture_ctx=self.suspend_capture_ctx,
)

# capsys or capfd are active, subtest should not capture

capman = self.request.config.pluginmanager.getplugin("capturemanager")
capture_fixture_active = getattr(capman, "_capture_fixture", None)
@attr.s(auto_attribs=True)
class _SubTestContextManager:
"""
Context manager for subtests, capturing exceptions raised inside the subtest scope and handling
them through the pytest machinery.
if option == "sys" and not capture_fixture_active:
with ignore_pytest_private_warning():
fixture = CaptureFixture(SysCapture, self.request)
elif option == "fd" and not capture_fixture_active:
with ignore_pytest_private_warning():
fixture = CaptureFixture(FDCapture, self.request)
else:
fixture = None
Note: initially this logic was implemented directly in SubTests.test() as a @contextmanager, however
it is not possible to control the output fully when exiting from it due to an exception when
in --exitfirst mode, so this was refactored into an explicit context manager class (#134).
"""

if fixture is not None:
fixture._start()
ihook: pluggy.HookRelay
msg: str | None
kwargs: dict[str, Any]
suspend_capture_ctx: Callable[[], ContextManager]
request: SubRequest

captured = Captured()
try:
yield captured
finally:
if fixture is not None:
out, err = fixture.readouterr()
fixture.close()
captured.out = out
captured.err = err

@contextmanager
def _capturing_logs(self) -> Generator[CapturedLogs | NullCapturedLogs, None, None]:
logging_plugin = self.request.config.pluginmanager.getplugin("logging-plugin")
if logging_plugin is None:
yield NullCapturedLogs()
else:
handler = LogCaptureHandler()
handler.setFormatter(logging_plugin.formatter)

captured_logs = CapturedLogs(handler)
with catching_logs(handler):
yield captured_logs

@contextmanager
def test(
self,
msg: str | None = None,
**kwargs: Any,
) -> Generator[None, None, None]:
# Hide from tracebacks.
def __enter__(self) -> None:
__tracebackhide__ = True

start = time.time()
precise_start = time.perf_counter()
exc_info = None
self._start = time.time()
self._precise_start = time.perf_counter()
self._exc_info = None

self._exit_stack = ExitStack()
self._captured_output = self._exit_stack.enter_context(
capturing_output(self.request)
)
self._captured_logs = self._exit_stack.enter_context(
capturing_logs(self.request)
)

def __exit__(
self,
exc_type: type[Exception] | None,
exc_val: Exception | None,
exc_tb: TracebackType | None,
) -> bool:
__tracebackhide__ = True
try:
if exc_val is not None:
if self.request.session.shouldfail:
return False

with self._capturing_output() as captured_output, self._capturing_logs() as captured_logs:
try:
yield
except (Exception, OutcomeException):
exc_info = ExceptionInfo.from_current()
exc_info = ExceptionInfo.from_exception(exc_val)
else:
exc_info = None
finally:
self._exit_stack.close()

precise_stop = time.perf_counter()
duration = precise_stop - precise_start
duration = precise_stop - self._precise_start
stop = time.time()

call_info = make_call_info(
exc_info, start=start, stop=stop, duration=duration, when="call"
exc_info, start=self._start, stop=stop, duration=duration, when="call"
)
report = self.ihook.pytest_runtest_makereport(
item=self.request.node, call=call_info
)
report = self.ihook.pytest_runtest_makereport(item=self.item, call=call_info)
sub_report = SubTestReport._from_test_report(report)
sub_report.context = SubTestContext(msg, kwargs.copy())
sub_report.context = SubTestContext(self.msg, self.kwargs.copy())

captured_output.update_report(sub_report)
captured_logs.update_report(sub_report)
self._captured_output.update_report(sub_report)
self._captured_logs.update_report(sub_report)

with self.suspend_capture_ctx():
self.ihook.pytest_runtest_logreport(report=sub_report)

if check_interactive_exception(call_info, sub_report):
self.ihook.pytest_exception_interact(
node=self.item, call=call_info, report=sub_report
node=self.request.node, call=call_info, report=sub_report
)

return True


def make_call_info(
exc_info: ExceptionInfo[BaseException] | None,
Expand All @@ -279,6 +297,53 @@ def make_call_info(
)


@contextmanager
def capturing_output(request: SubRequest) -> Iterator[Captured]:
option = request.config.getoption("capture", None)

# capsys or capfd are active, subtest should not capture.
capman = request.config.pluginmanager.getplugin("capturemanager")
capture_fixture_active = getattr(capman, "_capture_fixture", None)

if option == "sys" and not capture_fixture_active:
with ignore_pytest_private_warning():
fixture = CaptureFixture(SysCapture, request)
elif option == "fd" and not capture_fixture_active:
with ignore_pytest_private_warning():
fixture = CaptureFixture(FDCapture, request)
else:
fixture = None

if fixture is not None:
fixture._start()

captured = Captured()
try:
yield captured
finally:
if fixture is not None:
out, err = fixture.readouterr()
fixture.close()
captured.out = out
captured.err = err


@contextmanager
def capturing_logs(
request: SubRequest,
) -> Iterator[CapturedLogs | NullCapturedLogs]:
logging_plugin = request.config.pluginmanager.getplugin("logging-plugin")
if logging_plugin is None:
yield NullCapturedLogs()
else:
handler = LogCaptureHandler()
handler.setFormatter(logging_plugin.formatter)

captured_logs = CapturedLogs(handler)
with catching_logs(handler):
yield captured_logs


@contextmanager
def ignore_pytest_private_warning() -> Generator[None, None, None]:
import warnings
Expand Down
26 changes: 26 additions & 0 deletions tests/test_subtests.py
Original file line number Diff line number Diff line change
Expand Up @@ -580,3 +580,29 @@ def runpytest_and_check_pdb(
# assert.
result.stdout.fnmatch_lines("*entering PDB*")
assert self._FakePdb.calls == ["init", "reset", "interaction"]


def test_exitfirst(pytester: pytest.Pytester) -> None:
"""
Validate that when passing --exitfirst the test exits after the first failed subtest.
"""
pytester.makepyfile(
"""
def test_foo(subtests):
with subtests.test("sub1"):
assert False
with subtests.test("sub2"):
pass
"""
)
result = pytester.runpytest("--exitfirst")
assert result.parseoutcomes()["failed"] == 1
result.stdout.fnmatch_lines(
[
"*[[]sub1[]] SUBFAIL test_exitfirst.py::test_foo - assert False*",
"* stopping after 1 failures*",
],
consecutive=True,
)
result.stdout.no_fnmatch_line("*sub2*") # sub2 not executed.

0 comments on commit 96c8408

Please sign in to comment.