Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fail on subtest failure #27

Closed
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
82 changes: 81 additions & 1 deletion pytest_subtests.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from _pytest.capture import FDCapture
from _pytest.capture import SysCapture
from _pytest.outcomes import OutcomeException
from _pytest.outcomes import Skipped
from _pytest.reports import TestReport
from _pytest.runner import CallInfo
from _pytest.unittest import TestCaseFunction
Expand All @@ -23,6 +24,20 @@ def nullcontext():
else:
from contextlib import nullcontext

_ATTR_COUNTER = "_subtests_failed"


class SubTestFailed(OutcomeException):
"""Exception to report failures due to subtest errors

Raised to modify outcome of otherwise passed or skipped test
if it has failed subtests.
"""

@classmethod
def from_count(cls, count):
return cls("Failed subtests: {}".format(count))


@attr.s
class SubTestContext(object):
Expand Down Expand Up @@ -74,10 +89,12 @@ def _from_json(cls, reportdict):
def _addSubTest(self, test_case, test, exc_info):
if exc_info is not None:
msg = test._message if isinstance(test._message, str) else None
call_info = CallInfo(None, ExceptionInfo(exc_info), 0, 0, when="call")
exc_info = ExceptionInfo(exc_info)
call_info = CallInfo(None, exc_info, 0, 0, when="call")
sub_report = SubTestReport.from_item_and_call(item=self, call=call_info)
sub_report.context = SubTestContext(msg, dict(test.params))
self.ihook.pytest_runtest_logreport(report=sub_report)
_increment_failed_subtests(exc_info, self)


def pytest_configure(config):
Expand Down Expand Up @@ -161,6 +178,7 @@ def test(self, msg=None, **kwargs):
call_info = CallInfo(None, exc_info, start, stop, when="call")
sub_report = SubTestReport.from_item_and_call(item=self.item, call=call_info)
sub_report.context = SubTestContext(msg, kwargs.copy())
_increment_failed_subtests(exc_info, self.item)

captured.update_report(sub_report)

Expand Down Expand Up @@ -188,3 +206,65 @@ def pytest_report_to_serializable(report):
def pytest_report_from_serializable(data):
if data.get("_report_type") == "SubTestReport":
return SubTestReport._from_json(data)
elif data.get("$report_type") == "SubTestFailureReport":
return SubTestFailureReport._from_json(data)


def _increment_failed_subtests(exc_info, item):
# For unittest.TestCase.subTest skipped tests
# are processed through TestResult.addSkip, not here
if exc_info is None or exc_info.errisinstance(Skipped):
return
setattr(item, _ATTR_COUNTER, getattr(item, _ATTR_COUNTER, 0) + 1)


# a function to be shown in traceback instead of less informative hook name
def check_failed_subtests(item):
failed = getattr(item, _ATTR_COUNTER, 0)
if failed > 0:
raise SubTestFailed.from_count(failed)


# more tricky alternative is pytest_runtest_makereport hook
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_call(item):
"""Wrapper to avoid accounting of tests with failed subtests as passed

Force ``SubTestFailed`` exception if some subtests failed
but the test outcome is from passed or skipped categories.
"""
__tracebackhide__ = True
try:
outcome = yield
try:
outcome.get_result()
except Skipped:
pass
except (Exception, OutcomeException):
return

# Due to pluggy#244 could not just raise the exception
try:
check_failed_subtests(item)
except SubTestFailed:
outcome._excinfo = sys.exc_info()
finally:
if hasattr(item, _ATTR_COUNTER):
delattr(item, _ATTR_COUNTER)


class SubTestFailureReport(TestReport):
@property
def count_towards_summary(self):
return False


@pytest.hookimpl(tryfirst=True)
def pytest_runtest_makereport(item, call):
excinfo = call.excinfo
if excinfo is None or not isinstance(excinfo.value, SubTestFailed):
return

report = SubTestFailureReport.from_item_and_call(item=item, call=call)
report.longrepr = str(excinfo.value)
return report
68 changes: 61 additions & 7 deletions tests/test_subtests.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def test_simple_terminal_normal(self, simple_script, testdir, mode):
expected_lines += [
"* test_foo [[]custom[]] (i=1) *",
"* test_foo [[]custom[]] (i=3) *",
"* 2 failed, 1 passed in *",
"* 2 failed, 0 passed in *",
]
result.stdout.fnmatch_lines(expected_lines)

Expand All @@ -46,7 +46,7 @@ def test_simple_terminal_verbose(self, simple_script, testdir, mode):
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::test_foo FAILED *100%*",
]
else:
pytest.importorskip("xdist")
Expand All @@ -64,7 +64,7 @@ def test_simple_terminal_verbose(self, simple_script, testdir, mode):
expected_lines += [
"* test_foo [[]custom[]] (i=1) *",
"* test_foo [[]custom[]] (i=3) *",
"* 2 failed, 1 passed in *",
"* 2 failed, 0 passed in *",
]
result.stdout.fnmatch_lines(expected_lines)

Expand All @@ -89,6 +89,60 @@ def test_foo(subtests):
expected_lines += ["* 1 passed, 3 skipped in *"]
result.stdout.fnmatch_lines(expected_lines)

def test_skip_after_failed_subtest(self, testdir, mode):
testdir.makepyfile(
"""
import pytest

def test_skip_after_failed_subtest_py(subtests):
with subtests.test("failing subtest"):
assert 2*2 == 5

pytest.skip("Try to skip it")
"""
)
if mode == "normal":
result = testdir.runpytest()
expected_lines = ["collected 1 item"]
else:
pytest.importorskip("xdist")
result = testdir.runpytest("-n1")
expected_lines = ["gw0 [1]"]

expected_lines += [
"*_ test_skip_after_failed_subtest_py [[]failing subtest[]] _*",
"E*assert (2 * 2) == 5",
"*: AssertionError",
"*_ test_skip_after_failed_subtest_py _*",
"Failed subtests: 1",
# TODO consider reporting of skip as well
"* 1 failed in *",
]
result.stdout.fnmatch_lines(expected_lines)

def test_skip_after_passed_subtest(self, testdir, mode):
testdir.makepyfile(
"""
import pytest

def test_skip_after_passed_subtest_py(subtests):
with subtests.test("successful subtest"):
assert 2*2 == 4

pytest.skip("Try to skip it")
"""
)
if mode == "normal":
result = testdir.runpytest()
expected_lines = ["collected 1 item"]
else:
pytest.importorskip("xdist")
result = testdir.runpytest("-n1")
expected_lines = ["gw0 [1]"]

expected_lines += ["* 0 passed, 1 skipped in *"]
result.stdout.fnmatch_lines(expected_lines)


class TestSubTest:
"""
Expand Down Expand Up @@ -143,7 +197,7 @@ def test_simple_terminal_normal(self, simple_script, testdir, runner):
"E * AssertionError: 1 != 0",
"* T.test_foo [[]custom[]] (i=3) *",
"E * AssertionError: 1 != 0",
"* 2 failed, 1 passed in *",
"* 2 failed in *",
]
)

Expand All @@ -170,7 +224,7 @@ def test_simple_terminal_verbose(self, simple_script, testdir, runner):
"*collected 1 item",
"test_simple_terminal_verbose.py::T::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::T::test_foo FAILED *100%*",
"test_simple_terminal_verbose.py::T::test_foo PASSED *100%*",
"test_simple_terminal_verbose.py::T::test_foo FAILED *100%*",
]
else:
pytest.importorskip("xdist")
Expand All @@ -179,7 +233,7 @@ def test_simple_terminal_verbose(self, simple_script, testdir, runner):
"gw0 [1]",
"*gw0*100%* FAILED test_simple_terminal_verbose.py::T::test_foo*",
"*gw0*100%* FAILED test_simple_terminal_verbose.py::T::test_foo*",
"*gw0*100%* PASSED test_simple_terminal_verbose.py::T::test_foo*",
"*gw0*100%* FAILED test_simple_terminal_verbose.py::T::test_foo*",
]
result.stdout.fnmatch_lines(
expected_lines
Expand All @@ -188,7 +242,7 @@ def test_simple_terminal_verbose(self, simple_script, testdir, runner):
"E * AssertionError: 1 != 0",
"* T.test_foo [[]custom[]] (i=3) *",
"E * AssertionError: 1 != 0",
"* 2 failed, 1 passed in *",
"* 2 failed in *",
]
)

Expand Down