Add summary for xfails with -rxX option (#11574)

Co-authored-by: Brian Okken <1568356+okken@users.noreply.github.com>
This commit is contained in:
Fabian Sturm 2024-01-05 13:59:19 +01:00 committed by GitHub
parent c2a4a8d518
commit 13eacdad8a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 166 additions and 19 deletions

View File

@ -138,6 +138,7 @@ Erik Hasse
Erik M. Bray
Evan Kepner
Evgeny Seliverstov
Fabian Sturm
Fabien Zarifian
Fabio Zadrozny
Felix Hofstätter

View File

@ -0,0 +1,5 @@
Improvements to how ``-r`` for xfailures and xpasses:
* Report tracebacks for xfailures when ``-rx`` is set.
* Report captured output for xpasses when ``-rX`` is set.
* For xpasses, add ``-`` in summary between test name and reason, to match how xfail is displayed.

View File

@ -878,8 +878,10 @@ class TerminalReporter:
def pytest_terminal_summary(self) -> Generator[None, None, None]:
self.summary_errors()
self.summary_failures()
self.summary_xfailures()
self.summary_warnings()
self.summary_passes()
self.summary_xpasses()
try:
return (yield)
finally:
@ -1009,12 +1011,20 @@ class TerminalReporter:
)
def summary_passes(self) -> None:
self.summary_passes_combined("passed", "PASSES", "P")
def summary_xpasses(self) -> None:
self.summary_passes_combined("xpassed", "XPASSES", "X")
def summary_passes_combined(
self, which_reports: str, sep_title: str, needed_opt: str
) -> None:
if self.config.option.tbstyle != "no":
if self.hasopt("P"):
reports: List[TestReport] = self.getreports("passed")
if self.hasopt(needed_opt):
reports: List[TestReport] = self.getreports(which_reports)
if not reports:
return
self.write_sep("=", "PASSES")
self.write_sep("=", sep_title)
for rep in reports:
if rep.sections:
msg = self._getfailureheadline(rep)
@ -1048,11 +1058,20 @@ class TerminalReporter:
self._tw.line(content)
def summary_failures(self) -> None:
self.summary_failures_combined("failed", "FAILURES")
def summary_xfailures(self) -> None:
self.summary_failures_combined("xfailed", "XFAILURES", "x")
def summary_failures_combined(
self, which_reports: str, sep_title: str, needed_opt: Optional[str] = None
) -> None:
if self.config.option.tbstyle != "no":
reports: List[BaseReport] = self.getreports("failed")
if not needed_opt or self.hasopt(needed_opt):
reports: List[BaseReport] = self.getreports(which_reports)
if not reports:
return
self.write_sep("=", "FAILURES")
self.write_sep("=", sep_title)
if self.config.option.tbstyle == "line":
for rep in reports:
line = self._getcrashline(rep)
@ -1168,8 +1187,11 @@ class TerminalReporter:
verbose_word, **{_color_for_type["warnings"]: True}
)
nodeid = _get_node_id_with_markup(self._tw, self.config, rep)
line = f"{markup_word} {nodeid}"
reason = rep.wasxfail
lines.append(f"{markup_word} {nodeid} {reason}")
if reason:
line += " - " + str(reason)
lines.append(line)
def show_skipped(lines: List[str]) -> None:
skipped: List[CollectReport] = self.stats.get("skipped", [])

View File

@ -649,7 +649,7 @@ class TestXFail:
result.stdout.fnmatch_lines(
[
"*test_strict_xfail*",
"XPASS test_strict_xfail.py::test_foo unsupported feature",
"XPASS test_strict_xfail.py::test_foo - unsupported feature",
]
)
assert result.ret == (1 if strict else 0)

View File

@ -2619,3 +2619,122 @@ def test_format_trimmed() -> None:
assert _format_trimmed(" ({}) ", msg, len(msg) + 4) == " (unconditional skip) "
assert _format_trimmed(" ({}) ", msg, len(msg) + 3) == " (unconditional ...) "
def test_summary_xfail_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
assert False
@pytest.mark.xfail(reason="foo")
def test_xfail_reason():
assert False
"""
)
result = pytester.runpytest("-rx")
expect1 = "XFAIL test_summary_xfail_reason.py::test_xfail"
expect2 = "XFAIL test_summary_xfail_reason.py::test_xfail_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1
def test_summary_xfail_tb(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*_ test_xfail _*",
"* @pytest.mark.xfail*",
"* def test_xfail():*",
"* a, b = 1, 2*",
"> *assert a == b*",
"E *assert 1 == 2*",
"test_summary_xfail_tb.py:6: AssertionError*",
"*= short test summary info =*",
"XFAIL test_summary_xfail_tb.py::test_xfail",
"*= 1 xfailed in * =*",
]
)
def test_xfail_tb_line(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_xfail():
a, b = 1, 2
assert a == b
"""
)
result = pytester.runpytest("-rx", "--tb=line")
result.stdout.fnmatch_lines(
[
"*= XFAILURES =*",
"*test_xfail_tb_line.py:6: assert 1 == 2",
"*= short test summary info =*",
"XFAIL test_xfail_tb_line.py::test_xfail",
"*= 1 xfailed in * =*",
]
)
def test_summary_xpass_reason(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
...
@pytest.mark.xfail(reason="foo")
def test_reason():
...
"""
)
result = pytester.runpytest("-rX")
expect1 = "XPASS test_summary_xpass_reason.py::test_pass"
expect2 = "XPASS test_summary_xpass_reason.py::test_reason - foo"
result.stdout.fnmatch_lines([expect1, expect2])
assert result.stdout.lines.count(expect1) == 1
assert result.stdout.lines.count(expect2) == 1
def test_xpass_output(pytester: Pytester) -> None:
pytester.makepyfile(
"""
import pytest
@pytest.mark.xfail
def test_pass():
print('hi there')
"""
)
result = pytester.runpytest("-rX")
result.stdout.fnmatch_lines(
[
"*= XPASSES =*",
"*_ test_pass _*",
"*- Captured stdout call -*",
"*= short test summary info =*",
"XPASS test_xpass_output.py::test_pass*",
"*= 1 xpassed in * =*",
]
)