Show "short test summary info" after tracebacks and warnings
This commit is contained in:
parent
da3f4045e7
commit
4e405dd9f9
|
@ -489,8 +489,16 @@ def pytest_report_teststatus(report):
|
||||||
Stops at first non-None result, see :ref:`firstresult` """
|
Stops at first non-None result, see :ref:`firstresult` """
|
||||||
|
|
||||||
|
|
||||||
def pytest_terminal_summary(terminalreporter, exitstatus):
|
def pytest_terminal_summary(config, terminalreporter, exitstatus):
|
||||||
""" add additional section in terminal summary reporting. """
|
"""Add a section to terminal summary reporting.
|
||||||
|
|
||||||
|
:param _pytest.config.Config config: pytest config object
|
||||||
|
:param _pytest.terminal.TerminalReporter terminalreporter: the internal terminal reporter object
|
||||||
|
:param int exitstatus: the exit status that will be reported back to the OS
|
||||||
|
|
||||||
|
.. versionadded:: 3.5
|
||||||
|
The ``config`` parameter.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
@hookspec(historic=True)
|
@hookspec(historic=True)
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
""" support for skip/xfail functions and markers. """
|
""" support for skip/xfail functions and markers. """
|
||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
|
|
||||||
|
|
||||||
from _pytest.config import hookimpl
|
from _pytest.config import hookimpl
|
||||||
from _pytest.mark import MarkInfo, MarkDecorator
|
from _pytest.mark import MarkInfo, MarkDecorator
|
||||||
from _pytest.mark.evaluate import MarkEvaluator
|
from _pytest.mark.evaluate import MarkEvaluator
|
||||||
|
@ -14,11 +13,11 @@ def pytest_addoption(parser):
|
||||||
action="store_true", dest="runxfail", default=False,
|
action="store_true", dest="runxfail", default=False,
|
||||||
help="run tests even if they are marked xfail")
|
help="run tests even if they are marked xfail")
|
||||||
|
|
||||||
parser.addini("xfail_strict", "default for the strict parameter of xfail "
|
parser.addini("xfail_strict",
|
||||||
"markers when not given explicitly (default: "
|
"default for the strict parameter of xfail "
|
||||||
"False)",
|
"markers when not given explicitly (default: False)",
|
||||||
default=False,
|
default=False,
|
||||||
type="bool")
|
type="bool")
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
|
@ -130,7 +129,7 @@ def pytest_runtest_makereport(item, call):
|
||||||
rep.outcome = "passed"
|
rep.outcome = "passed"
|
||||||
rep.wasxfail = rep.longrepr
|
rep.wasxfail = rep.longrepr
|
||||||
elif item.config.option.runxfail:
|
elif item.config.option.runxfail:
|
||||||
pass # don't interefere
|
pass # don't interefere
|
||||||
elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
|
elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
|
||||||
rep.wasxfail = "reason: " + call.excinfo.value.msg
|
rep.wasxfail = "reason: " + call.excinfo.value.msg
|
||||||
rep.outcome = "skipped"
|
rep.outcome = "skipped"
|
||||||
|
@ -160,6 +159,7 @@ def pytest_runtest_makereport(item, call):
|
||||||
filename, line = item.location[:2]
|
filename, line = item.location[:2]
|
||||||
rep.longrepr = filename, line, reason
|
rep.longrepr = filename, line, reason
|
||||||
|
|
||||||
|
|
||||||
# called by terminalreporter progress reporting
|
# called by terminalreporter progress reporting
|
||||||
|
|
||||||
|
|
||||||
|
@ -170,6 +170,7 @@ def pytest_report_teststatus(report):
|
||||||
elif report.passed:
|
elif report.passed:
|
||||||
return "xpassed", "X", ("XPASS", {'yellow': True})
|
return "xpassed", "X", ("XPASS", {'yellow': True})
|
||||||
|
|
||||||
|
|
||||||
# called by the terminalreporter instance/plugin
|
# called by the terminalreporter instance/plugin
|
||||||
|
|
||||||
|
|
||||||
|
@ -233,7 +234,7 @@ def folded_skips(skipped):
|
||||||
# TODO: revisit after marks scope would be fixed
|
# TODO: revisit after marks scope would be fixed
|
||||||
when = getattr(event, 'when', None)
|
when = getattr(event, 'when', None)
|
||||||
if when == 'setup' and 'skip' in keywords and 'pytestmark' not in keywords:
|
if when == 'setup' and 'skip' in keywords and 'pytestmark' not in keywords:
|
||||||
key = (key[0], None, key[2], )
|
key = (key[0], None, key[2])
|
||||||
d.setdefault(key, []).append(event)
|
d.setdefault(key, []).append(event)
|
||||||
values = []
|
values = []
|
||||||
for key, events in d.items():
|
for key, events in d.items():
|
||||||
|
@ -269,6 +270,7 @@ def show_skipped(terminalreporter, lines):
|
||||||
def shower(stat, format):
|
def shower(stat, format):
|
||||||
def show_(terminalreporter, lines):
|
def show_(terminalreporter, lines):
|
||||||
return show_simple(terminalreporter, lines, stat, format)
|
return show_simple(terminalreporter, lines, stat, format)
|
||||||
|
|
||||||
return show_
|
return show_
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -480,16 +480,21 @@ class TerminalReporter(object):
|
||||||
EXIT_NOTESTSCOLLECTED)
|
EXIT_NOTESTSCOLLECTED)
|
||||||
if exitstatus in summary_exit_codes:
|
if exitstatus in summary_exit_codes:
|
||||||
self.config.hook.pytest_terminal_summary(terminalreporter=self,
|
self.config.hook.pytest_terminal_summary(terminalreporter=self,
|
||||||
|
config=self.config,
|
||||||
exitstatus=exitstatus)
|
exitstatus=exitstatus)
|
||||||
self.summary_errors()
|
|
||||||
self.summary_failures()
|
|
||||||
self.summary_warnings()
|
|
||||||
self.summary_passes()
|
|
||||||
if exitstatus == EXIT_INTERRUPTED:
|
if exitstatus == EXIT_INTERRUPTED:
|
||||||
self._report_keyboardinterrupt()
|
self._report_keyboardinterrupt()
|
||||||
del self._keyboardinterrupt_memo
|
del self._keyboardinterrupt_memo
|
||||||
self.summary_stats()
|
self.summary_stats()
|
||||||
|
|
||||||
|
@pytest.hookimpl(hookwrapper=True)
|
||||||
|
def pytest_terminal_summary(self):
|
||||||
|
self.summary_errors()
|
||||||
|
self.summary_failures()
|
||||||
|
yield
|
||||||
|
self.summary_warnings()
|
||||||
|
self.summary_passes()
|
||||||
|
|
||||||
def pytest_keyboard_interrupt(self, excinfo):
|
def pytest_keyboard_interrupt(self, excinfo):
|
||||||
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
|
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
The *short test summary info* section now is displayed after tracebacks and warnings in the terminal.
|
|
@ -1065,3 +1065,18 @@ def test_mark_xfail_item(testdir):
|
||||||
assert not failed
|
assert not failed
|
||||||
xfailed = [r for r in skipped if hasattr(r, 'wasxfail')]
|
xfailed = [r for r in skipped if hasattr(r, 'wasxfail')]
|
||||||
assert xfailed
|
assert xfailed
|
||||||
|
|
||||||
|
|
||||||
|
def test_summary_list_after_errors(testdir):
|
||||||
|
"""Ensure the list of errors/fails/xfails/skips appear after tracebacks in terminal reporting."""
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
def test_fail():
|
||||||
|
assert 0
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest('-ra')
|
||||||
|
result.stdout.fnmatch_lines([
|
||||||
|
'=* FAILURES *=',
|
||||||
|
'*= short test summary info =*',
|
||||||
|
'FAIL test_summary_list_after_errors.py::test_fail',
|
||||||
|
])
|
||||||
|
|
Loading…
Reference in New Issue