add --report cmdline option, shift refined xfailed and skipped reporting to skipping plugin
--HG-- branch : trunk
This commit is contained in:
parent
eab7e039eb
commit
3795b08e95
|
@ -132,9 +132,19 @@ def pytest_report_teststatus(report):
|
|||
|
||||
# called by the terminalreporter instance/plugin
|
||||
def pytest_terminal_summary(terminalreporter):
|
||||
show_xfailed(terminalreporter)
|
||||
show_skipped(terminalreporter)
|
||||
|
||||
def show_xfailed(terminalreporter):
|
||||
tr = terminalreporter
|
||||
xfailed = tr.stats.get("xfailed")
|
||||
if xfailed:
|
||||
if not tr.hasopt('xfailed'):
|
||||
if tr.config.getvalue("verbose"):
|
||||
tr.write_line(
|
||||
"%d expected failures, use --report=xfailed for more info" %
|
||||
len(xfailed))
|
||||
return
|
||||
tr.write_sep("_", "expected failures")
|
||||
for rep in xfailed:
|
||||
entry = rep.longrepr.reprcrash
|
||||
|
@ -178,3 +188,29 @@ def evalexpression(item, keyword):
|
|||
result = expr
|
||||
return expr, result
|
||||
|
||||
def folded_skips(skipped):
|
||||
d = {}
|
||||
for event in skipped:
|
||||
entry = event.longrepr.reprcrash
|
||||
key = entry.path, entry.lineno, entry.message
|
||||
d.setdefault(key, []).append(event)
|
||||
l = []
|
||||
for key, events in d.items():
|
||||
l.append((len(events),) + key)
|
||||
return l
|
||||
|
||||
def show_skipped(terminalreporter):
|
||||
tr = terminalreporter
|
||||
skipped = tr.stats.get('skipped', [])
|
||||
if skipped:
|
||||
if not tr.hasopt('skipped'):
|
||||
if tr.config.getvalue("verbose"):
|
||||
tr.write_line(
|
||||
"%d skipped tests, use --report=skipped for more info" %
|
||||
len(skipped))
|
||||
return
|
||||
fskips = folded_skips(skipped)
|
||||
if fskips:
|
||||
tr.write_sep("_", "skipped test summary")
|
||||
for num, fspath, lineno, reason in fskips:
|
||||
tr._tw.line("%s:%d: [%d] %s" %(fspath, lineno, num, reason))
|
||||
|
|
|
@ -23,6 +23,10 @@ def pytest_addoption(parser):
|
|||
group.addoption('--debug',
|
||||
action="store_true", dest="debug", default=False,
|
||||
help="generate and show debugging information.")
|
||||
group.addoption('--report',
|
||||
action="store", dest="report", default=None, metavar="opts",
|
||||
help="comma separated reporting options")
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
if config.option.collectonly:
|
||||
|
@ -38,6 +42,18 @@ def pytest_configure(config):
|
|||
setattr(reporter._tw, name, getattr(config, attr))
|
||||
config.pluginmanager.register(reporter, 'terminalreporter')
|
||||
|
||||
def getreportopt(optvalue):
|
||||
d = {}
|
||||
if optvalue:
|
||||
for setting in optvalue.split(","):
|
||||
setting = setting.strip()
|
||||
val = True
|
||||
if setting.startswith("no"):
|
||||
val = False
|
||||
setting = setting[2:]
|
||||
d[setting] = val
|
||||
return d
|
||||
|
||||
class TerminalReporter:
|
||||
def __init__(self, config, file=None):
|
||||
self.config = config
|
||||
|
@ -48,6 +64,10 @@ class TerminalReporter:
|
|||
self._tw = py.io.TerminalWriter(file)
|
||||
self.currentfspath = None
|
||||
self.gateway2info = {}
|
||||
self._reportopt = getreportopt(config.getvalue('report'))
|
||||
|
||||
def hasopt(self, name):
|
||||
return self._reportopt.get(name, False)
|
||||
|
||||
def write_fspath_result(self, fspath, res):
|
||||
fspath = self.curdir.bestrelpath(fspath)
|
||||
|
@ -254,7 +274,6 @@ class TerminalReporter:
|
|||
if exitstatus in (0, 1, 2):
|
||||
self.summary_errors()
|
||||
self.summary_failures()
|
||||
self.summary_skips()
|
||||
self.config.hook.pytest_terminal_summary(terminalreporter=self)
|
||||
if exitstatus == 2:
|
||||
self._report_keyboardinterrupt()
|
||||
|
@ -389,14 +408,6 @@ class TerminalReporter:
|
|||
self.write_sep("=", "%d tests deselected by %r" %(
|
||||
len(self.stats['deselected']), self.config.option.keyword), bold=True)
|
||||
|
||||
def summary_skips(self):
|
||||
if 'skipped' in self.stats:
|
||||
if 'failed' not in self.stats: # or self.config.option.showskipsummary:
|
||||
fskips = folded_skips(self.stats['skipped'])
|
||||
if fskips:
|
||||
self.write_sep("_", "skipped test summary")
|
||||
for num, fspath, lineno, reason in fskips:
|
||||
self._tw.line("%s:%d: [%d] %s" %(fspath, lineno, num, reason))
|
||||
|
||||
class CollectonlyReporter:
|
||||
INDENT = " "
|
||||
|
@ -435,16 +446,6 @@ class CollectonlyReporter:
|
|||
for rep in self._failed:
|
||||
rep.toterminal(self.out)
|
||||
|
||||
def folded_skips(skipped):
|
||||
d = {}
|
||||
for event in skipped:
|
||||
entry = event.longrepr.reprcrash
|
||||
key = entry.path, entry.lineno, entry.message
|
||||
d.setdefault(key, []).append(event)
|
||||
l = []
|
||||
for key, events in d.items():
|
||||
l.append((len(events),) + key)
|
||||
return l
|
||||
|
||||
def repr_pythonversion(v=None):
|
||||
if v is None:
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
Changes between 1.0.2 and '1.1.0b1'
|
||||
=====================================
|
||||
|
||||
* add --report command line option that allows to control showing of skipped/xfailed sections
|
||||
|
||||
* generalized skipping: a new way to mark python functions with skipif or xfail
|
||||
at function, class and modules level based on platform or sys-module attributes.
|
||||
|
||||
|
|
|
@ -1,5 +1,28 @@
|
|||
import py
|
||||
|
||||
def test_xfail_not_report_default(testdir):
|
||||
p = testdir.makepyfile(test_one="""
|
||||
import py
|
||||
@py.test.mark.xfail
|
||||
def test_this():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p, '-v')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*1 expected failures*--report=xfailed*",
|
||||
])
|
||||
|
||||
def test_skip_not_report_default(testdir):
|
||||
p = testdir.makepyfile(test_one="""
|
||||
import py
|
||||
def test_this():
|
||||
py.test.skip("hello")
|
||||
""")
|
||||
result = testdir.runpytest(p, '-v')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*1 skipped*--report=skipped*",
|
||||
])
|
||||
|
||||
def test_xfail_decorator(testdir):
|
||||
p = testdir.makepyfile(test_one="""
|
||||
import py
|
||||
|
@ -11,7 +34,7 @@ def test_xfail_decorator(testdir):
|
|||
def test_that():
|
||||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result = testdir.runpytest(p, '--report=xfailed')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*expected failures*",
|
||||
"*test_one.test_this*test_one.py:4*",
|
||||
|
@ -28,7 +51,7 @@ def test_xfail_at_module(testdir):
|
|||
def test_intentional_xfail():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result = testdir.runpytest(p, '--report=xfailed')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*expected failures*",
|
||||
"*test_intentional_xfail*:4*",
|
||||
|
@ -43,7 +66,7 @@ def test_skipif_decorator(testdir):
|
|||
def test_that():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result = testdir.runpytest(p, '--report=skipped')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*Skipped*platform*",
|
||||
"*1 skipped*"
|
||||
|
@ -99,3 +122,60 @@ def test_evalexpression_cls_config_example(testdir):
|
|||
x, y = evalexpression(item, 'skipif')
|
||||
assert x == 'config._hackxyz'
|
||||
assert y == 3
|
||||
|
||||
def test_skip_reasons_folding():
|
||||
from _py.test.plugin import pytest_runner as runner
|
||||
from _py.test.plugin.pytest_skipping import folded_skips
|
||||
class longrepr:
|
||||
class reprcrash:
|
||||
path = 'xyz'
|
||||
lineno = 3
|
||||
message = "justso"
|
||||
|
||||
ev1 = runner.CollectReport(None, None)
|
||||
ev1.when = "execute"
|
||||
ev1.skipped = True
|
||||
ev1.longrepr = longrepr
|
||||
|
||||
ev2 = runner.ItemTestReport(None, excinfo=longrepr)
|
||||
ev2.skipped = True
|
||||
|
||||
l = folded_skips([ev1, ev2])
|
||||
assert len(l) == 1
|
||||
num, fspath, lineno, reason = l[0]
|
||||
assert num == 2
|
||||
assert fspath == longrepr.reprcrash.path
|
||||
assert lineno == longrepr.reprcrash.lineno
|
||||
assert reason == longrepr.reprcrash.message
|
||||
|
||||
def test_skipped_reasons_functional(testdir):
|
||||
testdir.makepyfile(
|
||||
test_one="""
|
||||
from conftest import doskip
|
||||
def setup_function(func):
|
||||
doskip()
|
||||
def test_func():
|
||||
pass
|
||||
class TestClass:
|
||||
def test_method(self):
|
||||
doskip()
|
||||
""",
|
||||
test_two = """
|
||||
from conftest import doskip
|
||||
doskip()
|
||||
""",
|
||||
conftest = """
|
||||
import py
|
||||
def doskip():
|
||||
py.test.skip('test')
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest('--report=skipped')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*test_one.py ss",
|
||||
"*test_two.py S",
|
||||
"___* skipped test summary *_",
|
||||
"*conftest.py:3: *3* Skipped: 'test'",
|
||||
])
|
||||
assert result.ret == 0
|
||||
|
||||
|
|
|
@ -14,7 +14,7 @@ except ImportError:
|
|||
# ===============================================================================
|
||||
|
||||
from _py.test.plugin.pytest_terminal import TerminalReporter, \
|
||||
CollectonlyReporter, repr_pythonversion, folded_skips
|
||||
CollectonlyReporter, repr_pythonversion, getreportopt
|
||||
from _py.test.plugin import pytest_runner as runner
|
||||
|
||||
def basic_run_report(item):
|
||||
|
@ -289,28 +289,6 @@ class TestTerminal:
|
|||
])
|
||||
result.stdout.fnmatch_lines(['*KEYBOARD INTERRUPT*'])
|
||||
|
||||
def test_skip_reasons_folding(self):
|
||||
class longrepr:
|
||||
class reprcrash:
|
||||
path = 'xyz'
|
||||
lineno = 3
|
||||
message = "justso"
|
||||
|
||||
ev1 = runner.CollectReport(None, None)
|
||||
ev1.when = "execute"
|
||||
ev1.skipped = True
|
||||
ev1.longrepr = longrepr
|
||||
|
||||
ev2 = runner.ItemTestReport(None, excinfo=longrepr)
|
||||
ev2.skipped = True
|
||||
|
||||
l = folded_skips([ev1, ev2])
|
||||
assert len(l) == 1
|
||||
num, fspath, lineno, reason = l[0]
|
||||
assert num == 2
|
||||
assert fspath == longrepr.reprcrash.path
|
||||
assert lineno == longrepr.reprcrash.lineno
|
||||
assert reason == longrepr.reprcrash.message
|
||||
|
||||
class TestCollectonly:
|
||||
def test_collectonly_basic(self, testdir, linecomp):
|
||||
|
@ -473,37 +451,6 @@ class TestFixtureReporting:
|
|||
])
|
||||
|
||||
class TestTerminalFunctional:
|
||||
def test_skipped_reasons(self, testdir):
|
||||
testdir.makepyfile(
|
||||
test_one="""
|
||||
from conftest import doskip
|
||||
def setup_function(func):
|
||||
doskip()
|
||||
def test_func():
|
||||
pass
|
||||
class TestClass:
|
||||
def test_method(self):
|
||||
doskip()
|
||||
""",
|
||||
test_two = """
|
||||
from conftest import doskip
|
||||
doskip()
|
||||
""",
|
||||
conftest = """
|
||||
import py
|
||||
def doskip():
|
||||
py.test.skip('test')
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*test_one.py ss",
|
||||
"*test_two.py S",
|
||||
"___* skipped test summary *_",
|
||||
"*conftest.py:3: *3* Skipped: 'test'",
|
||||
])
|
||||
assert result.ret == 0
|
||||
|
||||
def test_deselected(self, testdir):
|
||||
testpath = testdir.makepyfile("""
|
||||
def test_one():
|
||||
|
@ -613,6 +560,27 @@ class TestTerminalFunctional:
|
|||
])
|
||||
assert result.ret == 1
|
||||
|
||||
|
||||
def test_getreportopt():
|
||||
assert getreportopt(None) == {}
|
||||
assert getreportopt("hello") == {'hello': True}
|
||||
assert getreportopt("hello, world") == dict(hello=True, world=True)
|
||||
assert getreportopt("nohello") == dict(hello=False)
|
||||
|
||||
def test_terminalreporter_reportopt_conftestsetting(testdir):
|
||||
testdir.makeconftest("option_report = 'skipped'")
|
||||
p = testdir.makepyfile("""
|
||||
def pytest_funcarg__tr(request):
|
||||
tr = request.config.pluginmanager.getplugin("terminalreporter")
|
||||
return tr
|
||||
def test_opt(tr):
|
||||
assert tr.hasopt('skipped')
|
||||
assert not tr.hasopt('qwe')
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
def test_trace_reporting(self, testdir):
|
||||
result = testdir.runpytest("--trace")
|
||||
assert result.stdout.fnmatch_lines([
|
||||
|
|
Loading…
Reference in New Issue