fix issue102 by introducing a --maxfailures=NUM option
also print an informative line about "stopped/interrupted" test runs near the end. --HG-- branch : trunk
This commit is contained in:
parent
fbcf9ec543
commit
c953c7d313
|
@ -12,6 +12,12 @@ New features
|
|||
declarative approach with the @py.test.mark.xfail cannot
|
||||
be used as it would mark all configurations as xfail.
|
||||
|
||||
- issue102: introduce new --maxfail=NUM option to stop
|
||||
test runs after NUM failures. This is a generalization
|
||||
of the '-x' or '--exitfirst' option which is now equivalent
|
||||
to '--maxfail=1'. Both '-x' and '--maxfail' will
|
||||
now also print a line near the end indicating the Interruption.
|
||||
|
||||
- issue89: allow py.test.mark decorators to be used on classes
|
||||
(class decorators were introduced with python2.6) and
|
||||
also allow to have multiple markers applied at class/module level
|
||||
|
@ -41,6 +47,7 @@ Fixes / Maintenance
|
|||
- improved and unified reporting for "--tb=short" option
|
||||
- Errors during test module imports are much shorter, (using --tb=short style)
|
||||
- raises shows shorter more relevant tracebacks
|
||||
- --fulltrace now more systematically makes traces longer / inhibits cutting
|
||||
|
||||
- improve support for raises and other dynamically compiled code by
|
||||
manipulating python's linecache.cache instead of the previous
|
||||
|
|
|
@ -62,9 +62,12 @@ def pytest_report_iteminfo(item):
|
|||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.getgroup("general", "running and selection options")
|
||||
group._addoption('-x', '--exitfirst',
|
||||
action="store_true", dest="exitfirst", default=False,
|
||||
group._addoption('-x', '--exitfirst', action="store_true", default=False,
|
||||
dest="exitfirst",
|
||||
help="exit instantly on first error or failed test."),
|
||||
group._addoption('--maxfail', metavar="num",
|
||||
action="store", type="int", dest="maxfail", default=0,
|
||||
help="exit after first num failures or errors.")
|
||||
group._addoption('-k',
|
||||
action="store", dest="keyword", default='',
|
||||
help="only run test items matching the given "
|
||||
|
@ -89,6 +92,9 @@ def pytest_addoption(parser):
|
|||
|
||||
def pytest_configure(config):
|
||||
setsession(config)
|
||||
# compat
|
||||
if config.getvalue("exitfirst"):
|
||||
config.option.maxfail = 1
|
||||
|
||||
def setsession(config):
|
||||
val = config.getvalue
|
||||
|
|
|
@ -45,7 +45,7 @@ def pytest_configure(__multicall__, config):
|
|||
options = [opt for opt in options if opt._long_opts]
|
||||
options.sort(key=lambda x: x._long_opts)
|
||||
for opt in options:
|
||||
if not opt._long_opts:
|
||||
if not opt._long_opts or not opt.dest:
|
||||
continue
|
||||
optstrings = list(opt._long_opts) # + list(opt._short_opts)
|
||||
optstrings = filter(None, optstrings)
|
||||
|
|
|
@ -312,9 +312,11 @@ class TerminalReporter:
|
|||
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
|
||||
|
||||
def _report_keyboardinterrupt(self):
|
||||
self.write_sep("!", "KEYBOARD INTERRUPT")
|
||||
excrepr = self._keyboardinterrupt_memo
|
||||
if self.config.option.verbose:
|
||||
msg = excrepr.reprcrash.message
|
||||
self.write_sep("!", msg)
|
||||
if "KeyboardInterrupt" in msg:
|
||||
if self.config.getvalue("fulltrace"):
|
||||
excrepr.toterminal(self._tw)
|
||||
else:
|
||||
excrepr.reprcrash.toterminal(self._tw)
|
||||
|
|
|
@ -20,11 +20,14 @@ Collector = py.test.collect.Collector
|
|||
|
||||
class Session(object):
|
||||
nodeid = ""
|
||||
class Interrupted(KeyboardInterrupt):
|
||||
""" signals an interrupted test run. """
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.pluginmanager = config.pluginmanager # shortcut
|
||||
self.pluginmanager.register(self)
|
||||
self._testsfailed = False
|
||||
self._testsfailed = 0
|
||||
self._nomatch = False
|
||||
self.shouldstop = False
|
||||
|
||||
|
@ -52,7 +55,7 @@ class Session(object):
|
|||
yield x
|
||||
self.config.hook.pytest_collectreport(report=rep)
|
||||
if self.shouldstop:
|
||||
break
|
||||
raise self.Interrupted(self.shouldstop)
|
||||
|
||||
def filteritems(self, colitems):
|
||||
""" return items to process (some may be deselected)"""
|
||||
|
@ -86,9 +89,11 @@ class Session(object):
|
|||
|
||||
def pytest_runtest_logreport(self, report):
|
||||
if report.failed:
|
||||
self._testsfailed = True
|
||||
if self.config.option.exitfirst:
|
||||
self.shouldstop = True
|
||||
self._testsfailed += 1
|
||||
maxfail = self.config.getvalue("maxfail")
|
||||
if maxfail and self._testsfailed >= maxfail:
|
||||
self.shouldstop = "stopping after %d failures" % (
|
||||
self._testsfailed)
|
||||
pytest_collectreport = pytest_runtest_logreport
|
||||
|
||||
def sessionfinishes(self, exitstatus):
|
||||
|
@ -122,7 +127,8 @@ class Session(object):
|
|||
|
||||
def _mainloop(self, colitems):
|
||||
for item in self.collect(colitems):
|
||||
if self.shouldstop:
|
||||
break
|
||||
if not self.config.option.collectonly:
|
||||
item.config.hook.pytest_runtest_protocol(item=item)
|
||||
if self.shouldstop:
|
||||
raise self.Interrupted(self.shouldstop)
|
||||
|
||||
|
|
|
@ -347,7 +347,7 @@ class TestCaptureFuncarg:
|
|||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result.stdout.fnmatch_lines([
|
||||
"*KEYBOARD INTERRUPT*"
|
||||
"*KeyboardInterrupt*"
|
||||
])
|
||||
assert result.ret == 2
|
||||
|
||||
|
|
|
@ -17,9 +17,10 @@ def basic_run_report(item):
|
|||
return runner.call_and_report(item, "call", log=False)
|
||||
|
||||
class Option:
|
||||
def __init__(self, verbose=False, dist=None):
|
||||
def __init__(self, verbose=False, dist=None, fulltrace=False):
|
||||
self.verbose = verbose
|
||||
self.dist = dist
|
||||
self.fulltrace = fulltrace
|
||||
def _getcmdargs(self):
|
||||
l = []
|
||||
if self.verbose:
|
||||
|
@ -27,6 +28,8 @@ class Option:
|
|||
if self.dist:
|
||||
l.append('--dist=%s' % self.dist)
|
||||
l.append('--tx=popen')
|
||||
if self.fulltrace:
|
||||
l.append('--fulltrace')
|
||||
return l
|
||||
def _getcmdstring(self):
|
||||
return " ".join(self._getcmdargs())
|
||||
|
@ -35,6 +38,7 @@ def pytest_generate_tests(metafunc):
|
|||
if "option" in metafunc.funcargnames:
|
||||
metafunc.addcall(id="default", param=Option(verbose=False))
|
||||
metafunc.addcall(id="verbose", param=Option(verbose=True))
|
||||
metafunc.addcall(id="fulltrace", param=Option(fulltrace=True))
|
||||
if not getattr(metafunc.function, 'nodist', False):
|
||||
metafunc.addcall(id="verbose-dist",
|
||||
param=Option(dist='each', verbose=True))
|
||||
|
@ -284,11 +288,28 @@ class TestTerminal:
|
|||
"E assert 0",
|
||||
"*_keyboard_interrupt.py:6: KeyboardInterrupt*",
|
||||
])
|
||||
if option.verbose:
|
||||
if option.fulltrace:
|
||||
result.stdout.fnmatch_lines([
|
||||
"*raise KeyboardInterrupt # simulating the user*",
|
||||
])
|
||||
result.stdout.fnmatch_lines(['*KEYBOARD INTERRUPT*'])
|
||||
result.stdout.fnmatch_lines(['*KeyboardInterrupt*'])
|
||||
|
||||
def test_maxfailures(self, testdir, option):
|
||||
p = testdir.makepyfile("""
|
||||
def test_1():
|
||||
assert 0
|
||||
def test_2():
|
||||
assert 0
|
||||
def test_3():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest("--maxfail=2", *option._getcmdargs())
|
||||
result.stdout.fnmatch_lines([
|
||||
"*def test_1():*",
|
||||
"*def test_2():*",
|
||||
"*!! Interrupted: stopping after 2 failures*!!*",
|
||||
"*2 failed*",
|
||||
])
|
||||
|
||||
def test_pytest_report_header(self, testdir):
|
||||
testdir.makeconftest("""
|
||||
|
|
|
@ -86,6 +86,16 @@ class SessionTests:
|
|||
assert failed == 1
|
||||
assert passed == skipped == 0
|
||||
|
||||
def test_maxfail(self, testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
def test_one(): assert 0
|
||||
def test_two(): assert 0
|
||||
def test_three(): assert 0
|
||||
""", '--maxfail=2')
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 2
|
||||
assert passed == skipped == 0
|
||||
|
||||
def test_broken_repr(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import py
|
||||
|
|
Loading…
Reference in New Issue