fix issue91 introduce new py.test.xfail(reason) helper
to imperatively mark a test as expected to fail. Can be used from within setup and test functions. This is useful especially for parametrized tests when certain configurations are expected-to-fail. In this case the declarative approach with the @py.test.mark.xfail cannot be used as it would mark all configurations as xfail. --HG-- branch : trunk
This commit is contained in:
parent
eac0345689
commit
925f75088d
|
@ -9,6 +9,14 @@ Changes between 1.3.0 and 1.3.1
|
|||
- fix issue95: late-import zlib so that it's not required
|
||||
for general py.test startup.
|
||||
|
||||
- fix issue91: introduce new py.test.xfail(reason) helper
|
||||
to imperatively mark a test as expected to fail. Can
|
||||
be used from within setup and test functions. This is
|
||||
useful especially for parametrized tests when certain
|
||||
configurations are expected-to-fail. In this case the
|
||||
declarative approach with the @py.test.mark.xfail cannot
|
||||
be used as it would mark all configurations as xfail.
|
||||
|
||||
- make py.test.cmdline.main() return the exitstatus
|
||||
instead of raising (which is still done by py.cmdline.pytest())
|
||||
and make it so that py.test.cmdline.main() can be called
|
||||
|
|
|
@ -10,6 +10,7 @@ def pytest_namespace():
|
|||
'skip' : skip,
|
||||
'importorskip' : importorskip,
|
||||
'fail' : fail,
|
||||
'xfail' : xfail,
|
||||
'exit' : exit,
|
||||
}
|
||||
|
||||
|
@ -295,6 +296,10 @@ class Failed(OutcomeException):
|
|||
""" raised from an explicit call to py.test.fail() """
|
||||
__module__ = 'builtins'
|
||||
|
||||
class XFailed(OutcomeException):
|
||||
""" raised from an explicit call to py.test.xfail() """
|
||||
__module__ = 'builtins'
|
||||
|
||||
class ExceptionFailure(Failed):
|
||||
""" raised by py.test.raises on an exception-assertion mismatch. """
|
||||
def __init__(self, expr, expected, msg=None, excinfo=None):
|
||||
|
@ -335,6 +340,14 @@ def fail(msg=""):
|
|||
|
||||
fail.Exception = Failed
|
||||
|
||||
def xfail(reason=""):
|
||||
""" xfail an executing test or setup functions, taking an optional
|
||||
reason string.
|
||||
"""
|
||||
__tracebackhide__ = True
|
||||
raise XFailed(reason)
|
||||
xfail.Exception = XFailed
|
||||
|
||||
def raises(ExpectedException, *args, **kwargs):
|
||||
""" if args[0] is callable: raise AssertionError if calling it with
|
||||
the remaining arguments does not raise the expected exception.
|
||||
|
|
|
@ -185,9 +185,17 @@ def pytest_runtest_setup(item):
|
|||
def pytest_runtest_makereport(__multicall__, item, call):
|
||||
if not isinstance(item, py.test.collect.Function):
|
||||
return
|
||||
if not (call.excinfo and
|
||||
call.excinfo.errisinstance(py.test.xfail.Exception)):
|
||||
evalxfail = getattr(item, '_evalxfail', None)
|
||||
if not evalxfail:
|
||||
return
|
||||
if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception):
|
||||
rep = __multicall__.execute()
|
||||
rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg
|
||||
rep.skipped = True
|
||||
rep.failed = False
|
||||
return rep
|
||||
if call.when == "setup":
|
||||
rep = __multicall__.execute()
|
||||
if rep.skipped and evalxfail.istrue():
|
||||
|
|
|
@ -169,6 +169,43 @@ class TestXFail:
|
|||
])
|
||||
assert result.ret == 1
|
||||
|
||||
def test_xfail_imperative(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import py
|
||||
def test_this():
|
||||
py.test.xfail("hello")
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 xfailed*",
|
||||
])
|
||||
result = testdir.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines([
|
||||
"*XFAIL*test_this*reason:*hello*",
|
||||
])
|
||||
|
||||
def test_xfail_imperative_in_setup_function(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import py
|
||||
def setup_function(function):
|
||||
py.test.xfail("hello")
|
||||
|
||||
def test_this():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 xfailed*",
|
||||
])
|
||||
result = testdir.runpytest(p, "-rx")
|
||||
result.stdout.fnmatch_lines([
|
||||
"*XFAIL*test_this*reason:*hello*",
|
||||
])
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class TestSkipif:
|
||||
def test_skipif_conditional(self, testdir):
|
||||
item = testdir.getitem("""
|
||||
|
|
Loading…
Reference in New Issue