unify handling of reportcharacters across resultlog/junitxml plugins
--HG-- branch : trunk
This commit is contained in:
parent
925f75088d
commit
93f91c9607
11
CHANGELOG
11
CHANGELOG
|
@ -17,6 +17,17 @@ Changes between 1.3.0 and 1.3.1
|
||||||
declarative approach with the @py.test.mark.xfail cannot
|
declarative approach with the @py.test.mark.xfail cannot
|
||||||
be used as it would mark all configurations as xfail.
|
be used as it would mark all configurations as xfail.
|
||||||
|
|
||||||
|
- improve and refine letter reporting in the progress bar:
|
||||||
|
. pass
|
||||||
|
f failed test
|
||||||
|
s skipped tests (reminder: use for dependency/platform mismatch only)
|
||||||
|
x xfailed test (test that was expected to fail)
|
||||||
|
X xpassed test (test that was expected to fail but passed)
|
||||||
|
|
||||||
|
You can use any combination of 'fsxX' with the '-r' extended
|
||||||
|
reporting option. The xfail/xpass results will show up as
|
||||||
|
skipped tests in the junitxml output.
|
||||||
|
|
||||||
- make py.test.cmdline.main() return the exitstatus
|
- make py.test.cmdline.main() return the exitstatus
|
||||||
instead of raising (which is still done by py.cmdline.pytest())
|
instead of raising (which is still done by py.cmdline.pytest())
|
||||||
and make it so that py.test.cmdline.main() can be called
|
and make it so that py.test.cmdline.main() can be called
|
||||||
|
|
|
@ -56,10 +56,15 @@ class LogXML(object):
|
||||||
def append_failure(self, report):
|
def append_failure(self, report):
|
||||||
self._opentestcase(report)
|
self._opentestcase(report)
|
||||||
#msg = str(report.longrepr.reprtraceback.extraline)
|
#msg = str(report.longrepr.reprtraceback.extraline)
|
||||||
|
if "xfail" in report.keywords:
|
||||||
|
self.appendlog(
|
||||||
|
'<skipped message="xfail-marked test passes unexpectedly"/>')
|
||||||
|
self.skipped += 1
|
||||||
|
else:
|
||||||
self.appendlog('<failure message="test failure">%s</failure>',
|
self.appendlog('<failure message="test failure">%s</failure>',
|
||||||
report.longrepr)
|
report.longrepr)
|
||||||
self._closetestcase()
|
|
||||||
self.failed += 1
|
self.failed += 1
|
||||||
|
self._closetestcase()
|
||||||
|
|
||||||
def _opentestcase_collectfailure(self, report):
|
def _opentestcase_collectfailure(self, report):
|
||||||
node = report.collector
|
node = report.collector
|
||||||
|
@ -95,6 +100,11 @@ class LogXML(object):
|
||||||
|
|
||||||
def append_skipped(self, report):
|
def append_skipped(self, report):
|
||||||
self._opentestcase(report)
|
self._opentestcase(report)
|
||||||
|
if "xfail" in report.keywords:
|
||||||
|
self.appendlog(
|
||||||
|
'<skipped message="expected test failure">%s</skipped>',
|
||||||
|
report.keywords['xfail'])
|
||||||
|
else:
|
||||||
self.appendlog("<skipped/>")
|
self.appendlog("<skipped/>")
|
||||||
self._closetestcase()
|
self._closetestcase()
|
||||||
self.skipped += 1
|
self.skipped += 1
|
||||||
|
|
|
@ -73,7 +73,7 @@ class ResultLog(object):
|
||||||
code = report.shortrepr
|
code = report.shortrepr
|
||||||
if code == 'x':
|
if code == 'x':
|
||||||
longrepr = str(report.longrepr)
|
longrepr = str(report.longrepr)
|
||||||
elif code == 'P':
|
elif code == 'X':
|
||||||
longrepr = ''
|
longrepr = ''
|
||||||
elif report.passed:
|
elif report.passed:
|
||||||
longrepr = ""
|
longrepr = ""
|
||||||
|
|
|
@ -225,7 +225,7 @@ def pytest_report_teststatus(report):
|
||||||
if report.skipped:
|
if report.skipped:
|
||||||
return "xfailed", "x", "xfail"
|
return "xfailed", "x", "xfail"
|
||||||
elif report.failed:
|
elif report.failed:
|
||||||
return "xpassed", "P", "XPASS"
|
return "xpassed", "X", "XPASS"
|
||||||
|
|
||||||
# called by the terminalreporter instance/plugin
|
# called by the terminalreporter instance/plugin
|
||||||
def pytest_terminal_summary(terminalreporter):
|
def pytest_terminal_summary(terminalreporter):
|
||||||
|
@ -242,7 +242,7 @@ def pytest_terminal_summary(terminalreporter):
|
||||||
for char in tr.reportchars:
|
for char in tr.reportchars:
|
||||||
if char == "x":
|
if char == "x":
|
||||||
show_xfailed(terminalreporter, lines)
|
show_xfailed(terminalreporter, lines)
|
||||||
elif char == "P":
|
elif char == "X":
|
||||||
show_xpassed(terminalreporter, lines)
|
show_xpassed(terminalreporter, lines)
|
||||||
elif char == "f":
|
elif char == "f":
|
||||||
show_failed(terminalreporter, lines)
|
show_failed(terminalreporter, lines)
|
||||||
|
|
|
@ -25,11 +25,17 @@ class TestPython:
|
||||||
assert 0
|
assert 0
|
||||||
def test_skip():
|
def test_skip():
|
||||||
py.test.skip("")
|
py.test.skip("")
|
||||||
|
@py.test.mark.xfail
|
||||||
|
def test_xfail():
|
||||||
|
assert 0
|
||||||
|
@py.test.mark.xfail
|
||||||
|
def test_xpass():
|
||||||
|
assert 1
|
||||||
""")
|
""")
|
||||||
result, dom = runandparse(testdir)
|
result, dom = runandparse(testdir)
|
||||||
assert result.ret
|
assert result.ret
|
||||||
node = dom.getElementsByTagName("testsuite")[0]
|
node = dom.getElementsByTagName("testsuite")[0]
|
||||||
assert_attr(node, errors=0, failures=1, skips=1, tests=2)
|
assert_attr(node, errors=0, failures=1, skips=3, tests=2)
|
||||||
|
|
||||||
def test_setup_error(self, testdir):
|
def test_setup_error(self, testdir):
|
||||||
testdir.makepyfile("""
|
testdir.makepyfile("""
|
||||||
|
@ -92,6 +98,43 @@ class TestPython:
|
||||||
assert_attr(fnode, message="test failure")
|
assert_attr(fnode, message="test failure")
|
||||||
assert "ValueError" in fnode.toxml()
|
assert "ValueError" in fnode.toxml()
|
||||||
|
|
||||||
|
def test_xfailure_function(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import py
|
||||||
|
def test_xfail():
|
||||||
|
py.test.xfail("42")
|
||||||
|
""")
|
||||||
|
result, dom = runandparse(testdir)
|
||||||
|
assert not result.ret
|
||||||
|
node = dom.getElementsByTagName("testsuite")[0]
|
||||||
|
assert_attr(node, skips=1, tests=0)
|
||||||
|
tnode = node.getElementsByTagName("testcase")[0]
|
||||||
|
assert_attr(tnode,
|
||||||
|
classname="test_xfailure_function.test_xfailure_function",
|
||||||
|
name="test_xfail")
|
||||||
|
fnode = tnode.getElementsByTagName("skipped")[0]
|
||||||
|
assert_attr(fnode, message="expected test failure")
|
||||||
|
#assert "ValueError" in fnode.toxml()
|
||||||
|
|
||||||
|
def test_xfailure_xpass(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import py
|
||||||
|
@py.test.mark.xfail
|
||||||
|
def test_xpass():
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
result, dom = runandparse(testdir)
|
||||||
|
#assert result.ret
|
||||||
|
node = dom.getElementsByTagName("testsuite")[0]
|
||||||
|
assert_attr(node, skips=1, tests=0)
|
||||||
|
tnode = node.getElementsByTagName("testcase")[0]
|
||||||
|
assert_attr(tnode,
|
||||||
|
classname="test_xfailure_xpass.test_xfailure_xpass",
|
||||||
|
name="test_xpass")
|
||||||
|
fnode = tnode.getElementsByTagName("skipped")[0]
|
||||||
|
assert_attr(fnode, message="xfail-marked test passes unexpectedly")
|
||||||
|
#assert "ValueError" in fnode.toxml()
|
||||||
|
|
||||||
def test_collect_error(self, testdir):
|
def test_collect_error(self, testdir):
|
||||||
testdir.makepyfile("syntax error")
|
testdir.makepyfile("syntax error")
|
||||||
result, dom = runandparse(testdir)
|
result, dom = runandparse(testdir)
|
||||||
|
|
|
@ -126,7 +126,7 @@ class TestWithFunctionIntegration:
|
||||||
tb = "".join(lines[8:14])
|
tb = "".join(lines[8:14])
|
||||||
assert tb.find('raise ValueError("XFAIL")') != -1
|
assert tb.find('raise ValueError("XFAIL")') != -1
|
||||||
|
|
||||||
assert lines[14].startswith('P ')
|
assert lines[14].startswith('X ')
|
||||||
assert len(lines) == 15
|
assert len(lines) == 15
|
||||||
|
|
||||||
def test_internal_exception(self):
|
def test_internal_exception(self):
|
||||||
|
|
|
@ -162,7 +162,7 @@ class TestXFail:
|
||||||
def test_that():
|
def test_that():
|
||||||
assert 1
|
assert 1
|
||||||
""")
|
""")
|
||||||
result = testdir.runpytest(p, '-rP')
|
result = testdir.runpytest(p, '-rX')
|
||||||
result.stdout.fnmatch_lines([
|
result.stdout.fnmatch_lines([
|
||||||
"*XPASS*test_that*",
|
"*XPASS*test_that*",
|
||||||
"*1 xpassed*"
|
"*1 xpassed*"
|
||||||
|
@ -331,7 +331,7 @@ def test_reportchars(testdir):
|
||||||
def test_4():
|
def test_4():
|
||||||
py.test.skip("four")
|
py.test.skip("four")
|
||||||
""")
|
""")
|
||||||
result = testdir.runpytest("-rfxPs")
|
result = testdir.runpytest("-rfxXs")
|
||||||
result.stdout.fnmatch_lines([
|
result.stdout.fnmatch_lines([
|
||||||
"FAIL*test_1*",
|
"FAIL*test_1*",
|
||||||
"XFAIL*test_2*",
|
"XFAIL*test_2*",
|
||||||
|
|
Loading…
Reference in New Issue