Merge pull request #3838 from wimglenn/runresult_xfail
Support xfailed and xpassed outcomes in RunResult.
This commit is contained in:
commit
044d2b8e6e
1
AUTHORS
1
AUTHORS
|
@ -214,6 +214,7 @@ Vitaly Lashmanov
|
||||||
Vlad Dragos
|
Vlad Dragos
|
||||||
Wil Cooley
|
Wil Cooley
|
||||||
William Lee
|
William Lee
|
||||||
|
Wim Glenn
|
||||||
Wouter van Ackooy
|
Wouter van Ackooy
|
||||||
Xuan Luong
|
Xuan Luong
|
||||||
Xuecong Liao
|
Xuecong Liao
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Added support for 'xfailed' and 'xpassed' outcomes to the ``pytester.RunResult.assert_outcomes`` signature.
|
|
@ -406,7 +406,9 @@ class RunResult(object):
|
||||||
return d
|
return d
|
||||||
raise ValueError("Pytest terminal report not found")
|
raise ValueError("Pytest terminal report not found")
|
||||||
|
|
||||||
def assert_outcomes(self, passed=0, skipped=0, failed=0, error=0):
|
def assert_outcomes(
|
||||||
|
self, passed=0, skipped=0, failed=0, error=0, xpassed=0, xfailed=0
|
||||||
|
):
|
||||||
"""Assert that the specified outcomes appear with the respective
|
"""Assert that the specified outcomes appear with the respective
|
||||||
numbers (0 means it didn't occur) in the text output from a test run.
|
numbers (0 means it didn't occur) in the text output from a test run.
|
||||||
|
|
||||||
|
@ -417,10 +419,18 @@ class RunResult(object):
|
||||||
"skipped": d.get("skipped", 0),
|
"skipped": d.get("skipped", 0),
|
||||||
"failed": d.get("failed", 0),
|
"failed": d.get("failed", 0),
|
||||||
"error": d.get("error", 0),
|
"error": d.get("error", 0),
|
||||||
|
"xpassed": d.get("xpassed", 0),
|
||||||
|
"xfailed": d.get("xfailed", 0),
|
||||||
}
|
}
|
||||||
assert obtained == dict(
|
expected = {
|
||||||
passed=passed, skipped=skipped, failed=failed, error=error
|
"passed": passed,
|
||||||
)
|
"skipped": skipped,
|
||||||
|
"failed": failed,
|
||||||
|
"error": error,
|
||||||
|
"xpassed": xpassed,
|
||||||
|
"xfailed": xfailed,
|
||||||
|
}
|
||||||
|
assert obtained == expected
|
||||||
|
|
||||||
|
|
||||||
class CwdSnapshot(object):
|
class CwdSnapshot(object):
|
||||||
|
|
|
@ -83,6 +83,57 @@ def test_testdir_runs_with_plugin(testdir):
|
||||||
result.assert_outcomes(passed=1)
|
result.assert_outcomes(passed=1)
|
||||||
|
|
||||||
|
|
||||||
|
def test_runresult_assertion_on_xfail(testdir):
|
||||||
|
testdir.makepyfile(
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytest_plugins = "pytester"
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
|
def test_potato():
|
||||||
|
assert False
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.assert_outcomes(xfailed=1)
|
||||||
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_runresult_assertion_on_xpassed(testdir):
|
||||||
|
testdir.makepyfile(
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytest_plugins = "pytester"
|
||||||
|
|
||||||
|
@pytest.mark.xfail
|
||||||
|
def test_potato():
|
||||||
|
assert True
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.assert_outcomes(xpassed=1)
|
||||||
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_xpassed_with_strict_is_considered_a_failure(testdir):
|
||||||
|
testdir.makepyfile(
|
||||||
|
"""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
pytest_plugins = "pytester"
|
||||||
|
|
||||||
|
@pytest.mark.xfail(strict=True)
|
||||||
|
def test_potato():
|
||||||
|
assert True
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.assert_outcomes(failed=1)
|
||||||
|
assert result.ret != 0
|
||||||
|
|
||||||
|
|
||||||
def make_holder():
|
def make_holder():
|
||||||
class apiclass(object):
|
class apiclass(object):
|
||||||
def pytest_xyz(self, arg):
|
def pytest_xyz(self, arg):
|
||||||
|
|
Loading…
Reference in New Issue