Merge pull request #1472 from pquentin/master
Fix pytest.mark.skip mark when used in strict mode
This commit is contained in:
commit
6ddfd60ce7
1
AUTHORS
1
AUTHORS
|
@ -69,6 +69,7 @@ Nicolas Delaby
|
|||
Pieter Mulder
|
||||
Piotr Banaszkiewicz
|
||||
Punyashloka Biswal
|
||||
Quentin Pradet
|
||||
Ralf Schmitt
|
||||
Raphael Pierzina
|
||||
Ronny Pfannschmidt
|
||||
|
|
|
@ -7,7 +7,9 @@
|
|||
|
||||
*
|
||||
|
||||
*
|
||||
* Fix ``pytest.mark.skip`` mark when used in strict mode.
|
||||
Thanks `@pquentin`_ for the PR and `@RonnyPfannschmidt`_ for
|
||||
showing how to fix the bug.
|
||||
|
||||
* Minor improvements and fixes to the documentation.
|
||||
Thanks `@omarkohl`_ for the PR.
|
||||
|
@ -165,6 +167,7 @@
|
|||
.. _@rabbbit: https://github.com/rabbbit
|
||||
.. _@hackebrot: https://github.com/hackebrot
|
||||
.. _@omarkohl: https://github.com/omarkohl
|
||||
.. _@pquentin: https://github.com/pquentin
|
||||
|
||||
2.8.7
|
||||
=====
|
||||
|
|
|
@ -30,6 +30,11 @@ def pytest_configure(config):
|
|||
nop.Exception = XFailed
|
||||
setattr(pytest, "xfail", nop)
|
||||
|
||||
config.addinivalue_line("markers",
|
||||
"skip(reason=None): skip the given test function with an optional reason. "
|
||||
"Example: skip(reason=\"no way of currently testing this\") skips the "
|
||||
"test."
|
||||
)
|
||||
config.addinivalue_line("markers",
|
||||
"skipif(condition): skip the given test function if eval(condition) "
|
||||
"results in a True value. Evaluation happens within the "
|
||||
|
@ -38,13 +43,13 @@ def pytest_configure(config):
|
|||
"http://pytest.org/latest/skipping.html"
|
||||
)
|
||||
config.addinivalue_line("markers",
|
||||
"xfail(condition, reason=None, run=True, raises=None): mark the the test function "
|
||||
"as an expected failure if eval(condition) has a True value. "
|
||||
"Optionally specify a reason for better reporting and run=False if "
|
||||
"you don't even want to execute the test function. If only specific "
|
||||
"exception(s) are expected, you can list them in raises, and if the test fails "
|
||||
"in other ways, it will be reported as a true failure. "
|
||||
"See http://pytest.org/latest/skipping.html"
|
||||
"xfail(condition, reason=None, run=True, raises=None, strict=False): "
|
||||
"mark the the test function as an expected failure if eval(condition) "
|
||||
"has a True value. Optionally specify a reason for better reporting "
|
||||
"and run=False if you don't even want to execute the test function. "
|
||||
"If only specific exception(s) are expected, you can list them in "
|
||||
"raises, and if the test fails in other ways, it will be reported as "
|
||||
"a true failure. See http://pytest.org/latest/skipping.html"
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -539,6 +539,19 @@ class TestSkip:
|
|||
"*1 passed*2 skipped*",
|
||||
])
|
||||
|
||||
def test_strict_and_skip(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.skip
|
||||
def test_hello():
|
||||
pass
|
||||
""")
|
||||
result = testdir.runpytest("-rs --strict")
|
||||
result.stdout.fnmatch_lines([
|
||||
"*unconditional skip*",
|
||||
"*1 skipped*",
|
||||
])
|
||||
|
||||
class TestSkipif:
|
||||
def test_skipif_conditional(self, testdir):
|
||||
item = testdir.getitem("""
|
||||
|
@ -812,7 +825,7 @@ def test_default_markers(testdir):
|
|||
result = testdir.runpytest("--markers")
|
||||
result.stdout.fnmatch_lines([
|
||||
"*skipif(*condition)*skip*",
|
||||
"*xfail(*condition, reason=None, run=True, raises=None)*expected failure*",
|
||||
"*xfail(*condition, reason=None, run=True, raises=None, strict=False)*expected failure*",
|
||||
])
|
||||
|
||||
def test_xfail_test_setup_exception(testdir):
|
||||
|
|
Loading…
Reference in New Issue