Merge pull request #1413 from nicoddemus/skip-if-parametrize
Fix skip/xfail markers in parametrized arguments
This commit is contained in:
commit
cbb5d48fdd
|
@ -5,7 +5,7 @@ import traceback
|
||||||
|
|
||||||
import py
|
import py
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.mark import MarkInfo
|
from _pytest.mark import MarkInfo, MarkDecorator
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
|
@ -163,14 +163,14 @@ def pytest_runtest_setup(item):
|
||||||
# Check if skip or skipif are specified as pytest marks
|
# Check if skip or skipif are specified as pytest marks
|
||||||
|
|
||||||
skipif_info = item.keywords.get('skipif')
|
skipif_info = item.keywords.get('skipif')
|
||||||
if isinstance(skipif_info, MarkInfo):
|
if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
|
||||||
eval_skipif = MarkEvaluator(item, 'skipif')
|
eval_skipif = MarkEvaluator(item, 'skipif')
|
||||||
if eval_skipif.istrue():
|
if eval_skipif.istrue():
|
||||||
item._evalskip = eval_skipif
|
item._evalskip = eval_skipif
|
||||||
pytest.skip(eval_skipif.getexplanation())
|
pytest.skip(eval_skipif.getexplanation())
|
||||||
|
|
||||||
skip_info = item.keywords.get('skip')
|
skip_info = item.keywords.get('skip')
|
||||||
if isinstance(skip_info, MarkInfo):
|
if isinstance(skip_info, (MarkInfo, MarkDecorator)):
|
||||||
item._evalskip = True
|
item._evalskip = True
|
||||||
if 'reason' in skip_info.kwargs:
|
if 'reason' in skip_info.kwargs:
|
||||||
pytest.skip(skip_info.kwargs['reason'])
|
pytest.skip(skip_info.kwargs['reason'])
|
||||||
|
|
|
@ -531,6 +531,84 @@ class TestFunction:
|
||||||
assert colitems[2].name == 'test2[a-c]'
|
assert colitems[2].name == 'test2[a-c]'
|
||||||
assert colitems[3].name == 'test2[b-c]'
|
assert colitems[3].name == 'test2[b-c]'
|
||||||
|
|
||||||
|
def test_parametrize_skipif(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.skipif('True')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_skip_if(x):
|
||||||
|
assert x < 2
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 2 passed, 1 skipped in *')
|
||||||
|
|
||||||
|
def test_parametrize_skip(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.skip('')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_skip(x):
|
||||||
|
assert x < 2
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 2 passed, 1 skipped in *')
|
||||||
|
|
||||||
|
def test_parametrize_skipif_no_skip(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.skipif('False')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_skipif_no_skip(x):
|
||||||
|
assert x < 2
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 1 failed, 2 passed in *')
|
||||||
|
|
||||||
|
def test_parametrize_xfail(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.xfail('True')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_xfail(x):
|
||||||
|
assert x < 2
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 2 passed, 1 xfailed in *')
|
||||||
|
|
||||||
|
def test_parametrize_passed(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.xfail('True')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_xfail(x):
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 2 passed, 1 xpassed in *')
|
||||||
|
|
||||||
|
def test_parametrize_xfail_passed(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
m = pytest.mark.xfail('False')
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('x', [0, 1, m(2)])
|
||||||
|
def test_passed(x):
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines('* 3 passed in *')
|
||||||
|
|
||||||
|
|
||||||
class TestSorting:
|
class TestSorting:
|
||||||
def test_check_equality(self, testdir):
|
def test_check_equality(self, testdir):
|
||||||
|
|
|
@ -551,14 +551,17 @@ class TestSkipif:
|
||||||
pytest_runtest_setup(item))
|
pytest_runtest_setup(item))
|
||||||
assert x.value.msg == "condition: hasattr(os, 'sep')"
|
assert x.value.msg == "condition: hasattr(os, 'sep')"
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('params', [
|
||||||
def test_skipif_reporting(self, testdir):
|
'"hasattr(sys, \'platform\')"',
|
||||||
|
'True, reason="invalid platform"',
|
||||||
|
])
|
||||||
|
def test_skipif_reporting(self, testdir, params):
|
||||||
p = testdir.makepyfile(test_foo="""
|
p = testdir.makepyfile(test_foo="""
|
||||||
import pytest
|
import pytest
|
||||||
@pytest.mark.skipif("hasattr(sys, 'platform')")
|
@pytest.mark.skipif(%(params)s)
|
||||||
def test_that():
|
def test_that():
|
||||||
assert 0
|
assert 0
|
||||||
""")
|
""" % dict(params=params))
|
||||||
result = testdir.runpytest(p, '-s', '-rs')
|
result = testdir.runpytest(p, '-s', '-rs')
|
||||||
result.stdout.fnmatch_lines([
|
result.stdout.fnmatch_lines([
|
||||||
"*SKIP*1*test_foo.py*platform*",
|
"*SKIP*1*test_foo.py*platform*",
|
||||||
|
|
Loading…
Reference in New Issue