support boolean condition expressions in skipif/xfail
change documentation to prefer it over string expressions
This commit is contained in:
parent
3279cfa28b
commit
bbd265184d
10
CHANGELOG
10
CHANGELOG
|
@ -1,9 +1,15 @@
|
|||
Changes between 2.3.5 and DEV
|
||||
Changes between 2.3.5 and 2.4.DEV
|
||||
-----------------------------------
|
||||
|
||||
- (experimental) allow fixture functions to be
|
||||
implemented as context managers. Thanks Andreas Pelme,
|
||||
ladimir Keleshev.
|
||||
Vladimir Keleshev.
|
||||
|
||||
- (experimental) allow boolean expression directly with skipif/xfail
|
||||
if a "reason" is also specified. Rework skipping documentation
|
||||
to recommend "condition as booleans" because it prevents surprises
|
||||
when importing markers between modules. Specifying conditions
|
||||
as strings will remain fully supported.
|
||||
|
||||
- fix issue245 by depending on the released py-1.4.14
|
||||
which fixes py.io.dupfile to work with files with no
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
#
|
||||
__version__ = '2.3.6.dev3'
|
||||
__version__ = '2.4.0.dev1'
|
||||
|
|
|
@ -89,7 +89,11 @@ class MarkEvaluator:
|
|||
if isinstance(expr, py.builtin._basestring):
|
||||
result = cached_eval(self.item.config, expr, d)
|
||||
else:
|
||||
pytest.fail("expression is not a string")
|
||||
if self.get("reason") is None:
|
||||
# XXX better be checked at collection time
|
||||
pytest.fail("you need to specify reason=STRING "
|
||||
"when using booleans as conditions.")
|
||||
result = bool(expr)
|
||||
if result:
|
||||
self.result = True
|
||||
self.expr = expr
|
||||
|
|
|
@ -9,86 +9,110 @@ If you have test functions that cannot be run on certain platforms
|
|||
or that you expect to fail you can mark them accordingly or you
|
||||
may call helper functions during execution of setup or test functions.
|
||||
|
||||
A *skip* means that you expect your test to pass unless a certain
|
||||
configuration or condition (e.g. wrong Python interpreter, missing
|
||||
dependency) prevents it to run. And *xfail* means that your test
|
||||
can run but you expect it to fail because there is an implementation problem.
|
||||
A *skip* means that you expect your test to pass unless the environment
|
||||
(e.g. wrong Python interpreter, missing dependency) prevents it to run.
|
||||
And *xfail* means that your test can run but you expect it to fail
|
||||
because there is an implementation problem.
|
||||
|
||||
py.test counts and lists *skip* and *xfail* tests separately. However,
|
||||
detailed information about skipped/xfailed tests is not shown by default
|
||||
to avoid cluttering the output. You can use the ``-r`` option to see
|
||||
details corresponding to the "short" letters shown in the test
|
||||
progress::
|
||||
py.test counts and lists *skip* and *xfail* tests separately. Detailed
|
||||
information about skipped/xfailed tests is not shown by default to avoid
|
||||
cluttering the output. You can use the ``-r`` option to see details
|
||||
corresponding to the "short" letters shown in the test progress::
|
||||
|
||||
py.test -rxs # show extra info on skips and xfails
|
||||
|
||||
(See :ref:`how to change command line options defaults`)
|
||||
|
||||
.. _skipif:
|
||||
.. _`condition booleans`:
|
||||
|
||||
Marking a test function to be skipped
|
||||
-------------------------------------------
|
||||
|
||||
.. versionadded:: 2.4
|
||||
|
||||
Here is an example of marking a test function to be skipped
|
||||
when run on a Python3 interpreter::
|
||||
when run on a Python3.3 interpreter::
|
||||
|
||||
import sys
|
||||
@pytest.mark.skipif("sys.version_info >= (3,0)")
|
||||
@pytest.mark.skipif(sys.version_info >= (3,3),
|
||||
reason="requires python3.3")
|
||||
def test_function():
|
||||
...
|
||||
|
||||
During test function setup the skipif condition is
|
||||
evaluated by calling ``eval('sys.version_info >= (3,0)', namespace)``.
|
||||
(*New in version 2.0.2*) The namespace contains all the module globals of the test function so that
|
||||
you can for example check for versions of a module you are using::
|
||||
During test function setup the condition ("sys.version_info >= (3,3)") is
|
||||
checked. If it evaluates to True, the test function will be skipped
|
||||
with the specified reason. Note that pytest enforces specifying a reason
|
||||
in order to report meaningful "skip reasons" (e.g. when using ``-rs``).
|
||||
|
||||
You can share skipif markers between modules. Consider this test module::
|
||||
|
||||
# content of test_mymodule.py
|
||||
|
||||
import mymodule
|
||||
|
||||
@pytest.mark.skipif("mymodule.__version__ < '1.2'")
|
||||
def test_function():
|
||||
...
|
||||
|
||||
The test function will not be run ("skipped") if
|
||||
``mymodule`` is below the specified version. The reason
|
||||
for specifying the condition as a string is mainly that
|
||||
py.test can report a summary of skip conditions.
|
||||
For information on the construction of the ``namespace``
|
||||
see `evaluation of skipif/xfail conditions`_.
|
||||
|
||||
You can of course create a shortcut for your conditional skip
|
||||
decorator at module level like this::
|
||||
|
||||
win32only = pytest.mark.skipif("sys.platform != 'win32'")
|
||||
|
||||
@win32only
|
||||
minversion = pytest.mark.skipif(mymodule.__versioninfo__ >= (1,1),
|
||||
reason="at least mymodule-1.1 required")
|
||||
@minversion
|
||||
def test_function():
|
||||
...
|
||||
|
||||
Skip all test functions of a class
|
||||
--------------------------------------
|
||||
You can import it from another test module::
|
||||
|
||||
# test_myothermodule.py
|
||||
from test_mymodule import minversion
|
||||
|
||||
@minversion
|
||||
def test_anotherfunction():
|
||||
...
|
||||
|
||||
For larger test suites it's usually a good idea to have one file
|
||||
where you define the markers which you then consistently apply
|
||||
throughout your test suite.
|
||||
|
||||
Alternatively, the pre pytest-2.4 way to specify `condition strings <condition strings>`_ instead of booleans will remain fully supported in future
|
||||
versions of pytest. It couldn't be easily used for importing markers
|
||||
between test modules so it's no longer advertised as the primary method.
|
||||
|
||||
|
||||
Skip all test functions of a class or module
|
||||
---------------------------------------------
|
||||
|
||||
As with all function :ref:`marking <mark>` you can skip test functions at the
|
||||
`whole class- or module level`_. Here is an example
|
||||
for skipping all methods of a test class based on the platform::
|
||||
`whole class- or module level`_. If your code targets python2.6 or above you
|
||||
use the skipif decorator (and any other marker) on classes::
|
||||
|
||||
class TestPosixCalls:
|
||||
pytestmark = pytest.mark.skipif("sys.platform == 'win32'")
|
||||
|
||||
def test_function(self):
|
||||
"will not be setup or run under 'win32' platform"
|
||||
|
||||
The ``pytestmark`` special name tells py.test to apply it to each test
|
||||
function in the class. If your code targets python2.6 or above you can
|
||||
more naturally use the skipif decorator (and any other marker) on
|
||||
classes::
|
||||
|
||||
@pytest.mark.skipif("sys.platform == 'win32'")
|
||||
@pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="requires windows")
|
||||
class TestPosixCalls:
|
||||
|
||||
def test_function(self):
|
||||
"will not be setup or run under 'win32' platform"
|
||||
|
||||
Using multiple "skipif" decorators on a single function is generally fine - it means that if any of the conditions apply the function execution will be skipped.
|
||||
If the condition is true, this marker will produce a skip result for
|
||||
each of the test methods.
|
||||
|
||||
If your code targets python2.5 where class-decorators are not available,
|
||||
you can set the ``pytestmark`` attribute of a class::
|
||||
|
||||
class TestPosixCalls:
|
||||
pytestmark = pytest.mark.skipif(sys.platform == 'win32',
|
||||
reason="requires Windows")
|
||||
|
||||
def test_function(self):
|
||||
"will not be setup or run under 'win32' platform"
|
||||
|
||||
As with the class-decorator, the ``pytestmark`` special name tells
|
||||
py.test to apply it to each test function in the class.
|
||||
|
||||
If you want to skip all test functions of a module, you must use
|
||||
the ``pytestmark`` name on the global level::
|
||||
|
||||
# test_module.py
|
||||
|
||||
pytestmark = pytest.mark.skipif(...)
|
||||
|
||||
If multiple "skipif" decorators are applied to a test function, it
|
||||
will be skipped if any of the skip conditions is true.
|
||||
|
||||
.. _`whole class- or module level`: mark.html#scoped-marking
|
||||
|
||||
|
@ -118,7 +142,8 @@ as if it weren't marked at all.
|
|||
As with skipif_ you can also mark your expectation of a failure
|
||||
on a particular platform::
|
||||
|
||||
@pytest.mark.xfail("sys.version_info >= (3,0)")
|
||||
@pytest.mark.xfail(sys.version_info >= (3,3),
|
||||
reason="python3.3 api changes")
|
||||
def test_function():
|
||||
...
|
||||
|
||||
|
@ -151,41 +176,19 @@ Running it with the report-on-xfail option gives this output::
|
|||
|
||||
======================== 6 xfailed in 0.05 seconds =========================
|
||||
|
||||
.. _`evaluation of skipif/xfail conditions`:
|
||||
|
||||
Evaluation of skipif/xfail expressions
|
||||
----------------------------------------------------
|
||||
|
||||
.. versionadded:: 2.0.2
|
||||
|
||||
The evaluation of a condition string in ``pytest.mark.skipif(conditionstring)``
|
||||
or ``pytest.mark.xfail(conditionstring)`` takes place in a namespace
|
||||
dictionary which is constructed as follows:
|
||||
|
||||
* the namespace is initialized by putting the ``sys`` and ``os`` modules
|
||||
and the pytest ``config`` object into it.
|
||||
|
||||
* updated with the module globals of the test function for which the
|
||||
expression is applied.
|
||||
|
||||
The pytest ``config`` object allows you to skip based on a test configuration value
|
||||
which you might have added::
|
||||
|
||||
@pytest.mark.skipif("not config.getvalue('db')")
|
||||
def test_function(...):
|
||||
...
|
||||
|
||||
|
||||
Imperative xfail from within a test or setup function
|
||||
------------------------------------------------------
|
||||
|
||||
If you cannot declare xfail-conditions at import time
|
||||
you can also imperatively produce an XFail-outcome from
|
||||
within test or setup code. Example::
|
||||
If you cannot declare xfail- of skipif conditions at import
|
||||
time you can also imperatively produce an according outcome
|
||||
imperatively, in test or setup code::
|
||||
|
||||
def test_function():
|
||||
if not valid_config():
|
||||
pytest.xfail("unsupported configuration")
|
||||
pytest.xfail("failing configuration (but should work)")
|
||||
# or
|
||||
pytest.skipif("unsupported configuration")
|
||||
|
||||
|
||||
Skipping on a missing import dependency
|
||||
|
@ -202,16 +205,61 @@ version number of a library::
|
|||
|
||||
docutils = pytest.importorskip("docutils", minversion="0.3")
|
||||
|
||||
The version will be read from the specified module's ``__version__`` attribute.
|
||||
The version will be read from the specified
|
||||
module's ``__version__`` attribute.
|
||||
|
||||
Imperative skip from within a test or setup function
|
||||
------------------------------------------------------
|
||||
|
||||
If for some reason you cannot declare skip-conditions
|
||||
you can also imperatively produce a skip-outcome from
|
||||
within test or setup code. Example::
|
||||
.. _`string conditions`:
|
||||
|
||||
specifying conditions as strings versus booleans
|
||||
----------------------------------------------------------
|
||||
|
||||
Prior to pytest-2.4 the only way to specify skipif/xfail conditions was
|
||||
to use strings::
|
||||
|
||||
import sys
|
||||
@pytest.mark.skipif("sys.version_info >= (3,3)")
|
||||
def test_function():
|
||||
if not valid_config():
|
||||
pytest.skip("unsupported configuration")
|
||||
...
|
||||
|
||||
During test function setup the skipif condition is evaluated by calling
|
||||
``eval('sys.version_info >= (3,0)', namespace)``. The namespace contains
|
||||
all the module globals, and ``os`` and ``sys`` as a minimum.
|
||||
|
||||
Since pytest-2.4 `condition booleans`_ are considered preferable
|
||||
because markers can then be freely imported between test modules.
|
||||
With strings you need to import not only the marker but all variables
|
||||
everything used by the marker, which violates encapsulation.
|
||||
|
||||
The reason for specifying the condition as a string was that py.test can
|
||||
report a summary of skip conditions based purely on the condition string.
|
||||
With conditions as booleans you are required to specify a ``reason`` string.
|
||||
|
||||
Note that string conditions will remain fully supported and you are free
|
||||
to use them if you have no need for cross-importing markers.
|
||||
|
||||
The evaluation of a condition string in ``pytest.mark.skipif(conditionstring)``
|
||||
or ``pytest.mark.xfail(conditionstring)`` takes place in a namespace
|
||||
dictionary which is constructed as follows:
|
||||
|
||||
* the namespace is initialized by putting the ``sys`` and ``os`` modules
|
||||
and the pytest ``config`` object into it.
|
||||
|
||||
* updated with the module globals of the test function for which the
|
||||
expression is applied.
|
||||
|
||||
The pytest ``config`` object allows you to skip based on a test
|
||||
configuration value which you might have added::
|
||||
|
||||
@pytest.mark.skipif("not config.getvalue('db')")
|
||||
def test_function(...):
|
||||
...
|
||||
|
||||
The equivalent with "boolean conditions" is::
|
||||
|
||||
@pytest.mark.skipif(not pytest.config.getvalue("db"),
|
||||
reason="--db was not specified")
|
||||
def test_function(...):
|
||||
pass
|
||||
|
||||
|
||||
|
|
2
setup.py
2
setup.py
|
@ -12,7 +12,7 @@ def main():
|
|||
name='pytest',
|
||||
description='py.test: simple powerful testing with Python',
|
||||
long_description = long_description,
|
||||
version='2.3.6.dev3',
|
||||
version='2.4.0.dev1',
|
||||
url='http://pytest.org',
|
||||
license='MIT license',
|
||||
platforms=['unix', 'linux', 'osx', 'cygwin', 'win32'],
|
||||
|
|
|
@ -569,7 +569,6 @@ def test_default_markers(testdir):
|
|||
"*xfail(*condition, reason=None, run=True)*expected failure*",
|
||||
])
|
||||
|
||||
|
||||
def test_xfail_test_setup_exception(testdir):
|
||||
testdir.makeconftest("""
|
||||
def pytest_runtest_setup():
|
||||
|
@ -610,3 +609,44 @@ def test_imperativeskip_on_xfail_test(testdir):
|
|||
""")
|
||||
|
||||
|
||||
class TestBooleanCondition:
|
||||
def test_skipif(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.skipif(True, reason="True123")
|
||||
def test_func1():
|
||||
pass
|
||||
@pytest.mark.skipif(False, reason="True123")
|
||||
def test_func2():
|
||||
pass
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
result.stdout.fnmatch_lines("""
|
||||
*1 passed*1 skipped*
|
||||
""")
|
||||
|
||||
def test_skipif_noreason(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.skipif(True)
|
||||
def test_func():
|
||||
pass
|
||||
""")
|
||||
result = testdir.runpytest("-rs")
|
||||
result.stdout.fnmatch_lines("""
|
||||
*1 error*
|
||||
""")
|
||||
|
||||
def test_xfail(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.xfail(True, reason="True123")
|
||||
def test_func():
|
||||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest("-rxs")
|
||||
result.stdout.fnmatch_lines("""
|
||||
*XFAIL*
|
||||
*True123*
|
||||
*1 xfail*
|
||||
""")
|
||||
|
|
Loading…
Reference in New Issue