- make importorskip static at py.test.importorskip because it's

used for conditional plugin loading
- fix case where xfail is defined at module/class level
- fixes and improvements to docs, correct links to plugins
- use new skip facilities here and there

--HG--
branch : trunk
This commit is contained in:
holger krekel 2009-10-15 20:10:06 +02:00
parent 3ca770b420
commit d8b9b5f1c8
17 changed files with 148 additions and 148 deletions

View File

@ -94,6 +94,25 @@ def raises(ExpectedException, *args, **kwargs):
raise ExceptionFailure(msg="DID NOT RAISE",
expr=args, expected=ExpectedException)
def importorskip(modname, minversion=None):
""" return imported module or perform a dynamic skip() """
compile(modname, '', 'eval') # to catch syntaxerrors
try:
mod = __import__(modname, None, None, ['__doc__'])
except ImportError:
py.test.skip("could not import %r" %(modname,))
if minversion is None:
return mod
verattr = getattr(mod, '__version__', None)
if isinstance(minversion, str):
minver = minversion.split(".")
else:
minver = list(minversion)
if verattr is None or verattr.split(".") < minver:
py.test.skip("module %r has __version__ %r, required is: %r" %(
modname, verattr, minversion))
return mod
# exitcodes for the command line
EXIT_OK = 0

View File

@ -4,7 +4,8 @@ write and report coverage data with 'figleaf'.
"""
import py
figleaf = py.test.importorskip("figleaf.annotate_html")
py.test.importorskip("figleaf.annotate_html")
import figleaf
def pytest_addoption(parser):
group = parser.addgroup('figleaf options')

View File

@ -1,13 +1,12 @@
"""
mark python test functions, classes or modules for conditional
skipping (skipif) or as expected-to-fail (xfail). Both declarations
lead to special reporting and both can be systematically associated
with functions, whole classes or modules. The difference between
the two is that 'xfail' will still execute test functions
but it will revert the outcome. A passing test is now
a failure and failing test is expected. All skip conditions
are reported at the end of test run through the terminal
reporter.
advanced conditional skipping for python test functions, classes or modules.
You can mark functions, classes or modules for for conditional
skipping (skipif) or as expected-to-fail (xfail). The difference
between the two is that 'xfail' will still execute test functions
but it will invert the outcome: a passing test becomes a failure and
a failing test is a semi-passing one. All skip conditions are
reported at the end of test run through the terminal reporter.
.. _skipif:
@ -20,15 +19,18 @@ Here is an example for skipping a test function on Python3::
def test_function():
...
Conditions are specified as python expressions
and can access the ``sys`` module. They can also
access the config object and thus depend on command
line or conftest options::
The 'skipif' marker accepts an **arbitrary python expression**
as a condition. When setting up the test function the condition
is evaluated by calling ``eval(expr, namespace)``. The namespace
contains the ``sys`` and ``os`` modules as well as the
test ``config`` object. The latter allows you to skip based
on a test configuration value e.g. like this::
@py.test.mark.skipif("config.getvalue('db') is None")
@py.test.mark.skipif("not config.getvalue('db')")
def test_function(...):
...
conditionally mark a function as "expected to fail"
-------------------------------------------------------
@ -53,7 +55,7 @@ skip/xfail a whole test class or module
-------------------------------------------
Instead of marking single functions you can skip
a whole class of tests when runnign on a specific
a whole class of tests when running on a specific
platform::
class TestSomething:
@ -75,13 +77,12 @@ You can use a helper to skip on a failing import::
You can use this helper at module level or within
a test or setup function.
You can aslo skip if a library does not have the right version::
You can also skip if a library does not come with a high enough version::
docutils = py.test.importorskip("docutils", minversion="0.3")
The version will be read from the specified module's ``__version__`` attribute.
dynamically skip from within a test or setup
-------------------------------------------------
@ -96,16 +97,11 @@ If you want to skip the execution of a test you can call
.. _`funcarg factory`: ../funcargs.html#factory
"""
# XXX not all skip-related code is contained in
# this plugin yet, some remains in outcome.py and
# the Skipped Exception is imported here and there.
# XXX py.test.skip, .importorskip and the Skipped class
# should also be defined in this plugin, requires thought/changes
import py
def pytest_namespace():
return {'importorskip': importorskip}
def pytest_runtest_setup(item):
expr, result = evalexpression(item, 'skipif')
if result:
@ -117,14 +113,15 @@ def pytest_runtest_makereport(__multicall__, item, call):
if hasattr(item, 'obj'):
expr, result = evalexpression(item, 'xfail')
if result:
res = __multicall__.execute()
rep = __multicall__.execute()
if call.excinfo:
res.skipped = True
res.failed = res.passed = False
rep.skipped = True
rep.failed = rep.passed = False
else:
res.skipped = res.passed = False
res.failed = True
return res
rep.skipped = rep.passed = False
rep.failed = True
rep.keywords['xfail'] = True # expr
return rep
def pytest_report_teststatus(report):
if 'xfail' in report.keywords:
@ -157,24 +154,6 @@ def pytest_terminal_summary(terminalreporter):
pos = "%s %s:%d: unexpectedly passing" %(modpath, fspath, lineno)
tr._tw.line(pos)
def importorskip(modname, minversion=None):
""" return imported module or perform a dynamic skip() """
compile(modname, '', 'eval') # to catch syntaxerrors
try:
mod = __import__(modname)
except ImportError:
py.test.skip("could not import %r" %(modname,))
if minversion is None:
return mod
verattr = getattr(mod, '__version__', None)
if isinstance(minversion, str):
minver = minversion.split(".")
else:
minver = list(minversion)
if verattr is None or verattr.split(".") < minver:
py.test.skip("module %r has __version__ %r, required is: %r" %(
modname, verattr, minversion))
return mod
def getexpression(item, keyword):
if isinstance(item, py.test.collect.Function):
@ -193,7 +172,7 @@ def evalexpression(item, keyword):
result = None
if expr:
if isinstance(expr, str):
d = {'sys': py.std.sys, 'config': item.config}
d = {'os': py.std.os, 'sys': py.std.sys, 'config': item.config}
result = eval(expr, d)
else:
result = expr

View File

@ -21,11 +21,13 @@ plugins = [
]
externals = {
'oejskit': 'run javascript tests in real life browsers',
'django': 'support for testing django applications',
'oejskit': "run javascript tests in real life browsers",
'django': "support for testing django applications",
# 'coverage': "support for using Ned's coverage module",
# 'xmlresult': "support for generating xml reports "
# "and CruiseControl integration",
}
def warn(*args):
msg = " ".join(map(str, args))
print >>sys.stderr, "WARN:", msg
@ -123,7 +125,7 @@ class RestWriter:
self.out.close()
print "wrote", self.target
del self.out
class PluginOverview(RestWriter):
def makerest(self, config):
plugindir = py.path.local(py.__file__).dirpath("test", "plugin")
@ -145,7 +147,6 @@ class PluginOverview(RestWriter):
self.Print()
class HookSpec(RestWriter):
def makerest(self, config):
module = config.pluginmanager.hook._hookspecs
source = py.code.Source(module)
@ -212,7 +213,7 @@ class PluginDoc(RestWriter):
# "py/test/plugin/%s" %(hg_changeset, basename)))
self.links.append((basename,
"http://bitbucket.org/hpk42/py-trunk/raw/%s/"
"py/test/plugin/%s" %(pyversion, basename)))
"_py/test/plugin/%s" %(pyversion, basename)))
self.links.append(('customize', '../customize.html'))
self.links.append(('plugins', 'index.html'))
self.links.append(('get in contact', '../../contact.html'))

View File

@ -123,14 +123,14 @@ command line. Using the `--pdb`` option you can automatically activate
a PDB `Python debugger`_ when a test fails.
advanced skipping of tests
-------------------------------
======================================
py.test has builtin support for skipping tests or expecting
py.test has `advanced support for skipping tests`_ or expecting
failures on tests on certain platforms. Apart from the
minimal py.test style also unittest- and nose-style tests
can make use of this feature.
.. _`advanced support for skipping tests`: plugin/skipping.html
.. _`funcargs mechanism`: funcargs.html
.. _`unittest.py`: http://docs.python.org/library/unittest.html
.. _`doctest.py`: http://docs.python.org/library/doctest.html

View File

@ -2,7 +2,7 @@
plugins for Python test functions
=================================
skipping_ mark python test functions, classes or modules for conditional
skipping_ advanced conditional skipping for python test functions, classes or modules.
figleaf_ write and report coverage data with 'figleaf'.

View File

@ -1,38 +1,38 @@
.. _`helpconfig`: helpconfig.html
.. _`terminal`: terminal.html
.. _`pytest_recwarn.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_recwarn.py
.. _`pytest_recwarn.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_recwarn.py
.. _`unittest`: unittest.html
.. _`pytest_monkeypatch.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_monkeypatch.py
.. _`pytest_keyword.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_keyword.py
.. _`pytest_monkeypatch.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_monkeypatch.py
.. _`pytest_keyword.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_keyword.py
.. _`pastebin`: pastebin.html
.. _`skipping`: skipping.html
.. _`plugins`: index.html
.. _`pytest_doctest.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_doctest.py
.. _`pytest_doctest.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_doctest.py
.. _`capture`: capture.html
.. _`pytest_nose.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_nose.py
.. _`pytest_restdoc.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_restdoc.py
.. _`pytest_nose.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_nose.py
.. _`pytest_restdoc.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_restdoc.py
.. _`restdoc`: restdoc.html
.. _`pytest_pastebin.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_pastebin.py
.. _`pytest_figleaf.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_figleaf.py
.. _`pytest_hooklog.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_hooklog.py
.. _`pytest_skipping.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_skipping.py
.. _`pytest_pastebin.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_pastebin.py
.. _`pytest_figleaf.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_figleaf.py
.. _`pytest_hooklog.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_hooklog.py
.. _`pytest_skipping.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_skipping.py
.. _`checkout the py.test development version`: ../../download.html#checkout
.. _`pytest_helpconfig.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_helpconfig.py
.. _`pytest_helpconfig.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_helpconfig.py
.. _`oejskit`: oejskit.html
.. _`doctest`: doctest.html
.. _`get in contact`: ../../contact.html
.. _`pytest_capture.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_capture.py
.. _`pytest_capture.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_capture.py
.. _`figleaf`: figleaf.html
.. _`customize`: ../customize.html
.. _`hooklog`: hooklog.html
.. _`pytest_terminal.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_terminal.py
.. _`pytest_terminal.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_terminal.py
.. _`recwarn`: recwarn.html
.. _`pytest_pdb.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_pdb.py
.. _`pytest_pdb.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_pdb.py
.. _`monkeypatch`: monkeypatch.html
.. _`resultlog`: resultlog.html
.. _`keyword`: keyword.html
.. _`django`: django.html
.. _`pytest_unittest.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_unittest.py
.. _`pytest_unittest.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_unittest.py
.. _`nose`: nose.html
.. _`pytest_resultlog.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/py/test/plugin/pytest_resultlog.py
.. _`pytest_resultlog.py`: http://bitbucket.org/hpk42/py-trunk/raw/trunk/_py/test/plugin/pytest_resultlog.py
.. _`pdb`: pdb.html

View File

@ -2,19 +2,17 @@
pytest_skipping plugin
======================
mark python test functions, classes or modules for conditional
advanced conditional skipping for python test functions, classes or modules.
.. contents::
:local:
skipping (skipif) or as expected-to-fail (xfail). Both declarations
lead to special reporting and both can be systematically associated
with functions, whole classes or modules. The difference between
the two is that 'xfail' will still execute test functions
but it will revert the outcome. A passing test is now
a failure and failing test is expected. All skip conditions
are reported at the end of test run through the terminal
reporter.
You can mark functions, classes or modules for for conditional
skipping (skipif) or as expected-to-fail (xfail). The difference
between the two is that 'xfail' will still execute test functions
but it will invert the outcome: a passing test becomes a failure and
a failing test is a semi-passing one. All skip conditions are
reported at the end of test run through the terminal reporter.
.. _skipif:
@ -27,15 +25,18 @@ Here is an example for skipping a test function on Python3::
def test_function():
...
Conditions are specified as python expressions
and can access the ``sys`` module. They can also
access the config object and thus depend on command
line or conftest options::
The 'skipif' marker accepts an **arbitrary python expression**
as a condition. When setting up the test function the condition
is evaluated by calling ``eval(expr, namespace)``. The namespace
contains the ``sys`` and ``os`` modules as well as the
test ``config`` object. The latter allows you to skip based
on a test configuration value e.g. like this::
@py.test.mark.skipif("config.getvalue('db') is None")
@py.test.mark.skipif("not config.getvalue('db')")
def test_function(...):
...
conditionally mark a function as "expected to fail"
-------------------------------------------------------
@ -60,7 +61,7 @@ skip/xfail a whole test class or module
-------------------------------------------
Instead of marking single functions you can skip
a whole class of tests when runnign on a specific
a whole class of tests when running on a specific
platform::
class TestSomething:
@ -82,13 +83,12 @@ You can use a helper to skip on a failing import::
You can use this helper at module level or within
a test or setup function.
You can aslo skip if a library does not have the right version::
You can also skip if a library does not come with a high enough version::
docutils = py.test.importorskip("docutils", minversion="0.3")
The version will be read from the specified module's ``__version__`` attribute.
dynamically skip from within a test or setup
-------------------------------------------------

View File

@ -53,6 +53,7 @@ _py.apipkg.initpkg(__name__, dict(
'_PluginManager' : '_py.test.pluginmanager:PluginManager',
'raises' : '_py.test.outcome:raises',
'skip' : '_py.test.outcome:skip',
'importorskip' : '_py.test.outcome:importorskip',
'fail' : '_py.test.outcome:fail',
'exit' : '_py.test.outcome:exit',
# configuration/initialization related test api

View File

@ -135,11 +135,7 @@ def test_assert_with_brokenrepr_arg():
class TestView:
def setup_class(cls):
try:
from _py.code._assertionold import View
except ImportError:
py.test.skip("requires the compile package")
cls.View = View
cls.View = py.test.importorskip("_py.code._assertionold").View
def test_class_dispatch(self):
### Use a custom class hierarchy with existing instances

View File

@ -191,9 +191,8 @@ class TestSourceParsingAndCompiling:
assert len(source) == 9
assert source.getstatementrange(5) == (0, 9)
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_compile_to_ast(self):
if sys.version_info < (2, 6):
py.test.skip("requires Python 2.6")
import ast
source = Source("x = 4")
mod = source.compile(flag=ast.PyCF_ONLY_AST)
@ -257,7 +256,6 @@ def test_getstartingblock_multiline():
assert len(l) == 4
def test_getline_finally():
#py.test.skip("inner statements cannot be located yet.")
def c(): pass
excinfo = py.test.raises(TypeError, """
teardown = None

View File

@ -2,13 +2,6 @@ import py
import os, sys
from _py.io import terminalwriter
def skip_win32():
if sys.platform == 'win32':
py.test.skip('Not relevant on win32')
import os
import py
def test_terminal_width_COLUMNS(monkeypatch):
""" Dummy test for get_terminal_width
"""
@ -82,14 +75,14 @@ class BaseTests:
assert len(l) == 1
assert l[0] == "-" * 26 + " hello " + "-" * 27 + "\n"
@py.test.mark.skipif("sys.platform == 'win32'")
def test__escaped(self):
skip_win32()
tw = self.getwriter()
text2 = tw._escaped("hello", (31))
assert text2.find("hello") != -1
@py.test.mark.skipif("sys.platform == 'win32'")
def test_markup(self):
skip_win32()
tw = self.getwriter()
for bold in (True, False):
for color in ("red", "green"):
@ -104,9 +97,9 @@ class BaseTests:
tw.line("x", bold=True)
tw.write("x\n", red=True)
l = self.getlines()
skip_win32()
assert len(l[0]) > 2, l
assert len(l[1]) > 2, l
if sys.platform != "win32":
assert len(l[0]) > 2, l
assert len(l[1]) > 2, l
def test_attr_fullwidth(self):
tw = self.getwriter()

View File

@ -1,9 +1,6 @@
import py, sys, os
def setup_module(mod):
if not hasattr(os, 'fork'):
py.test.skip("forkedfunc requires os.fork")
mod.tmpdir = py.test.ensuretemp(mod.__file__)
skipif = "not hasattr(os, 'fork')"
def test_waitfinish_removes_tempdir():
ff = py.process.ForkedFunc(boxf1)
@ -56,7 +53,7 @@ def test_forkedfunc_on_fds():
def test_forkedfunc_signal():
result = py.process.ForkedFunc(boxseg).waitfinish()
assert result.retval is None
if py.std.sys.version_info < (2,4):
if sys.version_info < (2,4):
py.test.skip("signal detection does not work with python prior 2.4")
assert result.signal == 11

View File

@ -13,5 +13,4 @@ def test_kill():
if sys.platform == "win32" and ret == 0:
py.test.skip("XXX on win32, subprocess.Popen().wait() on a killed "
"process does not yield return value != 0")
assert ret != 0

View File

@ -218,9 +218,8 @@ class TestExecutionNonForked(BaseFunctionalTests):
py.test.fail("did not raise")
class TestExecutionForked(BaseFunctionalTests):
skipif = "not hasattr(os, 'fork')"
def getrunner(self):
if not hasattr(py.std.os, 'fork'):
py.test.skip("no os.fork available")
return runner.forked_run_report
def test_suicide(self, testdir):
@ -262,10 +261,8 @@ class TestCollectionReports:
assert not rep.passed
assert rep.skipped
@py.test.mark.skipif("not hasattr(os, 'fork')")
def test_functional_boxed(testdir):
if not hasattr(py.std.os, 'fork'):
py.test.skip("needs os.fork")
p1 = testdir.makepyfile("""
import os
def test_function():

View File

@ -21,6 +21,21 @@ def test_xfail_decorator(testdir):
])
assert result.ret == 1
def test_xfail_at_module(testdir):
p = testdir.makepyfile("""
xfail = 'True'
def test_intentional_xfail():
assert 0
""")
result = testdir.runpytest(p)
extra = result.stdout.fnmatch_lines([
"*expected failures*",
"*test_intentional_xfail*:4*",
"*1 xfailed*"
])
assert result.ret == 0
def test_skipif_decorator(testdir):
p = testdir.makepyfile("""
import py
@ -84,26 +99,3 @@ def test_evalexpression_cls_config_example(testdir):
x, y = evalexpression(item, 'skipif')
assert x == 'config._hackxyz'
assert y == 3
def test_importorskip():
from _py.test.outcome import Skipped
from _py.test.plugin.pytest_skipping import importorskip
assert importorskip == py.test.importorskip
try:
sys = importorskip("sys")
assert sys == py.std.sys
#path = py.test.importorskip("os.path")
#assert path == py.std.os.path
py.test.raises(Skipped, "py.test.importorskip('alskdj')")
py.test.raises(SyntaxError, "py.test.importorskip('x y z')")
py.test.raises(SyntaxError, "py.test.importorskip('x=y')")
path = importorskip("py", minversion=".".join(py.__version__))
mod = py.std.types.ModuleType("hello123")
mod.__version__ = "1.3"
py.test.raises(Skipped, """
py.test.importorskip("hello123", minversion="5.0")
""")
except Skipped:
print(py.code.ExceptionInfo())
py.test.fail("spurious skip")

View File

@ -29,3 +29,30 @@ def test_exception_printing_skip():
excinfo = py.code.ExceptionInfo()
s = excinfo.exconly(tryshort=True)
assert s.startswith("Skipped")
def test_importorskip():
from _py.test.outcome import Skipped, importorskip
assert importorskip == py.test.importorskip
try:
sys = importorskip("sys")
assert sys == py.std.sys
#path = py.test.importorskip("os.path")
#assert path == py.std.os.path
py.test.raises(Skipped, "py.test.importorskip('alskdj')")
py.test.raises(SyntaxError, "py.test.importorskip('x y z')")
py.test.raises(SyntaxError, "py.test.importorskip('x=y')")
path = importorskip("py", minversion=".".join(py.__version__))
mod = py.std.types.ModuleType("hello123")
mod.__version__ = "1.3"
py.test.raises(Skipped, """
py.test.importorskip("hello123", minversion="5.0")
""")
except Skipped:
print(py.code.ExceptionInfo())
py.test.fail("spurious skip")
def test_importorskip_imports_last_module_part():
import os
ospath = py.test.importorskip("os.path")
assert os.path == ospath