document and refine py.test.fail helper and strike superflous ExceptionFailure class

refine builtin organisation and start a new doc
This commit is contained in:
holger krekel 2010-11-05 23:37:31 +01:00
parent 49319ba729
commit b1e4301457
16 changed files with 286 additions and 332 deletions

View File

@ -6,9 +6,10 @@ py.test reference documentation
.. toctree::
:maxdepth: 2
builtin.txt
customize.txt
assert.txt
assert.txt
funcargs.txt
xunit_setup.txt
capture.txt
@ -16,7 +17,7 @@ py.test reference documentation
tmpdir.txt
skipping.txt
mark.txt
recwarn.txt
recwarn.txt
unittest.txt
doctest.txt

70
doc/builtin.txt Normal file
View File

@ -0,0 +1,70 @@
pytest builtin helpers
================================================
builtin function arguments
-----------------------------------------------------
You can ask for available builtin or project-custom
:ref:`function arguments` by typing::
$ py.test --funcargs
pytestconfig
the pytest config object with access to command line opts.
capsys
captures writes to sys.stdout/sys.stderr and makes
them available successively via a ``capsys.readouterr()`` method
which returns a ``(out, err)`` tuple of captured snapshot strings.
capfd
captures writes to file descriptors 1 and 2 and makes
snapshotted ``(out, err)`` string tuples available
via the ``capsys.readouterr()`` method. If the underlying
platform does not have ``os.dup`` (e.g. Jython) tests using
this funcarg will automatically skip.
tmpdir
return a temporary directory path object
unique to each test function invocation,
created as a sub directory of the base temporary
directory. The returned object is a `py.path.local`_
path object.
monkeypatch
The returned ``monkeypatch`` funcarg provides these
helper methods to modify objects, dictionaries or os.environ::
monkeypatch.setattr(obj, name, value, raising=True)
monkeypatch.delattr(obj, name, raising=True)
monkeypatch.setitem(mapping, name, value)
monkeypatch.delitem(obj, name, raising=True)
monkeypatch.setenv(name, value, prepend=False)
monkeypatch.delenv(name, value, raising=True)
monkeypatch.syspath_prepend(path)
All modifications will be undone when the requesting
test function finished its execution. The ``raising``
parameter determines if a KeyError or AttributeError
will be raised if the set/deletion operation has no target.
recwarn
Return a WarningsRecorder instance that provides these methods:
* ``pop(category=None)``: return last warning matching the category.
* ``clear()``: clear list of warnings
builtin py.test.* helpers
-----------------------------------------------------
You can always use an interactive Python prompt and type::
import pytest
help(pytest)
to get an overview on available globally available helpers.
.. automodule:: pytest
:members:

36
doc/example/builtin.txt Normal file
View File

@ -0,0 +1,36 @@
writing well integrated assertion helpers
========================================================
If you have a test helper function called from a test you can
use the ``pytest.fail``_ builtin to cleanly fail a test with a message.
The test support function will never itself show up in the traceback.
Example::
# content of test_checkconfig.py
import pytest
def checkconfig(x):
__tracebackhide__ = True
if not hasattr(x, "config"):
pytest.fail("not configured: %s" %(x,))
def test_something():
checkconfig(42)
The ``__tracebackhide__`` setting influences py.test showing
of tracebacks: the ``checkconfig`` function will not be shown
unless the ``--fulltrace`` command line option is specified.
Let's run our little function::
$ py.test -q
F
================================= FAILURES =================================
______________________________ test_something ______________________________
def test_something():
> checkconfig(42)
E Failed: not configured: 42
test_checkconfig.py:8: Failed
1 failed in 0.02 seconds

View File

@ -7,6 +7,7 @@ Usages and Examples
.. toctree::
:maxdepth: 2
builtin.txt
pythoncollection.txt
controlskip.txt
mysetup.txt

View File

@ -0,0 +1,4 @@
[pytest]
testfilepatterns =
${topdir}/tests/unit/test_${basename}
${topdir}/tests/functional/*.py

View File

@ -1,5 +1,5 @@
"""
py.test / pytest API for unit and functional testing with Python.
unit and functional testing with Python.
see http://pytest.org for documentation and details
@ -13,4 +13,4 @@ from pytest import _core as cmdline
UsageError = cmdline.UsageError
def __main__():
raise SystemExit(cmdline.main())
raise SystemExit(cmdline.main())

View File

@ -7,7 +7,7 @@ assert py.__version__.split(".")[:2] >= ['2', '0'], ("installation problem: "
"%s is too old, remove or upgrade 'py'" % (py.__version__))
default_plugins = (
"config session terminal python runner pdb capture unittest mark skipping "
"config session terminal runner python pdb capture unittest mark skipping "
"tmpdir monkeypatch recwarn pastebin helpconfig nose assertion genscript "
"junitxml doctest").split()

View File

@ -7,7 +7,6 @@ import sys
import pytest
from py._code.code import TerminalRepr
import pytest
cutdir = py.path.local(pytest.__file__).dirpath()
@ -22,11 +21,16 @@ def pytest_cmdline_main(config):
showfuncargs(config)
return 0
def pytest_namespace():
return {'collect': {
def pytest_namespace(__multicall__):
__multicall__.execute()
raises.Exception = pytest.fail.Exception
return {
'raises' : raises,
'collect': {
'Module': Module, 'Class': Class, 'Instance': Instance,
'Function': Function, 'Generator': Generator,
'_fillfuncargs': fillfuncargs}}
'_fillfuncargs': fillfuncargs}
}
def pytest_funcarg__pytestconfig(request):
""" the pytest config object with access to command line opts."""
@ -300,17 +304,17 @@ class FunctionMixin(PyobjMixin):
if teardown_func_or_meth is not None:
teardown_func_or_meth(self.obj)
def _prunetraceback(self, traceback):
def _prunetraceback(self, excinfo):
if hasattr(self, '_obj') and not self.config.option.fulltrace:
code = py.code.Code(self.obj)
path, firstlineno = code.path, code.firstlineno
traceback = excinfo.traceback
ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
if ntraceback == traceback:
ntraceback = ntraceback.cut(path=path)
if ntraceback == traceback:
ntraceback = ntraceback.cut(excludepath=cutdir)
traceback = ntraceback.filter()
return traceback
excinfo.traceback = ntraceback.filter()
def _repr_failure_py(self, excinfo, style="long"):
if excinfo.errisinstance(FuncargRequest.LookupError):
@ -746,3 +750,71 @@ def getlocation(function, curdir):
if fn.relto(curdir):
fn = fn.relto(curdir)
return "%s:%d" %(fn, lineno+1)
# builtin pytest.raises helper
def raises(ExpectedException, *args, **kwargs):
""" assert that a code block/function call raises an exception.
If using Python 2.5 or above, you may use this function as a
context manager::
>>> with raises(ZeroDivisionError):
... 1/0
Or you can one of two forms:
if args[0] is callable: raise AssertionError if calling it with
the remaining arguments does not raise the expected exception.
if args[0] is a string: raise AssertionError if executing the
the string in the calling scope does not raise expected exception.
examples:
>>> x = 5
>>> raises(TypeError, lambda x: x + 'hello', x=x)
>>> raises(TypeError, "x + 'hello'")
"""
__tracebackhide__ = True
if not args:
return RaisesContext(ExpectedException)
elif isinstance(args[0], str):
code, = args
assert isinstance(code, str)
frame = sys._getframe(1)
loc = frame.f_locals.copy()
loc.update(kwargs)
#print "raises frame scope: %r" % frame.f_locals
try:
code = py.code.Source(code).compile()
py.builtin.exec_(code, frame.f_globals, loc)
# XXX didn'T mean f_globals == f_locals something special?
# this is destroyed here ...
except ExpectedException:
return py.code.ExceptionInfo()
else:
func = args[0]
try:
func(*args[1:], **kwargs)
except ExpectedException:
return py.code.ExceptionInfo()
k = ", ".join(["%s=%r" % x for x in kwargs.items()])
if k:
k = ', ' + k
expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k)
pytest.fail("DID NOT RAISE")
class RaisesContext(object):
def __init__(self, ExpectedException):
self.ExpectedException = ExpectedException
self.excinfo = None
def __enter__(self):
self.excinfo = object.__new__(py.code.ExceptionInfo)
return self.excinfo
def __exit__(self, *tp):
__tracebackhide__ = True
if tp[0] is None:
pytest.fail("DID NOT RAISE")
self.excinfo.__init__(tp)
return issubclass(self.excinfo.type, self.ExpectedException)

View File

@ -7,11 +7,9 @@ from py._code.code import TerminalRepr
def pytest_namespace():
return {
'raises' : raises,
'fail' : fail,
'skip' : skip,
'importorskip' : importorskip,
'fail' : fail,
'xfail' : xfail,
'exit' : exit,
}
@ -337,13 +335,12 @@ class OutcomeException(Exception):
""" OutcomeException and its subclass instances indicate and
contain info about test and collection outcomes.
"""
def __init__(self, msg=None, excinfo=None):
def __init__(self, msg=None):
self.msg = msg
self.excinfo = excinfo
def __repr__(self):
if self.msg:
return repr(self.msg)
return str(self.msg)
return "<%s instance>" %(self.__class__.__name__,)
__str__ = __repr__
@ -356,19 +353,8 @@ class Failed(OutcomeException):
""" raised from an explicit call to py.test.fail() """
__module__ = 'builtins'
class XFailed(OutcomeException):
""" raised from an explicit call to py.test.xfail() """
__module__ = 'builtins'
class ExceptionFailure(Failed):
""" raised by py.test.raises on an exception-assertion mismatch. """
def __init__(self, expr, expected, msg=None, excinfo=None):
Failed.__init__(self, msg=msg, excinfo=excinfo)
self.expr = expr
self.expected = expected
class Exit(KeyboardInterrupt):
""" raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """
""" raised for immediate program exits (no tracebacks/summaries)"""
def __init__(self, msg="unknown reason"):
self.msg = msg
KeyboardInterrupt.__init__(self, msg)
@ -384,103 +370,20 @@ exit.Exception = Exit
def skip(msg=""):
""" skip an executing test with the given message. Note: it's usually
better use the py.test.mark.skipif marker to declare a test to be
better to use the py.test.mark.skipif marker to declare a test to be
skipped under certain conditions like mismatching platforms or
dependencies. See the pytest_skipping plugin for details.
"""
__tracebackhide__ = True
raise Skipped(msg=msg)
skip.Exception = Skipped
def fail(msg=""):
""" explicitely fail an currently-executing test with the given Message. """
__tracebackhide__ = True
raise Failed(msg=msg)
fail.Exception = Failed
def xfail(reason=""):
""" xfail an executing test or setup functions, taking an optional
reason string.
"""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
def raises(ExpectedException, *args, **kwargs):
""" assert that a code block/function call raises an exception.
If using Python 2.5 or above, you may use this function as a
context manager::
>>> with raises(ZeroDivisionError):
... 1/0
Or you can one of two forms:
if args[0] is callable: raise AssertionError if calling it with
the remaining arguments does not raise the expected exception.
if args[0] is a string: raise AssertionError if executing the
the string in the calling scope does not raise expected exception.
examples:
>>> x = 5
>>> raises(TypeError, lambda x: x + 'hello', x=x)
>>> raises(TypeError, "x + 'hello'")
"""
__tracebackhide__ = True
if not args:
return RaisesContext(ExpectedException)
elif isinstance(args[0], str):
code, = args
assert isinstance(code, str)
frame = sys._getframe(1)
loc = frame.f_locals.copy()
loc.update(kwargs)
#print "raises frame scope: %r" % frame.f_locals
try:
code = py.code.Source(code).compile()
py.builtin.exec_(code, frame.f_globals, loc)
# XXX didn'T mean f_globals == f_locals something special?
# this is destroyed here ...
except ExpectedException:
return py.code.ExceptionInfo()
else:
func = args[0]
try:
func(*args[1:], **kwargs)
except ExpectedException:
return py.code.ExceptionInfo()
k = ", ".join(["%s=%r" % x for x in kwargs.items()])
if k:
k = ', ' + k
expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k)
raise ExceptionFailure(msg="DID NOT RAISE",
expr=args, expected=ExpectedException)
class RaisesContext(object):
def __init__(self, ExpectedException):
self.ExpectedException = ExpectedException
self.excinfo = None
def __enter__(self):
self.excinfo = object.__new__(py.code.ExceptionInfo)
return self.excinfo
def __exit__(self, *tp):
__tracebackhide__ = True
if tp[0] is None:
raise ExceptionFailure(msg="DID NOT RAISE",
expr=(),
expected=self.ExpectedException)
self.excinfo.__init__(tp)
return issubclass(self.excinfo.type, self.ExpectedException)
raises.Exception = ExceptionFailure
def importorskip(modname, minversion=None):
""" return imported module if it has a higher __version__ than the
@ -503,5 +406,3 @@ def importorskip(modname, minversion=None):
py.test.skip("module %r has __version__ %r, required is: %r" %(
modname, verattr, minversion))
return mod

View File

@ -7,6 +7,7 @@
import py
import pytest
import os, sys
tracebackcutdir = py.path.local(pytest.__file__).dirpath()
# exitcodes for the command line
EXIT_OK = 0
@ -403,14 +404,14 @@ class Node(object):
current = current.parent
return current
def _prunetraceback(self, traceback):
return traceback
def _prunetraceback(self, excinfo):
pass
def _repr_failure_py(self, excinfo, style=None):
if self.config.option.fulltrace:
style="long"
else:
excinfo.traceback = self._prunetraceback(excinfo.traceback)
self._prunetraceback(excinfo)
# XXX should excinfo.getrepr record all data and toterminal()
# process it?
if style is None:
@ -448,14 +449,14 @@ class Collector(Node):
""" internal helper method to cache results of calling collect(). """
return self._memoizedcall('_collected', self.collect)
def _prunetraceback(self, traceback):
def _prunetraceback(self, excinfo):
if hasattr(self, 'fspath'):
path = self.fspath
traceback = excinfo.traceback
ntraceback = traceback.cut(path=self.fspath)
if ntraceback == traceback:
ntraceback = ntraceback.cut(excludepath=py._pydir)
traceback = ntraceback.filter()
return traceback
ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
excinfo.traceback = ntraceback.filter()
class FSCollector(Collector):
def __init__(self, fspath, parent=None, config=None, collection=None):

View File

@ -1,154 +1,8 @@
"""
advanced skipping for python test functions, classes or modules.
With this plugin you can mark test functions for conditional skipping
or as "xfail", expected-to-fail. Skipping a test will avoid running it
while xfail-marked tests will run and result in an inverted outcome:
a pass becomes a failure and a fail becomes a semi-passing one.
The need for skipping a test is usually connected to a condition.
If a test fails under all conditions then it's probably better
to mark your test as 'xfail'.
By passing ``-rxs`` to the terminal reporter you will see extra
summary information on skips and xfail-run tests at the end of a test run.
.. _skipif:
Skipping a single function
-------------------------------------------
Here is an example for marking a test function to be skipped
when run on a Python3 interpreter::
@py.test.mark.skipif("sys.version_info >= (3,0)")
def test_function():
...
During test function setup the skipif condition is
evaluated by calling ``eval(expr, namespace)``. The namespace
contains the ``sys`` and ``os`` modules and the test
``config`` object. The latter allows you to skip based
on a test configuration value e.g. like this::
@py.test.mark.skipif("not config.getvalue('db')")
def test_function(...):
...
Create a shortcut for your conditional skip decorator
at module level like this::
win32only = py.test.mark.skipif("sys.platform != 'win32'")
@win32only
def test_function():
...
skip groups of test functions
--------------------------------------
As with all metadata function marking you can do it at
`whole class- or module level`_. Here is an example
for skipping all methods of a test class based on platform::
class TestPosixCalls:
pytestmark = py.test.mark.skipif("sys.platform == 'win32'")
def test_function(self):
# will not be setup or run under 'win32' platform
#
The ``pytestmark`` decorator will be applied to each test function.
If your code targets python2.6 or above you can equivalently use
the skipif decorator on classes::
@py.test.mark.skipif("sys.platform == 'win32'")
class TestPosixCalls:
def test_function(self):
# will not be setup or run under 'win32' platform
#
It is fine in general to apply multiple "skipif" decorators
on a single function - this means that if any of the conditions
apply the function will be skipped.
.. _`whole class- or module level`: mark.html#scoped-marking
.. _xfail:
mark a test function as **expected to fail**
-------------------------------------------------------
You can use the ``xfail`` marker to indicate that you
expect the test to fail::
@py.test.mark.xfail
def test_function():
...
This test will be run but no traceback will be reported
when it fails. Instead terminal reporting will list it in the
"expected to fail" or "unexpectedly passing" sections.
Same as with skipif_ you can also selectively expect a failure
depending on platform::
@py.test.mark.xfail("sys.version_info >= (3,0)")
def test_function():
...
To not run a test and still regard it as "xfailed"::
@py.test.mark.xfail(..., run=False)
To specify an explicit reason to be shown with xfailure detail::
@py.test.mark.xfail(..., reason="my reason")
imperative xfail from within a test or setup function
------------------------------------------------------
If you cannot declare xfail-conditions at import time
you can also imperatively produce an XFail-outcome from
within test or setup code. Example::
def test_function():
if not valid_config():
py.test.xfail("unsuppored configuration")
skipping on a missing import dependency
--------------------------------------------------
You can use the following import helper at module level
or within a test or test setup function::
docutils = py.test.importorskip("docutils")
If ``docutils`` cannot be imported here, this will lead to a
skip outcome of the test. You can also skip dependeing if
if a library does not come with a high enough version::
docutils = py.test.importorskip("docutils", minversion="0.3")
The version will be read from the specified module's ``__version__`` attribute.
imperative skip from within a test or setup function
------------------------------------------------------
If for some reason you cannot declare skip-conditions
you can also imperatively produce a Skip-outcome from
within test or setup code. Example::
def test_function():
if not valid_config():
py.test.skip("unsuppored configuration")
plugin providing skip and xfail functionality.
"""
import py
import py, pytest
def pytest_addoption(parser):
group = parser.getgroup("general")
@ -156,6 +10,18 @@ def pytest_addoption(parser):
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
def pytest_namespace():
return dict(xfail=xfail)
class XFailed(pytest.fail.Exception):
""" raised from an explicit call to py.test.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item

View File

@ -1,4 +1,4 @@
import py, sys
import pytest, py, sys
from pytest.plugin import python as funcargs
class TestModule:
@ -1118,3 +1118,60 @@ def test_show_funcarg(testdir):
"*temporary directory*",
]
)
class TestRaises:
def test_raises(self):
source = "int('qwe')"
excinfo = py.test.raises(ValueError, source)
code = excinfo.traceback[-1].frame.code
s = str(code.fullsource)
assert s == source
def test_raises_exec(self):
py.test.raises(ValueError, "a,x = []")
def test_raises_syntax_error(self):
py.test.raises(SyntaxError, "qwe qwe qwe")
def test_raises_function(self):
py.test.raises(ValueError, int, 'hello')
def test_raises_callable_no_exception(self):
class A:
def __call__(self):
pass
try:
py.test.raises(ValueError, A())
except py.test.raises.Exception:
pass
@py.test.mark.skipif('sys.version < "2.5"')
def test_raises_as_contextmanager(self, testdir):
testdir.makepyfile("""
from __future__ import with_statement
import py
def test_simple():
with py.test.raises(ZeroDivisionError) as excinfo:
assert isinstance(excinfo, py.code.ExceptionInfo)
1/0
print (excinfo)
assert excinfo.type == ZeroDivisionError
def test_noraise():
with py.test.raises(py.test.raises.Exception):
with py.test.raises(ValueError):
int()
def test_raise_wrong_exception_passes_by():
with py.test.raises(ZeroDivisionError):
with py.test.raises(ValueError):
1/0
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*3 passed*',
])

View File

@ -89,7 +89,7 @@ class TestWithFunctionIntegration:
assert lines[0].startswith("S ")
assert lines[0].endswith("test_collection_skip.py")
assert lines[1].startswith(" ")
assert lines[1].endswith("test_collection_skip.py:1: Skipped: 'hello'")
assert lines[1].endswith("test_collection_skip.py:1: Skipped: hello")
lines = self.getresultlog(testdir, fail)
assert lines

View File

@ -319,61 +319,6 @@ def test_runtest_in_module_ordering(testdir):
"*2 passed*"
])
class TestRaises:
def test_raises(self):
source = "int('qwe')"
excinfo = py.test.raises(ValueError, source)
code = excinfo.traceback[-1].frame.code
s = str(code.fullsource)
assert s == source
def test_raises_exec(self):
py.test.raises(ValueError, "a,x = []")
def test_raises_syntax_error(self):
py.test.raises(SyntaxError, "qwe qwe qwe")
def test_raises_function(self):
py.test.raises(ValueError, int, 'hello')
def test_raises_callable_no_exception(self):
class A:
def __call__(self):
pass
try:
py.test.raises(ValueError, A())
except py.test.raises.Exception:
pass
@py.test.mark.skipif('sys.version < "2.5"')
def test_raises_as_contextmanager(self, testdir):
testdir.makepyfile("""
from __future__ import with_statement
import py
def test_simple():
with py.test.raises(ZeroDivisionError) as excinfo:
assert isinstance(excinfo, py.code.ExceptionInfo)
1/0
print (excinfo)
assert excinfo.type == ZeroDivisionError
def test_noraise():
with py.test.raises(py.test.raises.Exception):
with py.test.raises(ValueError):
int()
def test_raise_wrong_exception_passes_by():
with py.test.raises(ZeroDivisionError):
with py.test.raises(ValueError):
1/0
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
'*3 passed*',
])
def test_pytest_exit():
try:

View File

@ -417,7 +417,7 @@ def test_skipped_reasons_functional(testdir):
result.stdout.fnmatch_lines([
"*test_two.py S",
"*test_one.py ss",
"*SKIP*3*conftest.py:3: 'test'",
"*SKIP*3*conftest.py:3: test",
])
assert result.ret == 0

View File

@ -195,7 +195,7 @@ class TestCollectonly:
assert len(cols) == 0
linecomp.assert_contains_lines("""
<Module 'test_collectonly_skipped_module.py'>
!!! Skipped: 'nomod' !!!
!!! Skipped: nomod !!!
""")
def test_collectonly_failed_module(self, testdir, linecomp):