Merge pull request #2490 from RonnyPfannschmidt/fix-580

Test Outcomes as BaseException - fix #580
This commit is contained in:
Bruno Oliveira 2017-07-30 17:38:42 -03:00 committed by GitHub
commit e1aed8cb17
13 changed files with 175 additions and 160 deletions

View File

@ -16,7 +16,7 @@ from _pytest.compat import (
getlocation, getfuncargnames,
safe_getattr,
)
from _pytest.runner import fail
from _pytest.outcomes import fail, TEST_OUTCOME
from _pytest.compat import FuncargnamesCompatAttr
if sys.version_info[:2] == (2, 6):
@ -126,7 +126,7 @@ def getfixturemarker(obj):
exceptions."""
try:
return getattr(obj, "_pytestfixturefunction", None)
except Exception:
except TEST_OUTCOME:
# some objects raise errors like request (from flask import request)
# we don't expect them to be fixture functions
return None
@ -816,7 +816,7 @@ def pytest_fixture_setup(fixturedef, request):
my_cache_key = request.param_index
try:
result = call_fixture_func(fixturefunc, request, kwargs)
except Exception:
except TEST_OUTCOME:
fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
raise
fixturedef.cached_result = (result, my_cache_key, None)

View File

@ -14,7 +14,8 @@ except ImportError:
from UserDict import DictMixin as MappingMixin
from _pytest.config import directory_arg, UsageError, hookimpl
from _pytest.runner import collect_one_node, exit
from _pytest.runner import collect_one_node
from _pytest.outcomes import exit
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()

140
_pytest/outcomes.py Normal file
View File

@ -0,0 +1,140 @@
"""
exception classes and constants handling test outcomes
as well as functions creating them
"""
from __future__ import absolute_import, division, print_function
import py
import sys
class OutcomeException(BaseException):
""" OutcomeException and its subclass instances indicate and
contain info about test and collection outcomes.
"""
def __init__(self, msg=None, pytrace=True):
BaseException.__init__(self, msg)
self.msg = msg
self.pytrace = pytrace
def __repr__(self):
if self.msg:
val = self.msg
if isinstance(val, bytes):
val = py._builtin._totext(val, errors='replace')
return val
return "<%s instance>" % (self.__class__.__name__,)
__str__ = __repr__
TEST_OUTCOME = (OutcomeException, Exception)
class Skipped(OutcomeException):
# XXX hackish: on 3k we fake to live in the builtins
# in order to have Skipped exception printing shorter/nicer
__module__ = 'builtins'
def __init__(self, msg=None, pytrace=True, allow_module_level=False):
OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
self.allow_module_level = allow_module_level
class Failed(OutcomeException):
""" raised from an explicit call to pytest.fail() """
__module__ = 'builtins'
class Exit(KeyboardInterrupt):
""" raised for immediate program exits (no tracebacks/summaries)"""
def __init__(self, msg="unknown reason"):
self.msg = msg
KeyboardInterrupt.__init__(self, msg)
# exposed helper methods
def exit(msg):
""" exit testing process as if KeyboardInterrupt was triggered. """
__tracebackhide__ = True
raise Exit(msg)
exit.Exception = Exit
def skip(msg=""):
""" skip an executing test with the given message. Note: it's usually
better to use the pytest.mark.skipif marker to declare a test to be
skipped under certain conditions like mismatching platforms or
dependencies. See the pytest_skipping plugin for details.
"""
__tracebackhide__ = True
raise Skipped(msg=msg)
skip.Exception = Skipped
def fail(msg="", pytrace=True):
""" explicitly fail an currently-executing test with the given Message.
:arg pytrace: if false the msg represents the full failure information
and no python traceback will be reported.
"""
__tracebackhide__ = True
raise Failed(msg=msg, pytrace=pytrace)
fail.Exception = Failed
class XFailed(fail.Exception):
""" raised from an explicit call to pytest.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
def importorskip(modname, minversion=None):
""" return imported module if it has at least "minversion" as its
__version__ attribute. If no minversion is specified the a skip
is only triggered if the module can not be imported.
"""
import warnings
__tracebackhide__ = True
compile(modname, '', 'eval') # to catch syntaxerrors
should_skip = False
with warnings.catch_warnings():
# make sure to ignore ImportWarnings that might happen because
# of existing directories with the same name we're trying to
# import but without a __init__.py file
warnings.simplefilter('ignore')
try:
__import__(modname)
except ImportError:
# Do not raise chained exception here(#1485)
should_skip = True
if should_skip:
raise Skipped("could not import %r" % (modname,), allow_module_level=True)
mod = sys.modules[modname]
if minversion is None:
return mod
verattr = getattr(mod, '__version__', None)
if minversion is not None:
try:
from pkg_resources import parse_version as pv
except ImportError:
raise Skipped("we have a required version for %r but can not import "
"pkg_resources to parse version strings." % (modname,),
allow_module_level=True)
if verattr is None or pv(verattr) < pv(minversion):
raise Skipped("module %r has __version__ %r, required is: %r" % (
modname, verattr, minversion), allow_module_level=True)
return mod

View File

@ -23,7 +23,7 @@ from _pytest.compat import (
get_real_func, getfslineno, safe_getattr,
safe_str, getlocation, enum,
)
from _pytest.runner import fail
from _pytest.outcomes import fail
from _pytest.mark import transfer_markers
cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))

View File

@ -4,7 +4,7 @@ import sys
import py
from _pytest.compat import isclass, izip
from _pytest.runner import fail
from _pytest.outcomes import fail
import _pytest._code
# builtin pytest.approx helper

View File

@ -7,7 +7,9 @@ import _pytest._code
import py
import sys
import warnings
from _pytest.fixtures import yield_fixture
from _pytest.outcomes import fail
@yield_fixture
@ -197,7 +199,6 @@ class WarningsChecker(WarningsRecorder):
if not any(issubclass(r.category, self.expected_warning)
for r in self):
__tracebackhide__ = True
from _pytest.runner import fail
fail("DID NOT WARN. No warnings of type {0} was emitted. "
"The list of emitted warnings is: {1}.".format(
self.expected_warning,

View File

@ -8,11 +8,12 @@ from time import time
import py
from _pytest._code.code import TerminalRepr, ExceptionInfo
from _pytest.outcomes import skip, Skipped, TEST_OUTCOME
#
# pytest plugin hooks
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting", "reporting", after="general")
group.addoption('--durations',
@ -445,7 +446,7 @@ class SetupState(object):
fin = finalizers.pop()
try:
fin()
except Exception:
except TEST_OUTCOME:
# XXX Only first exception will be seen by user,
# ideally all should be reported.
if exc is None:
@ -492,7 +493,7 @@ class SetupState(object):
self.stack.append(col)
try:
col.setup()
except Exception:
except TEST_OUTCOME:
col._prepare_exc = sys.exc_info()
raise
@ -505,126 +506,3 @@ def collect_one_node(collector):
if call and check_interactive_exception(call, rep):
ihook.pytest_exception_interact(node=collector, call=call, report=rep)
return rep
# =============================================================
# Test OutcomeExceptions and helpers for creating them.
class OutcomeException(Exception):
""" OutcomeException and its subclass instances indicate and
contain info about test and collection outcomes.
"""
def __init__(self, msg=None, pytrace=True):
Exception.__init__(self, msg)
self.msg = msg
self.pytrace = pytrace
def __repr__(self):
if self.msg:
val = self.msg
if isinstance(val, bytes):
val = py._builtin._totext(val, errors='replace')
return val
return "<%s instance>" % (self.__class__.__name__,)
__str__ = __repr__
class Skipped(OutcomeException):
# XXX hackish: on 3k we fake to live in the builtins
# in order to have Skipped exception printing shorter/nicer
__module__ = 'builtins'
def __init__(self, msg=None, pytrace=True, allow_module_level=False):
OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
self.allow_module_level = allow_module_level
class Failed(OutcomeException):
""" raised from an explicit call to pytest.fail() """
__module__ = 'builtins'
class Exit(KeyboardInterrupt):
""" raised for immediate program exits (no tracebacks/summaries)"""
def __init__(self, msg="unknown reason"):
self.msg = msg
KeyboardInterrupt.__init__(self, msg)
# exposed helper methods
def exit(msg):
""" exit testing process as if KeyboardInterrupt was triggered. """
__tracebackhide__ = True
raise Exit(msg)
exit.Exception = Exit
def skip(msg=""):
""" skip an executing test with the given message. Note: it's usually
better to use the pytest.mark.skipif marker to declare a test to be
skipped under certain conditions like mismatching platforms or
dependencies. See the pytest_skipping plugin for details.
"""
__tracebackhide__ = True
raise Skipped(msg=msg)
skip.Exception = Skipped
def fail(msg="", pytrace=True):
""" explicitly fail an currently-executing test with the given Message.
:arg pytrace: if false the msg represents the full failure information
and no python traceback will be reported.
"""
__tracebackhide__ = True
raise Failed(msg=msg, pytrace=pytrace)
fail.Exception = Failed
def importorskip(modname, minversion=None):
""" return imported module if it has at least "minversion" as its
__version__ attribute. If no minversion is specified the a skip
is only triggered if the module can not be imported.
"""
import warnings
__tracebackhide__ = True
compile(modname, '', 'eval') # to catch syntaxerrors
should_skip = False
with warnings.catch_warnings():
# make sure to ignore ImportWarnings that might happen because
# of existing directories with the same name we're trying to
# import but without a __init__.py file
warnings.simplefilter('ignore')
try:
__import__(modname)
except ImportError:
# Do not raise chained exception here(#1485)
should_skip = True
if should_skip:
raise Skipped("could not import %r" % (modname,), allow_module_level=True)
mod = sys.modules[modname]
if minversion is None:
return mod
verattr = getattr(mod, '__version__', None)
if minversion is not None:
try:
from pkg_resources import parse_version as pv
except ImportError:
raise Skipped("we have a required version for %r but can not import "
"pkg_resources to parse version strings." % (modname,),
allow_module_level=True)
if verattr is None or pv(verattr) < pv(minversion):
raise Skipped("module %r has __version__ %r, required is: %r" % (
modname, verattr, minversion), allow_module_level=True)
return mod

View File

@ -8,7 +8,7 @@ import traceback
import py
from _pytest.config import hookimpl
from _pytest.mark import MarkInfo, MarkDecorator
from _pytest.runner import fail, skip
from _pytest.outcomes import fail, skip, xfail, TEST_OUTCOME
def pytest_addoption(parser):
@ -34,7 +34,7 @@ def pytest_configure(config):
def nop(*args, **kwargs):
pass
nop.Exception = XFailed
nop.Exception = xfail.Exception
setattr(pytest, "xfail", nop)
config.addinivalue_line("markers",
@ -60,19 +60,6 @@ def pytest_configure(config):
)
class XFailed(fail.Exception):
""" raised from an explicit call to pytest.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item
@ -98,7 +85,7 @@ class MarkEvaluator:
def istrue(self):
try:
return self._istrue()
except Exception:
except TEST_OUTCOME:
self.exc = sys.exc_info()
if isinstance(self.exc[1], SyntaxError):
msg = [" " * (self.exc[1].offset + 4) + "^", ]

View File

@ -7,9 +7,9 @@ import traceback
# for transferring markers
import _pytest._code
from _pytest.config import hookimpl
from _pytest.runner import fail, skip
from _pytest.outcomes import fail, skip, xfail
from _pytest.python import transfer_markers, Class, Module, Function
from _pytest.skipping import MarkEvaluator, xfail
from _pytest.skipping import MarkEvaluator
def pytest_pycollect_makeitem(collector, name, obj):

1
changelog/580.feature Normal file
View File

@ -0,0 +1 @@
Exceptions raised by ``pytest.fail``, ``pytest.skip`` and ``pytest.xfail`` now subclass BaseException, making them harder to be caught unintentionally by normal code.

View File

@ -47,11 +47,11 @@ You can use the following functions in your test, fixture or setup
functions to force a certain test outcome. Note that most often
you can rather use declarative marks, see :ref:`skipping`.
.. autofunction:: _pytest.runner.fail
.. autofunction:: _pytest.runner.skip
.. autofunction:: _pytest.runner.importorskip
.. autofunction:: _pytest.skipping.xfail
.. autofunction:: _pytest.runner.exit
.. autofunction:: _pytest.outcomes.fail
.. autofunction:: _pytest.outcomes.skip
.. autofunction:: _pytest.outcomes.importorskip
.. autofunction:: _pytest.outcomes.xfail
.. autofunction:: _pytest.outcomes.exit
Fixtures and requests
-----------------------------------------------------

View File

@ -16,9 +16,8 @@ from _pytest.freeze_support import freeze_includes
from _pytest import __version__
from _pytest.debugging import pytestPDB as __pytestPDB
from _pytest.recwarn import warns, deprecated_call
from _pytest.runner import fail, skip, importorskip, exit
from _pytest.outcomes import fail, skip, importorskip, exit, xfail
from _pytest.mark import MARK_GEN as mark, param
from _pytest.skipping import xfail
from _pytest.main import Item, Collector, File, Session
from _pytest.fixtures import fillfixtures as _fillfuncargs
from _pytest.python import (

View File

@ -6,7 +6,7 @@ import os
import py
import pytest
import sys
from _pytest import runner, main
from _pytest import runner, main, outcomes
class TestSetupState(object):
@ -449,10 +449,18 @@ def test_runtest_in_module_ordering(testdir):
def test_outcomeexception_exceptionattributes():
outcome = runner.OutcomeException('test')
outcome = outcomes.OutcomeException('test')
assert outcome.args[0] == outcome.msg
def test_outcomeexception_passes_except_Exception():
with pytest.raises(outcomes.OutcomeException):
try:
raise outcomes.OutcomeException('test')
except Exception:
pass
def test_pytest_exit():
try:
pytest.exit("hello")