Merge pull request #2490 from RonnyPfannschmidt/fix-580
Test Outcomes as BaseException - fix #580
This commit is contained in:
commit
e1aed8cb17
|
@ -16,7 +16,7 @@ from _pytest.compat import (
|
||||||
getlocation, getfuncargnames,
|
getlocation, getfuncargnames,
|
||||||
safe_getattr,
|
safe_getattr,
|
||||||
)
|
)
|
||||||
from _pytest.runner import fail
|
from _pytest.outcomes import fail, TEST_OUTCOME
|
||||||
from _pytest.compat import FuncargnamesCompatAttr
|
from _pytest.compat import FuncargnamesCompatAttr
|
||||||
|
|
||||||
if sys.version_info[:2] == (2, 6):
|
if sys.version_info[:2] == (2, 6):
|
||||||
|
@ -126,7 +126,7 @@ def getfixturemarker(obj):
|
||||||
exceptions."""
|
exceptions."""
|
||||||
try:
|
try:
|
||||||
return getattr(obj, "_pytestfixturefunction", None)
|
return getattr(obj, "_pytestfixturefunction", None)
|
||||||
except Exception:
|
except TEST_OUTCOME:
|
||||||
# some objects raise errors like request (from flask import request)
|
# some objects raise errors like request (from flask import request)
|
||||||
# we don't expect them to be fixture functions
|
# we don't expect them to be fixture functions
|
||||||
return None
|
return None
|
||||||
|
@ -816,7 +816,7 @@ def pytest_fixture_setup(fixturedef, request):
|
||||||
my_cache_key = request.param_index
|
my_cache_key = request.param_index
|
||||||
try:
|
try:
|
||||||
result = call_fixture_func(fixturefunc, request, kwargs)
|
result = call_fixture_func(fixturefunc, request, kwargs)
|
||||||
except Exception:
|
except TEST_OUTCOME:
|
||||||
fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
|
fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
|
||||||
raise
|
raise
|
||||||
fixturedef.cached_result = (result, my_cache_key, None)
|
fixturedef.cached_result = (result, my_cache_key, None)
|
||||||
|
|
|
@ -14,7 +14,8 @@ except ImportError:
|
||||||
from UserDict import DictMixin as MappingMixin
|
from UserDict import DictMixin as MappingMixin
|
||||||
|
|
||||||
from _pytest.config import directory_arg, UsageError, hookimpl
|
from _pytest.config import directory_arg, UsageError, hookimpl
|
||||||
from _pytest.runner import collect_one_node, exit
|
from _pytest.runner import collect_one_node
|
||||||
|
from _pytest.outcomes import exit
|
||||||
|
|
||||||
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
|
tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,140 @@
|
||||||
|
"""
|
||||||
|
exception classes and constants handling test outcomes
|
||||||
|
as well as functions creating them
|
||||||
|
"""
|
||||||
|
from __future__ import absolute_import, division, print_function
|
||||||
|
import py
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
class OutcomeException(BaseException):
|
||||||
|
""" OutcomeException and its subclass instances indicate and
|
||||||
|
contain info about test and collection outcomes.
|
||||||
|
"""
|
||||||
|
def __init__(self, msg=None, pytrace=True):
|
||||||
|
BaseException.__init__(self, msg)
|
||||||
|
self.msg = msg
|
||||||
|
self.pytrace = pytrace
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
if self.msg:
|
||||||
|
val = self.msg
|
||||||
|
if isinstance(val, bytes):
|
||||||
|
val = py._builtin._totext(val, errors='replace')
|
||||||
|
return val
|
||||||
|
return "<%s instance>" % (self.__class__.__name__,)
|
||||||
|
__str__ = __repr__
|
||||||
|
|
||||||
|
|
||||||
|
TEST_OUTCOME = (OutcomeException, Exception)
|
||||||
|
|
||||||
|
|
||||||
|
class Skipped(OutcomeException):
|
||||||
|
# XXX hackish: on 3k we fake to live in the builtins
|
||||||
|
# in order to have Skipped exception printing shorter/nicer
|
||||||
|
__module__ = 'builtins'
|
||||||
|
|
||||||
|
def __init__(self, msg=None, pytrace=True, allow_module_level=False):
|
||||||
|
OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
|
||||||
|
self.allow_module_level = allow_module_level
|
||||||
|
|
||||||
|
|
||||||
|
class Failed(OutcomeException):
|
||||||
|
""" raised from an explicit call to pytest.fail() """
|
||||||
|
__module__ = 'builtins'
|
||||||
|
|
||||||
|
|
||||||
|
class Exit(KeyboardInterrupt):
|
||||||
|
""" raised for immediate program exits (no tracebacks/summaries)"""
|
||||||
|
def __init__(self, msg="unknown reason"):
|
||||||
|
self.msg = msg
|
||||||
|
KeyboardInterrupt.__init__(self, msg)
|
||||||
|
|
||||||
|
# exposed helper methods
|
||||||
|
|
||||||
|
|
||||||
|
def exit(msg):
|
||||||
|
""" exit testing process as if KeyboardInterrupt was triggered. """
|
||||||
|
__tracebackhide__ = True
|
||||||
|
raise Exit(msg)
|
||||||
|
|
||||||
|
|
||||||
|
exit.Exception = Exit
|
||||||
|
|
||||||
|
|
||||||
|
def skip(msg=""):
|
||||||
|
""" skip an executing test with the given message. Note: it's usually
|
||||||
|
better to use the pytest.mark.skipif marker to declare a test to be
|
||||||
|
skipped under certain conditions like mismatching platforms or
|
||||||
|
dependencies. See the pytest_skipping plugin for details.
|
||||||
|
"""
|
||||||
|
__tracebackhide__ = True
|
||||||
|
raise Skipped(msg=msg)
|
||||||
|
|
||||||
|
|
||||||
|
skip.Exception = Skipped
|
||||||
|
|
||||||
|
|
||||||
|
def fail(msg="", pytrace=True):
|
||||||
|
""" explicitly fail an currently-executing test with the given Message.
|
||||||
|
|
||||||
|
:arg pytrace: if false the msg represents the full failure information
|
||||||
|
and no python traceback will be reported.
|
||||||
|
"""
|
||||||
|
__tracebackhide__ = True
|
||||||
|
raise Failed(msg=msg, pytrace=pytrace)
|
||||||
|
|
||||||
|
|
||||||
|
fail.Exception = Failed
|
||||||
|
|
||||||
|
|
||||||
|
class XFailed(fail.Exception):
|
||||||
|
""" raised from an explicit call to pytest.xfail() """
|
||||||
|
|
||||||
|
|
||||||
|
def xfail(reason=""):
|
||||||
|
""" xfail an executing test or setup functions with the given reason."""
|
||||||
|
__tracebackhide__ = True
|
||||||
|
raise XFailed(reason)
|
||||||
|
|
||||||
|
|
||||||
|
xfail.Exception = XFailed
|
||||||
|
|
||||||
|
|
||||||
|
def importorskip(modname, minversion=None):
|
||||||
|
""" return imported module if it has at least "minversion" as its
|
||||||
|
__version__ attribute. If no minversion is specified the a skip
|
||||||
|
is only triggered if the module can not be imported.
|
||||||
|
"""
|
||||||
|
import warnings
|
||||||
|
__tracebackhide__ = True
|
||||||
|
compile(modname, '', 'eval') # to catch syntaxerrors
|
||||||
|
should_skip = False
|
||||||
|
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
# make sure to ignore ImportWarnings that might happen because
|
||||||
|
# of existing directories with the same name we're trying to
|
||||||
|
# import but without a __init__.py file
|
||||||
|
warnings.simplefilter('ignore')
|
||||||
|
try:
|
||||||
|
__import__(modname)
|
||||||
|
except ImportError:
|
||||||
|
# Do not raise chained exception here(#1485)
|
||||||
|
should_skip = True
|
||||||
|
if should_skip:
|
||||||
|
raise Skipped("could not import %r" % (modname,), allow_module_level=True)
|
||||||
|
mod = sys.modules[modname]
|
||||||
|
if minversion is None:
|
||||||
|
return mod
|
||||||
|
verattr = getattr(mod, '__version__', None)
|
||||||
|
if minversion is not None:
|
||||||
|
try:
|
||||||
|
from pkg_resources import parse_version as pv
|
||||||
|
except ImportError:
|
||||||
|
raise Skipped("we have a required version for %r but can not import "
|
||||||
|
"pkg_resources to parse version strings." % (modname,),
|
||||||
|
allow_module_level=True)
|
||||||
|
if verattr is None or pv(verattr) < pv(minversion):
|
||||||
|
raise Skipped("module %r has __version__ %r, required is: %r" % (
|
||||||
|
modname, verattr, minversion), allow_module_level=True)
|
||||||
|
return mod
|
|
@ -23,7 +23,7 @@ from _pytest.compat import (
|
||||||
get_real_func, getfslineno, safe_getattr,
|
get_real_func, getfslineno, safe_getattr,
|
||||||
safe_str, getlocation, enum,
|
safe_str, getlocation, enum,
|
||||||
)
|
)
|
||||||
from _pytest.runner import fail
|
from _pytest.outcomes import fail
|
||||||
from _pytest.mark import transfer_markers
|
from _pytest.mark import transfer_markers
|
||||||
|
|
||||||
cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
|
cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
|
||||||
|
|
|
@ -4,7 +4,7 @@ import sys
|
||||||
import py
|
import py
|
||||||
|
|
||||||
from _pytest.compat import isclass, izip
|
from _pytest.compat import isclass, izip
|
||||||
from _pytest.runner import fail
|
from _pytest.outcomes import fail
|
||||||
import _pytest._code
|
import _pytest._code
|
||||||
|
|
||||||
# builtin pytest.approx helper
|
# builtin pytest.approx helper
|
||||||
|
|
|
@ -7,7 +7,9 @@ import _pytest._code
|
||||||
import py
|
import py
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from _pytest.fixtures import yield_fixture
|
from _pytest.fixtures import yield_fixture
|
||||||
|
from _pytest.outcomes import fail
|
||||||
|
|
||||||
|
|
||||||
@yield_fixture
|
@yield_fixture
|
||||||
|
@ -197,7 +199,6 @@ class WarningsChecker(WarningsRecorder):
|
||||||
if not any(issubclass(r.category, self.expected_warning)
|
if not any(issubclass(r.category, self.expected_warning)
|
||||||
for r in self):
|
for r in self):
|
||||||
__tracebackhide__ = True
|
__tracebackhide__ = True
|
||||||
from _pytest.runner import fail
|
|
||||||
fail("DID NOT WARN. No warnings of type {0} was emitted. "
|
fail("DID NOT WARN. No warnings of type {0} was emitted. "
|
||||||
"The list of emitted warnings is: {1}.".format(
|
"The list of emitted warnings is: {1}.".format(
|
||||||
self.expected_warning,
|
self.expected_warning,
|
||||||
|
|
|
@ -8,11 +8,12 @@ from time import time
|
||||||
|
|
||||||
import py
|
import py
|
||||||
from _pytest._code.code import TerminalRepr, ExceptionInfo
|
from _pytest._code.code import TerminalRepr, ExceptionInfo
|
||||||
|
from _pytest.outcomes import skip, Skipped, TEST_OUTCOME
|
||||||
|
|
||||||
#
|
#
|
||||||
# pytest plugin hooks
|
# pytest plugin hooks
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
group = parser.getgroup("terminal reporting", "reporting", after="general")
|
group = parser.getgroup("terminal reporting", "reporting", after="general")
|
||||||
group.addoption('--durations',
|
group.addoption('--durations',
|
||||||
|
@ -445,7 +446,7 @@ class SetupState(object):
|
||||||
fin = finalizers.pop()
|
fin = finalizers.pop()
|
||||||
try:
|
try:
|
||||||
fin()
|
fin()
|
||||||
except Exception:
|
except TEST_OUTCOME:
|
||||||
# XXX Only first exception will be seen by user,
|
# XXX Only first exception will be seen by user,
|
||||||
# ideally all should be reported.
|
# ideally all should be reported.
|
||||||
if exc is None:
|
if exc is None:
|
||||||
|
@ -492,7 +493,7 @@ class SetupState(object):
|
||||||
self.stack.append(col)
|
self.stack.append(col)
|
||||||
try:
|
try:
|
||||||
col.setup()
|
col.setup()
|
||||||
except Exception:
|
except TEST_OUTCOME:
|
||||||
col._prepare_exc = sys.exc_info()
|
col._prepare_exc = sys.exc_info()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -505,126 +506,3 @@ def collect_one_node(collector):
|
||||||
if call and check_interactive_exception(call, rep):
|
if call and check_interactive_exception(call, rep):
|
||||||
ihook.pytest_exception_interact(node=collector, call=call, report=rep)
|
ihook.pytest_exception_interact(node=collector, call=call, report=rep)
|
||||||
return rep
|
return rep
|
||||||
|
|
||||||
|
|
||||||
# =============================================================
|
|
||||||
# Test OutcomeExceptions and helpers for creating them.
|
|
||||||
|
|
||||||
|
|
||||||
class OutcomeException(Exception):
|
|
||||||
""" OutcomeException and its subclass instances indicate and
|
|
||||||
contain info about test and collection outcomes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, msg=None, pytrace=True):
|
|
||||||
Exception.__init__(self, msg)
|
|
||||||
self.msg = msg
|
|
||||||
self.pytrace = pytrace
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
if self.msg:
|
|
||||||
val = self.msg
|
|
||||||
if isinstance(val, bytes):
|
|
||||||
val = py._builtin._totext(val, errors='replace')
|
|
||||||
return val
|
|
||||||
return "<%s instance>" % (self.__class__.__name__,)
|
|
||||||
__str__ = __repr__
|
|
||||||
|
|
||||||
|
|
||||||
class Skipped(OutcomeException):
|
|
||||||
# XXX hackish: on 3k we fake to live in the builtins
|
|
||||||
# in order to have Skipped exception printing shorter/nicer
|
|
||||||
__module__ = 'builtins'
|
|
||||||
|
|
||||||
def __init__(self, msg=None, pytrace=True, allow_module_level=False):
|
|
||||||
OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
|
|
||||||
self.allow_module_level = allow_module_level
|
|
||||||
|
|
||||||
|
|
||||||
class Failed(OutcomeException):
|
|
||||||
""" raised from an explicit call to pytest.fail() """
|
|
||||||
__module__ = 'builtins'
|
|
||||||
|
|
||||||
|
|
||||||
class Exit(KeyboardInterrupt):
|
|
||||||
""" raised for immediate program exits (no tracebacks/summaries)"""
|
|
||||||
|
|
||||||
def __init__(self, msg="unknown reason"):
|
|
||||||
self.msg = msg
|
|
||||||
KeyboardInterrupt.__init__(self, msg)
|
|
||||||
|
|
||||||
# exposed helper methods
|
|
||||||
|
|
||||||
|
|
||||||
def exit(msg):
|
|
||||||
""" exit testing process as if KeyboardInterrupt was triggered. """
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise Exit(msg)
|
|
||||||
|
|
||||||
|
|
||||||
exit.Exception = Exit
|
|
||||||
|
|
||||||
|
|
||||||
def skip(msg=""):
|
|
||||||
""" skip an executing test with the given message. Note: it's usually
|
|
||||||
better to use the pytest.mark.skipif marker to declare a test to be
|
|
||||||
skipped under certain conditions like mismatching platforms or
|
|
||||||
dependencies. See the pytest_skipping plugin for details.
|
|
||||||
"""
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise Skipped(msg=msg)
|
|
||||||
|
|
||||||
|
|
||||||
skip.Exception = Skipped
|
|
||||||
|
|
||||||
|
|
||||||
def fail(msg="", pytrace=True):
|
|
||||||
""" explicitly fail an currently-executing test with the given Message.
|
|
||||||
|
|
||||||
:arg pytrace: if false the msg represents the full failure information
|
|
||||||
and no python traceback will be reported.
|
|
||||||
"""
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise Failed(msg=msg, pytrace=pytrace)
|
|
||||||
|
|
||||||
|
|
||||||
fail.Exception = Failed
|
|
||||||
|
|
||||||
|
|
||||||
def importorskip(modname, minversion=None):
|
|
||||||
""" return imported module if it has at least "minversion" as its
|
|
||||||
__version__ attribute. If no minversion is specified the a skip
|
|
||||||
is only triggered if the module can not be imported.
|
|
||||||
"""
|
|
||||||
import warnings
|
|
||||||
__tracebackhide__ = True
|
|
||||||
compile(modname, '', 'eval') # to catch syntaxerrors
|
|
||||||
should_skip = False
|
|
||||||
|
|
||||||
with warnings.catch_warnings():
|
|
||||||
# make sure to ignore ImportWarnings that might happen because
|
|
||||||
# of existing directories with the same name we're trying to
|
|
||||||
# import but without a __init__.py file
|
|
||||||
warnings.simplefilter('ignore')
|
|
||||||
try:
|
|
||||||
__import__(modname)
|
|
||||||
except ImportError:
|
|
||||||
# Do not raise chained exception here(#1485)
|
|
||||||
should_skip = True
|
|
||||||
if should_skip:
|
|
||||||
raise Skipped("could not import %r" % (modname,), allow_module_level=True)
|
|
||||||
mod = sys.modules[modname]
|
|
||||||
if minversion is None:
|
|
||||||
return mod
|
|
||||||
verattr = getattr(mod, '__version__', None)
|
|
||||||
if minversion is not None:
|
|
||||||
try:
|
|
||||||
from pkg_resources import parse_version as pv
|
|
||||||
except ImportError:
|
|
||||||
raise Skipped("we have a required version for %r but can not import "
|
|
||||||
"pkg_resources to parse version strings." % (modname,),
|
|
||||||
allow_module_level=True)
|
|
||||||
if verattr is None or pv(verattr) < pv(minversion):
|
|
||||||
raise Skipped("module %r has __version__ %r, required is: %r" % (
|
|
||||||
modname, verattr, minversion), allow_module_level=True)
|
|
||||||
return mod
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ import traceback
|
||||||
import py
|
import py
|
||||||
from _pytest.config import hookimpl
|
from _pytest.config import hookimpl
|
||||||
from _pytest.mark import MarkInfo, MarkDecorator
|
from _pytest.mark import MarkInfo, MarkDecorator
|
||||||
from _pytest.runner import fail, skip
|
from _pytest.outcomes import fail, skip, xfail, TEST_OUTCOME
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
|
@ -34,7 +34,7 @@ def pytest_configure(config):
|
||||||
def nop(*args, **kwargs):
|
def nop(*args, **kwargs):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
nop.Exception = XFailed
|
nop.Exception = xfail.Exception
|
||||||
setattr(pytest, "xfail", nop)
|
setattr(pytest, "xfail", nop)
|
||||||
|
|
||||||
config.addinivalue_line("markers",
|
config.addinivalue_line("markers",
|
||||||
|
@ -60,19 +60,6 @@ def pytest_configure(config):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class XFailed(fail.Exception):
|
|
||||||
""" raised from an explicit call to pytest.xfail() """
|
|
||||||
|
|
||||||
|
|
||||||
def xfail(reason=""):
|
|
||||||
""" xfail an executing test or setup functions with the given reason."""
|
|
||||||
__tracebackhide__ = True
|
|
||||||
raise XFailed(reason)
|
|
||||||
|
|
||||||
|
|
||||||
xfail.Exception = XFailed
|
|
||||||
|
|
||||||
|
|
||||||
class MarkEvaluator:
|
class MarkEvaluator:
|
||||||
def __init__(self, item, name):
|
def __init__(self, item, name):
|
||||||
self.item = item
|
self.item = item
|
||||||
|
@ -98,7 +85,7 @@ class MarkEvaluator:
|
||||||
def istrue(self):
|
def istrue(self):
|
||||||
try:
|
try:
|
||||||
return self._istrue()
|
return self._istrue()
|
||||||
except Exception:
|
except TEST_OUTCOME:
|
||||||
self.exc = sys.exc_info()
|
self.exc = sys.exc_info()
|
||||||
if isinstance(self.exc[1], SyntaxError):
|
if isinstance(self.exc[1], SyntaxError):
|
||||||
msg = [" " * (self.exc[1].offset + 4) + "^", ]
|
msg = [" " * (self.exc[1].offset + 4) + "^", ]
|
||||||
|
|
|
@ -7,9 +7,9 @@ import traceback
|
||||||
# for transferring markers
|
# for transferring markers
|
||||||
import _pytest._code
|
import _pytest._code
|
||||||
from _pytest.config import hookimpl
|
from _pytest.config import hookimpl
|
||||||
from _pytest.runner import fail, skip
|
from _pytest.outcomes import fail, skip, xfail
|
||||||
from _pytest.python import transfer_markers, Class, Module, Function
|
from _pytest.python import transfer_markers, Class, Module, Function
|
||||||
from _pytest.skipping import MarkEvaluator, xfail
|
from _pytest.skipping import MarkEvaluator
|
||||||
|
|
||||||
|
|
||||||
def pytest_pycollect_makeitem(collector, name, obj):
|
def pytest_pycollect_makeitem(collector, name, obj):
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
Exceptions raised by ``pytest.fail``, ``pytest.skip`` and ``pytest.xfail`` now subclass BaseException, making them harder to be caught unintentionally by normal code.
|
|
@ -47,11 +47,11 @@ You can use the following functions in your test, fixture or setup
|
||||||
functions to force a certain test outcome. Note that most often
|
functions to force a certain test outcome. Note that most often
|
||||||
you can rather use declarative marks, see :ref:`skipping`.
|
you can rather use declarative marks, see :ref:`skipping`.
|
||||||
|
|
||||||
.. autofunction:: _pytest.runner.fail
|
.. autofunction:: _pytest.outcomes.fail
|
||||||
.. autofunction:: _pytest.runner.skip
|
.. autofunction:: _pytest.outcomes.skip
|
||||||
.. autofunction:: _pytest.runner.importorskip
|
.. autofunction:: _pytest.outcomes.importorskip
|
||||||
.. autofunction:: _pytest.skipping.xfail
|
.. autofunction:: _pytest.outcomes.xfail
|
||||||
.. autofunction:: _pytest.runner.exit
|
.. autofunction:: _pytest.outcomes.exit
|
||||||
|
|
||||||
Fixtures and requests
|
Fixtures and requests
|
||||||
-----------------------------------------------------
|
-----------------------------------------------------
|
||||||
|
|
|
@ -16,9 +16,8 @@ from _pytest.freeze_support import freeze_includes
|
||||||
from _pytest import __version__
|
from _pytest import __version__
|
||||||
from _pytest.debugging import pytestPDB as __pytestPDB
|
from _pytest.debugging import pytestPDB as __pytestPDB
|
||||||
from _pytest.recwarn import warns, deprecated_call
|
from _pytest.recwarn import warns, deprecated_call
|
||||||
from _pytest.runner import fail, skip, importorskip, exit
|
from _pytest.outcomes import fail, skip, importorskip, exit, xfail
|
||||||
from _pytest.mark import MARK_GEN as mark, param
|
from _pytest.mark import MARK_GEN as mark, param
|
||||||
from _pytest.skipping import xfail
|
|
||||||
from _pytest.main import Item, Collector, File, Session
|
from _pytest.main import Item, Collector, File, Session
|
||||||
from _pytest.fixtures import fillfixtures as _fillfuncargs
|
from _pytest.fixtures import fillfixtures as _fillfuncargs
|
||||||
from _pytest.python import (
|
from _pytest.python import (
|
||||||
|
|
|
@ -6,7 +6,7 @@ import os
|
||||||
import py
|
import py
|
||||||
import pytest
|
import pytest
|
||||||
import sys
|
import sys
|
||||||
from _pytest import runner, main
|
from _pytest import runner, main, outcomes
|
||||||
|
|
||||||
|
|
||||||
class TestSetupState(object):
|
class TestSetupState(object):
|
||||||
|
@ -449,10 +449,18 @@ def test_runtest_in_module_ordering(testdir):
|
||||||
|
|
||||||
|
|
||||||
def test_outcomeexception_exceptionattributes():
|
def test_outcomeexception_exceptionattributes():
|
||||||
outcome = runner.OutcomeException('test')
|
outcome = outcomes.OutcomeException('test')
|
||||||
assert outcome.args[0] == outcome.msg
|
assert outcome.args[0] == outcome.msg
|
||||||
|
|
||||||
|
|
||||||
|
def test_outcomeexception_passes_except_Exception():
|
||||||
|
with pytest.raises(outcomes.OutcomeException):
|
||||||
|
try:
|
||||||
|
raise outcomes.OutcomeException('test')
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_exit():
|
def test_pytest_exit():
|
||||||
try:
|
try:
|
||||||
pytest.exit("hello")
|
pytest.exit("hello")
|
||||||
|
|
Loading…
Reference in New Issue