Merge remote-tracking branch 'upstream/features' into integrate-pytest-warnings
This commit is contained in:
commit
0baed781fe
45
.travis.yml
45
.travis.yml
|
@ -8,34 +8,37 @@ install: "pip install -U tox"
|
||||||
env:
|
env:
|
||||||
matrix:
|
matrix:
|
||||||
# coveralls is not listed in tox's envlist, but should run in travis
|
# coveralls is not listed in tox's envlist, but should run in travis
|
||||||
- TESTENV=coveralls
|
- TOXENV=coveralls
|
||||||
# note: please use "tox --listenvs" to populate the build matrix below
|
# note: please use "tox --listenvs" to populate the build matrix below
|
||||||
- TESTENV=linting
|
- TOXENV=linting
|
||||||
- TESTENV=py26
|
- TOXENV=py26
|
||||||
- TESTENV=py27
|
- TOXENV=py27
|
||||||
- TESTENV=py33
|
- TOXENV=py33
|
||||||
- TESTENV=py34
|
- TOXENV=py34
|
||||||
- TESTENV=py35
|
- TOXENV=py35
|
||||||
- TESTENV=pypy
|
- TOXENV=pypy
|
||||||
- TESTENV=py27-pexpect
|
- TOXENV=py27-pexpect
|
||||||
- TESTENV=py27-xdist
|
- TOXENV=py27-xdist
|
||||||
- TESTENV=py27-trial
|
- TOXENV=py27-trial
|
||||||
- TESTENV=py35-pexpect
|
- TOXENV=py35-pexpect
|
||||||
- TESTENV=py35-xdist
|
- TOXENV=py35-xdist
|
||||||
- TESTENV=py35-trial
|
- TOXENV=py35-trial
|
||||||
- TESTENV=py27-nobyte
|
- TOXENV=py27-nobyte
|
||||||
- TESTENV=doctesting
|
- TOXENV=doctesting
|
||||||
- TESTENV=freeze
|
- TOXENV=freeze
|
||||||
- TESTENV=docs
|
- TOXENV=docs
|
||||||
|
|
||||||
matrix:
|
matrix:
|
||||||
include:
|
include:
|
||||||
- env: TESTENV=py36
|
- env: TOXENV=py36
|
||||||
python: '3.6-dev'
|
python: '3.6-dev'
|
||||||
- env: TESTENV=py37
|
- env: TOXENV=py37
|
||||||
|
python: 'nightly'
|
||||||
|
allow_failures:
|
||||||
|
- env: TOXENV=py37
|
||||||
python: 'nightly'
|
python: 'nightly'
|
||||||
|
|
||||||
script: tox --recreate -e $TESTENV
|
script: tox --recreate
|
||||||
|
|
||||||
notifications:
|
notifications:
|
||||||
irc:
|
irc:
|
||||||
|
|
7
AUTHORS
7
AUTHORS
|
@ -13,9 +13,11 @@ Andrzej Ostrowski
|
||||||
Andy Freeland
|
Andy Freeland
|
||||||
Anthon van der Neut
|
Anthon van der Neut
|
||||||
Antony Lee
|
Antony Lee
|
||||||
|
Anthony Sottile
|
||||||
Armin Rigo
|
Armin Rigo
|
||||||
Aron Curzon
|
Aron Curzon
|
||||||
Aviv Palivoda
|
Aviv Palivoda
|
||||||
|
Barney Gale
|
||||||
Ben Webb
|
Ben Webb
|
||||||
Benjamin Peterson
|
Benjamin Peterson
|
||||||
Bernard Pratz
|
Bernard Pratz
|
||||||
|
@ -42,6 +44,7 @@ Dave Hunt
|
||||||
David Díaz-Barquero
|
David Díaz-Barquero
|
||||||
David Mohr
|
David Mohr
|
||||||
David Vierra
|
David Vierra
|
||||||
|
Denis Kirisov
|
||||||
Diego Russo
|
Diego Russo
|
||||||
Dmitry Dygalo
|
Dmitry Dygalo
|
||||||
Duncan Betts
|
Duncan Betts
|
||||||
|
@ -116,11 +119,14 @@ Nicolas Delaby
|
||||||
Oleg Pidsadnyi
|
Oleg Pidsadnyi
|
||||||
Oliver Bestwalter
|
Oliver Bestwalter
|
||||||
Omar Kohl
|
Omar Kohl
|
||||||
|
Omer Hadari
|
||||||
|
Patrick Hayes
|
||||||
Pieter Mulder
|
Pieter Mulder
|
||||||
Piotr Banaszkiewicz
|
Piotr Banaszkiewicz
|
||||||
Punyashloka Biswal
|
Punyashloka Biswal
|
||||||
Quentin Pradet
|
Quentin Pradet
|
||||||
Ralf Schmitt
|
Ralf Schmitt
|
||||||
|
Ran Benita
|
||||||
Raphael Pierzina
|
Raphael Pierzina
|
||||||
Raquel Alegre
|
Raquel Alegre
|
||||||
Ravi Chandra
|
Ravi Chandra
|
||||||
|
@ -147,5 +153,6 @@ Tyler Goodlet
|
||||||
Vasily Kuznetsov
|
Vasily Kuznetsov
|
||||||
Victor Uriarte
|
Victor Uriarte
|
||||||
Vlad Dragos
|
Vlad Dragos
|
||||||
|
Vidar T. Fauske
|
||||||
Wouter van Ackooy
|
Wouter van Ackooy
|
||||||
Xuecong Liao
|
Xuecong Liao
|
||||||
|
|
|
@ -27,6 +27,11 @@ Changes
|
||||||
``__test__`` attribute to ``False`` in the class body (`#2007`_). Thanks
|
``__test__`` attribute to ``False`` in the class body (`#2007`_). Thanks
|
||||||
to `@syre`_ for the report and `@lwm`_ for the PR.
|
to `@syre`_ for the report and `@lwm`_ for the PR.
|
||||||
|
|
||||||
|
* Change junitxml.py to produce reports that comply with Junitxml schema.
|
||||||
|
If the same test fails with failure in call and then errors in teardown
|
||||||
|
we split testcase element into two, one containing the error and the other
|
||||||
|
the failure. (`#2228`_) Thanks to `@kkoukiou`_ for the PR.
|
||||||
|
|
||||||
* Testcase reports with a ``url`` attribute will now properly write this to junitxml.
|
* Testcase reports with a ``url`` attribute will now properly write this to junitxml.
|
||||||
Thanks `@fushi`_ for the PR (`#1874`_).
|
Thanks `@fushi`_ for the PR (`#1874`_).
|
||||||
|
|
||||||
|
@ -55,6 +60,14 @@ Changes
|
||||||
Thanks `@The-Compiler`_ for the PR.
|
Thanks `@The-Compiler`_ for the PR.
|
||||||
|
|
||||||
|
|
||||||
|
Bug Fixes
|
||||||
|
---------
|
||||||
|
|
||||||
|
* Fix ``AttributeError`` on ``sys.stdout.buffer`` / ``sys.stderr.buffer``
|
||||||
|
while using ``capsys`` fixture in python 3. (`#1407`_).
|
||||||
|
Thanks to `@asottile`_.
|
||||||
|
|
||||||
|
|
||||||
.. _@davidszotten: https://github.com/davidszotten
|
.. _@davidszotten: https://github.com/davidszotten
|
||||||
.. _@fushi: https://github.com/fushi
|
.. _@fushi: https://github.com/fushi
|
||||||
.. _@mattduck: https://github.com/mattduck
|
.. _@mattduck: https://github.com/mattduck
|
||||||
|
@ -65,6 +78,8 @@ Changes
|
||||||
.. _@unsignedint: https://github.com/unsignedint
|
.. _@unsignedint: https://github.com/unsignedint
|
||||||
.. _@Kriechi: https://github.com/Kriechi
|
.. _@Kriechi: https://github.com/Kriechi
|
||||||
|
|
||||||
|
|
||||||
|
.. _#1407: https://github.com/pytest-dev/pytest/issues/1407
|
||||||
.. _#1512: https://github.com/pytest-dev/pytest/issues/1512
|
.. _#1512: https://github.com/pytest-dev/pytest/issues/1512
|
||||||
.. _#1874: https://github.com/pytest-dev/pytest/pull/1874
|
.. _#1874: https://github.com/pytest-dev/pytest/pull/1874
|
||||||
.. _#1952: https://github.com/pytest-dev/pytest/pull/1952
|
.. _#1952: https://github.com/pytest-dev/pytest/pull/1952
|
||||||
|
@ -74,9 +89,11 @@ Changes
|
||||||
.. _#2166: https://github.com/pytest-dev/pytest/pull/2166
|
.. _#2166: https://github.com/pytest-dev/pytest/pull/2166
|
||||||
.. _#2147: https://github.com/pytest-dev/pytest/issues/2147
|
.. _#2147: https://github.com/pytest-dev/pytest/issues/2147
|
||||||
.. _#2208: https://github.com/pytest-dev/pytest/issues/2208
|
.. _#2208: https://github.com/pytest-dev/pytest/issues/2208
|
||||||
|
.. _#2228: https://github.com/pytest-dev/pytest/issues/2228
|
||||||
|
|
||||||
3.0.7 (unreleased)
|
|
||||||
=======================
|
3.0.8 (unreleased)
|
||||||
|
==================
|
||||||
|
|
||||||
*
|
*
|
||||||
|
|
||||||
|
@ -86,6 +103,66 @@ Changes
|
||||||
|
|
||||||
*
|
*
|
||||||
|
|
||||||
|
*
|
||||||
|
|
||||||
|
|
||||||
|
3.0.7 (2017-03-14)
|
||||||
|
==================
|
||||||
|
|
||||||
|
|
||||||
|
* Fix issue in assertion rewriting breaking due to modules silently discarding
|
||||||
|
other modules when importing fails
|
||||||
|
Notably, importing the `anydbm` module is fixed. (`#2248`_).
|
||||||
|
Thanks `@pfhayes`_ for the PR.
|
||||||
|
|
||||||
|
* junitxml: Fix problematic case where system-out tag occured twice per testcase
|
||||||
|
element in the XML report. Thanks `@kkoukiou`_ for the PR.
|
||||||
|
|
||||||
|
* Fix regression, pytest now skips unittest correctly if run with ``--pdb``
|
||||||
|
(`#2137`_). Thanks to `@gst`_ for the report and `@mbyt`_ for the PR.
|
||||||
|
|
||||||
|
* Ignore exceptions raised from descriptors (e.g. properties) during Python test collection (`#2234`_).
|
||||||
|
Thanks to `@bluetech`_.
|
||||||
|
|
||||||
|
* ``--override-ini`` now correctly overrides some fundamental options like ``python_files`` (`#2238`_).
|
||||||
|
Thanks `@sirex`_ for the report and `@nicoddemus`_ for the PR.
|
||||||
|
|
||||||
|
* Replace ``raise StopIteration`` usages in the code by simple ``returns`` to finish generators, in accordance to `PEP-479`_ (`#2160`_).
|
||||||
|
Thanks `@tgoodlet`_ for the report and `@nicoddemus`_ for the PR.
|
||||||
|
|
||||||
|
* Fix internal errors when an unprintable ``AssertionError`` is raised inside a test.
|
||||||
|
Thanks `@omerhadari`_ for the PR.
|
||||||
|
|
||||||
|
* Skipping plugin now also works with test items generated by custom collectors (`#2231`_).
|
||||||
|
Thanks to `@vidartf`_.
|
||||||
|
|
||||||
|
* Fix trailing whitespace in console output if no .ini file presented (`#2281`_). Thanks `@fbjorn`_ for the PR.
|
||||||
|
|
||||||
|
* Conditionless ``xfail`` markers no longer rely on the underlying test item
|
||||||
|
being an instance of ``PyobjMixin``, and can therefore apply to tests not
|
||||||
|
collected by the built-in python test collector. Thanks `@barneygale`_ for the
|
||||||
|
PR.
|
||||||
|
|
||||||
|
|
||||||
|
.. _@pfhayes: https://github.com/pfhayes
|
||||||
|
.. _@bluetech: https://github.com/bluetech
|
||||||
|
.. _@gst: https://github.com/gst
|
||||||
|
.. _@sirex: https://github.com/sirex
|
||||||
|
.. _@vidartf: https://github.com/vidartf
|
||||||
|
.. _@kkoukiou: https://github.com/KKoukiou
|
||||||
|
.. _@omerhadari: https://github.com/omerhadari
|
||||||
|
.. _@fbjorn: https://github.com/fbjorn
|
||||||
|
|
||||||
|
.. _#2248: https://github.com/pytest-dev/pytest/issues/2248
|
||||||
|
.. _#2137: https://github.com/pytest-dev/pytest/issues/2137
|
||||||
|
.. _#2160: https://github.com/pytest-dev/pytest/issues/2160
|
||||||
|
.. _#2231: https://github.com/pytest-dev/pytest/issues/2231
|
||||||
|
.. _#2234: https://github.com/pytest-dev/pytest/issues/2234
|
||||||
|
.. _#2238: https://github.com/pytest-dev/pytest/issues/2238
|
||||||
|
.. _#2281: https://github.com/pytest-dev/pytest/issues/2281
|
||||||
|
|
||||||
|
.. _PEP-479: https://www.python.org/dev/peps/pep-0479/
|
||||||
|
|
||||||
|
|
||||||
3.0.6 (2017-01-22)
|
3.0.6 (2017-01-22)
|
||||||
==================
|
==================
|
||||||
|
@ -119,6 +196,7 @@ Changes
|
||||||
terminal output it relies on is missing. Thanks to `@eli-b`_ for the PR.
|
terminal output it relies on is missing. Thanks to `@eli-b`_ for the PR.
|
||||||
|
|
||||||
|
|
||||||
|
.. _@barneygale: https://github.com/barneygale
|
||||||
.. _@lesteve: https://github.com/lesteve
|
.. _@lesteve: https://github.com/lesteve
|
||||||
.. _@malinoff: https://github.com/malinoff
|
.. _@malinoff: https://github.com/malinoff
|
||||||
.. _@pelme: https://github.com/pelme
|
.. _@pelme: https://github.com/pelme
|
||||||
|
@ -2451,7 +2529,7 @@ Bug fixes:
|
||||||
teardown function are called earlier.
|
teardown function are called earlier.
|
||||||
- add an all-powerful metafunc.parametrize function which allows to
|
- add an all-powerful metafunc.parametrize function which allows to
|
||||||
parametrize test function arguments in multiple steps and therefore
|
parametrize test function arguments in multiple steps and therefore
|
||||||
from indepdenent plugins and palces.
|
from independent plugins and places.
|
||||||
- add a @pytest.mark.parametrize helper which allows to easily
|
- add a @pytest.mark.parametrize helper which allows to easily
|
||||||
call a test function with different argument values
|
call a test function with different argument values
|
||||||
- Add examples to the "parametrize" example page, including a quick port
|
- Add examples to the "parametrize" example page, including a quick port
|
||||||
|
|
|
@ -352,6 +352,8 @@ class ExceptionInfo(object):
|
||||||
help for navigating the traceback.
|
help for navigating the traceback.
|
||||||
"""
|
"""
|
||||||
_striptext = ''
|
_striptext = ''
|
||||||
|
_assert_start_repr = "AssertionError(u\'assert " if sys.version_info[0] < 3 else "AssertionError(\'assert "
|
||||||
|
|
||||||
def __init__(self, tup=None, exprinfo=None):
|
def __init__(self, tup=None, exprinfo=None):
|
||||||
import _pytest._code
|
import _pytest._code
|
||||||
if tup is None:
|
if tup is None:
|
||||||
|
@ -359,8 +361,8 @@ class ExceptionInfo(object):
|
||||||
if exprinfo is None and isinstance(tup[1], AssertionError):
|
if exprinfo is None and isinstance(tup[1], AssertionError):
|
||||||
exprinfo = getattr(tup[1], 'msg', None)
|
exprinfo = getattr(tup[1], 'msg', None)
|
||||||
if exprinfo is None:
|
if exprinfo is None:
|
||||||
exprinfo = py._builtin._totext(tup[1])
|
exprinfo = py.io.saferepr(tup[1])
|
||||||
if exprinfo and exprinfo.startswith('assert '):
|
if exprinfo and exprinfo.startswith(self._assert_start_repr):
|
||||||
self._striptext = 'AssertionError: '
|
self._striptext = 'AssertionError: '
|
||||||
self._excinfo = tup
|
self._excinfo = tup
|
||||||
#: the exception class
|
#: the exception class
|
||||||
|
|
|
@ -215,7 +215,8 @@ class AssertionRewritingHook(object):
|
||||||
mod.__loader__ = self
|
mod.__loader__ = self
|
||||||
py.builtin.exec_(co, mod.__dict__)
|
py.builtin.exec_(co, mod.__dict__)
|
||||||
except:
|
except:
|
||||||
del sys.modules[name]
|
if name in sys.modules:
|
||||||
|
del sys.modules[name]
|
||||||
raise
|
raise
|
||||||
return sys.modules[name]
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
"""
|
"""
|
||||||
merged implementation of the cache provider
|
merged implementation of the cache provider
|
||||||
|
|
||||||
the name cache was not choosen to ensure pluggy automatically
|
the name cache was not chosen to ensure pluggy automatically
|
||||||
ignores the external pytest-cache
|
ignores the external pytest-cache
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,8 @@ from tempfile import TemporaryFile
|
||||||
|
|
||||||
import py
|
import py
|
||||||
import pytest
|
import pytest
|
||||||
|
from _pytest.compat import CaptureIO
|
||||||
|
|
||||||
from py.io import TextIO
|
|
||||||
unicode = py.builtin.text
|
unicode = py.builtin.text
|
||||||
|
|
||||||
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
|
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
|
||||||
|
@ -403,7 +403,7 @@ class SysCapture(object):
|
||||||
if name == "stdin":
|
if name == "stdin":
|
||||||
tmpfile = DontReadFromInput()
|
tmpfile = DontReadFromInput()
|
||||||
else:
|
else:
|
||||||
tmpfile = TextIO()
|
tmpfile = CaptureIO()
|
||||||
self.tmpfile = tmpfile
|
self.tmpfile = tmpfile
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
|
|
|
@ -251,3 +251,19 @@ else:
|
||||||
except UnicodeError:
|
except UnicodeError:
|
||||||
errors = 'replace'
|
errors = 'replace'
|
||||||
return v.encode('ascii', errors)
|
return v.encode('ascii', errors)
|
||||||
|
|
||||||
|
|
||||||
|
if _PY2:
|
||||||
|
from py.io import TextIO as CaptureIO
|
||||||
|
else:
|
||||||
|
import io
|
||||||
|
|
||||||
|
class CaptureIO(io.TextIOWrapper):
|
||||||
|
def __init__(self):
|
||||||
|
super(CaptureIO, self).__init__(
|
||||||
|
io.BytesIO(),
|
||||||
|
encoding='UTF-8', newline='', write_through=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def getvalue(self):
|
||||||
|
return self.buffer.getvalue().decode('UTF-8')
|
||||||
|
|
|
@ -877,6 +877,7 @@ class Config(object):
|
||||||
self.trace = self.pluginmanager.trace.root.get("config")
|
self.trace = self.pluginmanager.trace.root.get("config")
|
||||||
self.hook = self.pluginmanager.hook
|
self.hook = self.pluginmanager.hook
|
||||||
self._inicache = {}
|
self._inicache = {}
|
||||||
|
self._override_ini = ()
|
||||||
self._opt2dest = {}
|
self._opt2dest = {}
|
||||||
self._cleanup = []
|
self._cleanup = []
|
||||||
self._warn = self.pluginmanager._warn
|
self._warn = self.pluginmanager._warn
|
||||||
|
@ -977,6 +978,7 @@ class Config(object):
|
||||||
self.invocation_dir = py.path.local()
|
self.invocation_dir = py.path.local()
|
||||||
self._parser.addini('addopts', 'extra command line options', 'args')
|
self._parser.addini('addopts', 'extra command line options', 'args')
|
||||||
self._parser.addini('minversion', 'minimally required pytest version')
|
self._parser.addini('minversion', 'minimally required pytest version')
|
||||||
|
self._override_ini = ns.override_ini or ()
|
||||||
|
|
||||||
def _consider_importhook(self, args, entrypoint_name):
|
def _consider_importhook(self, args, entrypoint_name):
|
||||||
"""Install the PEP 302 import hook if using assertion re-writing.
|
"""Install the PEP 302 import hook if using assertion re-writing.
|
||||||
|
@ -1159,15 +1161,14 @@ class Config(object):
|
||||||
# and -o foo1=bar1 -o foo2=bar2 options
|
# and -o foo1=bar1 -o foo2=bar2 options
|
||||||
# always use the last item if multiple value set for same ini-name,
|
# always use the last item if multiple value set for same ini-name,
|
||||||
# e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
|
# e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
|
||||||
if self.getoption("override_ini", None):
|
for ini_config_list in self._override_ini:
|
||||||
for ini_config_list in self.option.override_ini:
|
for ini_config in ini_config_list:
|
||||||
for ini_config in ini_config_list:
|
try:
|
||||||
try:
|
(key, user_ini_value) = ini_config.split("=", 1)
|
||||||
(key, user_ini_value) = ini_config.split("=", 1)
|
except ValueError:
|
||||||
except ValueError:
|
raise UsageError("-o/--override-ini expects option=value style.")
|
||||||
raise UsageError("-o/--override-ini expects option=value style.")
|
if key == name:
|
||||||
if key == name:
|
value = user_ini_value
|
||||||
value = user_ini_value
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def getoption(self, name, default=notset, skip=False):
|
def getoption(self, name, default=notset, skip=False):
|
||||||
|
|
|
@ -14,6 +14,7 @@ from _pytest.compat import (
|
||||||
getfslineno, get_real_func,
|
getfslineno, get_real_func,
|
||||||
is_generator, isclass, getimfunc,
|
is_generator, isclass, getimfunc,
|
||||||
getlocation, getfuncargnames,
|
getlocation, getfuncargnames,
|
||||||
|
safe_getattr,
|
||||||
)
|
)
|
||||||
|
|
||||||
def pytest_sessionstart(session):
|
def pytest_sessionstart(session):
|
||||||
|
@ -124,8 +125,6 @@ def getfixturemarker(obj):
|
||||||
exceptions."""
|
exceptions."""
|
||||||
try:
|
try:
|
||||||
return getattr(obj, "_pytestfixturefunction", None)
|
return getattr(obj, "_pytestfixturefunction", None)
|
||||||
except KeyboardInterrupt:
|
|
||||||
raise
|
|
||||||
except Exception:
|
except Exception:
|
||||||
# some objects raise errors like request (from flask import request)
|
# some objects raise errors like request (from flask import request)
|
||||||
# we don't expect them to be fixture functions
|
# we don't expect them to be fixture functions
|
||||||
|
@ -1068,7 +1067,9 @@ class FixtureManager(object):
|
||||||
self._holderobjseen.add(holderobj)
|
self._holderobjseen.add(holderobj)
|
||||||
autousenames = []
|
autousenames = []
|
||||||
for name in dir(holderobj):
|
for name in dir(holderobj):
|
||||||
obj = getattr(holderobj, name, None)
|
# The attribute can be an arbitrary descriptor, so the attribute
|
||||||
|
# access below can raise. safe_getatt() ignores such exceptions.
|
||||||
|
obj = safe_getattr(holderobj, name, None)
|
||||||
# fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
|
# fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
|
||||||
# or are "@pytest.fixture" marked
|
# or are "@pytest.fixture" marked
|
||||||
marker = getfixturemarker(obj)
|
marker = getfixturemarker(obj)
|
||||||
|
|
|
@ -247,7 +247,7 @@ def pytest_unconfigure(config):
|
||||||
|
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
# hooks for customising the assert methods
|
# hooks for customizing the assert methods
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
|
||||||
def pytest_assertrepr_compare(config, op, left, right):
|
def pytest_assertrepr_compare(config, op, left, right):
|
||||||
|
@ -256,7 +256,7 @@ def pytest_assertrepr_compare(config, op, left, right):
|
||||||
Return None for no custom explanation, otherwise return a list
|
Return None for no custom explanation, otherwise return a list
|
||||||
of strings. The strings will be joined by newlines but any newlines
|
of strings. The strings will be joined by newlines but any newlines
|
||||||
*in* a string will be escaped. Note that all but the first line will
|
*in* a string will be escaped. Note that all but the first line will
|
||||||
be indented sligthly, the intention is for the first line to be a summary.
|
be indented slightly, the intention is for the first line to be a summary.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
@ -264,7 +264,14 @@ def pytest_assertrepr_compare(config, op, left, right):
|
||||||
# -------------------------------------------------------------------------
|
# -------------------------------------------------------------------------
|
||||||
|
|
||||||
def pytest_report_header(config, startdir):
|
def pytest_report_header(config, startdir):
|
||||||
""" return a string to be displayed as header info for terminal reporting."""
|
""" return a string to be displayed as header info for terminal reporting.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
This function should be implemented only in plugins or ``conftest.py``
|
||||||
|
files situated at the tests root directory due to how pytest
|
||||||
|
:ref:`discovers plugins during startup <pluginorder>`.
|
||||||
|
"""
|
||||||
|
|
||||||
@hookspec(firstresult=True)
|
@hookspec(firstresult=True)
|
||||||
def pytest_report_teststatus(report):
|
def pytest_report_teststatus(report):
|
||||||
|
|
|
@ -121,7 +121,7 @@ class _NodeReporter(object):
|
||||||
node = kind(data, message=message)
|
node = kind(data, message=message)
|
||||||
self.append(node)
|
self.append(node)
|
||||||
|
|
||||||
def _write_captured_output(self, report):
|
def write_captured_output(self, report):
|
||||||
for capname in ('out', 'err'):
|
for capname in ('out', 'err'):
|
||||||
content = getattr(report, 'capstd' + capname)
|
content = getattr(report, 'capstd' + capname)
|
||||||
if content:
|
if content:
|
||||||
|
@ -130,7 +130,6 @@ class _NodeReporter(object):
|
||||||
|
|
||||||
def append_pass(self, report):
|
def append_pass(self, report):
|
||||||
self.add_stats('passed')
|
self.add_stats('passed')
|
||||||
self._write_captured_output(report)
|
|
||||||
|
|
||||||
def append_failure(self, report):
|
def append_failure(self, report):
|
||||||
# msg = str(report.longrepr.reprtraceback.extraline)
|
# msg = str(report.longrepr.reprtraceback.extraline)
|
||||||
|
@ -149,7 +148,6 @@ class _NodeReporter(object):
|
||||||
fail = Junit.failure(message=message)
|
fail = Junit.failure(message=message)
|
||||||
fail.append(bin_xml_escape(report.longrepr))
|
fail.append(bin_xml_escape(report.longrepr))
|
||||||
self.append(fail)
|
self.append(fail)
|
||||||
self._write_captured_output(report)
|
|
||||||
|
|
||||||
def append_collect_error(self, report):
|
def append_collect_error(self, report):
|
||||||
# msg = str(report.longrepr.reprtraceback.extraline)
|
# msg = str(report.longrepr.reprtraceback.extraline)
|
||||||
|
@ -167,7 +165,6 @@ class _NodeReporter(object):
|
||||||
msg = "test setup failure"
|
msg = "test setup failure"
|
||||||
self._add_simple(
|
self._add_simple(
|
||||||
Junit.error, msg, report.longrepr)
|
Junit.error, msg, report.longrepr)
|
||||||
self._write_captured_output(report)
|
|
||||||
|
|
||||||
def append_skipped(self, report):
|
def append_skipped(self, report):
|
||||||
if hasattr(report, "wasxfail"):
|
if hasattr(report, "wasxfail"):
|
||||||
|
@ -182,7 +179,7 @@ class _NodeReporter(object):
|
||||||
Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason),
|
Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason),
|
||||||
type="pytest.skip",
|
type="pytest.skip",
|
||||||
message=skipreason))
|
message=skipreason))
|
||||||
self._write_captured_output(report)
|
self.write_captured_output(report)
|
||||||
|
|
||||||
def finalize(self):
|
def finalize(self):
|
||||||
data = self.to_xml().unicode(indent=0)
|
data = self.to_xml().unicode(indent=0)
|
||||||
|
@ -273,6 +270,9 @@ class LogXML(object):
|
||||||
self.node_reporters = {} # nodeid -> _NodeReporter
|
self.node_reporters = {} # nodeid -> _NodeReporter
|
||||||
self.node_reporters_ordered = []
|
self.node_reporters_ordered = []
|
||||||
self.global_properties = []
|
self.global_properties = []
|
||||||
|
# List of reports that failed on call but teardown is pending.
|
||||||
|
self.open_reports = []
|
||||||
|
self.cnt_double_fail_tests = 0
|
||||||
|
|
||||||
def finalize(self, report):
|
def finalize(self, report):
|
||||||
nodeid = getattr(report, 'nodeid', report)
|
nodeid = getattr(report, 'nodeid', report)
|
||||||
|
@ -332,14 +332,33 @@ class LogXML(object):
|
||||||
-> teardown node2
|
-> teardown node2
|
||||||
-> teardown node1
|
-> teardown node1
|
||||||
"""
|
"""
|
||||||
|
close_report = None
|
||||||
if report.passed:
|
if report.passed:
|
||||||
if report.when == "call": # ignore setup/teardown
|
if report.when == "call": # ignore setup/teardown
|
||||||
reporter = self._opentestcase(report)
|
reporter = self._opentestcase(report)
|
||||||
reporter.append_pass(report)
|
reporter.append_pass(report)
|
||||||
elif report.failed:
|
elif report.failed:
|
||||||
|
if report.when == "teardown":
|
||||||
|
# The following vars are needed when xdist plugin is used
|
||||||
|
report_wid = getattr(report, "worker_id", None)
|
||||||
|
report_ii = getattr(report, "item_index", None)
|
||||||
|
close_report = next(
|
||||||
|
(rep for rep in self.open_reports
|
||||||
|
if (rep.nodeid == report.nodeid and
|
||||||
|
getattr(rep, "item_index", None) == report_ii and
|
||||||
|
getattr(rep, "worker_id", None) == report_wid
|
||||||
|
)
|
||||||
|
), None)
|
||||||
|
if close_report:
|
||||||
|
# We need to open new testcase in case we have failure in
|
||||||
|
# call and error in teardown in order to follow junit
|
||||||
|
# schema
|
||||||
|
self.finalize(close_report)
|
||||||
|
self.cnt_double_fail_tests += 1
|
||||||
reporter = self._opentestcase(report)
|
reporter = self._opentestcase(report)
|
||||||
if report.when == "call":
|
if report.when == "call":
|
||||||
reporter.append_failure(report)
|
reporter.append_failure(report)
|
||||||
|
self.open_reports.append(report)
|
||||||
else:
|
else:
|
||||||
reporter.append_error(report)
|
reporter.append_error(report)
|
||||||
elif report.skipped:
|
elif report.skipped:
|
||||||
|
@ -347,7 +366,20 @@ class LogXML(object):
|
||||||
reporter.append_skipped(report)
|
reporter.append_skipped(report)
|
||||||
self.update_testcase_duration(report)
|
self.update_testcase_duration(report)
|
||||||
if report.when == "teardown":
|
if report.when == "teardown":
|
||||||
|
reporter = self._opentestcase(report)
|
||||||
|
reporter.write_captured_output(report)
|
||||||
self.finalize(report)
|
self.finalize(report)
|
||||||
|
report_wid = getattr(report, "worker_id", None)
|
||||||
|
report_ii = getattr(report, "item_index", None)
|
||||||
|
close_report = next(
|
||||||
|
(rep for rep in self.open_reports
|
||||||
|
if (rep.nodeid == report.nodeid and
|
||||||
|
getattr(rep, "item_index", None) == report_ii and
|
||||||
|
getattr(rep, "worker_id", None) == report_wid
|
||||||
|
)
|
||||||
|
), None)
|
||||||
|
if close_report:
|
||||||
|
self.open_reports.remove(close_report)
|
||||||
|
|
||||||
def update_testcase_duration(self, report):
|
def update_testcase_duration(self, report):
|
||||||
"""accumulates total duration for nodeid from given report and updates
|
"""accumulates total duration for nodeid from given report and updates
|
||||||
|
@ -380,8 +412,9 @@ class LogXML(object):
|
||||||
suite_stop_time = time.time()
|
suite_stop_time = time.time()
|
||||||
suite_time_delta = suite_stop_time - self.suite_start_time
|
suite_time_delta = suite_stop_time - self.suite_start_time
|
||||||
|
|
||||||
numtests = self.stats['passed'] + self.stats['failure'] + self.stats['skipped'] + self.stats['error']
|
numtests = (self.stats['passed'] + self.stats['failure'] +
|
||||||
|
self.stats['skipped'] + self.stats['error'] -
|
||||||
|
self.cnt_double_fail_tests)
|
||||||
logfile.write('<?xml version="1.0" encoding="utf-8"?>')
|
logfile.write('<?xml version="1.0" encoding="utf-8"?>')
|
||||||
|
|
||||||
logfile.write(Junit.testsuite(
|
logfile.write(Junit.testsuite(
|
||||||
|
|
|
@ -81,7 +81,7 @@ def pytest_namespace():
|
||||||
|
|
||||||
|
|
||||||
def pytest_configure(config):
|
def pytest_configure(config):
|
||||||
pytest.config = config # compatibiltiy
|
pytest.config = config # compatibility
|
||||||
|
|
||||||
|
|
||||||
def wrap_session(config, doit):
|
def wrap_session(config, doit):
|
||||||
|
|
|
@ -72,7 +72,7 @@ def pytest_collection_modifyitems(items, config):
|
||||||
return
|
return
|
||||||
# pytest used to allow "-" for negating
|
# pytest used to allow "-" for negating
|
||||||
# but today we just allow "-" at the beginning, use "not" instead
|
# but today we just allow "-" at the beginning, use "not" instead
|
||||||
# we probably remove "-" alltogether soon
|
# we probably remove "-" altogether soon
|
||||||
if keywordexpr.startswith("-"):
|
if keywordexpr.startswith("-"):
|
||||||
keywordexpr = "not " + keywordexpr[1:]
|
keywordexpr = "not " + keywordexpr[1:]
|
||||||
selectuntil = False
|
selectuntil = False
|
||||||
|
|
|
@ -14,6 +14,7 @@ from weakref import WeakKeyDictionary
|
||||||
|
|
||||||
from py.builtin import print_
|
from py.builtin import print_
|
||||||
|
|
||||||
|
from _pytest.capture import MultiCapture, SysCapture
|
||||||
from _pytest._code import Source
|
from _pytest._code import Source
|
||||||
import py
|
import py
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -334,7 +335,7 @@ def testdir(request, tmpdir_factory):
|
||||||
return Testdir(request, tmpdir_factory)
|
return Testdir(request, tmpdir_factory)
|
||||||
|
|
||||||
|
|
||||||
rex_outcome = re.compile("(\d+) ([\w-]+)")
|
rex_outcome = re.compile(r"(\d+) ([\w-]+)")
|
||||||
class RunResult(object):
|
class RunResult(object):
|
||||||
"""The result of running a command.
|
"""The result of running a command.
|
||||||
|
|
||||||
|
@ -569,7 +570,7 @@ class Testdir(object):
|
||||||
def mkpydir(self, name):
|
def mkpydir(self, name):
|
||||||
"""Create a new python package.
|
"""Create a new python package.
|
||||||
|
|
||||||
This creates a (sub)direcotry with an empty ``__init__.py``
|
This creates a (sub)directory with an empty ``__init__.py``
|
||||||
file so that is recognised as a python package.
|
file so that is recognised as a python package.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
@ -664,7 +665,7 @@ class Testdir(object):
|
||||||
def inline_genitems(self, *args):
|
def inline_genitems(self, *args):
|
||||||
"""Run ``pytest.main(['--collectonly'])`` in-process.
|
"""Run ``pytest.main(['--collectonly'])`` in-process.
|
||||||
|
|
||||||
Retuns a tuple of the collected items and a
|
Returns a tuple of the collected items and a
|
||||||
:py:class:`HookRecorder` instance.
|
:py:class:`HookRecorder` instance.
|
||||||
|
|
||||||
This runs the :py:func:`pytest.main` function to run all of
|
This runs the :py:func:`pytest.main` function to run all of
|
||||||
|
@ -737,7 +738,8 @@ class Testdir(object):
|
||||||
if kwargs.get("syspathinsert"):
|
if kwargs.get("syspathinsert"):
|
||||||
self.syspathinsert()
|
self.syspathinsert()
|
||||||
now = time.time()
|
now = time.time()
|
||||||
capture = py.io.StdCapture()
|
capture = MultiCapture(Capture=SysCapture)
|
||||||
|
capture.start_capturing()
|
||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
reprec = self.inline_run(*args, **kwargs)
|
reprec = self.inline_run(*args, **kwargs)
|
||||||
|
@ -752,7 +754,8 @@ class Testdir(object):
|
||||||
class reprec(object):
|
class reprec(object):
|
||||||
ret = 3
|
ret = 3
|
||||||
finally:
|
finally:
|
||||||
out, err = capture.reset()
|
out, err = capture.readouterr()
|
||||||
|
capture.stop_capturing()
|
||||||
sys.stdout.write(out)
|
sys.stdout.write(out)
|
||||||
sys.stderr.write(err)
|
sys.stderr.write(err)
|
||||||
|
|
||||||
|
@ -860,7 +863,7 @@ class Testdir(object):
|
||||||
:py:meth:`parseconfigure`.
|
:py:meth:`parseconfigure`.
|
||||||
|
|
||||||
:param withinit: Whether to also write a ``__init__.py`` file
|
:param withinit: Whether to also write a ``__init__.py`` file
|
||||||
to the temporarly directory to ensure it is a package.
|
to the temporary directory to ensure it is a package.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
kw = {self.request.function.__name__: Source(source).strip()}
|
kw = {self.request.function.__name__: Source(source).strip()}
|
||||||
|
|
|
@ -174,7 +174,7 @@ def pytest_pycollect_makeitem(collector, name, obj):
|
||||||
outcome = yield
|
outcome = yield
|
||||||
res = outcome.get_result()
|
res = outcome.get_result()
|
||||||
if res is not None:
|
if res is not None:
|
||||||
raise StopIteration
|
return
|
||||||
# nothing was collected elsewhere, let's do it here
|
# nothing was collected elsewhere, let's do it here
|
||||||
if isclass(obj):
|
if isclass(obj):
|
||||||
if collector.istestclass(obj, name):
|
if collector.istestclass(obj, name):
|
||||||
|
@ -632,7 +632,7 @@ class Generator(FunctionMixin, PyCollector):
|
||||||
def getcallargs(self, obj):
|
def getcallargs(self, obj):
|
||||||
if not isinstance(obj, (tuple, list)):
|
if not isinstance(obj, (tuple, list)):
|
||||||
obj = (obj,)
|
obj = (obj,)
|
||||||
# explict naming
|
# explicit naming
|
||||||
if isinstance(obj[0], py.builtin._basestring):
|
if isinstance(obj[0], py.builtin._basestring):
|
||||||
name = obj[0]
|
name = obj[0]
|
||||||
obj = obj[1:]
|
obj = obj[1:]
|
||||||
|
|
|
@ -6,11 +6,10 @@ import py
|
||||||
import sys
|
import sys
|
||||||
import warnings
|
import warnings
|
||||||
import pytest
|
import pytest
|
||||||
from collections import namedtuple
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture
|
@pytest.yield_fixture
|
||||||
def recwarn(request):
|
def recwarn():
|
||||||
"""Return a WarningsRecorder instance that provides these methods:
|
"""Return a WarningsRecorder instance that provides these methods:
|
||||||
|
|
||||||
* ``pop(category=None)``: return last warning matching the category.
|
* ``pop(category=None)``: return last warning matching the category.
|
||||||
|
@ -115,19 +114,14 @@ def warns(expected_warning, *args, **kwargs):
|
||||||
return func(*args[1:], **kwargs)
|
return func(*args[1:], **kwargs)
|
||||||
|
|
||||||
|
|
||||||
RecordedWarning = namedtuple('RecordedWarning', (
|
class WarningsRecorder(warnings.catch_warnings):
|
||||||
'message', 'category', 'filename', 'lineno', 'file', 'line',
|
|
||||||
))
|
|
||||||
|
|
||||||
|
|
||||||
class WarningsRecorder(object):
|
|
||||||
"""A context manager to record raised warnings.
|
"""A context manager to record raised warnings.
|
||||||
|
|
||||||
Adapted from `warnings.catch_warnings`.
|
Adapted from `warnings.catch_warnings`.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, module=None):
|
def __init__(self):
|
||||||
self._module = sys.modules['warnings'] if module is None else module
|
super(WarningsRecorder, self).__init__(record=True)
|
||||||
self._entered = False
|
self._entered = False
|
||||||
self._list = []
|
self._list = []
|
||||||
|
|
||||||
|
@ -164,38 +158,20 @@ class WarningsRecorder(object):
|
||||||
if self._entered:
|
if self._entered:
|
||||||
__tracebackhide__ = True
|
__tracebackhide__ = True
|
||||||
raise RuntimeError("Cannot enter %r twice" % self)
|
raise RuntimeError("Cannot enter %r twice" % self)
|
||||||
self._entered = True
|
self._list = super(WarningsRecorder, self).__enter__()
|
||||||
self._filters = self._module.filters
|
warnings.simplefilter('always')
|
||||||
self._module.filters = self._filters[:]
|
|
||||||
self._showwarning = self._module.showwarning
|
|
||||||
|
|
||||||
def showwarning(message, category, filename, lineno,
|
|
||||||
file=None, line=None):
|
|
||||||
self._list.append(RecordedWarning(
|
|
||||||
message, category, filename, lineno, file, line))
|
|
||||||
|
|
||||||
# still perform old showwarning functionality
|
|
||||||
self._showwarning(
|
|
||||||
message, category, filename, lineno, file=file, line=line)
|
|
||||||
|
|
||||||
self._module.showwarning = showwarning
|
|
||||||
|
|
||||||
# allow the same warning to be raised more than once
|
|
||||||
|
|
||||||
self._module.simplefilter('always')
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def __exit__(self, *exc_info):
|
def __exit__(self, *exc_info):
|
||||||
if not self._entered:
|
if not self._entered:
|
||||||
__tracebackhide__ = True
|
__tracebackhide__ = True
|
||||||
raise RuntimeError("Cannot exit %r without entering first" % self)
|
raise RuntimeError("Cannot exit %r without entering first" % self)
|
||||||
self._module.filters = self._filters
|
super(WarningsRecorder, self).__exit__(*exc_info)
|
||||||
self._module.showwarning = self._showwarning
|
|
||||||
|
|
||||||
|
|
||||||
class WarningsChecker(WarningsRecorder):
|
class WarningsChecker(WarningsRecorder):
|
||||||
def __init__(self, expected_warning=None, module=None):
|
def __init__(self, expected_warning=None):
|
||||||
super(WarningsChecker, self).__init__(module=module)
|
super(WarningsChecker, self).__init__()
|
||||||
|
|
||||||
msg = ("exceptions must be old-style classes or "
|
msg = ("exceptions must be old-style classes or "
|
||||||
"derived from Warning, not %s")
|
"derived from Warning, not %s")
|
||||||
|
|
|
@ -112,14 +112,14 @@ class MarkEvaluator(object):
|
||||||
|
|
||||||
def _getglobals(self):
|
def _getglobals(self):
|
||||||
d = {'os': os, 'sys': sys, 'config': self.item.config}
|
d = {'os': os, 'sys': sys, 'config': self.item.config}
|
||||||
d.update(self.item.obj.__globals__)
|
if hasattr(self.item, 'obj'):
|
||||||
|
d.update(self.item.obj.__globals__)
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def _istrue(self):
|
def _istrue(self):
|
||||||
if hasattr(self, 'result'):
|
if hasattr(self, 'result'):
|
||||||
return self.result
|
return self.result
|
||||||
if self.holder:
|
if self.holder:
|
||||||
d = self._getglobals()
|
|
||||||
if self.holder.args or 'condition' in self.holder.kwargs:
|
if self.holder.args or 'condition' in self.holder.kwargs:
|
||||||
self.result = False
|
self.result = False
|
||||||
# "holder" might be a MarkInfo or a MarkDecorator; only
|
# "holder" might be a MarkInfo or a MarkDecorator; only
|
||||||
|
@ -133,6 +133,7 @@ class MarkEvaluator(object):
|
||||||
for expr in args:
|
for expr in args:
|
||||||
self.expr = expr
|
self.expr = expr
|
||||||
if isinstance(expr, py.builtin._basestring):
|
if isinstance(expr, py.builtin._basestring):
|
||||||
|
d = self._getglobals()
|
||||||
result = cached_eval(self.item.config, expr, d)
|
result = cached_eval(self.item.config, expr, d)
|
||||||
else:
|
else:
|
||||||
if "reason" not in kwargs:
|
if "reason" not in kwargs:
|
||||||
|
|
|
@ -298,8 +298,8 @@ class TerminalReporter(object):
|
||||||
def pytest_report_header(self, config):
|
def pytest_report_header(self, config):
|
||||||
inifile = ""
|
inifile = ""
|
||||||
if config.inifile:
|
if config.inifile:
|
||||||
inifile = config.rootdir.bestrelpath(config.inifile)
|
inifile = " " + config.rootdir.bestrelpath(config.inifile)
|
||||||
lines = ["rootdir: %s, inifile: %s" %(config.rootdir, inifile)]
|
lines = ["rootdir: %s, inifile:%s" % (config.rootdir, inifile)]
|
||||||
|
|
||||||
plugininfo = config.pluginmanager.list_plugin_distinfo()
|
plugininfo = config.pluginmanager.list_plugin_distinfo()
|
||||||
if plugininfo:
|
if plugininfo:
|
||||||
|
|
|
@ -116,7 +116,7 @@ def tmpdir(request, tmpdir_factory):
|
||||||
path object.
|
path object.
|
||||||
"""
|
"""
|
||||||
name = request.node.name
|
name = request.node.name
|
||||||
name = re.sub("[\W]", "_", name)
|
name = re.sub(r"[\W]", "_", name)
|
||||||
MAXVAL = 30
|
MAXVAL = 30
|
||||||
if len(name) > MAXVAL:
|
if len(name) > MAXVAL:
|
||||||
name = name[:MAXVAL]
|
name = name[:MAXVAL]
|
||||||
|
|
|
@ -5,7 +5,7 @@ import sys
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
# for transfering markers
|
# for transferring markers
|
||||||
import _pytest._code
|
import _pytest._code
|
||||||
from _pytest.python import transfer_markers
|
from _pytest.python import transfer_markers
|
||||||
from _pytest.skipping import MarkEvaluator
|
from _pytest.skipping import MarkEvaluator
|
||||||
|
@ -65,7 +65,6 @@ class UnitTestCase(pytest.Class):
|
||||||
yield TestCaseFunction('runTest', parent=self)
|
yield TestCaseFunction('runTest', parent=self)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class TestCaseFunction(pytest.Function):
|
class TestCaseFunction(pytest.Function):
|
||||||
_excinfo = None
|
_excinfo = None
|
||||||
|
|
||||||
|
@ -152,14 +151,33 @@ class TestCaseFunction(pytest.Function):
|
||||||
def stopTest(self, testcase):
|
def stopTest(self, testcase):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _handle_skip(self):
|
||||||
|
# implements the skipping machinery (see #2137)
|
||||||
|
# analog to pythons Lib/unittest/case.py:run
|
||||||
|
testMethod = getattr(self._testcase, self._testcase._testMethodName)
|
||||||
|
if (getattr(self._testcase.__class__, "__unittest_skip__", False) or
|
||||||
|
getattr(testMethod, "__unittest_skip__", False)):
|
||||||
|
# If the class or method was skipped.
|
||||||
|
skip_why = (getattr(self._testcase.__class__, '__unittest_skip_why__', '') or
|
||||||
|
getattr(testMethod, '__unittest_skip_why__', ''))
|
||||||
|
try: # PY3, unittest2 on PY2
|
||||||
|
self._testcase._addSkip(self, self._testcase, skip_why)
|
||||||
|
except TypeError: # PY2
|
||||||
|
if sys.version_info[0] != 2:
|
||||||
|
raise
|
||||||
|
self._testcase._addSkip(self, skip_why)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
def runtest(self):
|
def runtest(self):
|
||||||
if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
|
if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
|
||||||
self._testcase(result=self)
|
self._testcase(result=self)
|
||||||
else:
|
else:
|
||||||
# disables tearDown and cleanups for post mortem debugging (see #1890)
|
# disables tearDown and cleanups for post mortem debugging (see #1890)
|
||||||
|
if self._handle_skip():
|
||||||
|
return
|
||||||
self._testcase.debug()
|
self._testcase.debug()
|
||||||
|
|
||||||
|
|
||||||
def _prunetraceback(self, excinfo):
|
def _prunetraceback(self, excinfo):
|
||||||
pytest.Function._prunetraceback(self, excinfo)
|
pytest.Function._prunetraceback(self, excinfo)
|
||||||
traceback = excinfo.traceback.filter(
|
traceback = excinfo.traceback.filter(
|
||||||
|
|
|
@ -75,7 +75,7 @@ __all__ = ["PluginManager", "PluginValidationError", "HookCallError",
|
||||||
_py3 = sys.version_info > (3, 0)
|
_py3 = sys.version_info > (3, 0)
|
||||||
|
|
||||||
|
|
||||||
class HookspecMarker(object):
|
class HookspecMarker:
|
||||||
""" Decorator helper class for marking functions as hook specifications.
|
""" Decorator helper class for marking functions as hook specifications.
|
||||||
|
|
||||||
You can instantiate it with a project_name to get a decorator.
|
You can instantiate it with a project_name to get a decorator.
|
||||||
|
@ -113,7 +113,7 @@ class HookspecMarker(object):
|
||||||
return setattr_hookspec_opts
|
return setattr_hookspec_opts
|
||||||
|
|
||||||
|
|
||||||
class HookimplMarker(object):
|
class HookimplMarker:
|
||||||
""" Decorator helper class for marking functions as hook implementations.
|
""" Decorator helper class for marking functions as hook implementations.
|
||||||
|
|
||||||
You can instantiate with a project_name to get a decorator.
|
You can instantiate with a project_name to get a decorator.
|
||||||
|
@ -167,7 +167,7 @@ def normalize_hookimpl_opts(opts):
|
||||||
opts.setdefault("optionalhook", False)
|
opts.setdefault("optionalhook", False)
|
||||||
|
|
||||||
|
|
||||||
class _TagTracer(object):
|
class _TagTracer:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._tag2proc = {}
|
self._tag2proc = {}
|
||||||
self.writer = None
|
self.writer = None
|
||||||
|
@ -214,7 +214,7 @@ class _TagTracer(object):
|
||||||
self._tag2proc[tags] = processor
|
self._tag2proc[tags] = processor
|
||||||
|
|
||||||
|
|
||||||
class _TagTracerSub(object):
|
class _TagTracerSub:
|
||||||
def __init__(self, root, tags):
|
def __init__(self, root, tags):
|
||||||
self.root = root
|
self.root = root
|
||||||
self.tags = tags
|
self.tags = tags
|
||||||
|
@ -254,7 +254,7 @@ def _wrapped_call(wrap_controller, func):
|
||||||
return call_outcome.get_result()
|
return call_outcome.get_result()
|
||||||
|
|
||||||
|
|
||||||
class _CallOutcome(object):
|
class _CallOutcome:
|
||||||
""" Outcome of a function call, either an exception or a proper result.
|
""" Outcome of a function call, either an exception or a proper result.
|
||||||
Calling the ``get_result`` method will return the result or reraise
|
Calling the ``get_result`` method will return the result or reraise
|
||||||
the exception raised when the function was called. """
|
the exception raised when the function was called. """
|
||||||
|
@ -286,7 +286,7 @@ def _reraise(cls, val, tb):
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
|
||||||
class _TracedHookExecution(object):
|
class _TracedHookExecution:
|
||||||
def __init__(self, pluginmanager, before, after):
|
def __init__(self, pluginmanager, before, after):
|
||||||
self.pluginmanager = pluginmanager
|
self.pluginmanager = pluginmanager
|
||||||
self.before = before
|
self.before = before
|
||||||
|
@ -580,7 +580,7 @@ class PluginManager(object):
|
||||||
return orig
|
return orig
|
||||||
|
|
||||||
|
|
||||||
class _MultiCall(object):
|
class _MultiCall:
|
||||||
""" execute a call into multiple python functions/methods. """
|
""" execute a call into multiple python functions/methods. """
|
||||||
|
|
||||||
# XXX note that the __multicall__ argument is supported only
|
# XXX note that the __multicall__ argument is supported only
|
||||||
|
@ -673,7 +673,7 @@ def varnames(func, startindex=None):
|
||||||
return x
|
return x
|
||||||
|
|
||||||
|
|
||||||
class _HookRelay(object):
|
class _HookRelay:
|
||||||
""" hook holder object for performing 1:N hook calls where N is the number
|
""" hook holder object for performing 1:N hook calls where N is the number
|
||||||
of registered plugins.
|
of registered plugins.
|
||||||
|
|
||||||
|
@ -770,7 +770,7 @@ class _HookCaller(object):
|
||||||
proc(res[0])
|
proc(res[0])
|
||||||
|
|
||||||
|
|
||||||
class HookImpl(object):
|
class HookImpl:
|
||||||
def __init__(self, plugin, plugin_name, function, hook_impl_opts):
|
def __init__(self, plugin, plugin_name, function, hook_impl_opts):
|
||||||
self.function = function
|
self.function = function
|
||||||
self.argnames = varnames(self.function)
|
self.argnames = varnames(self.function)
|
||||||
|
|
|
@ -6,6 +6,7 @@ Release announcements
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
|
||||||
|
|
||||||
|
release-3.0.7
|
||||||
release-3.0.6
|
release-3.0.6
|
||||||
release-3.0.5
|
release-3.0.5
|
||||||
release-3.0.4
|
release-3.0.4
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
pytest-3.0.7
|
||||||
|
============
|
||||||
|
|
||||||
|
pytest 3.0.7 has just been released to PyPI.
|
||||||
|
|
||||||
|
This is a bug-fix release, being a drop-in replacement. To upgrade::
|
||||||
|
|
||||||
|
pip install --upgrade pytest
|
||||||
|
|
||||||
|
The full changelog is available at http://doc.pytest.org/en/latest/changelog.html.
|
||||||
|
|
||||||
|
Thanks to all who contributed to this release, among them:
|
||||||
|
|
||||||
|
* Anthony Sottile
|
||||||
|
* Barney Gale
|
||||||
|
* Bruno Oliveira
|
||||||
|
* Florian Bruhin
|
||||||
|
* Floris Bruynooghe
|
||||||
|
* Ionel Cristian Mărieș
|
||||||
|
* Katerina Koukiou
|
||||||
|
* NODA, Kai
|
||||||
|
* Omer Hadari
|
||||||
|
* Patrick Hayes
|
||||||
|
* Ran Benita
|
||||||
|
* Ronny Pfannschmidt
|
||||||
|
* Victor Uriarte
|
||||||
|
* Vidar Tonaas Fauske
|
||||||
|
* Ville Skyttä
|
||||||
|
* fbjorn
|
||||||
|
* mbyt
|
||||||
|
|
||||||
|
Happy testing,
|
||||||
|
The pytest Development Team
|
|
@ -26,8 +26,8 @@ you will see the return value of the function call::
|
||||||
|
|
||||||
$ pytest test_assert1.py
|
$ pytest test_assert1.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_assert1.py F
|
test_assert1.py F
|
||||||
|
@ -170,8 +170,8 @@ if you run this module::
|
||||||
|
|
||||||
$ pytest test_assert2.py
|
$ pytest test_assert2.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_assert2.py F
|
test_assert2.py F
|
||||||
|
@ -183,7 +183,7 @@ if you run this module::
|
||||||
set1 = set("1308")
|
set1 = set("1308")
|
||||||
set2 = set("8035")
|
set2 = set("8035")
|
||||||
> assert set1 == set2
|
> assert set1 == set2
|
||||||
E assert {'0', '1', '3', '8'} == {'0', '3', '5', '8'}
|
E AssertionError: assert {'0', '1', '3', '8'} == {'0', '3', '5', '8'}
|
||||||
E Extra items in the left set:
|
E Extra items in the left set:
|
||||||
E '1'
|
E '1'
|
||||||
E Extra items in the right set:
|
E Extra items in the right set:
|
||||||
|
@ -287,5 +287,5 @@ For further information, Benjamin Peterson wrote up `Behind the scenes of pytest
|
||||||
``--nomagic``.
|
``--nomagic``.
|
||||||
|
|
||||||
.. versionchanged:: 3.0
|
.. versionchanged:: 3.0
|
||||||
Removes the ``--no-assert`` and``--nomagic`` options.
|
Removes the ``--no-assert`` and ``--nomagic`` options.
|
||||||
Removes the ``--assert=reinterp`` option.
|
Removes the ``--assert=reinterp`` option.
|
||||||
|
|
|
@ -80,9 +80,9 @@ If you then run it with ``--lf``::
|
||||||
|
|
||||||
$ pytest --lf
|
$ pytest --lf
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
run-last-failure: rerun last 2 failures
|
run-last-failure: rerun last 2 failures
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 50 items
|
collected 50 items
|
||||||
|
|
||||||
test_50.py FF
|
test_50.py FF
|
||||||
|
@ -122,9 +122,9 @@ of ``FF`` and dots)::
|
||||||
|
|
||||||
$ pytest --ff
|
$ pytest --ff
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
run-last-failure: rerun last 2 failures first
|
run-last-failure: rerun last 2 failures first
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 50 items
|
collected 50 items
|
||||||
|
|
||||||
test_50.py FF................................................
|
test_50.py FF................................................
|
||||||
|
@ -227,14 +227,14 @@ You can always peek at the content of the cache using the
|
||||||
|
|
||||||
$ py.test --cache-show
|
$ py.test --cache-show
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
cachedir: $REGENDOC_TMPDIR/.cache
|
cachedir: $REGENDOC_TMPDIR/.cache
|
||||||
------------------------------- cache values -------------------------------
|
------------------------------- cache values -------------------------------
|
||||||
example/value contains:
|
|
||||||
42
|
|
||||||
cache/lastfailed contains:
|
cache/lastfailed contains:
|
||||||
{'test_caching.py::test_function': True}
|
{'test_caching.py::test_function': True}
|
||||||
|
example/value contains:
|
||||||
|
42
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
|
||||||
|
|
|
@ -64,8 +64,8 @@ of the failing function and hide the other one::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py .F
|
test_module.py .F
|
||||||
|
|
|
@ -62,7 +62,7 @@ then you can just invoke ``pytest`` without command line options::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
|
|
|
@ -31,9 +31,9 @@ You can then restrict a test run to only run tests marked with ``webtest``::
|
||||||
|
|
||||||
$ pytest -v -m webtest
|
$ pytest -v -m webtest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::test_send_http PASSED
|
test_server.py::test_send_http PASSED
|
||||||
|
@ -45,9 +45,9 @@ Or the inverse, running all tests except the webtest ones::
|
||||||
|
|
||||||
$ pytest -v -m "not webtest"
|
$ pytest -v -m "not webtest"
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::test_something_quick PASSED
|
test_server.py::test_something_quick PASSED
|
||||||
|
@ -66,9 +66,9 @@ tests based on their module, class, method, or function name::
|
||||||
|
|
||||||
$ pytest -v test_server.py::TestClass::test_method
|
$ pytest -v test_server.py::TestClass::test_method
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 5 items
|
collecting ... collected 5 items
|
||||||
|
|
||||||
test_server.py::TestClass::test_method PASSED
|
test_server.py::TestClass::test_method PASSED
|
||||||
|
@ -79,9 +79,9 @@ You can also select on the class::
|
||||||
|
|
||||||
$ pytest -v test_server.py::TestClass
|
$ pytest -v test_server.py::TestClass
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::TestClass::test_method PASSED
|
test_server.py::TestClass::test_method PASSED
|
||||||
|
@ -92,9 +92,9 @@ Or select multiple nodes::
|
||||||
|
|
||||||
$ pytest -v test_server.py::TestClass test_server.py::test_send_http
|
$ pytest -v test_server.py::TestClass test_server.py::test_send_http
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 8 items
|
collecting ... collected 8 items
|
||||||
|
|
||||||
test_server.py::TestClass::test_method PASSED
|
test_server.py::TestClass::test_method PASSED
|
||||||
|
@ -130,9 +130,9 @@ select tests based on their names::
|
||||||
|
|
||||||
$ pytest -v -k http # running with the above defined example module
|
$ pytest -v -k http # running with the above defined example module
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::test_send_http PASSED
|
test_server.py::test_send_http PASSED
|
||||||
|
@ -144,9 +144,9 @@ And you can also run all tests except the ones that match the keyword::
|
||||||
|
|
||||||
$ pytest -k "not send_http" -v
|
$ pytest -k "not send_http" -v
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::test_something_quick PASSED
|
test_server.py::test_something_quick PASSED
|
||||||
|
@ -160,9 +160,9 @@ Or to select "http" and "quick" tests::
|
||||||
|
|
||||||
$ pytest -k "http or quick" -v
|
$ pytest -k "http or quick" -v
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 4 items
|
collecting ... collected 4 items
|
||||||
|
|
||||||
test_server.py::test_send_http PASSED
|
test_server.py::test_send_http PASSED
|
||||||
|
@ -352,8 +352,8 @@ the test needs::
|
||||||
|
|
||||||
$ pytest -E stage2
|
$ pytest -E stage2
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_someenv.py s
|
test_someenv.py s
|
||||||
|
@ -364,8 +364,8 @@ and here is one that specifies exactly the environment needed::
|
||||||
|
|
||||||
$ pytest -E stage1
|
$ pytest -E stage1
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_someenv.py .
|
test_someenv.py .
|
||||||
|
@ -485,8 +485,8 @@ then you will see two tests skipped and two executed tests as expected::
|
||||||
|
|
||||||
$ pytest -rs # this option reports skip reasons
|
$ pytest -rs # this option reports skip reasons
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_plat.py s.s.
|
test_plat.py s.s.
|
||||||
|
@ -499,8 +499,8 @@ Note that if you specify a platform via the marker-command line option like this
|
||||||
|
|
||||||
$ pytest -m linux
|
$ pytest -m linux
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_plat.py .
|
test_plat.py .
|
||||||
|
@ -551,8 +551,8 @@ We can now use the ``-m option`` to select one set::
|
||||||
|
|
||||||
$ pytest -m interface --tb=short
|
$ pytest -m interface --tb=short
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_module.py FF
|
test_module.py FF
|
||||||
|
@ -573,8 +573,8 @@ or to select both "event" and "interface" tests::
|
||||||
|
|
||||||
$ pytest -m "interface or event" --tb=short
|
$ pytest -m "interface or event" --tb=short
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_module.py FFF
|
test_module.py FFF
|
||||||
|
|
|
@ -27,8 +27,8 @@ now execute the test specification::
|
||||||
|
|
||||||
nonpython $ pytest test_simple.yml
|
nonpython $ pytest test_simple.yml
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_simple.yml F.
|
test_simple.yml F.
|
||||||
|
@ -59,9 +59,9 @@ consulted when reporting in ``verbose`` mode::
|
||||||
|
|
||||||
nonpython $ pytest -v
|
nonpython $ pytest -v
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||||
collecting ... collected 2 items
|
collecting ... collected 2 items
|
||||||
|
|
||||||
test_simple.yml::hello FAILED
|
test_simple.yml::hello FAILED
|
||||||
|
@ -81,8 +81,8 @@ interesting to just look at the collection tree::
|
||||||
|
|
||||||
nonpython $ pytest --collect-only
|
nonpython $ pytest --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
<YamlFile 'test_simple.yml'>
|
<YamlFile 'test_simple.yml'>
|
||||||
<YamlItem 'hello'>
|
<YamlItem 'hello'>
|
||||||
|
|
|
@ -130,8 +130,8 @@ objects, they are still using the default pytest representation::
|
||||||
|
|
||||||
$ pytest test_time.py --collect-only
|
$ pytest test_time.py --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 6 items
|
collected 6 items
|
||||||
<Module 'test_time.py'>
|
<Module 'test_time.py'>
|
||||||
<Function 'test_timedistance_v0[a0-b0-expected0]'>
|
<Function 'test_timedistance_v0[a0-b0-expected0]'>
|
||||||
|
@ -181,8 +181,8 @@ this is a fully self-contained example which you can run with::
|
||||||
|
|
||||||
$ pytest test_scenarios.py
|
$ pytest test_scenarios.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_scenarios.py ....
|
test_scenarios.py ....
|
||||||
|
@ -194,8 +194,8 @@ If you just collect tests you'll also nicely see 'advanced' and 'basic' as varia
|
||||||
|
|
||||||
$ pytest --collect-only test_scenarios.py
|
$ pytest --collect-only test_scenarios.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
<Module 'test_scenarios.py'>
|
<Module 'test_scenarios.py'>
|
||||||
<Class 'TestSampleWithScenarios'>
|
<Class 'TestSampleWithScenarios'>
|
||||||
|
@ -259,8 +259,8 @@ Let's first see how it looks like at collection time::
|
||||||
|
|
||||||
$ pytest test_backends.py --collect-only
|
$ pytest test_backends.py --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
<Module 'test_backends.py'>
|
<Module 'test_backends.py'>
|
||||||
<Function 'test_db_initialized[d1]'>
|
<Function 'test_db_initialized[d1]'>
|
||||||
|
@ -320,8 +320,8 @@ The result of this test will be successful::
|
||||||
|
|
||||||
$ pytest test_indirect_list.py --collect-only
|
$ pytest test_indirect_list.py --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
<Module 'test_indirect_list.py'>
|
<Module 'test_indirect_list.py'>
|
||||||
<Function 'test_indirect[a-b]'>
|
<Function 'test_indirect[a-b]'>
|
||||||
|
@ -397,12 +397,10 @@ is to be run with different sets of arguments for its three arguments:
|
||||||
Running it results in some skips if we don't have all the python interpreters installed and otherwise runs all combinations (5 interpreters times 5 interpreters times 3 objects to serialize/deserialize)::
|
Running it results in some skips if we don't have all the python interpreters installed and otherwise runs all combinations (5 interpreters times 5 interpreters times 3 objects to serialize/deserialize)::
|
||||||
|
|
||||||
. $ pytest -rs -q multipython.py
|
. $ pytest -rs -q multipython.py
|
||||||
sssssssssssssssssssssssssssssssssssssssssssss...
|
sssssssssssssss.........sss.........sss.........
|
||||||
======= short test summary info ========
|
======= short test summary info ========
|
||||||
SKIP [15] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python2.6' not found
|
SKIP [21] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python2.6' not found
|
||||||
SKIP [15] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python3.4' not found
|
27 passed, 21 skipped in 0.12 seconds
|
||||||
SKIP [15] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python2.7' not found
|
|
||||||
3 passed, 45 skipped in 0.12 seconds
|
|
||||||
|
|
||||||
Indirect parametrization of optional implementations/imports
|
Indirect parametrization of optional implementations/imports
|
||||||
--------------------------------------------------------------------
|
--------------------------------------------------------------------
|
||||||
|
@ -449,8 +447,8 @@ If you run this with reporting for skips enabled::
|
||||||
|
|
||||||
$ pytest -rs test_module.py
|
$ pytest -rs test_module.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py .s
|
test_module.py .s
|
||||||
|
|
|
@ -117,7 +117,7 @@ then the test collection looks like this::
|
||||||
|
|
||||||
$ pytest --collect-only
|
$ pytest --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||||
collected 2 items
|
collected 2 items
|
||||||
<Module 'check_myapp.py'>
|
<Module 'check_myapp.py'>
|
||||||
|
@ -163,7 +163,7 @@ You can always peek at the collection tree without running tests like this::
|
||||||
|
|
||||||
. $ pytest --collect-only pythoncollection.py
|
. $ pytest --collect-only pythoncollection.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||||
collected 3 items
|
collected 3 items
|
||||||
<Module 'CWD/pythoncollection.py'>
|
<Module 'CWD/pythoncollection.py'>
|
||||||
|
@ -230,7 +230,7 @@ will be left out::
|
||||||
|
|
||||||
$ pytest --collect-only
|
$ pytest --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||||
collected 0 items
|
collected 0 items
|
||||||
|
|
||||||
|
|
|
@ -11,8 +11,8 @@ get on the terminal - we are working on that)::
|
||||||
|
|
||||||
assertion $ pytest failure_demo.py
|
assertion $ pytest failure_demo.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
|
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
|
||||||
collected 42 items
|
collected 42 items
|
||||||
|
|
||||||
failure_demo.py FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
|
failure_demo.py FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
|
||||||
|
@ -81,7 +81,7 @@ get on the terminal - we are working on that)::
|
||||||
|
|
||||||
def test_eq_text(self):
|
def test_eq_text(self):
|
||||||
> assert 'spam' == 'eggs'
|
> assert 'spam' == 'eggs'
|
||||||
E assert 'spam' == 'eggs'
|
E AssertionError: assert 'spam' == 'eggs'
|
||||||
E - spam
|
E - spam
|
||||||
E + eggs
|
E + eggs
|
||||||
|
|
||||||
|
@ -92,7 +92,7 @@ get on the terminal - we are working on that)::
|
||||||
|
|
||||||
def test_eq_similar_text(self):
|
def test_eq_similar_text(self):
|
||||||
> assert 'foo 1 bar' == 'foo 2 bar'
|
> assert 'foo 1 bar' == 'foo 2 bar'
|
||||||
E assert 'foo 1 bar' == 'foo 2 bar'
|
E AssertionError: assert 'foo 1 bar' == 'foo 2 bar'
|
||||||
E - foo 1 bar
|
E - foo 1 bar
|
||||||
E ? ^
|
E ? ^
|
||||||
E + foo 2 bar
|
E + foo 2 bar
|
||||||
|
@ -105,7 +105,7 @@ get on the terminal - we are working on that)::
|
||||||
|
|
||||||
def test_eq_multiline_text(self):
|
def test_eq_multiline_text(self):
|
||||||
> assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
|
> assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
|
||||||
E assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
|
E AssertionError: assert 'foo\nspam\nbar' == 'foo\neggs\nbar'
|
||||||
E foo
|
E foo
|
||||||
E - spam
|
E - spam
|
||||||
E + eggs
|
E + eggs
|
||||||
|
@ -120,7 +120,7 @@ get on the terminal - we are working on that)::
|
||||||
a = '1'*100 + 'a' + '2'*100
|
a = '1'*100 + 'a' + '2'*100
|
||||||
b = '1'*100 + 'b' + '2'*100
|
b = '1'*100 + 'b' + '2'*100
|
||||||
> assert a == b
|
> assert a == b
|
||||||
E assert '111111111111...2222222222222' == '1111111111111...2222222222222'
|
E AssertionError: assert '111111111111...2222222222222' == '1111111111111...2222222222222'
|
||||||
E Skipping 90 identical leading characters in diff, use -v to show
|
E Skipping 90 identical leading characters in diff, use -v to show
|
||||||
E Skipping 91 identical trailing characters in diff, use -v to show
|
E Skipping 91 identical trailing characters in diff, use -v to show
|
||||||
E - 1111111111a222222222
|
E - 1111111111a222222222
|
||||||
|
@ -137,7 +137,7 @@ get on the terminal - we are working on that)::
|
||||||
a = '1\n'*100 + 'a' + '2\n'*100
|
a = '1\n'*100 + 'a' + '2\n'*100
|
||||||
b = '1\n'*100 + 'b' + '2\n'*100
|
b = '1\n'*100 + 'b' + '2\n'*100
|
||||||
> assert a == b
|
> assert a == b
|
||||||
E assert '1\n1\n1\n1\n...n2\n2\n2\n2\n' == '1\n1\n1\n1\n1...n2\n2\n2\n2\n'
|
E AssertionError: assert '1\n1\n1\n1\n...n2\n2\n2\n2\n' == '1\n1\n1\n1\n1...n2\n2\n2\n2\n'
|
||||||
E Skipping 190 identical leading characters in diff, use -v to show
|
E Skipping 190 identical leading characters in diff, use -v to show
|
||||||
E Skipping 191 identical trailing characters in diff, use -v to show
|
E Skipping 191 identical trailing characters in diff, use -v to show
|
||||||
E 1
|
E 1
|
||||||
|
@ -183,7 +183,7 @@ get on the terminal - we are working on that)::
|
||||||
|
|
||||||
def test_eq_dict(self):
|
def test_eq_dict(self):
|
||||||
> assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
|
> assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
|
||||||
E assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
|
E AssertionError: assert {'a': 0, 'b': 1, 'c': 0} == {'a': 0, 'b': 2, 'd': 0}
|
||||||
E Omitting 1 identical items, use -v to show
|
E Omitting 1 identical items, use -v to show
|
||||||
E Differing items:
|
E Differing items:
|
||||||
E {'b': 1} != {'b': 2}
|
E {'b': 1} != {'b': 2}
|
||||||
|
@ -238,7 +238,7 @@ get on the terminal - we are working on that)::
|
||||||
def test_not_in_text_multiline(self):
|
def test_not_in_text_multiline(self):
|
||||||
text = 'some multiline\ntext\nwhich\nincludes foo\nand a\ntail'
|
text = 'some multiline\ntext\nwhich\nincludes foo\nand a\ntail'
|
||||||
> assert 'foo' not in text
|
> assert 'foo' not in text
|
||||||
E assert 'foo' not in 'some multiline\ntext\nw...ncludes foo\nand a\ntail'
|
E AssertionError: assert 'foo' not in 'some multiline\ntext\nw...ncludes foo\nand a\ntail'
|
||||||
E 'foo' is contained here:
|
E 'foo' is contained here:
|
||||||
E some multiline
|
E some multiline
|
||||||
E text
|
E text
|
||||||
|
@ -256,7 +256,7 @@ get on the terminal - we are working on that)::
|
||||||
def test_not_in_text_single(self):
|
def test_not_in_text_single(self):
|
||||||
text = 'single foo line'
|
text = 'single foo line'
|
||||||
> assert 'foo' not in text
|
> assert 'foo' not in text
|
||||||
E assert 'foo' not in 'single foo line'
|
E AssertionError: assert 'foo' not in 'single foo line'
|
||||||
E 'foo' is contained here:
|
E 'foo' is contained here:
|
||||||
E single foo line
|
E single foo line
|
||||||
E ? +++
|
E ? +++
|
||||||
|
@ -269,7 +269,7 @@ get on the terminal - we are working on that)::
|
||||||
def test_not_in_text_single_long(self):
|
def test_not_in_text_single_long(self):
|
||||||
text = 'head ' * 50 + 'foo ' + 'tail ' * 20
|
text = 'head ' * 50 + 'foo ' + 'tail ' * 20
|
||||||
> assert 'foo' not in text
|
> assert 'foo' not in text
|
||||||
E assert 'foo' not in 'head head head head hea...ail tail tail tail tail '
|
E AssertionError: assert 'foo' not in 'head head head head hea...ail tail tail tail tail '
|
||||||
E 'foo' is contained here:
|
E 'foo' is contained here:
|
||||||
E head head foo tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
|
E head head foo tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
|
||||||
E ? +++
|
E ? +++
|
||||||
|
@ -282,7 +282,7 @@ get on the terminal - we are working on that)::
|
||||||
def test_not_in_text_single_long_term(self):
|
def test_not_in_text_single_long_term(self):
|
||||||
text = 'head ' * 50 + 'f'*70 + 'tail ' * 20
|
text = 'head ' * 50 + 'f'*70 + 'tail ' * 20
|
||||||
> assert 'f'*70 not in text
|
> assert 'f'*70 not in text
|
||||||
E assert 'fffffffffff...ffffffffffff' not in 'head head he...l tail tail '
|
E AssertionError: assert 'fffffffffff...ffffffffffff' not in 'head head he...l tail tail '
|
||||||
E 'ffffffffffffffffff...fffffffffffffffffff' is contained here:
|
E 'ffffffffffffffffff...fffffffffffffffffff' is contained here:
|
||||||
E head head fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffftail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
|
E head head fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffftail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail tail
|
||||||
E ? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
E ? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
|
||||||
|
@ -305,7 +305,7 @@ get on the terminal - we are working on that)::
|
||||||
class Foo(object):
|
class Foo(object):
|
||||||
b = 1
|
b = 1
|
||||||
> assert Foo().b == 2
|
> assert Foo().b == 2
|
||||||
E assert 1 == 2
|
E AssertionError: assert 1 == 2
|
||||||
E + where 1 = <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef>.b
|
E + where 1 = <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef>.b
|
||||||
E + where <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_instance.<locals>.Foo'>()
|
E + where <failure_demo.test_attribute_instance.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_instance.<locals>.Foo'>()
|
||||||
|
|
||||||
|
@ -338,7 +338,7 @@ get on the terminal - we are working on that)::
|
||||||
class Bar(object):
|
class Bar(object):
|
||||||
b = 2
|
b = 2
|
||||||
> assert Foo().b == Bar().b
|
> assert Foo().b == Bar().b
|
||||||
E assert 1 == 2
|
E AssertionError: assert 1 == 2
|
||||||
E + where 1 = <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef>.b
|
E + where 1 = <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef>.b
|
||||||
E + where <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Foo'>()
|
E + where <failure_demo.test_attribute_multiple.<locals>.Foo object at 0xdeadbeef> = <class 'failure_demo.test_attribute_multiple.<locals>.Foo'>()
|
||||||
E + and 2 = <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef>.b
|
E + and 2 = <failure_demo.test_attribute_multiple.<locals>.Bar object at 0xdeadbeef>.b
|
||||||
|
@ -480,7 +480,7 @@ get on the terminal - we are working on that)::
|
||||||
s = "123"
|
s = "123"
|
||||||
g = "456"
|
g = "456"
|
||||||
> assert s.startswith(g)
|
> assert s.startswith(g)
|
||||||
E assert False
|
E AssertionError: assert False
|
||||||
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
|
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
|
||||||
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
|
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
|
||||||
|
|
||||||
|
@ -495,7 +495,7 @@ get on the terminal - we are working on that)::
|
||||||
def g():
|
def g():
|
||||||
return "456"
|
return "456"
|
||||||
> assert f().startswith(g())
|
> assert f().startswith(g())
|
||||||
E assert False
|
E AssertionError: assert False
|
||||||
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
|
E + where False = <built-in method startswith of str object at 0xdeadbeef>('456')
|
||||||
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
|
E + where <built-in method startswith of str object at 0xdeadbeef> = '123'.startswith
|
||||||
E + where '123' = <function TestMoreErrors.test_startswith_nested.<locals>.f at 0xdeadbeef>()
|
E + where '123' = <function TestMoreErrors.test_startswith_nested.<locals>.f at 0xdeadbeef>()
|
||||||
|
|
|
@ -113,8 +113,8 @@ directory with the above conftest.py::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 0 items
|
collected 0 items
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
@ -164,8 +164,8 @@ and when running it will see a skipped "slow" test::
|
||||||
|
|
||||||
$ pytest -rs # "-rs" means report details on the little 's'
|
$ pytest -rs # "-rs" means report details on the little 's'
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py .s
|
test_module.py .s
|
||||||
|
@ -178,8 +178,8 @@ Or run it including the ``slow`` marked test::
|
||||||
|
|
||||||
$ pytest --runslow
|
$ pytest --runslow
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py ..
|
test_module.py ..
|
||||||
|
@ -302,9 +302,9 @@ which will add the string to the test header accordingly::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
project deps: mylib-1.1
|
project deps: mylib-1.1
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 0 items
|
collected 0 items
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
@ -327,11 +327,11 @@ which will add info only when run with "--v"::
|
||||||
|
|
||||||
$ pytest -v
|
$ pytest -v
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
info1: did you know that ...
|
info1: did you know that ...
|
||||||
did you?
|
did you?
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 0 items
|
collecting ... collected 0 items
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
@ -340,8 +340,8 @@ and nothing when run plainly::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 0 items
|
collected 0 items
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
@ -374,8 +374,8 @@ Now we can profile which test functions execute the slowest::
|
||||||
|
|
||||||
$ pytest --durations=3
|
$ pytest --durations=3
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 3 items
|
collected 3 items
|
||||||
|
|
||||||
test_some_are_slow.py ...
|
test_some_are_slow.py ...
|
||||||
|
@ -440,8 +440,8 @@ If we run this::
|
||||||
|
|
||||||
$ pytest -rx
|
$ pytest -rx
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 4 items
|
collected 4 items
|
||||||
|
|
||||||
test_step.py .Fx.
|
test_step.py .Fx.
|
||||||
|
@ -519,8 +519,8 @@ We can run this::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 7 items
|
collected 7 items
|
||||||
|
|
||||||
test_step.py .Fx.
|
test_step.py .Fx.
|
||||||
|
@ -627,8 +627,8 @@ and run them::
|
||||||
|
|
||||||
$ pytest test_module.py
|
$ pytest test_module.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py FF
|
test_module.py FF
|
||||||
|
@ -721,8 +721,8 @@ and run it::
|
||||||
|
|
||||||
$ pytest -s test_module.py
|
$ pytest -s test_module.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 3 items
|
collected 3 items
|
||||||
|
|
||||||
test_module.py Esetting up a test failed! test_module.py::test_setup_fails
|
test_module.py Esetting up a test failed! test_module.py::test_setup_fails
|
||||||
|
|
|
@ -70,8 +70,8 @@ marked ``smtp`` fixture function. Running the test looks like this::
|
||||||
|
|
||||||
$ pytest test_smtpsimple.py
|
$ pytest test_smtpsimple.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_smtpsimple.py F
|
test_smtpsimple.py F
|
||||||
|
@ -188,8 +188,8 @@ inspect what is going on and can now run the tests::
|
||||||
|
|
||||||
$ pytest test_module.py
|
$ pytest test_module.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_module.py FF
|
test_module.py FF
|
||||||
|
@ -243,7 +243,9 @@ Fixture finalization / executing teardown code
|
||||||
|
|
||||||
pytest supports execution of fixture specific finalization code
|
pytest supports execution of fixture specific finalization code
|
||||||
when the fixture goes out of scope. By using a ``yield`` statement instead of ``return``, all
|
when the fixture goes out of scope. By using a ``yield`` statement instead of ``return``, all
|
||||||
the code after the *yield* statement serves as the teardown code.::
|
the code after the *yield* statement serves as the teardown code:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
# content of conftest.py
|
# content of conftest.py
|
||||||
|
|
||||||
|
@ -275,22 +277,23 @@ occur around each single test. In either case the test
|
||||||
module itself does not need to change or know about these details
|
module itself does not need to change or know about these details
|
||||||
of fixture setup.
|
of fixture setup.
|
||||||
|
|
||||||
Note that we can also seamlessly use the ``yield`` syntax with ``with`` statements::
|
Note that we can also seamlessly use the ``yield`` syntax with ``with`` statements:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
# content of test_yield2.py
|
# content of test_yield2.py
|
||||||
|
|
||||||
|
import smtplib
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture(scope="module")
|
||||||
def passwd():
|
def smtp(request):
|
||||||
with open("/etc/passwd") as f:
|
with smtplib.SMTP("smtp.gmail.com") as smtp:
|
||||||
yield f.readlines()
|
yield smtp # provide the fixture value
|
||||||
|
|
||||||
def test_has_lines(passwd):
|
|
||||||
assert len(passwd) >= 1
|
|
||||||
|
|
||||||
The file ``f`` will be closed after the test finished execution
|
The ``smtp`` connection will be closed after the test finished execution
|
||||||
because the Python ``file`` object supports finalization when
|
because the ``smtp`` object automatically closes when
|
||||||
the ``with`` statement ends.
|
the ``with`` statement ends.
|
||||||
|
|
||||||
|
|
||||||
|
@ -352,8 +355,8 @@ again, nothing much has changed::
|
||||||
|
|
||||||
$ pytest -s -q --tb=no
|
$ pytest -s -q --tb=no
|
||||||
FFfinalizing <smtplib.SMTP object at 0xdeadbeef> (smtp.gmail.com)
|
FFfinalizing <smtplib.SMTP object at 0xdeadbeef> (smtp.gmail.com)
|
||||||
.
|
|
||||||
2 failed, 1 passed in 0.12 seconds
|
2 failed in 0.12 seconds
|
||||||
|
|
||||||
Let's quickly create another test module that actually sets the
|
Let's quickly create another test module that actually sets the
|
||||||
server URL in its module namespace::
|
server URL in its module namespace::
|
||||||
|
@ -450,7 +453,7 @@ So let's just do another run::
|
||||||
response, msg = smtp.ehlo()
|
response, msg = smtp.ehlo()
|
||||||
assert response == 250
|
assert response == 250
|
||||||
> assert b"smtp.gmail.com" in msg
|
> assert b"smtp.gmail.com" in msg
|
||||||
E assert b'smtp.gmail.com' in b'mail.python.org\nSIZE 51200000\nETRN\nSTARTTLS\nENHANCEDSTATUSCODES\n8BITMIME\nDSN\nSMTPUTF8'
|
E AssertionError: assert b'smtp.gmail.com' in b'mail.python.org\nSIZE 51200000\nETRN\nSTARTTLS\nENHANCEDSTATUSCODES\n8BITMIME\nDSN\nSMTPUTF8'
|
||||||
|
|
||||||
test_module.py:5: AssertionError
|
test_module.py:5: AssertionError
|
||||||
-------------------------- Captured stdout setup ---------------------------
|
-------------------------- Captured stdout setup ---------------------------
|
||||||
|
@ -520,9 +523,9 @@ Running the above tests results in the following test IDs being used::
|
||||||
|
|
||||||
$ pytest --collect-only
|
$ pytest --collect-only
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 11 items
|
collected 10 items
|
||||||
<Module 'test_anothersmtp.py'>
|
<Module 'test_anothersmtp.py'>
|
||||||
<Function 'test_showhelo[smtp.gmail.com]'>
|
<Function 'test_showhelo[smtp.gmail.com]'>
|
||||||
<Function 'test_showhelo[mail.python.org]'>
|
<Function 'test_showhelo[mail.python.org]'>
|
||||||
|
@ -536,8 +539,6 @@ Running the above tests results in the following test IDs being used::
|
||||||
<Function 'test_noop[smtp.gmail.com]'>
|
<Function 'test_noop[smtp.gmail.com]'>
|
||||||
<Function 'test_ehlo[mail.python.org]'>
|
<Function 'test_ehlo[mail.python.org]'>
|
||||||
<Function 'test_noop[mail.python.org]'>
|
<Function 'test_noop[mail.python.org]'>
|
||||||
<Module 'test_yield2.py'>
|
|
||||||
<Function 'test_has_lines'>
|
|
||||||
|
|
||||||
======= no tests ran in 0.12 seconds ========
|
======= no tests ran in 0.12 seconds ========
|
||||||
|
|
||||||
|
@ -573,9 +574,9 @@ Here we declare an ``app`` fixture which receives the previously defined
|
||||||
|
|
||||||
$ pytest -v test_appsetup.py
|
$ pytest -v test_appsetup.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 2 items
|
collecting ... collected 2 items
|
||||||
|
|
||||||
test_appsetup.py::test_smtp_exists[smtp.gmail.com] PASSED
|
test_appsetup.py::test_smtp_exists[smtp.gmail.com] PASSED
|
||||||
|
@ -642,9 +643,9 @@ Let's run the tests in verbose mode and with looking at the print-output::
|
||||||
|
|
||||||
$ pytest -v -s test_module.py
|
$ pytest -v -s test_module.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5m
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0 -- $PYTHON_PREFIX/bin/python3.5
|
||||||
cachedir: .cache
|
cachedir: .cache
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collecting ... collected 8 items
|
collecting ... collected 8 items
|
||||||
|
|
||||||
test_module.py::test_0[1] SETUP otherarg 1
|
test_module.py::test_0[1] SETUP otherarg 1
|
||||||
|
|
|
@ -26,7 +26,7 @@ Installation::
|
||||||
To check your installation has installed the correct version::
|
To check your installation has installed the correct version::
|
||||||
|
|
||||||
$ pytest --version
|
$ pytest --version
|
||||||
This is pytest version 3.0.6, imported from $PYTHON_PREFIX/lib/python3.5/site-packages/pytest.py
|
This is pytest version 3.0.7, imported from $PYTHON_PREFIX/lib/python3.5/site-packages/pytest.py
|
||||||
|
|
||||||
.. _`simpletest`:
|
.. _`simpletest`:
|
||||||
|
|
||||||
|
@ -46,8 +46,8 @@ That's it. You can execute the test function now::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_sample.py F
|
test_sample.py F
|
||||||
|
@ -134,7 +134,7 @@ run the module by passing its filename::
|
||||||
def test_two(self):
|
def test_two(self):
|
||||||
x = "hello"
|
x = "hello"
|
||||||
> assert hasattr(x, 'check')
|
> assert hasattr(x, 'check')
|
||||||
E assert False
|
E AssertionError: assert False
|
||||||
E + where False = hasattr('hello', 'check')
|
E + where False = hasattr('hello', 'check')
|
||||||
|
|
||||||
test_class.py:8: AssertionError
|
test_class.py:8: AssertionError
|
||||||
|
|
|
@ -25,8 +25,8 @@ To execute it::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_sample.py F
|
test_sample.py F
|
||||||
|
|
|
@ -55,8 +55,8 @@ them in turn::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 3 items
|
collected 3 items
|
||||||
|
|
||||||
test_expectation.py ..F
|
test_expectation.py ..F
|
||||||
|
@ -73,7 +73,7 @@ them in turn::
|
||||||
])
|
])
|
||||||
def test_eval(test_input, expected):
|
def test_eval(test_input, expected):
|
||||||
> assert eval(test_input) == expected
|
> assert eval(test_input) == expected
|
||||||
E assert 54 == 42
|
E AssertionError: assert 54 == 42
|
||||||
E + where 54 = eval('6*9')
|
E + where 54 = eval('6*9')
|
||||||
|
|
||||||
test_expectation.py:8: AssertionError
|
test_expectation.py:8: AssertionError
|
||||||
|
@ -103,8 +103,8 @@ Let's run this::
|
||||||
|
|
||||||
$ pytest
|
$ pytest
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 3 items
|
collected 3 items
|
||||||
|
|
||||||
test_expectation.py ..x
|
test_expectation.py ..x
|
||||||
|
@ -186,7 +186,7 @@ Let's also run with a stringinput that will lead to a failing test::
|
||||||
|
|
||||||
def test_valid_string(stringinput):
|
def test_valid_string(stringinput):
|
||||||
> assert stringinput.isalpha()
|
> assert stringinput.isalpha()
|
||||||
E assert False
|
E AssertionError: assert False
|
||||||
E + where False = <built-in method isalpha of str object at 0xdeadbeef>()
|
E + where False = <built-in method isalpha of str object at 0xdeadbeef>()
|
||||||
E + where <built-in method isalpha of str object at 0xdeadbeef> = '!'.isalpha
|
E + where <built-in method isalpha of str object at 0xdeadbeef> = '!'.isalpha
|
||||||
|
|
||||||
|
|
|
@ -224,8 +224,8 @@ Running it with the report-on-xfail option gives this output::
|
||||||
|
|
||||||
example $ pytest -rx xfail_demo.py
|
example $ pytest -rx xfail_demo.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR/example, inifile:
|
rootdir: $REGENDOC_TMPDIR/example, inifile:
|
||||||
collected 7 items
|
collected 7 items
|
||||||
|
|
||||||
xfail_demo.py xxxxxxx
|
xfail_demo.py xxxxxxx
|
||||||
|
|
|
@ -4,7 +4,9 @@ Talks and Tutorials
|
||||||
|
|
||||||
.. sidebar:: Next Open Trainings
|
.. sidebar:: Next Open Trainings
|
||||||
|
|
||||||
`pytest workshop <http://www.meetup.com/Python-Django-User-Group-Bern/events/235151115/>`_, 8th December 2016, Bern, Switzerland
|
`Professional Testing with Python
|
||||||
|
<http://www.python-academy.com/courses/specialtopics/python_course_testing.html>`_,
|
||||||
|
26-28 April 2017, Leipzig, Germany.
|
||||||
|
|
||||||
.. _`funcargs`: funcargs.html
|
.. _`funcargs`: funcargs.html
|
||||||
|
|
||||||
|
|
|
@ -29,8 +29,8 @@ Running this would result in a passed test except for the last
|
||||||
|
|
||||||
$ pytest test_tmpdir.py
|
$ pytest test_tmpdir.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 1 items
|
collected 1 items
|
||||||
|
|
||||||
test_tmpdir.py F
|
test_tmpdir.py F
|
||||||
|
|
|
@ -108,8 +108,8 @@ the ``self.db`` values in the traceback::
|
||||||
|
|
||||||
$ pytest test_unittest_db.py
|
$ pytest test_unittest_db.py
|
||||||
======= test session starts ========
|
======= test session starts ========
|
||||||
platform linux -- Python 3.5.2, pytest-3.0.6, py-1.4.33, pluggy-0.4.0
|
platform linux -- Python 3.5.2, pytest-3.0.7, py-1.4.32, pluggy-0.4.0
|
||||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||||
collected 2 items
|
collected 2 items
|
||||||
|
|
||||||
test_unittest_db.py FF
|
test_unittest_db.py FF
|
||||||
|
|
|
@ -192,7 +192,7 @@ This will add an extra property ``example_key="1"`` to the generated
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
This is an experimental feature, and its interface might be replaced
|
``record_xml_property`` is an experimental feature, and its interface might be replaced
|
||||||
by something more powerful and general in future versions. The
|
by something more powerful and general in future versions. The
|
||||||
functionality per-se will be kept, however.
|
functionality per-se will be kept, however.
|
||||||
|
|
||||||
|
|
|
@ -236,20 +236,31 @@ import ``helper.py`` normally. The contents of
|
||||||
Requiring/Loading plugins in a test module or conftest file
|
Requiring/Loading plugins in a test module or conftest file
|
||||||
-----------------------------------------------------------
|
-----------------------------------------------------------
|
||||||
|
|
||||||
You can require plugins in a test module or a conftest file like this::
|
You can require plugins in a test module or a ``conftest.py`` file like this:
|
||||||
|
|
||||||
pytest_plugins = "name1", "name2",
|
.. code-block:: python
|
||||||
|
|
||||||
|
pytest_plugins = ["name1", "name2"]
|
||||||
|
|
||||||
When the test module or conftest plugin is loaded the specified plugins
|
When the test module or conftest plugin is loaded the specified plugins
|
||||||
will be loaded as well. You can also use dotted path like this::
|
will be loaded as well. Any module can be blessed as a plugin, including internal
|
||||||
|
application modules:
|
||||||
|
|
||||||
|
.. code-block:: python
|
||||||
|
|
||||||
pytest_plugins = "myapp.testsupport.myplugin"
|
pytest_plugins = "myapp.testsupport.myplugin"
|
||||||
|
|
||||||
which will import the specified module as a ``pytest`` plugin.
|
``pytest_plugins`` variables are processed recursively, so note that in the example above
|
||||||
|
if ``myapp.testsupport.myplugin`` also declares ``pytest_plugins``, the contents
|
||||||
|
of the variable will also be loaded as plugins, and so on.
|
||||||
|
|
||||||
Plugins imported like this will automatically be marked to require
|
This mechanism makes it easy to share fixtures within applications or even
|
||||||
assertion rewriting using the :func:`pytest.register_assert_rewrite`
|
external applications without the need to create external plugins using
|
||||||
mechanism. However for this to have any effect the module must not be
|
the ``setuptools``'s entry point technique.
|
||||||
|
|
||||||
|
Plugins imported by ``pytest_plugins`` will also automatically be marked
|
||||||
|
for assertion rewriting (see :func:`pytest.register_assert_rewrite`).
|
||||||
|
However for this to have any effect the module must not be
|
||||||
imported already; if it was already imported at the time the
|
imported already; if it was already imported at the time the
|
||||||
``pytest_plugins`` statement is processed, a warning will result and
|
``pytest_plugins`` statement is processed, a warning will result and
|
||||||
assertions inside the plugin will not be re-written. To fix this you
|
assertions inside the plugin will not be re-written. To fix this you
|
||||||
|
|
|
@ -166,6 +166,16 @@ class TestClass(object):
|
||||||
"because it has a __new__ constructor*"
|
"because it has a __new__ constructor*"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_issue2234_property(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
class TestCase(object):
|
||||||
|
@property
|
||||||
|
def prop(self):
|
||||||
|
raise NotImplementedError()
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
assert result.ret == EXIT_NOTESTSCOLLECTED
|
||||||
|
|
||||||
|
|
||||||
class TestGenerator(object):
|
class TestGenerator(object):
|
||||||
def test_generative_functions(self, testdir):
|
def test_generative_functions(self, testdir):
|
||||||
|
|
|
@ -365,7 +365,7 @@ class TestAssert_reprcompare(object):
|
||||||
expl = '\n'.join(callequal(left, right, verbose=True))
|
expl = '\n'.join(callequal(left, right, verbose=True))
|
||||||
assert expl.endswith(textwrap.dedent(expected).strip())
|
assert expl.endswith(textwrap.dedent(expected).strip())
|
||||||
|
|
||||||
def test_list_different_lenghts(self):
|
def test_list_different_lengths(self):
|
||||||
expl = callequal([0, 1], [0, 1, 2])
|
expl = callequal([0, 1], [0, 1, 2])
|
||||||
assert len(expl) > 1
|
assert len(expl) > 1
|
||||||
expl = callequal([0, 1, 2], [0, 1])
|
expl = callequal([0, 1, 2], [0, 1])
|
||||||
|
@ -996,6 +996,25 @@ def test_assert_with_unicode(monkeypatch, testdir):
|
||||||
result = testdir.runpytest()
|
result = testdir.runpytest()
|
||||||
result.stdout.fnmatch_lines(['*AssertionError*'])
|
result.stdout.fnmatch_lines(['*AssertionError*'])
|
||||||
|
|
||||||
|
def test_raise_unprintable_assertion_error(testdir):
|
||||||
|
testdir.makepyfile(r"""
|
||||||
|
def test_raise_assertion_error():
|
||||||
|
raise AssertionError('\xff')
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines([r"> raise AssertionError('\xff')", 'E AssertionError: *'])
|
||||||
|
|
||||||
|
def test_raise_assertion_error_raisin_repr(testdir):
|
||||||
|
testdir.makepyfile(u"""
|
||||||
|
class RaisingRepr(object):
|
||||||
|
def __repr__(self):
|
||||||
|
raise Exception()
|
||||||
|
def test_raising_repr():
|
||||||
|
raise AssertionError(RaisingRepr())
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest()
|
||||||
|
result.stdout.fnmatch_lines(['E AssertionError: <unprintable AssertionError object>'])
|
||||||
|
|
||||||
def test_issue_1944(testdir):
|
def test_issue_1944(testdir):
|
||||||
testdir.makepyfile("""
|
testdir.makepyfile("""
|
||||||
def f():
|
def f():
|
||||||
|
|
|
@ -271,7 +271,7 @@ class TestAssertionRewrite(object):
|
||||||
|
|
||||||
getmsg(f, must_pass=True)
|
getmsg(f, must_pass=True)
|
||||||
|
|
||||||
def test_short_circut_evaluation(self):
|
def test_short_circuit_evaluation(self):
|
||||||
def f():
|
def f():
|
||||||
assert True or explode # noqa
|
assert True or explode # noqa
|
||||||
|
|
||||||
|
|
|
@ -281,7 +281,7 @@ class TestLoggingInteraction(object):
|
||||||
def test_logging():
|
def test_logging():
|
||||||
import logging
|
import logging
|
||||||
import pytest
|
import pytest
|
||||||
stream = capture.TextIO()
|
stream = capture.CaptureIO()
|
||||||
logging.basicConfig(stream=stream)
|
logging.basicConfig(stream=stream)
|
||||||
stream.close() # to free memory/release resources
|
stream.close() # to free memory/release resources
|
||||||
""")
|
""")
|
||||||
|
@ -604,7 +604,7 @@ def test_capture_binary_output(testdir):
|
||||||
|
|
||||||
|
|
||||||
def test_error_during_readouterr(testdir):
|
def test_error_during_readouterr(testdir):
|
||||||
"""Make sure we suspend capturing if errors occurr during readouterr"""
|
"""Make sure we suspend capturing if errors occur during readouterr"""
|
||||||
testdir.makepyfile(pytest_xyz="""
|
testdir.makepyfile(pytest_xyz="""
|
||||||
from _pytest.capture import FDCapture
|
from _pytest.capture import FDCapture
|
||||||
def bad_snap(self):
|
def bad_snap(self):
|
||||||
|
@ -622,16 +622,16 @@ def test_error_during_readouterr(testdir):
|
||||||
])
|
])
|
||||||
|
|
||||||
|
|
||||||
class TestTextIO(object):
|
class TestCaptureIO(object):
|
||||||
def test_text(self):
|
def test_text(self):
|
||||||
f = capture.TextIO()
|
f = capture.CaptureIO()
|
||||||
f.write("hello")
|
f.write("hello")
|
||||||
s = f.getvalue()
|
s = f.getvalue()
|
||||||
assert s == "hello"
|
assert s == "hello"
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
def test_unicode_and_str_mixture(self):
|
def test_unicode_and_str_mixture(self):
|
||||||
f = capture.TextIO()
|
f = capture.CaptureIO()
|
||||||
if sys.version_info >= (3, 0):
|
if sys.version_info >= (3, 0):
|
||||||
f.write("\u00f6")
|
f.write("\u00f6")
|
||||||
pytest.raises(TypeError, "f.write(bytes('hello', 'UTF-8'))")
|
pytest.raises(TypeError, "f.write(bytes('hello', 'UTF-8'))")
|
||||||
|
@ -642,6 +642,18 @@ class TestTextIO(object):
|
||||||
f.close()
|
f.close()
|
||||||
assert isinstance(s, unicode)
|
assert isinstance(s, unicode)
|
||||||
|
|
||||||
|
@pytest.mark.skipif(
|
||||||
|
sys.version_info[0] == 2,
|
||||||
|
reason='python 3 only behaviour',
|
||||||
|
)
|
||||||
|
def test_write_bytes_to_buffer(self):
|
||||||
|
"""In python3, stdout / stderr are text io wrappers (exposing a buffer
|
||||||
|
property of the underlying bytestream). See issue #1407
|
||||||
|
"""
|
||||||
|
f = capture.CaptureIO()
|
||||||
|
f.buffer.write(b'foo\r\n')
|
||||||
|
assert f.getvalue() == 'foo\r\n'
|
||||||
|
|
||||||
|
|
||||||
def test_bytes_io():
|
def test_bytes_io():
|
||||||
f = py.io.BytesIO()
|
f = py.io.BytesIO()
|
||||||
|
@ -900,8 +912,8 @@ class TestStdCapture(object):
|
||||||
with self.getcapture() as cap:
|
with self.getcapture() as cap:
|
||||||
sys.stdout.write("hello")
|
sys.stdout.write("hello")
|
||||||
sys.stderr.write("world")
|
sys.stderr.write("world")
|
||||||
sys.stdout = capture.TextIO()
|
sys.stdout = capture.CaptureIO()
|
||||||
sys.stderr = capture.TextIO()
|
sys.stderr = capture.CaptureIO()
|
||||||
print ("not seen")
|
print ("not seen")
|
||||||
sys.stderr.write("not seen\n")
|
sys.stderr.write("not seen\n")
|
||||||
out, err = cap.readouterr()
|
out, err = cap.readouterr()
|
||||||
|
|
|
@ -519,7 +519,7 @@ def test_consider_args_after_options_for_rootdir_and_inifile(testdir, args):
|
||||||
args[i] = d2
|
args[i] = d2
|
||||||
with root.as_cwd():
|
with root.as_cwd():
|
||||||
result = testdir.runpytest(*args)
|
result = testdir.runpytest(*args)
|
||||||
result.stdout.fnmatch_lines(['*rootdir: *myroot, inifile: '])
|
result.stdout.fnmatch_lines(['*rootdir: *myroot, inifile:'])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif("sys.platform == 'win32'")
|
@pytest.mark.skipif("sys.platform == 'win32'")
|
||||||
|
@ -779,6 +779,21 @@ class TestOverrideIniArgs(object):
|
||||||
result = testdir.runpytest("--override-ini", 'xdist_strict True', "-s")
|
result = testdir.runpytest("--override-ini", 'xdist_strict True', "-s")
|
||||||
result.stderr.fnmatch_lines(["*ERROR* *expects option=value*"])
|
result.stderr.fnmatch_lines(["*ERROR* *expects option=value*"])
|
||||||
|
|
||||||
|
@pytest.mark.parametrize('with_ini', [True, False])
|
||||||
|
def test_override_ini_handled_asap(self, testdir, with_ini):
|
||||||
|
"""-o should be handled as soon as possible and always override what's in ini files (#2238)"""
|
||||||
|
if with_ini:
|
||||||
|
testdir.makeini("""
|
||||||
|
[pytest]
|
||||||
|
python_files=test_*.py
|
||||||
|
""")
|
||||||
|
testdir.makepyfile(unittest_ini_handle="""
|
||||||
|
def test():
|
||||||
|
pass
|
||||||
|
""")
|
||||||
|
result = testdir.runpytest("--override-ini", 'python_files=unittest_*.py')
|
||||||
|
result.stdout.fnmatch_lines(["*1 passed in*"])
|
||||||
|
|
||||||
def test_with_arg_outside_cwd_without_inifile(self, tmpdir, monkeypatch):
|
def test_with_arg_outside_cwd_without_inifile(self, tmpdir, monkeypatch):
|
||||||
monkeypatch.chdir(str(tmpdir))
|
monkeypatch.chdir(str(tmpdir))
|
||||||
a = tmpdir.mkdir("a")
|
a = tmpdir.mkdir("a")
|
||||||
|
|
|
@ -189,6 +189,29 @@ class TestPython(object):
|
||||||
fnode.assert_attr(message="test teardown failure")
|
fnode.assert_attr(message="test teardown failure")
|
||||||
assert "ValueError" in fnode.toxml()
|
assert "ValueError" in fnode.toxml()
|
||||||
|
|
||||||
|
def test_call_failure_teardown_error(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def arg():
|
||||||
|
yield
|
||||||
|
raise Exception("Teardown Exception")
|
||||||
|
def test_function(arg):
|
||||||
|
raise Exception("Call Exception")
|
||||||
|
""")
|
||||||
|
result, dom = runandparse(testdir)
|
||||||
|
assert result.ret
|
||||||
|
node = dom.find_first_by_tag("testsuite")
|
||||||
|
node.assert_attr(errors=1, failures=1, tests=1)
|
||||||
|
first, second = dom.find_by_tag("testcase")
|
||||||
|
if not first or not second or first == second:
|
||||||
|
assert 0
|
||||||
|
fnode = first.find_first_by_tag("failure")
|
||||||
|
fnode.assert_attr(message="Exception: Call Exception")
|
||||||
|
snode = second.find_first_by_tag("error")
|
||||||
|
snode.assert_attr(message="test teardown failure")
|
||||||
|
|
||||||
def test_skip_contains_name_reason(self, testdir):
|
def test_skip_contains_name_reason(self, testdir):
|
||||||
testdir.makepyfile("""
|
testdir.makepyfile("""
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -557,6 +580,25 @@ class TestPython(object):
|
||||||
systemout = pnode.find_first_by_tag("system-err")
|
systemout = pnode.find_first_by_tag("system-err")
|
||||||
assert "hello-stderr" in systemout.toxml()
|
assert "hello-stderr" in systemout.toxml()
|
||||||
|
|
||||||
|
def test_avoid_double_stdout(self, testdir):
|
||||||
|
testdir.makepyfile("""
|
||||||
|
import sys
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def arg(request):
|
||||||
|
yield
|
||||||
|
sys.stdout.write('hello-stdout teardown')
|
||||||
|
raise ValueError()
|
||||||
|
def test_function(arg):
|
||||||
|
sys.stdout.write('hello-stdout call')
|
||||||
|
""")
|
||||||
|
result, dom = runandparse(testdir)
|
||||||
|
node = dom.find_first_by_tag("testsuite")
|
||||||
|
pnode = node.find_first_by_tag("testcase")
|
||||||
|
systemout = pnode.find_first_by_tag("system-out")
|
||||||
|
assert "hello-stdout call" in systemout.toxml()
|
||||||
|
assert "hello-stdout teardown" in systemout.toxml()
|
||||||
|
|
||||||
def test_mangle_test_address():
|
def test_mangle_test_address():
|
||||||
from _pytest.junitxml import mangle_test_address
|
from _pytest.junitxml import mangle_test_address
|
||||||
|
|
|
@ -126,6 +126,21 @@ class TestPDB(object):
|
||||||
assert 'debug.me' in rest
|
assert 'debug.me' in rest
|
||||||
self.flush(child)
|
self.flush(child)
|
||||||
|
|
||||||
|
def test_pdb_unittest_skip(self, testdir):
|
||||||
|
"""Test for issue #2137"""
|
||||||
|
p1 = testdir.makepyfile("""
|
||||||
|
import unittest
|
||||||
|
@unittest.skipIf(True, 'Skipping also with pdb active')
|
||||||
|
class MyTestCase(unittest.TestCase):
|
||||||
|
def test_one(self):
|
||||||
|
assert 0
|
||||||
|
""")
|
||||||
|
child = testdir.spawn_pytest("-rs --pdb %s" % p1)
|
||||||
|
child.expect('Skipping also with pdb active')
|
||||||
|
child.expect('1 skipped in')
|
||||||
|
child.sendeof()
|
||||||
|
self.flush(child)
|
||||||
|
|
||||||
def test_pdb_interaction_capture(self, testdir):
|
def test_pdb_interaction_capture(self, testdir):
|
||||||
p1 = testdir.makepyfile("""
|
p1 = testdir.makepyfile("""
|
||||||
def test_1():
|
def test_1():
|
||||||
|
|
|
@ -8,25 +8,19 @@ from _pytest.recwarn import WarningsRecorder
|
||||||
def test_recwarn_functional(testdir):
|
def test_recwarn_functional(testdir):
|
||||||
reprec = testdir.inline_runsource("""
|
reprec = testdir.inline_runsource("""
|
||||||
import warnings
|
import warnings
|
||||||
oldwarn = warnings.showwarning
|
|
||||||
def test_method(recwarn):
|
def test_method(recwarn):
|
||||||
assert warnings.showwarning != oldwarn
|
|
||||||
warnings.warn("hello")
|
warnings.warn("hello")
|
||||||
warn = recwarn.pop()
|
warn = recwarn.pop()
|
||||||
assert isinstance(warn.message, UserWarning)
|
assert isinstance(warn.message, UserWarning)
|
||||||
def test_finalized():
|
|
||||||
assert warnings.showwarning == oldwarn
|
|
||||||
""")
|
""")
|
||||||
res = reprec.countoutcomes()
|
res = reprec.countoutcomes()
|
||||||
assert tuple(res) == (2, 0, 0), res
|
assert tuple(res) == (1, 0, 0), res
|
||||||
|
|
||||||
|
|
||||||
class TestWarningsRecorderChecker(object):
|
class TestWarningsRecorderChecker(object):
|
||||||
def test_recording(self, recwarn):
|
def test_recording(self):
|
||||||
showwarning = py.std.warnings.showwarning
|
|
||||||
rec = WarningsRecorder()
|
rec = WarningsRecorder()
|
||||||
with rec:
|
with rec:
|
||||||
assert py.std.warnings.showwarning != showwarning
|
|
||||||
assert not rec.list
|
assert not rec.list
|
||||||
py.std.warnings.warn_explicit("hello", UserWarning, "xyz", 13)
|
py.std.warnings.warn_explicit("hello", UserWarning, "xyz", 13)
|
||||||
assert len(rec.list) == 1
|
assert len(rec.list) == 1
|
||||||
|
@ -40,8 +34,6 @@ class TestWarningsRecorderChecker(object):
|
||||||
assert l is rec.list
|
assert l is rec.list
|
||||||
pytest.raises(AssertionError, "rec.pop()")
|
pytest.raises(AssertionError, "rec.pop()")
|
||||||
|
|
||||||
assert showwarning == py.std.warnings.showwarning
|
|
||||||
|
|
||||||
def test_typechecking(self):
|
def test_typechecking(self):
|
||||||
from _pytest.recwarn import WarningsChecker
|
from _pytest.recwarn import WarningsChecker
|
||||||
with pytest.raises(TypeError):
|
with pytest.raises(TypeError):
|
||||||
|
@ -217,7 +209,6 @@ class TestWarns(object):
|
||||||
excinfo.match(re.escape(message_template.format(warning_classes,
|
excinfo.match(re.escape(message_template.format(warning_classes,
|
||||||
[each.message for each in warninfo])))
|
[each.message for each in warninfo])))
|
||||||
|
|
||||||
|
|
||||||
def test_record(self):
|
def test_record(self):
|
||||||
with pytest.warns(UserWarning) as record:
|
with pytest.warns(UserWarning) as record:
|
||||||
warnings.warn("user", UserWarning)
|
warnings.warn("user", UserWarning)
|
||||||
|
@ -225,9 +216,6 @@ class TestWarns(object):
|
||||||
assert len(record) == 1
|
assert len(record) == 1
|
||||||
assert str(record[0].message) == "user"
|
assert str(record[0].message) == "user"
|
||||||
|
|
||||||
print(repr(record[0]))
|
|
||||||
assert str(record[0].message) in repr(record[0])
|
|
||||||
|
|
||||||
def test_record_only(self):
|
def test_record_only(self):
|
||||||
with pytest.warns(None) as record:
|
with pytest.warns(None) as record:
|
||||||
warnings.warn("user", UserWarning)
|
warnings.warn("user", UserWarning)
|
||||||
|
|
|
@ -197,7 +197,7 @@ class TestNewSession(SessionTests):
|
||||||
colfail = [x for x in finished if x.failed]
|
colfail = [x for x in finished if x.failed]
|
||||||
assert len(colfail) == 1
|
assert len(colfail) == 1
|
||||||
|
|
||||||
def test_minus_x_overriden_by_maxfail(self, testdir):
|
def test_minus_x_overridden_by_maxfail(self, testdir):
|
||||||
testdir.makepyfile(__init__="")
|
testdir.makepyfile(__init__="")
|
||||||
testdir.makepyfile(test_one="xxxx", test_two="yyyy", test_third="zzz")
|
testdir.makepyfile(test_one="xxxx", test_two="yyyy", test_third="zzz")
|
||||||
reprec = testdir.inline_run("-x", "--maxfail=2", testdir.tmpdir)
|
reprec = testdir.inline_run("-x", "--maxfail=2", testdir.tmpdir)
|
||||||
|
|
|
@ -969,3 +969,26 @@ def test_module_level_skip_error(testdir):
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
"*Using pytest.skip outside of a test is not allowed*"
|
"*Using pytest.skip outside of a test is not allowed*"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_mark_xfail_item(testdir):
|
||||||
|
# Ensure pytest.mark.xfail works with non-Python Item
|
||||||
|
testdir.makeconftest("""
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
class MyItem(pytest.Item):
|
||||||
|
nodeid = 'foo'
|
||||||
|
def setup(self):
|
||||||
|
marker = pytest.mark.xfail(True, reason="Expected failure")
|
||||||
|
self.add_marker(marker)
|
||||||
|
def runtest(self):
|
||||||
|
assert False
|
||||||
|
|
||||||
|
def pytest_collect_file(path, parent):
|
||||||
|
return MyItem("foo", parent)
|
||||||
|
""")
|
||||||
|
result = testdir.inline_run()
|
||||||
|
passed, skipped, failed = result.listoutcomes()
|
||||||
|
assert not failed
|
||||||
|
xfailed = [r for r in skipped if hasattr(r, 'wasxfail')]
|
||||||
|
assert xfailed
|
||||||
|
|
|
@ -906,3 +906,12 @@ def test_summary_stats(exp_line, exp_color, stats_arg):
|
||||||
print("Actually got: \"%s\"; with color \"%s\"" % (line, color))
|
print("Actually got: \"%s\"; with color \"%s\"" % (line, color))
|
||||||
assert line == exp_line
|
assert line == exp_line
|
||||||
assert color == exp_color
|
assert color == exp_color
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_trailing_whitespace_after_inifile_word(testdir):
|
||||||
|
result = testdir.runpytest('')
|
||||||
|
assert 'inifile:\n' in result.stdout.str()
|
||||||
|
|
||||||
|
testdir.makeini('[pytest]')
|
||||||
|
result = testdir.runpytest('')
|
||||||
|
assert 'inifile: tox.ini\n' in result.stdout.str()
|
||||||
|
|
Loading…
Reference in New Issue