Merge remote-tracking branch 'upstream/features' into features
This commit is contained in:
commit
1f46015de5
|
@ -2,7 +2,7 @@ Thanks for submitting an issue!
|
|||
|
||||
Here's a quick checklist in what to include:
|
||||
|
||||
[ ] Include a detailed description of the bug or suggestion
|
||||
[ ] `pip list` of the virtual environment you are using
|
||||
[ ] py.test and operating system versions
|
||||
[ ] Minimal example if possible
|
||||
- [ ] Include a detailed description of the bug or suggestion
|
||||
- [ ] `pip list` of the virtual environment you are using
|
||||
- [ ] py.test and operating system versions
|
||||
- [ ] Minimal example if possible
|
||||
|
|
|
@ -2,7 +2,7 @@ Thanks for submitting a PR, your contribution is really appreciated!
|
|||
|
||||
Here's a quick checklist that should be present in PRs:
|
||||
|
||||
[ ] Target: for bug or doc fixes, target `master`; for new features, target `features`
|
||||
[ ] Make sure to include one or more tests for your change
|
||||
[ ] Add yourself to `AUTHORS`
|
||||
[ ] Add a new entry to the `CHANGELOG` (choose any open position to avoid merge conflicts with other PRs)
|
||||
- [ ] Target: for bug or doc fixes, target `master`; for new features, target `features`
|
||||
- [ ] Make sure to include one or more tests for your change
|
||||
- [ ] Add yourself to `AUTHORS`
|
||||
- [ ] Add a new entry to the `CHANGELOG` (choose any open position to avoid merge conflicts with other PRs)
|
||||
|
|
3
AUTHORS
3
AUTHORS
|
@ -10,6 +10,7 @@ Andy Freeland
|
|||
Anthon van der Neut
|
||||
Armin Rigo
|
||||
Aron Curzon
|
||||
Aviv Palivoda
|
||||
Benjamin Peterson
|
||||
Bob Ippolito
|
||||
Brian Dorsey
|
||||
|
@ -23,6 +24,7 @@ Christian Theunert
|
|||
Christian Tismer
|
||||
Christopher Gilling
|
||||
Daniel Grana
|
||||
Daniel Hahler
|
||||
Daniel Nuri
|
||||
Dave Hunt
|
||||
David Mohr
|
||||
|
@ -60,6 +62,7 @@ Marc Schlaich
|
|||
Mark Abramowitz
|
||||
Markus Unterwaditzer
|
||||
Martijn Faassen
|
||||
Matt Bachmann
|
||||
Matt Williams
|
||||
Michael Aquilina
|
||||
Michael Birtwell
|
||||
|
|
|
@ -27,6 +27,9 @@
|
|||
explicitly passed parametrize ids do not get escaped to ascii.
|
||||
Thanks `@ceridwen`_ for the PR.
|
||||
|
||||
* parametrize ids can accept None as specific test id. The
|
||||
automatically generated id for that argument will be used.
|
||||
|
||||
*
|
||||
|
||||
*
|
||||
|
@ -42,17 +45,54 @@
|
|||
.. _#1454: https://github.com/pytest-dev/pytest/pull/1454
|
||||
|
||||
|
||||
2.9.1.dev1
|
||||
2.9.2.dev1
|
||||
==========
|
||||
|
||||
**Bug Fixes**
|
||||
|
||||
*
|
||||
* When receiving identical test ids in parametrize we generate unique test ids.
|
||||
|
||||
*
|
||||
|
||||
*
|
||||
|
||||
*
|
||||
|
||||
|
||||
2.9.1
|
||||
=====
|
||||
|
||||
**Bug Fixes**
|
||||
|
||||
* Improve error message when a plugin fails to load.
|
||||
Thanks `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#1178 <https://github.com/pytest-dev/pytest/issues/1178>`_):
|
||||
``pytest.fail`` with non-ascii characters raises an internal pytest error.
|
||||
Thanks `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#469`_): junit parses report.nodeid incorrectly, when params IDs
|
||||
contain ``::``. Thanks `@tomviner`_ for the PR (`#1431`_).
|
||||
|
||||
* Fix (`#578 <https://github.com/pytest-dev/pytest/issues/578>`_): SyntaxErrors
|
||||
containing non-ascii lines at the point of failure generated an internal
|
||||
py.test error.
|
||||
Thanks `@asottile`_ for the report and `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#1437`_): When passing in a bytestring regex pattern to parameterize
|
||||
attempt to decode it as utf-8 ignoring errors.
|
||||
|
||||
* Fix (`#649`_): parametrized test nodes cannot be specified to run on the command line.
|
||||
|
||||
|
||||
.. _#1437: https://github.com/pytest-dev/pytest/issues/1437
|
||||
.. _#469: https://github.com/pytest-dev/pytest/issues/469
|
||||
.. _#1431: https://github.com/pytest-dev/pytest/pull/1431
|
||||
.. _#649: https://github.com/pytest-dev/pytest/issues/649
|
||||
|
||||
.. _@asottile: https://github.com/asottile
|
||||
|
||||
|
||||
2.9.0
|
||||
=====
|
||||
|
||||
|
|
|
@ -1,2 +1,3 @@
|
|||
#
|
||||
|
||||
__version__ = '2.10.0.dev1'
|
||||
|
|
|
@ -47,7 +47,9 @@ def format_exception_only(etype, value):
|
|||
filename = filename or "<string>"
|
||||
lines.append(' File "%s", line %d\n' % (filename, lineno))
|
||||
if badline is not None:
|
||||
lines.append(' %s\n' % badline.strip())
|
||||
if isinstance(badline, bytes): # python 2 only
|
||||
badline = badline.decode('utf-8', 'replace')
|
||||
lines.append(u' %s\n' % badline.strip())
|
||||
if offset is not None:
|
||||
caretspace = badline.rstrip('\n')[:offset].lstrip()
|
||||
# non-space whitespace (likes tabs) must be kept for alignment
|
||||
|
|
|
@ -149,7 +149,9 @@ class LFPlugin:
|
|||
config = self.config
|
||||
if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
|
||||
return
|
||||
config.cache.set("cache/lastfailed", self.lastfailed)
|
||||
prev_failed = config.cache.get("cache/lastfailed", None) is not None
|
||||
if (session.testscollected and prev_failed) or self.lastfailed:
|
||||
config.cache.set("cache/lastfailed", self.lastfailed)
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
|
|
|
@ -383,8 +383,13 @@ class PytestPluginManager(PluginManager):
|
|||
importspec = modname
|
||||
try:
|
||||
__import__(importspec)
|
||||
except ImportError:
|
||||
raise
|
||||
except ImportError as e:
|
||||
new_exc = ImportError('Error importing plugin "%s": %s' % (modname, e))
|
||||
# copy over name and path attributes
|
||||
for attr in ('name', 'path'):
|
||||
if hasattr(e, attr):
|
||||
setattr(new_exc, attr, getattr(e, attr))
|
||||
raise new_exc
|
||||
except Exception as e:
|
||||
import pytest
|
||||
if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception):
|
||||
|
|
|
@ -28,17 +28,14 @@ def pytest_plugin_registered(plugin, manager):
|
|||
|
||||
@hookspec(historic=True)
|
||||
def pytest_addoption(parser):
|
||||
"""register argparse-style options and ini-style config values.
|
||||
"""register argparse-style options and ini-style config values,
|
||||
called once at the beginning of a test run.
|
||||
|
||||
.. warning::
|
||||
.. note::
|
||||
|
||||
This function must be implemented in a :ref:`plugin <pluginorder>`
|
||||
and is called once at the beginning of a test run.
|
||||
|
||||
Implementing this hook from ``conftest.py`` files is **strongly**
|
||||
discouraged because ``conftest.py`` files are lazily loaded and
|
||||
may give strange *unknown option* errors depending on the directory
|
||||
``py.test`` is invoked from.
|
||||
This function should be implemented only in plugins or ``conftest.py``
|
||||
files situated at the tests root directory due to how py.test
|
||||
:ref:`discovers plugins during startup <pluginorder>`.
|
||||
|
||||
:arg parser: To add command line options, call
|
||||
:py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`.
|
||||
|
@ -84,7 +81,7 @@ def pytest_cmdline_main(config):
|
|||
""" called for performing the main command line action. The default
|
||||
implementation will invoke the configure hooks and runtest_mainloop. """
|
||||
|
||||
def pytest_load_initial_conftests(args, early_config, parser):
|
||||
def pytest_load_initial_conftests(early_config, parser, args):
|
||||
""" implements the loading of initial conftest files ahead
|
||||
of command line option parsing. """
|
||||
|
||||
|
|
|
@ -46,6 +46,8 @@ del _legal_chars
|
|||
del _legal_ranges
|
||||
del _legal_xml_re
|
||||
|
||||
_py_ext_re = re.compile(r"\.py$")
|
||||
|
||||
|
||||
def bin_xml_escape(arg):
|
||||
def repl(matchobj):
|
||||
|
@ -89,7 +91,7 @@ class _NodeReporter(object):
|
|||
|
||||
def record_testreport(self, testreport):
|
||||
assert not self.testcase
|
||||
names = mangle_testnames(testreport.nodeid.split("::"))
|
||||
names = mangle_test_address(testreport.nodeid)
|
||||
classnames = names[:-1]
|
||||
if self.xml.prefix:
|
||||
classnames.insert(0, self.xml.prefix)
|
||||
|
@ -235,9 +237,18 @@ def pytest_unconfigure(config):
|
|||
config.pluginmanager.unregister(xml)
|
||||
|
||||
|
||||
def mangle_testnames(names):
|
||||
names = [x.replace(".py", "") for x in names if x != '()']
|
||||
def mangle_test_address(address):
|
||||
path, possible_open_bracket, params = address.partition('[')
|
||||
names = path.split("::")
|
||||
try:
|
||||
names.remove('()')
|
||||
except ValueError:
|
||||
pass
|
||||
# convert file path to dotted path
|
||||
names[0] = names[0].replace("/", '.')
|
||||
names[0] = _py_ext_re.sub("", names[0])
|
||||
# put any params back
|
||||
names[-1] += possible_open_bracket + params
|
||||
return names
|
||||
|
||||
|
||||
|
|
|
@ -718,7 +718,8 @@ class Session(FSCollector):
|
|||
if rep.passed:
|
||||
has_matched = False
|
||||
for x in rep.result:
|
||||
if x.name == name:
|
||||
# TODO: remove parametrized workaround once collection structure contains parametrization
|
||||
if x.name == name or x.name.split("[")[0] == name:
|
||||
resultnodes.extend(self.matchnodes([x], nextnames))
|
||||
has_matched = True
|
||||
# XXX accept IDs that don't have "()" for class instances
|
||||
|
|
|
@ -751,7 +751,7 @@ class FunctionMixin(PyobjMixin):
|
|||
def _repr_failure_py(self, excinfo, style="long"):
|
||||
if excinfo.errisinstance(pytest.fail.Exception):
|
||||
if not excinfo.value.pytrace:
|
||||
return str(excinfo.value)
|
||||
return py._builtin._totext(excinfo.value)
|
||||
return super(FunctionMixin, self)._repr_failure_py(excinfo,
|
||||
style=style)
|
||||
|
||||
|
@ -967,7 +967,8 @@ class Metafunc(FuncargnamesCompatAttr):
|
|||
|
||||
:arg ids: list of string ids, or a callable.
|
||||
If strings, each is corresponding to the argvalues so that they are
|
||||
part of the test id.
|
||||
part of the test id. If None is given as id of specific test, the
|
||||
automatically generated id for that argument will be used.
|
||||
If callable, it should take one argument (a single argvalue) and return
|
||||
a string or return None. If None, the automatically generated id for that
|
||||
argument will be used.
|
||||
|
@ -1025,14 +1026,10 @@ class Metafunc(FuncargnamesCompatAttr):
|
|||
if callable(ids):
|
||||
idfn = ids
|
||||
ids = None
|
||||
if ids:
|
||||
if len(ids) != len(argvalues):
|
||||
raise ValueError('%d tests specified with %d ids' %(
|
||||
len(argvalues), len(ids)))
|
||||
else:
|
||||
ids = [_escape_strings(i) for i in ids]
|
||||
if not ids:
|
||||
ids = idmaker(argnames, argvalues, idfn)
|
||||
if ids and len(ids) != len(argvalues):
|
||||
raise ValueError('%d tests specified with %d ids' %(
|
||||
len(argvalues), len(ids)))
|
||||
ids = idmaker(argnames, argvalues, idfn, ids)
|
||||
newcalls = []
|
||||
for callspec in self._calls or [CallSpec2(self)]:
|
||||
for param_index, valset in enumerate(argvalues):
|
||||
|
@ -1153,13 +1150,16 @@ def _idval(val, argname, idx, idfn):
|
|||
return val.__name__
|
||||
return str(argname)+str(idx)
|
||||
|
||||
def _idvalset(idx, valset, argnames, idfn):
|
||||
this_id = [_idval(val, argname, idx, idfn)
|
||||
for val, argname in zip(valset, argnames)]
|
||||
return "-".join(this_id)
|
||||
def _idvalset(idx, valset, argnames, idfn, ids):
|
||||
if ids is None or ids[idx] is None:
|
||||
this_id = [_idval(val, argname, idx, idfn)
|
||||
for val, argname in zip(valset, argnames)]
|
||||
return "-".join(this_id)
|
||||
else:
|
||||
return _escape_strings(ids[idx])
|
||||
|
||||
def idmaker(argnames, argvalues, idfn=None):
|
||||
ids = [_idvalset(valindex, valset, argnames, idfn)
|
||||
def idmaker(argnames, argvalues, idfn=None, ids=None):
|
||||
ids = [_idvalset(valindex, valset, argnames, idfn, ids)
|
||||
for valindex, valset in enumerate(argvalues)]
|
||||
if len(set(ids)) < len(ids):
|
||||
# user may have provided a bad idfn which means the ids are not unique
|
||||
|
|
|
@ -435,7 +435,10 @@ class OutcomeException(Exception):
|
|||
|
||||
def __repr__(self):
|
||||
if self.msg:
|
||||
return str(self.msg)
|
||||
val = self.msg
|
||||
if isinstance(val, bytes):
|
||||
val = py._builtin._totext(val, errors='replace')
|
||||
return val
|
||||
return "<%s instance>" %(self.__class__.__name__,)
|
||||
__str__ = __repr__
|
||||
|
||||
|
|
|
@ -0,0 +1,65 @@
|
|||
pytest-2.9.1
|
||||
============
|
||||
|
||||
pytest is a mature Python testing tool with more than a 1100 tests
|
||||
against itself, passing on many different interpreters and platforms.
|
||||
|
||||
See below for the changes and see docs at:
|
||||
|
||||
http://pytest.org
|
||||
|
||||
As usual, you can upgrade from pypi via::
|
||||
|
||||
pip install -U pytest
|
||||
|
||||
Thanks to all who contributed to this release, among them:
|
||||
|
||||
Bruno Oliveira
|
||||
Daniel Hahler
|
||||
Dmitry Malinovsky
|
||||
Florian Bruhin
|
||||
Floris Bruynooghe
|
||||
Matt Bachmann
|
||||
Ronny Pfannschmidt
|
||||
TomV
|
||||
Vladimir Bolshakov
|
||||
Zearin
|
||||
palaviv
|
||||
|
||||
|
||||
Happy testing,
|
||||
The py.test Development Team
|
||||
|
||||
|
||||
2.9.1 (compared to 2.9.0)
|
||||
-------------------------
|
||||
|
||||
**Bug Fixes**
|
||||
|
||||
* Improve error message when a plugin fails to load.
|
||||
Thanks `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#1178 <https://github.com/pytest-dev/pytest/issues/1178>`_):
|
||||
``pytest.fail`` with non-ascii characters raises an internal pytest error.
|
||||
Thanks `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#469`_): junit parses report.nodeid incorrectly, when params IDs
|
||||
contain ``::``. Thanks `@tomviner`_ for the PR (`#1431`_).
|
||||
|
||||
* Fix (`#578 <https://github.com/pytest-dev/pytest/issues/578>`_): SyntaxErrors
|
||||
containing non-ascii lines at the point of failure generated an internal
|
||||
py.test error.
|
||||
Thanks `@asottile`_ for the report and `@nicoddemus`_ for the PR.
|
||||
|
||||
* Fix (`#1437`_): When passing in a bytestring regex pattern to parameterize
|
||||
attempt to decode it as utf-8 ignoring errors.
|
||||
|
||||
* Fix (`#649`_): parametrized test nodes cannot be specified to run on the command line.
|
||||
|
||||
|
||||
.. _#1437: https://github.com/pytest-dev/pytest/issues/1437
|
||||
.. _#469: https://github.com/pytest-dev/pytest/issues/469
|
||||
.. _#1431: https://github.com/pytest-dev/pytest/pull/1431
|
||||
.. _#649: https://github.com/pytest-dev/pytest/issues/649
|
||||
|
||||
.. _@asottile: https://github.com/asottile
|
|
@ -26,7 +26,7 @@ you will see the return value of the function call::
|
|||
|
||||
$ py.test test_assert1.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -143,7 +143,7 @@ if you run this module::
|
|||
|
||||
$ py.test test_assert2.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -80,7 +80,7 @@ If you then run it with ``--lf``::
|
|||
|
||||
$ py.test --lf
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
run-last-failure: rerun last 2 failures
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items
|
||||
|
@ -121,7 +121,7 @@ of ``FF`` and dots)::
|
|||
|
||||
$ py.test --ff
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
run-last-failure: rerun last 2 failures first
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items
|
||||
|
@ -226,7 +226,7 @@ You can always peek at the content of the cache using the
|
|||
|
||||
$ py.test --cache-clear
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ of the failing function and hide the other one::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ then you can just invoke ``py.test`` without command line options::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ You can then restrict a test run to only run tests marked with ``webtest``::
|
|||
|
||||
$ py.test -v -m webtest
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -45,7 +45,7 @@ Or the inverse, running all tests except the webtest ones::
|
|||
|
||||
$ py.test -v -m "not webtest"
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -66,7 +66,7 @@ tests based on their module, class, method, or function name::
|
|||
|
||||
$ py.test -v test_server.py::TestClass::test_method
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 5 items
|
||||
|
@ -79,7 +79,7 @@ You can also select on the class::
|
|||
|
||||
$ py.test -v test_server.py::TestClass
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -92,7 +92,7 @@ Or select multiple nodes::
|
|||
|
||||
$ py.test -v test_server.py::TestClass test_server.py::test_send_http
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
@ -130,7 +130,7 @@ select tests based on their names::
|
|||
|
||||
$ py.test -v -k http # running with the above defined example module
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -144,7 +144,7 @@ And you can also run all tests except the ones that match the keyword::
|
|||
|
||||
$ py.test -k "not send_http" -v
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -160,7 +160,7 @@ Or to select "http" and "quick" tests::
|
|||
|
||||
$ py.test -k "http or quick" -v
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items
|
||||
|
@ -350,7 +350,7 @@ the test needs::
|
|||
|
||||
$ py.test -E stage2
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -362,7 +362,7 @@ and here is one that specifies exactly the environment needed::
|
|||
|
||||
$ py.test -E stage1
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -481,7 +481,7 @@ then you will see two test skipped and two executed tests as expected::
|
|||
|
||||
$ py.test -rs # this option reports skip reasons
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -495,7 +495,7 @@ Note that if you specify a platform via the marker-command line option like this
|
|||
|
||||
$ py.test -m linux2
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -547,7 +547,7 @@ We can now use the ``-m option`` to select one set::
|
|||
|
||||
$ py.test -m interface --tb=short
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -569,7 +569,7 @@ or to select both "event" and "interface" tests::
|
|||
|
||||
$ py.test -m "interface or event" --tb=short
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ now execute the test specification::
|
|||
|
||||
nonpython $ py.test test_simple.yml
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -59,7 +59,7 @@ consulted when reporting in ``verbose`` mode::
|
|||
|
||||
nonpython $ py.test -v
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
@ -81,7 +81,7 @@ interesting to just look at the collection tree::
|
|||
|
||||
nonpython $ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
<YamlFile 'test_simple.yml'>
|
||||
|
|
|
@ -130,7 +130,7 @@ objects, they are still using the default pytest representation::
|
|||
|
||||
$ py.test test_time.py --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 6 items
|
||||
<Module 'test_time.py'>
|
||||
|
@ -181,7 +181,7 @@ this is a fully self-contained example which you can run with::
|
|||
|
||||
$ py.test test_scenarios.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -194,7 +194,7 @@ If you just collect tests you'll also nicely see 'advanced' and 'basic' as varia
|
|||
|
||||
$ py.test --collect-only test_scenarios.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
<Module 'test_scenarios.py'>
|
||||
|
@ -259,7 +259,7 @@ Let's first see how it looks like at collection time::
|
|||
|
||||
$ py.test test_backends.py --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
<Module 'test_backends.py'>
|
||||
|
@ -320,7 +320,7 @@ The result of this test will be successful::
|
|||
|
||||
$ py.test test_indirect_list.py --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
<Module 'test_indirect_list.py'>
|
||||
|
@ -399,8 +399,8 @@ Running it results in some skips if we don't have all the python interpreters in
|
|||
. $ py.test -rs -q multipython.py
|
||||
ssssssssssss...ssssssssssss
|
||||
======= short test summary info ========
|
||||
SKIP [12] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python2.6' not found
|
||||
SKIP [12] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python3.3' not found
|
||||
SKIP [12] $REGENDOC_TMPDIR/CWD/multipython.py:23: 'python2.6' not found
|
||||
3 passed, 24 skipped in 0.12 seconds
|
||||
|
||||
Indirect parametrization of optional implementations/imports
|
||||
|
@ -448,7 +448,7 @@ If you run this with reporting for skips enabled::
|
|||
|
||||
$ py.test -rs test_module.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -82,7 +82,7 @@ then the test collection looks like this::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: setup.cfg
|
||||
collected 2 items
|
||||
<Module 'check_myapp.py'>
|
||||
|
@ -128,7 +128,7 @@ You can always peek at the collection tree without running tests like this::
|
|||
|
||||
. $ py.test --collect-only pythoncollection.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 3 items
|
||||
<Module 'CWD/pythoncollection.py'>
|
||||
|
@ -182,7 +182,7 @@ interpreters and will leave out the setup.py file::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 0 items
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ get on the terminal - we are working on that):
|
|||
|
||||
assertion $ py.test failure_demo.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
|
||||
collected 42 items
|
||||
|
||||
|
|
|
@ -108,7 +108,7 @@ directory with the above conftest.py::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -156,7 +156,7 @@ and when running it will see a skipped "slow" test::
|
|||
|
||||
$ py.test -rs # "-rs" means report details on the little 's'
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -170,7 +170,7 @@ Or run it including the ``slow`` marked test::
|
|||
|
||||
$ py.test --runslow
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -262,7 +262,7 @@ which will add the string to the test header accordingly::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
project deps: mylib-1.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
@ -286,7 +286,7 @@ which will add info only when run with "--v"::
|
|||
|
||||
$ py.test -v
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
info1: did you know that ...
|
||||
did you?
|
||||
|
@ -299,7 +299,7 @@ and nothing when run plainly::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -332,7 +332,7 @@ Now we can profile which test functions execute the slowest::
|
|||
|
||||
$ py.test --durations=3
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -394,7 +394,7 @@ If we run this::
|
|||
|
||||
$ py.test -rx
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -465,7 +465,7 @@ We can run this::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 7 items
|
||||
|
||||
|
@ -479,7 +479,7 @@ We can run this::
|
|||
file $REGENDOC_TMPDIR/b/test_error.py, line 1
|
||||
def test_root(db): # no db here, will error out
|
||||
fixture 'db' not found
|
||||
available fixtures: cache, tmpdir_factory, capsys, pytestconfig, capfd, record_xml_property, recwarn, tmpdir, monkeypatch
|
||||
available fixtures: record_xml_property, recwarn, cache, capsys, pytestconfig, tmpdir_factory, capfd, monkeypatch, tmpdir
|
||||
use 'py.test --fixtures [testpath]' for help on them.
|
||||
|
||||
$REGENDOC_TMPDIR/b/test_error.py:1
|
||||
|
@ -569,7 +569,7 @@ and run them::
|
|||
|
||||
$ py.test test_module.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -660,7 +660,7 @@ and run it::
|
|||
|
||||
$ py.test -s test_module.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ marked ``smtp`` fixture function. Running the test looks like this::
|
|||
|
||||
$ py.test test_smtpsimple.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
@ -193,7 +193,7 @@ inspect what is going on and can now run the tests::
|
|||
|
||||
$ py.test test_module.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -480,7 +480,7 @@ Running the above tests results in the following test IDs being used::
|
|||
|
||||
$ py.test --collect-only
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 10 items
|
||||
<Module 'test_anothersmtp.py'>
|
||||
|
@ -531,7 +531,7 @@ Here we declare an ``app`` fixture which receives the previously defined
|
|||
|
||||
$ py.test -v test_appsetup.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
@ -597,7 +597,7 @@ Let's run the tests in verbose mode and with looking at the print-output::
|
|||
|
||||
$ py.test -v -s test_module.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1 -- $PYTHON_PREFIX/bin/python3.4
|
||||
cachedir: .cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
|
|
@ -27,7 +27,7 @@ Installation options::
|
|||
To check your installation has installed the correct version::
|
||||
|
||||
$ py.test --version
|
||||
This is pytest version 2.9.0, imported from $PYTHON_PREFIX/lib/python3.4/site-packages/pytest.py
|
||||
This is pytest version 2.9.1, imported from $PYTHON_PREFIX/lib/python3.4/site-packages/pytest.py
|
||||
|
||||
If you get an error checkout :ref:`installation issues`.
|
||||
|
||||
|
@ -49,7 +49,7 @@ That's it. You can execute the test function now::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -55,7 +55,7 @@ them in turn::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -103,7 +103,7 @@ Let's run this::
|
|||
|
||||
$ py.test
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
|
|
@ -224,7 +224,7 @@ Running it with the report-on-xfail option gives this output::
|
|||
|
||||
example $ py.test -rx xfail_demo.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR/example, inifile:
|
||||
collected 7 items
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ Talks and Tutorials
|
|||
|
||||
.. sidebar:: Next Open Trainings
|
||||
|
||||
`professional testing with pytest and tox <http://www.python-academy.com/courses/specialtopics/python_course_testing.html>`_, 24-26th November 2014, Freiburg, Germany
|
||||
`professional testing with pytest and tox <http://www.python-academy.com/courses/specialtopics/python_course_testing.html>`_, 27-29th June 2016, Freiburg, Germany
|
||||
|
||||
.. _`funcargs`: funcargs.html
|
||||
|
||||
|
@ -14,6 +14,9 @@ Talks and blog postings
|
|||
.. _`tutorial1 repository`: http://bitbucket.org/pytest-dev/pytest-tutorial1/
|
||||
.. _`pycon 2010 tutorial PDF`: http://bitbucket.org/pytest-dev/pytest-tutorial1/raw/tip/pytest-basic.pdf
|
||||
|
||||
- `pytest - Rapid Simple Testing, Florian Bruhin, Swiss Python Summit 2016
|
||||
<https://www.youtube.com/watch?v=rCBHkQ_LVIs>`_.
|
||||
|
||||
- `Improve your testing with Pytest and Mock, Gabe Hollombe, PyCon SG 2015
|
||||
<https://www.youtube.com/watch?v=RcN26hznmk4>`_.
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ Running this would result in a passed test except for the last
|
|||
|
||||
$ py.test test_tmpdir.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 items
|
||||
|
||||
|
|
|
@ -88,7 +88,7 @@ the ``self.db`` values in the traceback::
|
|||
|
||||
$ py.test test_unittest_db.py
|
||||
======= test session starts ========
|
||||
platform linux -- Python 3.4.0, pytest-2.9.0, py-1.4.31, pluggy-0.3.1
|
||||
platform linux -- Python 3.4.0, pytest-2.9.1, py-1.4.31, pluggy-0.3.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -392,6 +392,20 @@ class TestGeneralUsage:
|
|||
monkeypatch.setitem(sys.modules, 'myplugin', mod)
|
||||
assert pytest.main(args=[str(tmpdir)], plugins=['myplugin']) == 0
|
||||
|
||||
def test_parameterized_with_bytes_regex(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import re
|
||||
import pytest
|
||||
@pytest.mark.parametrize('r', [re.compile(b'foo')])
|
||||
def test_stuff(r):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
res = testdir.runpytest(p)
|
||||
res.stdout.fnmatch_lines([
|
||||
'*1 passed*'
|
||||
])
|
||||
|
||||
|
||||
class TestInvocationVariants:
|
||||
def test_earlyinit(self, testdir):
|
||||
|
|
|
@ -93,6 +93,17 @@ def test_unicode_handling():
|
|||
if sys.version_info[0] < 3:
|
||||
unicode(excinfo)
|
||||
|
||||
|
||||
@pytest.mark.skipif(sys.version_info[0] >= 3, reason='python 2 only issue')
|
||||
def test_unicode_handling_syntax_error():
|
||||
value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
|
||||
def f():
|
||||
raise SyntaxError('invalid syntax', (None, 1, 3, value))
|
||||
excinfo = pytest.raises(Exception, f)
|
||||
str(excinfo)
|
||||
if sys.version_info[0] < 3:
|
||||
unicode(excinfo)
|
||||
|
||||
def test_code_getargs():
|
||||
def f1(x):
|
||||
pass
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
import sys
|
||||
from textwrap import dedent
|
||||
|
||||
|
@ -1181,3 +1182,19 @@ def test_class_injection_does_not_break_collection(testdir):
|
|||
result = testdir.runpytest()
|
||||
assert "RuntimeError: dictionary changed size during iteration" not in result.stdout.str()
|
||||
result.stdout.fnmatch_lines(['*1 passed*'])
|
||||
|
||||
|
||||
def test_syntax_error_with_non_ascii_chars(testdir):
|
||||
"""Fix decoding issue while formatting SyntaxErrors during collection (#578)
|
||||
"""
|
||||
testdir.makepyfile(u"""
|
||||
# -*- coding: UTF-8 -*-
|
||||
|
||||
☃
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
result.stdout.fnmatch_lines([
|
||||
'*ERROR collecting*',
|
||||
'*SyntaxError*',
|
||||
'*1 error in*',
|
||||
])
|
||||
|
|
|
@ -170,6 +170,11 @@ class TestMetafunc:
|
|||
result = idmaker((py.builtin._totext("a"), "b"), [({}, b'\xc3\xb4')])
|
||||
assert result == ['a0-\\xc3\\xb4']
|
||||
|
||||
def test_idmaker_with_bytes_regex(self):
|
||||
from _pytest.python import idmaker
|
||||
result = idmaker(("a"), [(re.compile(b'foo'), 1.0)])
|
||||
assert result == ["foo"]
|
||||
|
||||
def test_idmaker_native_strings(self):
|
||||
from _pytest.python import idmaker
|
||||
totext = py.builtin._totext
|
||||
|
@ -253,6 +258,20 @@ class TestMetafunc:
|
|||
"three-b2",
|
||||
]
|
||||
|
||||
def test_idmaker_with_ids(self):
|
||||
from _pytest.python import idmaker
|
||||
result = idmaker(("a", "b"), [(1, 2),
|
||||
(3, 4)],
|
||||
ids=["a", None])
|
||||
assert result == ["a", "3-4"]
|
||||
|
||||
def test_idmaker_with_ids_unique_names(self):
|
||||
from _pytest.python import idmaker
|
||||
result = idmaker(("a", "b"), [(1, 2),
|
||||
(3, 4)],
|
||||
ids=["a", "a"])
|
||||
assert result == ["0a", "1a"]
|
||||
|
||||
def test_addcall_and_parametrize(self):
|
||||
def func(x, y): pass
|
||||
metafunc = self.Metafunc(func)
|
||||
|
@ -784,6 +803,41 @@ class TestMetafuncFunctional:
|
|||
*test_function*1.3-b1*
|
||||
""")
|
||||
|
||||
def test_parametrize_with_None_in_ids(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
def pytest_generate_tests(metafunc):
|
||||
metafunc.parametrize(("a", "b"), [(1,1), (1,1), (1,2)],
|
||||
ids=["basic", None, "advanced"])
|
||||
|
||||
def test_function(a, b):
|
||||
assert a == b
|
||||
""")
|
||||
result = testdir.runpytest("-v")
|
||||
assert result.ret == 1
|
||||
result.stdout.fnmatch_lines_random([
|
||||
"*test_function*basic*PASSED",
|
||||
"*test_function*1-1*PASSED",
|
||||
"*test_function*advanced*FAILED",
|
||||
])
|
||||
|
||||
def test_parametrize_with_identical_ids_get_unique_names(self, testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
def pytest_generate_tests(metafunc):
|
||||
metafunc.parametrize(("a", "b"), [(1,1), (1,2)],
|
||||
ids=["a", "a"])
|
||||
|
||||
def test_function(a, b):
|
||||
assert a == b
|
||||
""")
|
||||
result = testdir.runpytest("-v")
|
||||
assert result.ret == 1
|
||||
result.stdout.fnmatch_lines_random([
|
||||
"*test_function*0a*PASSED",
|
||||
"*test_function*1a*FAILED"
|
||||
])
|
||||
|
||||
@pytest.mark.parametrize(("scope", "length"),
|
||||
[("module", 2), ("function", 4)])
|
||||
def test_parametrize_scope_overrides(self, testdir, scope, length):
|
||||
|
|
|
@ -46,12 +46,12 @@ class TestNewAPI:
|
|||
def test_cache_failure_warns(self, testdir):
|
||||
testdir.tmpdir.ensure_dir('.cache').chmod(0)
|
||||
testdir.makepyfile("""
|
||||
def test_pass():
|
||||
pass
|
||||
def test_error():
|
||||
raise Exception
|
||||
|
||||
""")
|
||||
result = testdir.runpytest('-rw')
|
||||
assert result.ret == 0
|
||||
assert result.ret == 1
|
||||
result.stdout.fnmatch_lines([
|
||||
"*could not create cache path*",
|
||||
"*1 pytest-warnings*",
|
||||
|
@ -266,7 +266,7 @@ class TestLastFailed:
|
|||
""")
|
||||
config = testdir.parseconfigure()
|
||||
lastfailed = config.cache.get("cache/lastfailed", -1)
|
||||
assert not lastfailed
|
||||
assert lastfailed == -1
|
||||
|
||||
def test_non_serializable_parametrize(self, testdir):
|
||||
"""Test that failed parametrized tests with unmarshable parameters
|
||||
|
@ -305,7 +305,7 @@ class TestLastFailed:
|
|||
return lastfailed
|
||||
|
||||
lastfailed = rlf(fail_import=0, fail_run=0)
|
||||
assert not lastfailed
|
||||
assert lastfailed == -1
|
||||
|
||||
lastfailed = rlf(fail_import=1, fail_run=0)
|
||||
assert list(lastfailed) == ['test_maybe.py']
|
||||
|
@ -347,7 +347,7 @@ class TestLastFailed:
|
|||
return result, lastfailed
|
||||
|
||||
result, lastfailed = rlf(fail_import=0, fail_run=0)
|
||||
assert not lastfailed
|
||||
assert lastfailed == -1
|
||||
result.stdout.fnmatch_lines([
|
||||
'*3 passed*',
|
||||
])
|
||||
|
@ -370,3 +370,17 @@ class TestLastFailed:
|
|||
result.stdout.fnmatch_lines([
|
||||
'*2 passed*',
|
||||
])
|
||||
|
||||
def test_lastfailed_creates_cache_when_needed(self, testdir):
|
||||
# Issue #1342
|
||||
testdir.makepyfile(test_empty='')
|
||||
testdir.runpytest('-q', '--lf')
|
||||
assert not os.path.exists('.cache')
|
||||
|
||||
testdir.makepyfile(test_successful='def test_success():\n assert True')
|
||||
testdir.runpytest('-q', '--lf')
|
||||
assert not os.path.exists('.cache')
|
||||
|
||||
testdir.makepyfile(test_errored='def test_error():\n assert False')
|
||||
testdir.runpytest('-q', '--lf')
|
||||
assert os.path.exists('.cache')
|
||||
|
|
|
@ -449,11 +449,12 @@ class TestPython:
|
|||
assert "hello-stderr" in systemout.toxml()
|
||||
|
||||
|
||||
def test_mangle_testnames():
|
||||
from _pytest.junitxml import mangle_testnames
|
||||
names = ["a/pything.py", "Class", "()", "method"]
|
||||
newnames = mangle_testnames(names)
|
||||
assert newnames == ["a.pything", "Class", "method"]
|
||||
def test_mangle_test_address():
|
||||
from _pytest.junitxml import mangle_test_address
|
||||
address = '::'.join(
|
||||
["a/my.py.thing.py", "Class", "()", "method", "[a-1-::]"])
|
||||
newnames = mangle_test_address(address)
|
||||
assert newnames == ["a.my.py.thing", "Class", "method", "[a-1-::]"]
|
||||
|
||||
|
||||
def test_dont_configure_on_slaves(tmpdir):
|
||||
|
@ -619,6 +620,36 @@ def test_escaped_parametrized_names_xml(testdir):
|
|||
node.assert_attr(name="test_func[#x00]")
|
||||
|
||||
|
||||
def test_double_colon_split_function_issue469(testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.parametrize('param', ["double::colon"])
|
||||
def test_func(param):
|
||||
pass
|
||||
""")
|
||||
result, dom = runandparse(testdir)
|
||||
assert result.ret == 0
|
||||
node = dom.find_first_by_tag("testcase")
|
||||
node.assert_attr(classname="test_double_colon_split_function_issue469")
|
||||
node.assert_attr(name='test_func[double::colon]')
|
||||
|
||||
|
||||
def test_double_colon_split_method_issue469(testdir):
|
||||
testdir.makepyfile("""
|
||||
import pytest
|
||||
class TestClass:
|
||||
@pytest.mark.parametrize('param', ["double::colon"])
|
||||
def test_func(self, param):
|
||||
pass
|
||||
""")
|
||||
result, dom = runandparse(testdir)
|
||||
assert result.ret == 0
|
||||
node = dom.find_first_by_tag("testcase")
|
||||
node.assert_attr(
|
||||
classname="test_double_colon_split_method_issue469.TestClass")
|
||||
node.assert_attr(name='test_func[double::colon]')
|
||||
|
||||
|
||||
def test_unicode_issue368(testdir):
|
||||
path = testdir.tmpdir.join("test.xml")
|
||||
log = LogXML(str(path), None)
|
||||
|
|
|
@ -269,6 +269,22 @@ def test_keyword_option_parametrize(spec, testdir):
|
|||
assert len(passed) == len(passed_result)
|
||||
assert list(passed) == list(passed_result)
|
||||
|
||||
|
||||
def test_parametrized_collected_from_command_line(testdir):
|
||||
"""Parametrized test not collected if test named specified
|
||||
in command line issue#649.
|
||||
"""
|
||||
py_file = testdir.makepyfile("""
|
||||
import pytest
|
||||
@pytest.mark.parametrize("arg", [None, 1.3, "2-3"])
|
||||
def test_func(arg):
|
||||
pass
|
||||
""")
|
||||
file_name = os.path.basename(py_file.strpath)
|
||||
rec = testdir.inline_run(file_name + "::" + "test_func")
|
||||
rec.assertoutcome(passed=3)
|
||||
|
||||
|
||||
class TestFunctional:
|
||||
|
||||
def test_mark_per_function(self, testdir):
|
||||
|
|
|
@ -178,13 +178,17 @@ def test_default_markers(testdir):
|
|||
"*trylast*last*",
|
||||
])
|
||||
|
||||
|
||||
def test_importplugin_issue375(testdir, pytestpm):
|
||||
"""Don't hide import errors when importing plugins and provide
|
||||
an easy to debug message.
|
||||
"""
|
||||
testdir.syspathinsert(testdir.tmpdir)
|
||||
testdir.makepyfile(qwe="import aaaa")
|
||||
with pytest.raises(ImportError) as excinfo:
|
||||
pytestpm.import_plugin("qwe")
|
||||
assert "qwe" not in str(excinfo.value)
|
||||
assert "aaaa" in str(excinfo.value)
|
||||
expected = '.*Error importing plugin "qwe": No module named \'?aaaa\'?'
|
||||
assert py.std.re.match(expected, str(excinfo.value))
|
||||
|
||||
|
||||
class TestPytestPluginManager:
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from __future__ import with_statement
|
||||
|
||||
import _pytest._code
|
||||
|
@ -439,6 +440,27 @@ def test_pytest_fail_notrace(testdir):
|
|||
assert 'def teardown_function' not in result.stdout.str()
|
||||
|
||||
|
||||
@pytest.mark.parametrize('str_prefix', ['u', ''])
|
||||
def test_pytest_fail_notrace_non_ascii(testdir, str_prefix):
|
||||
"""Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
|
||||
|
||||
This tests with native and unicode strings containing non-ascii chars.
|
||||
"""
|
||||
testdir.makepyfile(u"""
|
||||
# coding: utf-8
|
||||
import pytest
|
||||
|
||||
def test_hello():
|
||||
pytest.fail(%s'oh oh: ☺', pytrace=False)
|
||||
""" % str_prefix)
|
||||
result = testdir.runpytest()
|
||||
if sys.version_info[0] >= 3:
|
||||
result.stdout.fnmatch_lines(['*test_hello*', "oh oh: ☺"])
|
||||
else:
|
||||
result.stdout.fnmatch_lines(['*test_hello*', "oh oh: *"])
|
||||
assert 'def test_hello' not in result.stdout.str()
|
||||
|
||||
|
||||
def test_pytest_no_tests_collected_exit_status(testdir):
|
||||
result = testdir.runpytest()
|
||||
result.stdout.fnmatch_lines('*collected 0 items*')
|
||||
|
|
Loading…
Reference in New Issue