commit
7dcd9bf5ad
2
AUTHORS
2
AUTHORS
|
@ -120,6 +120,7 @@ Jonas Obrist
|
|||
Jordan Guymon
|
||||
Jordan Moldow
|
||||
Jordan Speicher
|
||||
Joseph Hunkeler
|
||||
Joshua Bronson
|
||||
Jurko Gospodnetić
|
||||
Justyna Janczyszyn
|
||||
|
@ -129,6 +130,7 @@ Katerina Koukiou
|
|||
Kevin Cox
|
||||
Kodi B. Arfer
|
||||
Kostis Anagnostopoulos
|
||||
Kristoffer Nordström
|
||||
Kyle Altendorf
|
||||
Lawrence Mitchell
|
||||
Lee Kamentsky
|
||||
|
|
|
@ -18,6 +18,84 @@ with advance notice in the **Deprecations** section of releases.
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
pytest 4.2.0 (2019-01-30)
|
||||
=========================
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- `#3094 <https://github.com/pytest-dev/pytest/issues/3094>`_: `Class xunit-style <https://docs.pytest.org/en/latest/xunit_setup.html>`__ functions and methods
|
||||
now obey the scope of *autouse* fixtures.
|
||||
|
||||
This fixes a number of surprising issues like ``setup_method`` being called before session-scoped
|
||||
autouse fixtures (see `#517 <https://github.com/pytest-dev/pytest/issues/517>`__ for an example).
|
||||
|
||||
|
||||
- `#4627 <https://github.com/pytest-dev/pytest/issues/4627>`_: Display a message at the end of the test session when running under Python 2.7 and 3.4 that pytest 5.0 will no longer
|
||||
support those Python versions.
|
||||
|
||||
|
||||
- `#4660 <https://github.com/pytest-dev/pytest/issues/4660>`_: The number of *selected* tests now are also displayed when the ``-k`` or ``-m`` flags are used.
|
||||
|
||||
|
||||
- `#4688 <https://github.com/pytest-dev/pytest/issues/4688>`_: ``pytest_report_teststatus`` hook now can also receive a ``config`` parameter.
|
||||
|
||||
|
||||
- `#4691 <https://github.com/pytest-dev/pytest/issues/4691>`_: ``pytest_terminal_summary`` hook now can also receive a ``config`` parameter.
|
||||
|
||||
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- `#3547 <https://github.com/pytest-dev/pytest/issues/3547>`_: ``--junitxml`` can emit XML compatible with Jenkins xUnit.
|
||||
``junit_family`` INI option accepts ``legacy|xunit1``, which produces old style output, and ``xunit2`` that conforms more strictly to https://github.com/jenkinsci/xunit-plugin/blob/xunit-2.3.2/src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
|
||||
|
||||
|
||||
- `#4280 <https://github.com/pytest-dev/pytest/issues/4280>`_: Improve quitting from pdb, especially with ``--trace``.
|
||||
|
||||
Using ``q[quit]`` after ``pdb.set_trace()`` will quit pytest also.
|
||||
|
||||
|
||||
- `#4402 <https://github.com/pytest-dev/pytest/issues/4402>`_: Warning summary now groups warnings by message instead of by test id.
|
||||
|
||||
This makes the output more compact and better conveys the general idea of how much code is
|
||||
actually generating warnings, instead of how many tests call that code.
|
||||
|
||||
|
||||
- `#4536 <https://github.com/pytest-dev/pytest/issues/4536>`_: ``monkeypatch.delattr`` handles class descriptors like ``staticmethod``/``classmethod``.
|
||||
|
||||
|
||||
- `#4649 <https://github.com/pytest-dev/pytest/issues/4649>`_: Restore marks being considered keywords for keyword expressions.
|
||||
|
||||
|
||||
- `#4653 <https://github.com/pytest-dev/pytest/issues/4653>`_: ``tmp_path`` fixture and other related ones provides resolved path (a.k.a real path)
|
||||
|
||||
|
||||
- `#4667 <https://github.com/pytest-dev/pytest/issues/4667>`_: ``pytest_terminal_summary`` uses result from ``pytest_report_teststatus`` hook, rather than hardcoded strings.
|
||||
|
||||
|
||||
- `#4669 <https://github.com/pytest-dev/pytest/issues/4669>`_: Correctly handle ``unittest.SkipTest`` exception containing non-ascii characters on Python 2.
|
||||
|
||||
|
||||
- `#4680 <https://github.com/pytest-dev/pytest/issues/4680>`_: Ensure the ``tmpdir`` and the ``tmp_path`` fixtures are the same folder.
|
||||
|
||||
|
||||
- `#4681 <https://github.com/pytest-dev/pytest/issues/4681>`_: Ensure ``tmp_path`` is always a real path.
|
||||
|
||||
|
||||
|
||||
Trivial/Internal Changes
|
||||
------------------------
|
||||
|
||||
- `#4643 <https://github.com/pytest-dev/pytest/issues/4643>`_: Use ``a.item()`` instead of the deprecated ``np.asscalar(a)`` in ``pytest.approx``.
|
||||
|
||||
``np.asscalar`` has been `deprecated <https://github.com/numpy/numpy/blob/master/doc/release/1.16.0-notes.rst#new-deprecations>`__ in ``numpy 1.16.``.
|
||||
|
||||
|
||||
- `#4657 <https://github.com/pytest-dev/pytest/issues/4657>`_: Copy saferepr from pylib
|
||||
|
||||
|
||||
pytest 4.1.1 (2019-01-12)
|
||||
=========================
|
||||
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
Warning summary now groups warnings by message instead of by test id.
|
||||
|
||||
This makes the output more compact and better conveys the general idea of how much code is
|
||||
actually generating warnings, instead of how many tests call that code.
|
|
@ -1 +0,0 @@
|
|||
``monkeypatch.delattr`` handles class descriptors like ``staticmethod``/``classmethod``.
|
|
@ -1,3 +0,0 @@
|
|||
Use ``a.item()`` instead of the deprecated ``np.asscalar(a)`` in ``pytest.approx``.
|
||||
|
||||
``np.asscalar`` has been `deprecated <https://github.com/numpy/numpy/blob/master/doc/release/1.16.0-notes.rst#new-deprecations>`__ in ``numpy 1.16.``.
|
|
@ -1 +0,0 @@
|
|||
Restore marks being considered keywords for keyword expressions.
|
|
@ -1 +0,0 @@
|
|||
``tmp_path`` fixture and other related ones provides resolved path (a.k.a real path)
|
|
@ -1 +0,0 @@
|
|||
Copy saferepr from pylib
|
|
@ -1 +0,0 @@
|
|||
``pytest_terminal_summary`` uses result from ``pytest_report_teststatus`` hook, rather than hardcoded strings.
|
|
@ -1 +0,0 @@
|
|||
Correctly handle ``unittest.SkipTest`` exception containing non-ascii characters on Python 2.
|
|
@ -6,6 +6,7 @@ Release announcements
|
|||
:maxdepth: 2
|
||||
|
||||
|
||||
release-4.2.0
|
||||
release-4.1.1
|
||||
release-4.1.0
|
||||
release-4.0.2
|
||||
|
|
|
@ -0,0 +1,37 @@
|
|||
pytest-4.2.0
|
||||
=======================================
|
||||
|
||||
The pytest team is proud to announce the 4.2.0 release!
|
||||
|
||||
pytest is a mature Python testing tool with more than a 2000 tests
|
||||
against itself, passing on many different interpreters and platforms.
|
||||
|
||||
This release contains a number of bugs fixes and improvements, so users are encouraged
|
||||
to take a look at the CHANGELOG:
|
||||
|
||||
https://docs.pytest.org/en/latest/changelog.html
|
||||
|
||||
For complete documentation, please visit:
|
||||
|
||||
https://docs.pytest.org/en/latest/
|
||||
|
||||
As usual, you can upgrade from pypi via:
|
||||
|
||||
pip install -U pytest
|
||||
|
||||
Thanks to all who contributed to this release, among them:
|
||||
|
||||
* Adam Uhlir
|
||||
* Anthony Sottile
|
||||
* Bruno Oliveira
|
||||
* Christopher Dignam
|
||||
* Daniel Hahler
|
||||
* Joseph Hunkeler
|
||||
* Kristoffer Nordstroem
|
||||
* Ronny Pfannschmidt
|
||||
* Thomas Hisch
|
||||
* wim glenn
|
||||
|
||||
|
||||
Happy testing,
|
||||
The Pytest Development Team
|
|
@ -29,6 +29,7 @@ you will see the return value of the function call:
|
|||
$ pytest test_assert1.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
@ -173,6 +174,7 @@ if you run this module:
|
|||
$ pytest test_assert2.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -81,8 +81,9 @@ If you then run it with ``--lf``:
|
|||
$ pytest --lf
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items / 48 deselected
|
||||
collected 50 items / 48 deselected / 2 selected
|
||||
run-last-failure: rerun previous 2 failures
|
||||
|
||||
test_50.py FF [100%]
|
||||
|
@ -124,6 +125,7 @@ of ``FF`` and dots):
|
|||
$ pytest --ff
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 50 items
|
||||
run-last-failure: rerun previous 2 failures first
|
||||
|
@ -257,11 +259,17 @@ You can always peek at the content of the cache using the
|
|||
$ pytest --cache-show
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
cachedir: $REGENDOC_TMPDIR/.pytest_cache
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
------------------------------- cache values -------------------------------
|
||||
cache/lastfailed contains:
|
||||
{'test_caching.py::test_function': True}
|
||||
{'test_50.py::test_num[17]': True,
|
||||
'test_50.py::test_num[25]': True,
|
||||
'test_assert1.py::test_function': True,
|
||||
'test_assert2.py::test_set_comparison': True,
|
||||
'test_caching.py::test_function': True,
|
||||
'test_foocompare.py::test_compare': True}
|
||||
cache/nodeids contains:
|
||||
['test_caching.py::test_function']
|
||||
cache/stepwise contains:
|
||||
|
|
|
@ -68,6 +68,7 @@ of the failing function and hide the other one:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -65,6 +65,7 @@ then you can just invoke ``pytest`` without command line options:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -33,10 +33,10 @@ You can then restrict a test run to only run tests marked with ``webtest``:
|
|||
|
||||
$ pytest -v -m webtest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 3 deselected
|
||||
collecting ... collected 4 items / 3 deselected / 1 selected
|
||||
|
||||
test_server.py::test_send_http PASSED [100%]
|
||||
|
||||
|
@ -48,10 +48,10 @@ Or the inverse, running all tests except the webtest ones:
|
|||
|
||||
$ pytest -v -m "not webtest"
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 1 deselected
|
||||
collecting ... collected 4 items / 1 deselected / 3 selected
|
||||
|
||||
test_server.py::test_something_quick PASSED [ 33%]
|
||||
test_server.py::test_another PASSED [ 66%]
|
||||
|
@ -70,8 +70,8 @@ tests based on their module, class, method, or function name:
|
|||
|
||||
$ pytest -v test_server.py::TestClass::test_method
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 1 item
|
||||
|
||||
|
@ -85,8 +85,8 @@ You can also select on the class:
|
|||
|
||||
$ pytest -v test_server.py::TestClass
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 1 item
|
||||
|
||||
|
@ -100,8 +100,8 @@ Or select multiple nodes:
|
|||
|
||||
$ pytest -v test_server.py::TestClass test_server.py::test_send_http
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
||||
|
@ -140,10 +140,10 @@ select tests based on their names:
|
|||
|
||||
$ pytest -v -k http # running with the above defined example module
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 3 deselected
|
||||
collecting ... collected 4 items / 3 deselected / 1 selected
|
||||
|
||||
test_server.py::test_send_http PASSED [100%]
|
||||
|
||||
|
@ -155,10 +155,10 @@ And you can also run all tests except the ones that match the keyword:
|
|||
|
||||
$ pytest -k "not send_http" -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 1 deselected
|
||||
collecting ... collected 4 items / 1 deselected / 3 selected
|
||||
|
||||
test_server.py::test_something_quick PASSED [ 33%]
|
||||
test_server.py::test_another PASSED [ 66%]
|
||||
|
@ -172,10 +172,10 @@ Or to select "http" and "quick" tests:
|
|||
|
||||
$ pytest -k "http or quick" -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 2 deselected
|
||||
collecting ... collected 4 items / 2 deselected / 2 selected
|
||||
|
||||
test_server.py::test_send_http PASSED [ 50%]
|
||||
test_server.py::test_something_quick PASSED [100%]
|
||||
|
@ -365,6 +365,7 @@ the test needs:
|
|||
$ pytest -E stage2
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
@ -379,6 +380,7 @@ and here is one that specifies exactly the environment needed:
|
|||
$ pytest -E stage1
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
@ -546,12 +548,13 @@ then you will see two tests skipped and two executed tests as expected:
|
|||
$ pytest -rs # this option reports skip reasons
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
test_plat.py s.s. [100%]
|
||||
========================= short test summary info ==========================
|
||||
SKIP [2] $REGENDOC_TMPDIR/conftest.py:12: cannot run on platform linux
|
||||
SKIPPED [2] $REGENDOC_TMPDIR/conftest.py:12: cannot run on platform linux
|
||||
|
||||
=================== 2 passed, 2 skipped in 0.12 seconds ====================
|
||||
|
||||
|
@ -562,8 +565,9 @@ Note that if you specify a platform via the marker-command line option like this
|
|||
$ pytest -m linux
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items / 3 deselected
|
||||
collected 4 items / 3 deselected / 1 selected
|
||||
|
||||
test_plat.py . [100%]
|
||||
|
||||
|
@ -615,8 +619,9 @@ We can now use the ``-m option`` to select one set:
|
|||
$ pytest -m interface --tb=short
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items / 2 deselected
|
||||
collected 4 items / 2 deselected / 2 selected
|
||||
|
||||
test_module.py FF [100%]
|
||||
|
||||
|
@ -638,8 +643,9 @@ or to select both "event" and "interface" tests:
|
|||
$ pytest -m "interface or event" --tb=short
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items / 1 deselected
|
||||
collected 4 items / 1 deselected / 3 selected
|
||||
|
||||
test_module.py FFF [100%]
|
||||
|
||||
|
|
|
@ -30,6 +30,7 @@ now execute the test specification:
|
|||
nonpython $ pytest test_simple.yml
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -63,8 +64,8 @@ consulted when reporting in ``verbose`` mode:
|
|||
|
||||
nonpython $ pytest -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
||||
|
@ -88,6 +89,7 @@ interesting to just look at the collection tree:
|
|||
nonpython $ pytest --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
<Package $REGENDOC_TMPDIR/nonpython>
|
||||
|
|
|
@ -145,6 +145,7 @@ objects, they are still using the default pytest representation:
|
|||
$ pytest test_time.py --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 8 items
|
||||
<Module test_time.py>
|
||||
|
@ -203,6 +204,7 @@ this is a fully self-contained example which you can run with:
|
|||
$ pytest test_scenarios.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -217,6 +219,7 @@ If you just collect tests you'll also nicely see 'advanced' and 'basic' as varia
|
|||
$ pytest --collect-only test_scenarios.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
<Module test_scenarios.py>
|
||||
|
@ -283,6 +286,7 @@ Let's first see how it looks like at collection time:
|
|||
$ pytest test_backends.py --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
<Module test_backends.py>
|
||||
|
@ -348,6 +352,7 @@ The result of this test will be successful:
|
|||
$ pytest test_indirect_list.py --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
<Module test_indirect_list.py>
|
||||
|
@ -433,7 +438,7 @@ Running it results in some skips if we don't have all the python interpreters in
|
|||
. $ pytest -rs -q multipython.py
|
||||
...sss...sssssssss...sss... [100%]
|
||||
========================= short test summary info ==========================
|
||||
SKIP [15] $REGENDOC_TMPDIR/CWD/multipython.py:30: 'python3.4' not found
|
||||
SKIPPED [15] $REGENDOC_TMPDIR/CWD/multipython.py:30: 'python3.4' not found
|
||||
12 passed, 15 skipped in 0.12 seconds
|
||||
|
||||
Indirect parametrization of optional implementations/imports
|
||||
|
@ -484,12 +489,13 @@ If you run this with reporting for skips enabled:
|
|||
$ pytest -rs test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
test_module.py .s [100%]
|
||||
========================= short test summary info ==========================
|
||||
SKIP [1] $REGENDOC_TMPDIR/conftest.py:11: could not import 'opt2'
|
||||
SKIPPED [1] $REGENDOC_TMPDIR/conftest.py:11: could not import 'opt2'
|
||||
|
||||
=================== 1 passed, 1 skipped in 0.12 seconds ====================
|
||||
|
||||
|
@ -540,14 +546,14 @@ Then run ``pytest`` with verbose mode and with only the ``basic`` marker:
|
|||
|
||||
$ pytest -v -m basic
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 17 items / 14 deselected
|
||||
collecting ... collected 17 items / 14 deselected / 3 selected
|
||||
|
||||
test_pytest_param_example.py::test_eval[1+7-8] PASSED [ 33%]
|
||||
test_pytest_param_example.py::test_eval[basic_2+4] PASSED [ 66%]
|
||||
test_pytest_param_example.py::test_eval[basic_6*9] xfail [100%]
|
||||
test_pytest_param_example.py::test_eval[basic_6*9] XFAIL [100%]
|
||||
|
||||
============ 2 passed, 14 deselected, 1 xfailed in 0.12 seconds ============
|
||||
|
||||
|
|
|
@ -132,6 +132,7 @@ The test collection would look like this:
|
|||
$ pytest --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 2 items
|
||||
<Module check_myapp.py>
|
||||
|
@ -187,6 +188,7 @@ You can always peek at the collection tree without running tests like this:
|
|||
. $ pytest --collect-only pythoncollection.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 3 items
|
||||
<Module CWD/pythoncollection.py>
|
||||
|
@ -259,6 +261,7 @@ file will be left out:
|
|||
$ pytest --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 0 items
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ get on the terminal - we are working on that):
|
|||
assertion $ pytest failure_demo.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/assertion, inifile:
|
||||
collected 44 items
|
||||
|
||||
|
|
|
@ -128,6 +128,7 @@ directory with the above conftest.py:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -188,12 +189,13 @@ and when running it will see a skipped "slow" test:
|
|||
$ pytest -rs # "-rs" means report details on the little 's'
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
test_module.py .s [100%]
|
||||
========================= short test summary info ==========================
|
||||
SKIP [1] test_module.py:8: need --runslow option to run
|
||||
SKIPPED [1] test_module.py:8: need --runslow option to run
|
||||
|
||||
=================== 1 passed, 1 skipped in 0.12 seconds ====================
|
||||
|
||||
|
@ -204,6 +206,7 @@ Or run it including the ``slow`` marked test:
|
|||
$ pytest --runslow
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -346,6 +349,7 @@ which will add the string to the test header accordingly:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
project deps: mylib-1.1
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
@ -373,8 +377,8 @@ which will add info only when run with "--v":
|
|||
|
||||
$ pytest -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
info1: did you know that ...
|
||||
did you?
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
|
@ -389,6 +393,7 @@ and nothing when run plainly:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 0 items
|
||||
|
||||
|
@ -428,6 +433,7 @@ Now we can profile which test functions execute the slowest:
|
|||
$ pytest --durations=3
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -502,6 +508,7 @@ If we run this:
|
|||
$ pytest -rx
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 4 items
|
||||
|
||||
|
@ -585,6 +592,7 @@ We can run this:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 7 items
|
||||
|
||||
|
@ -698,6 +706,7 @@ and run them:
|
|||
$ pytest test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -799,6 +808,7 @@ and run it:
|
|||
$ pytest -s test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
|
|
@ -73,6 +73,7 @@ marked ``smtp_connection`` fixture function. Running the test looks like this:
|
|||
$ pytest test_smtpsimple.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
@ -213,6 +214,7 @@ inspect what is going on and can now run the tests:
|
|||
$ pytest test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
@ -701,6 +703,7 @@ Running the above tests results in the following test IDs being used:
|
|||
$ pytest --collect-only
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 10 items
|
||||
<Module test_anothersmtp.py>
|
||||
|
@ -744,8 +747,8 @@ Running this test will *skip* the invocation of ``data_set`` with value ``2``:
|
|||
|
||||
$ pytest test_fixture_marks.py -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 3 items
|
||||
|
||||
|
@ -789,8 +792,8 @@ Here we declare an ``app`` fixture which receives the previously defined
|
|||
|
||||
$ pytest -v test_appsetup.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
||||
|
@ -860,8 +863,8 @@ Let's run the tests in verbose mode and with looking at the print-output:
|
|||
|
||||
$ pytest -v -s test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ That’s it. You can now execute the test function:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ To execute it:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -57,6 +57,7 @@ them in turn:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -108,6 +109,7 @@ Let's run this:
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 3 items
|
||||
|
||||
|
@ -207,7 +209,7 @@ list:
|
|||
$ pytest -q -rs test_strings.py
|
||||
s [100%]
|
||||
========================= short test summary info ==========================
|
||||
SKIP [1] test_strings.py: got empty parameter set ['stringinput'], function test_valid_string at $REGENDOC_TMPDIR/test_strings.py:1
|
||||
SKIPPED [1] test_strings.py: got empty parameter set ['stringinput'], function test_valid_string at $REGENDOC_TMPDIR/test_strings.py:1
|
||||
1 skipped in 0.12 seconds
|
||||
|
||||
Note that when calling ``metafunc.parametrize`` multiple times with different parameter sets, all parameter names across
|
||||
|
|
|
@ -330,6 +330,7 @@ Running it with the report-on-xfail option gives this output:
|
|||
example $ pytest -rx xfail_demo.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/example, inifile:
|
||||
collected 7 items
|
||||
|
||||
|
|
|
@ -42,6 +42,7 @@ Running this would result in a passed test except for the last
|
|||
$ pytest test_tmp_path.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
@ -104,6 +105,7 @@ Running this would result in a passed test except for the last
|
|||
$ pytest test_tmpdir.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -129,6 +129,7 @@ the ``self.db`` values in the traceback:
|
|||
$ pytest test_unittest_db.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -193,6 +193,7 @@ Example:
|
|||
$ pytest -ra
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 6 items
|
||||
|
||||
|
@ -216,12 +217,12 @@ Example:
|
|||
|
||||
test_example.py:14: AssertionError
|
||||
========================= short test summary info ==========================
|
||||
SKIP [1] $REGENDOC_TMPDIR/test_example.py:23: skipping this test
|
||||
SKIPPED [1] $REGENDOC_TMPDIR/test_example.py:23: skipping this test
|
||||
XFAIL test_example.py::test_xfail
|
||||
reason: xfailing this test
|
||||
XPASS test_example.py::test_xpass always xfail
|
||||
ERROR test_example.py::test_error
|
||||
FAIL test_example.py::test_fail
|
||||
FAILED test_example.py::test_fail
|
||||
1 failed, 1 passed, 1 skipped, 1 xfailed, 1 xpassed, 1 error in 0.12 seconds
|
||||
|
||||
The ``-r`` options accepts a number of characters after it, with ``a`` used above meaning "all except passes".
|
||||
|
@ -244,6 +245,7 @@ More than one character can be used, so for example to only see failed and skipp
|
|||
$ pytest -rfs
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 6 items
|
||||
|
||||
|
@ -267,8 +269,8 @@ More than one character can be used, so for example to only see failed and skipp
|
|||
|
||||
test_example.py:14: AssertionError
|
||||
========================= short test summary info ==========================
|
||||
FAIL test_example.py::test_fail
|
||||
SKIP [1] $REGENDOC_TMPDIR/test_example.py:23: skipping this test
|
||||
FAILED test_example.py::test_fail
|
||||
SKIPPED [1] $REGENDOC_TMPDIR/test_example.py:23: skipping this test
|
||||
1 failed, 1 passed, 1 skipped, 1 xfailed, 1 xpassed, 1 error in 0.12 seconds
|
||||
|
||||
Using ``p`` lists the passing tests, whilst ``P`` adds an extra section "PASSES" with those tests that passed but had
|
||||
|
@ -279,6 +281,7 @@ captured output:
|
|||
$ pytest -rpP
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 6 items
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ Running pytest now produces this output:
|
|||
$ pytest test_show_warnings.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collected 1 item
|
||||
|
||||
|
|
|
@ -413,6 +413,7 @@ additionally it is possible to copy examples for an example folder before runnin
|
|||
$ pytest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-4.x.y, py-1.x.y, pluggy-0.x.y
|
||||
cachedir: $PYTHON_PREFIX/.pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile: pytest.ini
|
||||
collected 2 items
|
||||
|
||||
|
|
|
@ -93,7 +93,15 @@ Remarks:
|
|||
|
||||
* It is possible for setup/teardown pairs to be invoked multiple times
|
||||
per testing process.
|
||||
|
||||
* teardown functions are not called if the corresponding setup function existed
|
||||
and failed/was skipped.
|
||||
|
||||
* Prior to pytest-4.2, xunit-style functions did not obey the scope rules of fixtures, so
|
||||
it was possible, for example, for a ``setup_method`` to be called before a
|
||||
session-scoped autouse fixture.
|
||||
|
||||
Now the xunit-style functions are integrated with the fixture mechanism and obey the proper
|
||||
scope rules of fixtures involved in the call.
|
||||
|
||||
.. _`unittest.py module`: http://docs.python.org/library/unittest.html
|
||||
|
|
|
@ -24,8 +24,6 @@ from _pytest.compat import _PY3
|
|||
from _pytest.compat import PY35
|
||||
from _pytest.compat import safe_str
|
||||
|
||||
builtin_repr = repr
|
||||
|
||||
if _PY3:
|
||||
from traceback import format_exception_only
|
||||
else:
|
||||
|
@ -948,8 +946,6 @@ class ReprEntryNative(TerminalRepr):
|
|||
|
||||
|
||||
class ReprEntry(TerminalRepr):
|
||||
localssep = "_ "
|
||||
|
||||
def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
|
||||
self.lines = lines
|
||||
self.reprfuncargs = reprfuncargs
|
||||
|
@ -971,7 +967,6 @@ class ReprEntry(TerminalRepr):
|
|||
red = line.startswith("E ")
|
||||
tw.line(line, bold=True, red=red)
|
||||
if self.reprlocals:
|
||||
# tw.sep(self.localssep, "Locals")
|
||||
tw.line("")
|
||||
self.reprlocals.toterminal(tw)
|
||||
if self.reprfileloc:
|
||||
|
|
|
@ -52,19 +52,6 @@ else:
|
|||
return ast.Call(a, b, c, None, None)
|
||||
|
||||
|
||||
def ast_Call_helper(func_name, *args, **kwargs):
|
||||
"""
|
||||
func_name: str
|
||||
args: Iterable[ast.expr]
|
||||
kwargs: Dict[str,ast.expr]
|
||||
"""
|
||||
return ast.Call(
|
||||
ast.Name(func_name, ast.Load()),
|
||||
list(args),
|
||||
[ast.keyword(key, val) for key, val in kwargs.items()],
|
||||
)
|
||||
|
||||
|
||||
class AssertionRewritingHook(object):
|
||||
"""PEP302 Import hook which rewrites asserts."""
|
||||
|
||||
|
|
|
@ -118,6 +118,10 @@ class pytestPDB(object):
|
|||
|
||||
do_c = do_cont = do_continue
|
||||
|
||||
def set_quit(self):
|
||||
super(_PdbWrapper, self).set_quit()
|
||||
outcomes.exit("Quitting debugger")
|
||||
|
||||
def setup(self, f, tb):
|
||||
"""Suspend on setup().
|
||||
|
||||
|
@ -210,8 +214,7 @@ def _enter_pdb(node, excinfo, rep):
|
|||
tw.sep(">", "entering PDB")
|
||||
tb = _postmortem_traceback(excinfo)
|
||||
rep._pdbshown = True
|
||||
if post_mortem(tb):
|
||||
outcomes.exit("Quitting debugger")
|
||||
post_mortem(tb)
|
||||
return rep
|
||||
|
||||
|
||||
|
@ -242,4 +245,5 @@ def post_mortem(t):
|
|||
p = Pdb()
|
||||
p.reset()
|
||||
p.interaction(None, t)
|
||||
return p.quitting
|
||||
if p.quitting:
|
||||
outcomes.exit("Quitting debugger")
|
||||
|
|
|
@ -660,12 +660,6 @@ class SubRequest(FixtureRequest):
|
|||
self._fixturedef.addfinalizer(finalizer)
|
||||
|
||||
|
||||
class ScopeMismatchError(Exception):
|
||||
""" A fixture function tries to use a different fixture function which
|
||||
which has a lower scope (e.g. a Session one calls a function one)
|
||||
"""
|
||||
|
||||
|
||||
scopes = "session package module class function".split()
|
||||
scopenum_function = scopes.index("function")
|
||||
|
||||
|
|
|
@ -481,19 +481,22 @@ def pytest_report_collectionfinish(config, startdir, items):
|
|||
|
||||
|
||||
@hookspec(firstresult=True)
|
||||
def pytest_report_teststatus(report):
|
||||
def pytest_report_teststatus(report, config):
|
||||
""" return result-category, shortletter and verbose word for reporting.
|
||||
|
||||
:param _pytest.config.Config config: pytest config object
|
||||
|
||||
Stops at first non-None result, see :ref:`firstresult` """
|
||||
|
||||
|
||||
def pytest_terminal_summary(terminalreporter, exitstatus):
|
||||
def pytest_terminal_summary(terminalreporter, exitstatus, config):
|
||||
"""Add a section to terminal summary reporting.
|
||||
|
||||
:param _pytest.terminal.TerminalReporter terminalreporter: the internal terminal reporter object
|
||||
:param int exitstatus: the exit status that will be reported back to the OS
|
||||
:param _pytest.config.Config config: pytest config object
|
||||
|
||||
.. versionadded:: 3.5
|
||||
.. versionadded:: 4.2
|
||||
The ``config`` parameter.
|
||||
"""
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@ import sys
|
|||
import time
|
||||
|
||||
import py
|
||||
import six
|
||||
|
||||
import pytest
|
||||
from _pytest import nodes
|
||||
|
@ -27,10 +28,6 @@ from _pytest.config import filename_arg
|
|||
# Python 2.X and 3.X compatibility
|
||||
if sys.version_info[0] < 3:
|
||||
from codecs import open
|
||||
else:
|
||||
unichr = chr
|
||||
unicode = str
|
||||
long = int
|
||||
|
||||
|
||||
class Junit(py.xml.Namespace):
|
||||
|
@ -45,12 +42,12 @@ class Junit(py.xml.Namespace):
|
|||
_legal_chars = (0x09, 0x0A, 0x0D)
|
||||
_legal_ranges = ((0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF))
|
||||
_legal_xml_re = [
|
||||
unicode("%s-%s") % (unichr(low), unichr(high))
|
||||
u"%s-%s" % (six.unichr(low), six.unichr(high))
|
||||
for (low, high) in _legal_ranges
|
||||
if low < sys.maxunicode
|
||||
]
|
||||
_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
|
||||
illegal_xml_re = re.compile(unicode("[^%s]") % unicode("").join(_legal_xml_re))
|
||||
_legal_xml_re = [six.unichr(x) for x in _legal_chars] + _legal_xml_re
|
||||
illegal_xml_re = re.compile(u"[^%s]" % u"".join(_legal_xml_re))
|
||||
del _legal_chars
|
||||
del _legal_ranges
|
||||
del _legal_xml_re
|
||||
|
@ -62,19 +59,41 @@ def bin_xml_escape(arg):
|
|||
def repl(matchobj):
|
||||
i = ord(matchobj.group())
|
||||
if i <= 0xFF:
|
||||
return unicode("#x%02X") % i
|
||||
return u"#x%02X" % i
|
||||
else:
|
||||
return unicode("#x%04X") % i
|
||||
return u"#x%04X" % i
|
||||
|
||||
return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg)))
|
||||
|
||||
|
||||
def merge_family(left, right):
|
||||
result = {}
|
||||
for kl, vl in left.items():
|
||||
for kr, vr in right.items():
|
||||
if not isinstance(vl, list):
|
||||
raise TypeError(type(vl))
|
||||
result[kl] = vl + vr
|
||||
left.update(result)
|
||||
|
||||
|
||||
families = {}
|
||||
families["_base"] = {"testcase": ["classname", "name"]}
|
||||
families["_base_legacy"] = {"testcase": ["file", "line", "url"]}
|
||||
|
||||
# xUnit 1.x inherits legacy attributes
|
||||
families["xunit1"] = families["_base"].copy()
|
||||
merge_family(families["xunit1"], families["_base_legacy"])
|
||||
|
||||
# xUnit 2.x uses strict base attributes
|
||||
families["xunit2"] = families["_base"]
|
||||
|
||||
|
||||
class _NodeReporter(object):
|
||||
def __init__(self, nodeid, xml):
|
||||
|
||||
self.id = nodeid
|
||||
self.xml = xml
|
||||
self.add_stats = self.xml.add_stats
|
||||
self.family = self.xml.family
|
||||
self.duration = 0
|
||||
self.properties = []
|
||||
self.nodes = []
|
||||
|
@ -122,8 +141,20 @@ class _NodeReporter(object):
|
|||
self.attrs = attrs
|
||||
self.attrs.update(existing_attrs) # restore any user-defined attributes
|
||||
|
||||
# Preserve legacy testcase behavior
|
||||
if self.family == "xunit1":
|
||||
return
|
||||
|
||||
# Filter out attributes not permitted by this test family.
|
||||
# Including custom attributes because they are not valid here.
|
||||
temp_attrs = {}
|
||||
for key in self.attrs.keys():
|
||||
if key in families[self.family]["testcase"]:
|
||||
temp_attrs[key] = self.attrs[key]
|
||||
self.attrs = temp_attrs
|
||||
|
||||
def to_xml(self):
|
||||
testcase = Junit.testcase(time=self.duration, **self.attrs)
|
||||
testcase = Junit.testcase(time="%.3f" % self.duration, **self.attrs)
|
||||
testcase.append(self.make_properties_node())
|
||||
for node in self.nodes:
|
||||
testcase.append(node)
|
||||
|
@ -194,7 +225,7 @@ class _NodeReporter(object):
|
|||
else:
|
||||
if hasattr(report.longrepr, "reprcrash"):
|
||||
message = report.longrepr.reprcrash.message
|
||||
elif isinstance(report.longrepr, (unicode, str)):
|
||||
elif isinstance(report.longrepr, six.string_types):
|
||||
message = report.longrepr
|
||||
else:
|
||||
message = str(report.longrepr)
|
||||
|
@ -272,16 +303,26 @@ def record_xml_attribute(request):
|
|||
from _pytest.warning_types import PytestWarning
|
||||
|
||||
request.node.warn(PytestWarning("record_xml_attribute is an experimental feature"))
|
||||
|
||||
# Declare noop
|
||||
def add_attr_noop(name, value):
|
||||
pass
|
||||
|
||||
attr_func = add_attr_noop
|
||||
xml = getattr(request.config, "_xml", None)
|
||||
if xml is not None:
|
||||
|
||||
if xml is not None and xml.family != "xunit1":
|
||||
request.node.warn(
|
||||
PytestWarning(
|
||||
"record_xml_attribute is incompatible with junit_family: "
|
||||
"%s (use: legacy|xunit1)" % xml.family
|
||||
)
|
||||
)
|
||||
elif xml is not None:
|
||||
node_reporter = xml.node_reporter(request.node.nodeid)
|
||||
return node_reporter.add_attribute
|
||||
else:
|
||||
attr_func = node_reporter.add_attribute
|
||||
|
||||
def add_attr_noop(name, value):
|
||||
pass
|
||||
|
||||
return add_attr_noop
|
||||
return attr_func
|
||||
|
||||
|
||||
def pytest_addoption(parser):
|
||||
|
@ -318,6 +359,11 @@ def pytest_addoption(parser):
|
|||
"Duration time to report: one of total|call",
|
||||
default="total",
|
||||
) # choices=['total', 'call'])
|
||||
parser.addini(
|
||||
"junit_family",
|
||||
"Emit XML for schema: one of legacy|xunit1|xunit2",
|
||||
default="xunit1",
|
||||
)
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
|
@ -330,6 +376,7 @@ def pytest_configure(config):
|
|||
config.getini("junit_suite_name"),
|
||||
config.getini("junit_logging"),
|
||||
config.getini("junit_duration_report"),
|
||||
config.getini("junit_family"),
|
||||
)
|
||||
config.pluginmanager.register(config._xml)
|
||||
|
||||
|
@ -364,6 +411,7 @@ class LogXML(object):
|
|||
suite_name="pytest",
|
||||
logging="no",
|
||||
report_duration="total",
|
||||
family="xunit1",
|
||||
):
|
||||
logfile = os.path.expanduser(os.path.expandvars(logfile))
|
||||
self.logfile = os.path.normpath(os.path.abspath(logfile))
|
||||
|
@ -371,6 +419,7 @@ class LogXML(object):
|
|||
self.suite_name = suite_name
|
||||
self.logging = logging
|
||||
self.report_duration = report_duration
|
||||
self.family = family
|
||||
self.stats = dict.fromkeys(["error", "passed", "failure", "skipped"], 0)
|
||||
self.node_reporters = {} # nodeid -> _NodeReporter
|
||||
self.node_reporters_ordered = []
|
||||
|
@ -379,6 +428,10 @@ class LogXML(object):
|
|||
self.open_reports = []
|
||||
self.cnt_double_fail_tests = 0
|
||||
|
||||
# Replaces convenience family with real family
|
||||
if self.family == "legacy":
|
||||
self.family = "xunit1"
|
||||
|
||||
def finalize(self, report):
|
||||
nodeid = getattr(report, "nodeid", report)
|
||||
# local hack to handle xdist report order
|
||||
|
@ -548,7 +601,7 @@ class LogXML(object):
|
|||
name=self.suite_name,
|
||||
errors=self.stats["error"],
|
||||
failures=self.stats["failure"],
|
||||
skips=self.stats["skipped"],
|
||||
skipped=self.stats["skipped"],
|
||||
tests=numtests,
|
||||
time="%.3f" % suite_time_delta,
|
||||
).unicode(indent=0)
|
||||
|
|
|
@ -49,13 +49,13 @@ class Failed(OutcomeException):
|
|||
__module__ = "builtins"
|
||||
|
||||
|
||||
class Exit(SystemExit):
|
||||
class Exit(Exception):
|
||||
""" raised for immediate program exits (no tracebacks/summaries)"""
|
||||
|
||||
def __init__(self, msg="unknown reason", returncode=None):
|
||||
self.msg = msg
|
||||
self.returncode = returncode
|
||||
SystemExit.__init__(self, msg)
|
||||
super(Exit, self).__init__(msg)
|
||||
|
||||
|
||||
# exposed helper methods
|
||||
|
@ -63,7 +63,7 @@ class Exit(SystemExit):
|
|||
|
||||
def exit(msg, returncode=None):
|
||||
"""
|
||||
Exit testing process as if SystemExit was triggered.
|
||||
Exit testing process.
|
||||
|
||||
:param str msg: message to display upon exit.
|
||||
:param int returncode: return code to be used when exiting pytest.
|
||||
|
|
|
@ -9,6 +9,7 @@ import inspect
|
|||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from functools import partial
|
||||
from textwrap import dedent
|
||||
|
||||
import py
|
||||
|
@ -283,9 +284,6 @@ class PyobjMixin(PyobjContext):
|
|||
s = ".".join(parts)
|
||||
return s.replace(".[", "[")
|
||||
|
||||
def _getfslineno(self):
|
||||
return getfslineno(self.obj)
|
||||
|
||||
def reportinfo(self):
|
||||
# XXX caching?
|
||||
obj = self.obj
|
||||
|
@ -435,9 +433,66 @@ class Module(nodes.File, PyCollector):
|
|||
return self._importtestmodule()
|
||||
|
||||
def collect(self):
|
||||
self._inject_setup_module_fixture()
|
||||
self._inject_setup_function_fixture()
|
||||
self.session._fixturemanager.parsefactories(self)
|
||||
return super(Module, self).collect()
|
||||
|
||||
def _inject_setup_module_fixture(self):
|
||||
"""Injects a hidden autouse, module scoped fixture into the collected module object
|
||||
that invokes setUpModule/tearDownModule if either or both are available.
|
||||
|
||||
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
|
||||
other fixtures (#517).
|
||||
"""
|
||||
setup_module = _get_non_fixture_func(self.obj, "setUpModule")
|
||||
if setup_module is None:
|
||||
setup_module = _get_non_fixture_func(self.obj, "setup_module")
|
||||
|
||||
teardown_module = _get_non_fixture_func(self.obj, "tearDownModule")
|
||||
if teardown_module is None:
|
||||
teardown_module = _get_non_fixture_func(self.obj, "teardown_module")
|
||||
|
||||
if setup_module is None and teardown_module is None:
|
||||
return
|
||||
|
||||
@fixtures.fixture(autouse=True, scope="module")
|
||||
def xunit_setup_module_fixture(request):
|
||||
if setup_module is not None:
|
||||
_call_with_optional_argument(setup_module, request.module)
|
||||
yield
|
||||
if teardown_module is not None:
|
||||
_call_with_optional_argument(teardown_module, request.module)
|
||||
|
||||
self.obj.__pytest_setup_module = xunit_setup_module_fixture
|
||||
|
||||
def _inject_setup_function_fixture(self):
|
||||
"""Injects a hidden autouse, function scoped fixture into the collected module object
|
||||
that invokes setup_function/teardown_function if either or both are available.
|
||||
|
||||
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
|
||||
other fixtures (#517).
|
||||
"""
|
||||
setup_function = _get_non_fixture_func(self.obj, "setup_function")
|
||||
teardown_function = _get_non_fixture_func(self.obj, "teardown_function")
|
||||
if setup_function is None and teardown_function is None:
|
||||
return
|
||||
|
||||
@fixtures.fixture(autouse=True, scope="function")
|
||||
def xunit_setup_function_fixture(request):
|
||||
if request.instance is not None:
|
||||
# in this case we are bound to an instance, so we need to let
|
||||
# setup_method handle this
|
||||
yield
|
||||
return
|
||||
if setup_function is not None:
|
||||
_call_with_optional_argument(setup_function, request.function)
|
||||
yield
|
||||
if teardown_function is not None:
|
||||
_call_with_optional_argument(teardown_function, request.function)
|
||||
|
||||
self.obj.__pytest_setup_function = xunit_setup_function_fixture
|
||||
|
||||
def _importtestmodule(self):
|
||||
# we assume we are only called once per module
|
||||
importmode = self.config.getoption("--import-mode")
|
||||
|
@ -488,19 +543,6 @@ class Module(nodes.File, PyCollector):
|
|||
self.config.pluginmanager.consider_module(mod)
|
||||
return mod
|
||||
|
||||
def setup(self):
|
||||
setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule")
|
||||
if setup_module is None:
|
||||
setup_module = _get_xunit_setup_teardown(self.obj, "setup_module")
|
||||
if setup_module is not None:
|
||||
setup_module()
|
||||
|
||||
teardown_module = _get_xunit_setup_teardown(self.obj, "tearDownModule")
|
||||
if teardown_module is None:
|
||||
teardown_module = _get_xunit_setup_teardown(self.obj, "teardown_module")
|
||||
if teardown_module is not None:
|
||||
self.addfinalizer(teardown_module)
|
||||
|
||||
|
||||
class Package(Module):
|
||||
def __init__(self, fspath, parent=None, config=None, session=None, nodeid=None):
|
||||
|
@ -513,6 +555,22 @@ class Package(Module):
|
|||
self._norecursepatterns = session._norecursepatterns
|
||||
self.fspath = fspath
|
||||
|
||||
def setup(self):
|
||||
# not using fixtures to call setup_module here because autouse fixtures
|
||||
# from packages are not called automatically (#4085)
|
||||
setup_module = _get_non_fixture_func(self.obj, "setUpModule")
|
||||
if setup_module is None:
|
||||
setup_module = _get_non_fixture_func(self.obj, "setup_module")
|
||||
if setup_module is not None:
|
||||
_call_with_optional_argument(setup_module, self.obj)
|
||||
|
||||
teardown_module = _get_non_fixture_func(self.obj, "tearDownModule")
|
||||
if teardown_module is None:
|
||||
teardown_module = _get_non_fixture_func(self.obj, "teardown_module")
|
||||
if teardown_module is not None:
|
||||
func = partial(_call_with_optional_argument, teardown_module, self.obj)
|
||||
self.addfinalizer(func)
|
||||
|
||||
def _recurse(self, dirpath):
|
||||
if dirpath.basename == "__pycache__":
|
||||
return False
|
||||
|
@ -599,8 +657,9 @@ def _get_xunit_setup_teardown(holder, attr_name, param_obj=None):
|
|||
when the callable is called without arguments, defaults to the ``holder`` object.
|
||||
Return ``None`` if a suitable callable is not found.
|
||||
"""
|
||||
# TODO: only needed because of Package!
|
||||
param_obj = param_obj if param_obj is not None else holder
|
||||
result = _get_xunit_func(holder, attr_name)
|
||||
result = _get_non_fixture_func(holder, attr_name)
|
||||
if result is not None:
|
||||
arg_count = result.__code__.co_argcount
|
||||
if inspect.ismethod(result):
|
||||
|
@ -611,7 +670,19 @@ def _get_xunit_setup_teardown(holder, attr_name, param_obj=None):
|
|||
return result
|
||||
|
||||
|
||||
def _get_xunit_func(obj, name):
|
||||
def _call_with_optional_argument(func, arg):
|
||||
"""Call the given function with the given argument if func accepts one argument, otherwise
|
||||
calls func without arguments"""
|
||||
arg_count = func.__code__.co_argcount
|
||||
if inspect.ismethod(func):
|
||||
arg_count -= 1
|
||||
if arg_count:
|
||||
func(arg)
|
||||
else:
|
||||
func()
|
||||
|
||||
|
||||
def _get_non_fixture_func(obj, name):
|
||||
"""Return the attribute from the given object to be used as a setup/teardown
|
||||
xunit-style function, but only if not marked as a fixture to
|
||||
avoid calling it twice.
|
||||
|
@ -643,18 +714,60 @@ class Class(PyCollector):
|
|||
)
|
||||
)
|
||||
return []
|
||||
|
||||
self._inject_setup_class_fixture()
|
||||
self._inject_setup_method_fixture()
|
||||
|
||||
return [Instance(name="()", parent=self)]
|
||||
|
||||
def setup(self):
|
||||
setup_class = _get_xunit_func(self.obj, "setup_class")
|
||||
if setup_class is not None:
|
||||
setup_class = getimfunc(setup_class)
|
||||
setup_class(self.obj)
|
||||
def _inject_setup_class_fixture(self):
|
||||
"""Injects a hidden autouse, class scoped fixture into the collected class object
|
||||
that invokes setup_class/teardown_class if either or both are available.
|
||||
|
||||
fin_class = getattr(self.obj, "teardown_class", None)
|
||||
if fin_class is not None:
|
||||
fin_class = getimfunc(fin_class)
|
||||
self.addfinalizer(lambda: fin_class(self.obj))
|
||||
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
|
||||
other fixtures (#517).
|
||||
"""
|
||||
setup_class = _get_non_fixture_func(self.obj, "setup_class")
|
||||
teardown_class = getattr(self.obj, "teardown_class", None)
|
||||
if setup_class is None and teardown_class is None:
|
||||
return
|
||||
|
||||
@fixtures.fixture(autouse=True, scope="class")
|
||||
def xunit_setup_class_fixture(cls):
|
||||
if setup_class is not None:
|
||||
func = getimfunc(setup_class)
|
||||
_call_with_optional_argument(func, self.obj)
|
||||
yield
|
||||
if teardown_class is not None:
|
||||
func = getimfunc(teardown_class)
|
||||
_call_with_optional_argument(func, self.obj)
|
||||
|
||||
self.obj.__pytest_setup_class = xunit_setup_class_fixture
|
||||
|
||||
def _inject_setup_method_fixture(self):
|
||||
"""Injects a hidden autouse, function scoped fixture into the collected class object
|
||||
that invokes setup_method/teardown_method if either or both are available.
|
||||
|
||||
Using a fixture to invoke this methods ensures we play nicely and unsurprisingly with
|
||||
other fixtures (#517).
|
||||
"""
|
||||
setup_method = _get_non_fixture_func(self.obj, "setup_method")
|
||||
teardown_method = getattr(self.obj, "teardown_method", None)
|
||||
if setup_method is None and teardown_method is None:
|
||||
return
|
||||
|
||||
@fixtures.fixture(autouse=True, scope="function")
|
||||
def xunit_setup_method_fixture(self, request):
|
||||
method = request.function
|
||||
if setup_method is not None:
|
||||
func = getattr(self, "setup_method")
|
||||
_call_with_optional_argument(func, method)
|
||||
yield
|
||||
if teardown_method is not None:
|
||||
func = getattr(self, "teardown_method")
|
||||
_call_with_optional_argument(func, method)
|
||||
|
||||
self.obj.__pytest_setup_method = xunit_setup_method_fixture
|
||||
|
||||
|
||||
class Instance(PyCollector):
|
||||
|
@ -681,29 +794,9 @@ class FunctionMixin(PyobjMixin):
|
|||
|
||||
def setup(self):
|
||||
""" perform setup for this test function. """
|
||||
if hasattr(self, "_preservedparent"):
|
||||
obj = self._preservedparent
|
||||
elif isinstance(self.parent, Instance):
|
||||
obj = self.parent.newinstance()
|
||||
if isinstance(self.parent, Instance):
|
||||
self.parent.newinstance()
|
||||
self.obj = self._getobj()
|
||||
else:
|
||||
obj = self.parent.obj
|
||||
if inspect.ismethod(self.obj):
|
||||
setup_name = "setup_method"
|
||||
teardown_name = "teardown_method"
|
||||
else:
|
||||
setup_name = "setup_function"
|
||||
teardown_name = "teardown_function"
|
||||
setup_func_or_method = _get_xunit_setup_teardown(
|
||||
obj, setup_name, param_obj=self.obj
|
||||
)
|
||||
if setup_func_or_method is not None:
|
||||
setup_func_or_method()
|
||||
teardown_func_or_method = _get_xunit_setup_teardown(
|
||||
obj, teardown_name, param_obj=self.obj
|
||||
)
|
||||
if teardown_func_or_method is not None:
|
||||
self.addfinalizer(teardown_func_or_method)
|
||||
|
||||
def _prunetraceback(self, excinfo):
|
||||
if hasattr(self, "_obj") and not self.config.option.fulltrace:
|
||||
|
@ -1252,7 +1345,6 @@ class Function(FunctionMixin, nodes.Item, fixtures.FuncargnamesCompatAttr):
|
|||
Python test function.
|
||||
"""
|
||||
|
||||
_genid = None
|
||||
# disable since functions handle it themselves
|
||||
_ALLOW_MARKERS = False
|
||||
|
||||
|
@ -1327,7 +1419,6 @@ class Function(FunctionMixin, nodes.Item, fixtures.FuncargnamesCompatAttr):
|
|||
if hasattr(self, "callspec"):
|
||||
callspec = self.callspec
|
||||
assert not callspec.funcargs
|
||||
self._genid = callspec.id
|
||||
if hasattr(callspec, "param"):
|
||||
self.param = callspec.param
|
||||
self._request = fixtures.FixtureRequest(self)
|
||||
|
|
|
@ -160,16 +160,6 @@ class TestReport(BaseReport):
|
|||
)
|
||||
|
||||
|
||||
class TeardownErrorReport(BaseReport):
|
||||
outcome = "failed"
|
||||
when = "teardown"
|
||||
|
||||
def __init__(self, longrepr, **extra):
|
||||
self.longrepr = longrepr
|
||||
self.sections = []
|
||||
self.__dict__.update(extra)
|
||||
|
||||
|
||||
class CollectReport(BaseReport):
|
||||
when = "collect"
|
||||
|
||||
|
|
|
@ -47,30 +47,6 @@ def pytest_unconfigure(config):
|
|||
config.pluginmanager.unregister(resultlog)
|
||||
|
||||
|
||||
def generic_path(item):
|
||||
chain = item.listchain()
|
||||
gpath = [chain[0].name]
|
||||
fspath = chain[0].fspath
|
||||
fspart = False
|
||||
for node in chain[1:]:
|
||||
newfspath = node.fspath
|
||||
if newfspath == fspath:
|
||||
if fspart:
|
||||
gpath.append(":")
|
||||
fspart = False
|
||||
else:
|
||||
gpath.append(".")
|
||||
else:
|
||||
gpath.append("/")
|
||||
fspart = True
|
||||
name = node.name
|
||||
if name[0] in "([":
|
||||
gpath.pop()
|
||||
gpath.append(name)
|
||||
fspath = newfspath
|
||||
return "".join(gpath)
|
||||
|
||||
|
||||
class ResultLog(object):
|
||||
def __init__(self, config, logfile):
|
||||
self.config = config
|
||||
|
@ -90,7 +66,9 @@ class ResultLog(object):
|
|||
def pytest_runtest_logreport(self, report):
|
||||
if report.when != "call" and report.passed:
|
||||
return
|
||||
res = self.config.hook.pytest_report_teststatus(report=report)
|
||||
res = self.config.hook.pytest_report_teststatus(
|
||||
report=report, config=self.config
|
||||
)
|
||||
code = res[1]
|
||||
if code == "x":
|
||||
longrepr = str(report.longrepr)
|
||||
|
|
|
@ -285,7 +285,7 @@ def shower(stat):
|
|||
|
||||
def _get_report_str(terminalreporter, report):
|
||||
_category, _short, verbose = terminalreporter.config.hook.pytest_report_teststatus(
|
||||
report=report
|
||||
report=report, config=terminalreporter.config
|
||||
)
|
||||
return verbose
|
||||
|
||||
|
|
|
@ -363,7 +363,7 @@ class TerminalReporter(object):
|
|||
|
||||
def pytest_runtest_logreport(self, report):
|
||||
rep = report
|
||||
res = self.config.hook.pytest_report_teststatus(report=rep)
|
||||
res = self.config.hook.pytest_report_teststatus(report=rep, config=self.config)
|
||||
category, letter, word = res
|
||||
if isinstance(word, tuple):
|
||||
word, markup = word
|
||||
|
@ -500,6 +500,7 @@ class TerminalReporter(object):
|
|||
errors = len(self.stats.get("error", []))
|
||||
skipped = len(self.stats.get("skipped", []))
|
||||
deselected = len(self.stats.get("deselected", []))
|
||||
selected = self._numcollected - errors - skipped - deselected
|
||||
if final:
|
||||
line = "collected "
|
||||
else:
|
||||
|
@ -513,6 +514,8 @@ class TerminalReporter(object):
|
|||
line += " / %d deselected" % deselected
|
||||
if skipped:
|
||||
line += " / %d skipped" % skipped
|
||||
if self._numcollected > selected > 0:
|
||||
line += " / %d selected" % selected
|
||||
if self.isatty:
|
||||
self.rewrite(line, bold=True, erase=True)
|
||||
if final:
|
||||
|
@ -633,7 +636,7 @@ class TerminalReporter(object):
|
|||
)
|
||||
if exitstatus in summary_exit_codes:
|
||||
self.config.hook.pytest_terminal_summary(
|
||||
terminalreporter=self, exitstatus=exitstatus
|
||||
terminalreporter=self, exitstatus=exitstatus, config=self.config
|
||||
)
|
||||
if exitstatus == EXIT_INTERRUPTED:
|
||||
self._report_keyboardinterrupt()
|
||||
|
@ -649,6 +652,7 @@ class TerminalReporter(object):
|
|||
self.summary_passes()
|
||||
# Display any extra warnings from teardown here (if any).
|
||||
self.summary_warnings()
|
||||
self.summary_deprecated_python()
|
||||
|
||||
def pytest_keyboard_interrupt(self, excinfo):
|
||||
self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
|
||||
|
@ -770,6 +774,20 @@ class TerminalReporter(object):
|
|||
self.write_sep("_", msg)
|
||||
self._outrep_summary(rep)
|
||||
|
||||
def summary_deprecated_python(self):
|
||||
if sys.version_info[:2] <= (3, 4) and self.verbosity >= 0:
|
||||
self.write_sep("=", "deprecated python version", yellow=True, bold=False)
|
||||
using_version = ".".join(str(x) for x in sys.version_info[:3])
|
||||
self.line(
|
||||
"You are using Python {}, which will no longer be supported in pytest 5.0".format(
|
||||
using_version
|
||||
),
|
||||
yellow=True,
|
||||
bold=False,
|
||||
)
|
||||
self.line("For more information, please read:")
|
||||
self.line(" https://docs.pytest.org/en/latest/py27-py34-deprecation.html")
|
||||
|
||||
def print_teardown_sections(self, rep):
|
||||
showcapture = self.config.option.showcapture
|
||||
if showcapture == "no":
|
||||
|
@ -843,15 +861,6 @@ class TerminalReporter(object):
|
|||
self.write_line(msg, **markup)
|
||||
|
||||
|
||||
def repr_pythonversion(v=None):
|
||||
if v is None:
|
||||
v = sys.version_info
|
||||
try:
|
||||
return "%s.%s.%s-%s-%s" % v
|
||||
except (TypeError, ValueError):
|
||||
return str(v)
|
||||
|
||||
|
||||
def build_summary_stats_line(stats):
|
||||
keys = ("failed passed skipped deselected xfailed xpassed warnings error").split()
|
||||
unknown_key_seen = False
|
||||
|
|
|
@ -63,6 +63,7 @@ class TempPathFactory(object):
|
|||
if self._given_basetemp is not None:
|
||||
basetemp = self._given_basetemp
|
||||
ensure_reset_dir(basetemp)
|
||||
basetemp = basetemp.resolve()
|
||||
else:
|
||||
from_env = os.environ.get("PYTEST_DEBUG_TEMPROOT")
|
||||
temproot = Path(from_env or tempfile.gettempdir()).resolve()
|
||||
|
@ -167,7 +168,7 @@ def _mk_tmp(request, factory):
|
|||
|
||||
|
||||
@pytest.fixture
|
||||
def tmpdir(request, tmpdir_factory):
|
||||
def tmpdir(tmp_path):
|
||||
"""Return a temporary directory path object
|
||||
which is unique to each test function invocation,
|
||||
created as a sub directory of the base temporary
|
||||
|
@ -176,7 +177,7 @@ def tmpdir(request, tmpdir_factory):
|
|||
|
||||
.. _`py.path.local`: https://py.readthedocs.io/en/latest/path.html
|
||||
"""
|
||||
return _mk_tmp(request, tmpdir_factory)
|
||||
return py.path.local(tmp_path)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
|
|
|
@ -7,6 +7,7 @@ import sys
|
|||
import traceback
|
||||
|
||||
import _pytest._code
|
||||
import pytest
|
||||
from _pytest.compat import getimfunc
|
||||
from _pytest.config import hookimpl
|
||||
from _pytest.outcomes import fail
|
||||
|
@ -32,24 +33,18 @@ class UnitTestCase(Class):
|
|||
# to declare that our children do not support funcargs
|
||||
nofuncargs = True
|
||||
|
||||
def setup(self):
|
||||
cls = self.obj
|
||||
if getattr(cls, "__unittest_skip__", False):
|
||||
return # skipped
|
||||
setup = getattr(cls, "setUpClass", None)
|
||||
if setup is not None:
|
||||
setup()
|
||||
teardown = getattr(cls, "tearDownClass", None)
|
||||
if teardown is not None:
|
||||
self.addfinalizer(teardown)
|
||||
super(UnitTestCase, self).setup()
|
||||
|
||||
def collect(self):
|
||||
from unittest import TestLoader
|
||||
|
||||
cls = self.obj
|
||||
if not getattr(cls, "__test__", True):
|
||||
return
|
||||
|
||||
skipped = getattr(cls, "__unittest_skip__", False)
|
||||
if not skipped:
|
||||
self._inject_setup_teardown_fixtures(cls)
|
||||
self._inject_setup_class_fixture()
|
||||
|
||||
self.session._fixturemanager.parsefactories(self, unittest=True)
|
||||
loader = TestLoader()
|
||||
foundsomething = False
|
||||
|
@ -68,6 +63,44 @@ class UnitTestCase(Class):
|
|||
if ut is None or runtest != ut.TestCase.runTest:
|
||||
yield TestCaseFunction("runTest", parent=self)
|
||||
|
||||
def _inject_setup_teardown_fixtures(self, cls):
|
||||
"""Injects a hidden auto-use fixture to invoke setUpClass/setup_method and corresponding
|
||||
teardown functions (#517)"""
|
||||
class_fixture = _make_xunit_fixture(
|
||||
cls, "setUpClass", "tearDownClass", scope="class", pass_self=False
|
||||
)
|
||||
if class_fixture:
|
||||
cls.__pytest_class_setup = class_fixture
|
||||
|
||||
method_fixture = _make_xunit_fixture(
|
||||
cls, "setup_method", "teardown_method", scope="function", pass_self=True
|
||||
)
|
||||
if method_fixture:
|
||||
cls.__pytest_method_setup = method_fixture
|
||||
|
||||
|
||||
def _make_xunit_fixture(obj, setup_name, teardown_name, scope, pass_self):
|
||||
setup = getattr(obj, setup_name, None)
|
||||
teardown = getattr(obj, teardown_name, None)
|
||||
if setup is None and teardown is None:
|
||||
return None
|
||||
|
||||
@pytest.fixture(scope=scope, autouse=True)
|
||||
def fixture(self, request):
|
||||
if setup is not None:
|
||||
if pass_self:
|
||||
setup(self, request.function)
|
||||
else:
|
||||
setup()
|
||||
yield
|
||||
if teardown is not None:
|
||||
if pass_self:
|
||||
teardown(self, request.function)
|
||||
else:
|
||||
teardown()
|
||||
|
||||
return fixture
|
||||
|
||||
|
||||
class TestCaseFunction(Function):
|
||||
nofuncargs = True
|
||||
|
@ -77,9 +110,6 @@ class TestCaseFunction(Function):
|
|||
def setup(self):
|
||||
self._testcase = self.parent.obj(self.name)
|
||||
self._fix_unittest_skip_decorator()
|
||||
self._obj = getattr(self._testcase, self.name)
|
||||
if hasattr(self._testcase, "setup_method"):
|
||||
self._testcase.setup_method(self._obj)
|
||||
if hasattr(self, "_request"):
|
||||
self._request._fillfixtures()
|
||||
|
||||
|
@ -97,11 +127,7 @@ class TestCaseFunction(Function):
|
|||
setattr(self._testcase, "__name__", self.name)
|
||||
|
||||
def teardown(self):
|
||||
if hasattr(self._testcase, "teardown_method"):
|
||||
self._testcase.teardown_method(self._obj)
|
||||
# Allow garbage collection on TestCase instance attributes.
|
||||
self._testcase = None
|
||||
self._obj = None
|
||||
|
||||
def startTest(self, testcase):
|
||||
pass
|
||||
|
|
|
@ -854,7 +854,9 @@ class TestDurations(object):
|
|||
result = testdir.runpytest("--durations=2")
|
||||
assert result.ret == 0
|
||||
lines = result.stdout.get_lines_after("*slowest*durations*")
|
||||
assert "4 passed" in lines[2]
|
||||
# account for the "deprecated python version" header
|
||||
index = 2 if sys.version_info[:2] > (3, 4) else 6
|
||||
assert "4 passed" in lines[index]
|
||||
|
||||
def test_calls_showall(self, testdir):
|
||||
testdir.makepyfile(self.source)
|
||||
|
|
|
@ -3,6 +3,7 @@ from __future__ import division
|
|||
from __future__ import print_function
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
@ -219,3 +220,21 @@ def test_fixture_named_request(testdir):
|
|||
"*'request' is a reserved name for fixtures and will raise an error in future versions"
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_python_deprecation(testdir):
|
||||
result = testdir.runpytest()
|
||||
python_ver = ".".join(str(x) for x in sys.version_info[:3])
|
||||
msg = "You are using Python {}, which will no longer be supported in pytest 5.0".format(
|
||||
python_ver
|
||||
)
|
||||
if sys.version_info[:2] <= (3, 4):
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
msg,
|
||||
"For more information, please read:",
|
||||
" https://docs.pytest.org/en/latest/py27-py34-deprecation.html",
|
||||
]
|
||||
)
|
||||
else:
|
||||
assert msg not in result.stdout.str()
|
||||
|
|
|
@ -240,9 +240,6 @@ class TestClass(object):
|
|||
assert result.ret == EXIT_NOTESTSCOLLECTED
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings(
|
||||
"ignore:usage of Generator.Function is deprecated, please use pytest.Function instead"
|
||||
)
|
||||
class TestFunction(object):
|
||||
def test_getmodulecollector(self, testdir):
|
||||
item = testdir.getitem("def test_func(): pass")
|
||||
|
|
|
@ -739,7 +739,7 @@ class TestRequestBasic(object):
|
|||
def test_function(request, farg):
|
||||
assert set(get_public_names(request.fixturenames)) == \
|
||||
set(["tmpdir", "sarg", "arg1", "request", "farg",
|
||||
"tmpdir_factory"])
|
||||
"tmp_path", "tmp_path_factory"])
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run()
|
||||
|
@ -1226,6 +1226,45 @@ class TestFixtureUsages(object):
|
|||
values = reprec.getcalls("pytest_runtest_call")[0].item.module.values
|
||||
assert values == [1, 2, 10, 20]
|
||||
|
||||
def test_setup_functions_as_fixtures(self, testdir):
|
||||
"""Ensure setup_* methods obey fixture scope rules (#517, #3094)."""
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
DB_INITIALIZED = None
|
||||
|
||||
@pytest.yield_fixture(scope="session", autouse=True)
|
||||
def db():
|
||||
global DB_INITIALIZED
|
||||
DB_INITIALIZED = True
|
||||
yield
|
||||
DB_INITIALIZED = False
|
||||
|
||||
def setup_module():
|
||||
assert DB_INITIALIZED
|
||||
|
||||
def teardown_module():
|
||||
assert DB_INITIALIZED
|
||||
|
||||
class TestClass(object):
|
||||
|
||||
def setup_method(self, method):
|
||||
assert DB_INITIALIZED
|
||||
|
||||
def teardown_method(self, method):
|
||||
assert DB_INITIALIZED
|
||||
|
||||
def test_printer_1(self):
|
||||
pass
|
||||
|
||||
def test_printer_2(self):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result.stdout.fnmatch_lines(["* 2 passed in *"])
|
||||
|
||||
|
||||
class TestFixtureManagerParseFactories(object):
|
||||
@pytest.fixture
|
||||
|
|
|
@ -784,7 +784,6 @@ class TestMetafuncFunctional(object):
|
|||
|
||||
@pytest.fixture
|
||||
def metafunc(request):
|
||||
assert request._pyfuncitem._genid == "0"
|
||||
return request.param
|
||||
|
||||
def test_function(metafunc, pytestconfig):
|
||||
|
|
|
@ -418,7 +418,7 @@ class TestLastFailed(object):
|
|||
result = testdir.runpytest("--lf")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"collected 4 items / 2 deselected",
|
||||
"collected 4 items / 2 deselected / 2 selected",
|
||||
"run-last-failure: rerun previous 2 failures",
|
||||
"*2 failed, 2 deselected in*",
|
||||
]
|
||||
|
|
|
@ -107,7 +107,7 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(name="pytest", errors=0, failures=1, skips=2, tests=5)
|
||||
node.assert_attr(name="pytest", errors=0, failures=1, skipped=2, tests=5)
|
||||
|
||||
def test_summing_simple_with_errors(self, testdir):
|
||||
testdir.makepyfile(
|
||||
|
@ -133,7 +133,7 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(name="pytest", errors=1, failures=2, skips=1, tests=5)
|
||||
node.assert_attr(name="pytest", errors=1, failures=2, skipped=1, tests=5)
|
||||
|
||||
def test_timing_function(self, testdir):
|
||||
testdir.makepyfile(
|
||||
|
@ -201,12 +201,7 @@ class TestPython(object):
|
|||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(errors=1, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_setup_error.py",
|
||||
line="5",
|
||||
classname="test_setup_error",
|
||||
name="test_function",
|
||||
)
|
||||
tnode.assert_attr(classname="test_setup_error", name="test_function")
|
||||
fnode = tnode.find_first_by_tag("error")
|
||||
fnode.assert_attr(message="test setup failure")
|
||||
assert "ValueError" in fnode.toxml()
|
||||
|
@ -228,12 +223,7 @@ class TestPython(object):
|
|||
assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_teardown_error.py",
|
||||
line="6",
|
||||
classname="test_teardown_error",
|
||||
name="test_function",
|
||||
)
|
||||
tnode.assert_attr(classname="test_teardown_error", name="test_function")
|
||||
fnode = tnode.find_first_by_tag("error")
|
||||
fnode.assert_attr(message="test teardown failure")
|
||||
assert "ValueError" in fnode.toxml()
|
||||
|
@ -274,14 +264,9 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret == 0
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=1)
|
||||
node.assert_attr(skipped=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_skip_contains_name_reason.py",
|
||||
line="1",
|
||||
classname="test_skip_contains_name_reason",
|
||||
name="test_skip",
|
||||
)
|
||||
tnode.assert_attr(classname="test_skip_contains_name_reason", name="test_skip")
|
||||
snode = tnode.find_first_by_tag("skipped")
|
||||
snode.assert_attr(type="pytest.skip", message="hello23")
|
||||
|
||||
|
@ -297,13 +282,10 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret == 0
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=1)
|
||||
node.assert_attr(skipped=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_mark_skip_contains_name_reason.py",
|
||||
line="1",
|
||||
classname="test_mark_skip_contains_name_reason",
|
||||
name="test_skip",
|
||||
classname="test_mark_skip_contains_name_reason", name="test_skip"
|
||||
)
|
||||
snode = tnode.find_first_by_tag("skipped")
|
||||
snode.assert_attr(type="pytest.skip", message="hello24")
|
||||
|
@ -321,13 +303,10 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret == 0
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=1)
|
||||
node.assert_attr(skipped=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_mark_skipif_contains_name_reason.py",
|
||||
line="2",
|
||||
classname="test_mark_skipif_contains_name_reason",
|
||||
name="test_skip",
|
||||
classname="test_mark_skipif_contains_name_reason", name="test_skip"
|
||||
)
|
||||
snode = tnode.find_first_by_tag("skipped")
|
||||
snode.assert_attr(type="pytest.skip", message="hello25")
|
||||
|
@ -360,10 +339,7 @@ class TestPython(object):
|
|||
node.assert_attr(failures=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_classname_instance.py",
|
||||
line="1",
|
||||
classname="test_classname_instance.TestClass",
|
||||
name="test_method",
|
||||
classname="test_classname_instance.TestClass", name="test_method"
|
||||
)
|
||||
|
||||
def test_classname_nested_dir(self, testdir):
|
||||
|
@ -374,12 +350,7 @@ class TestPython(object):
|
|||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(failures=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file=os.path.join("sub", "test_hello.py"),
|
||||
line="0",
|
||||
classname="sub.test_hello",
|
||||
name="test_func",
|
||||
)
|
||||
tnode.assert_attr(classname="sub.test_hello", name="test_func")
|
||||
|
||||
def test_internal_error(self, testdir):
|
||||
testdir.makeconftest("def pytest_runtest_protocol(): 0 / 0")
|
||||
|
@ -415,12 +386,7 @@ class TestPython(object):
|
|||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(failures=1, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_failure_function.py",
|
||||
line="3",
|
||||
classname="test_failure_function",
|
||||
name="test_fail",
|
||||
)
|
||||
tnode.assert_attr(classname="test_failure_function", name="test_fail")
|
||||
fnode = tnode.find_first_by_tag("failure")
|
||||
fnode.assert_attr(message="ValueError: 42")
|
||||
assert "ValueError" in fnode.toxml()
|
||||
|
@ -477,10 +443,7 @@ class TestPython(object):
|
|||
|
||||
tnode = node.find_nth_by_tag("testcase", index)
|
||||
tnode.assert_attr(
|
||||
file="test_failure_escape.py",
|
||||
line="1",
|
||||
classname="test_failure_escape",
|
||||
name="test_func[%s]" % char,
|
||||
classname="test_failure_escape", name="test_func[%s]" % char
|
||||
)
|
||||
sysout = tnode.find_first_by_tag("system-out")
|
||||
text = sysout.text
|
||||
|
@ -501,18 +464,10 @@ class TestPython(object):
|
|||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(failures=1, tests=2)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_junit_prefixing.py",
|
||||
line="0",
|
||||
classname="xyz.test_junit_prefixing",
|
||||
name="test_func",
|
||||
)
|
||||
tnode.assert_attr(classname="xyz.test_junit_prefixing", name="test_func")
|
||||
tnode = node.find_nth_by_tag("testcase", 1)
|
||||
tnode.assert_attr(
|
||||
file="test_junit_prefixing.py",
|
||||
line="3",
|
||||
classname="xyz.test_junit_prefixing.TestHello",
|
||||
name="test_hello",
|
||||
classname="xyz.test_junit_prefixing.TestHello", name="test_hello"
|
||||
)
|
||||
|
||||
def test_xfailure_function(self, testdir):
|
||||
|
@ -526,14 +481,9 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert not result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=1, tests=1)
|
||||
node.assert_attr(skipped=1, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_xfailure_function.py",
|
||||
line="1",
|
||||
classname="test_xfailure_function",
|
||||
name="test_xfail",
|
||||
)
|
||||
tnode.assert_attr(classname="test_xfailure_function", name="test_xfail")
|
||||
fnode = tnode.find_first_by_tag("skipped")
|
||||
fnode.assert_attr(message="expected test failure")
|
||||
# assert "ValueError" in fnode.toxml()
|
||||
|
@ -569,14 +519,9 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
# assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=0, tests=1)
|
||||
node.assert_attr(skipped=0, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_xfailure_xpass.py",
|
||||
line="1",
|
||||
classname="test_xfailure_xpass",
|
||||
name="test_xpass",
|
||||
)
|
||||
tnode.assert_attr(classname="test_xfailure_xpass", name="test_xpass")
|
||||
|
||||
def test_xfailure_xpass_strict(self, testdir):
|
||||
testdir.makepyfile(
|
||||
|
@ -590,14 +535,9 @@ class TestPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
# assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(skips=0, tests=1)
|
||||
node.assert_attr(skipped=0, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(
|
||||
file="test_xfailure_xpass_strict.py",
|
||||
line="1",
|
||||
classname="test_xfailure_xpass_strict",
|
||||
name="test_xpass",
|
||||
)
|
||||
tnode.assert_attr(classname="test_xfailure_xpass_strict", name="test_xpass")
|
||||
fnode = tnode.find_first_by_tag("failure")
|
||||
fnode.assert_attr(message="[XPASS(strict)] This needs to fail!")
|
||||
|
||||
|
@ -608,8 +548,6 @@ class TestPython(object):
|
|||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(errors=1, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(file="test_collect_error.py", name="test_collect_error")
|
||||
assert tnode["line"] is None
|
||||
fnode = tnode.find_first_by_tag("error")
|
||||
fnode.assert_attr(message="collection failure")
|
||||
assert "SyntaxError" in fnode.toxml()
|
||||
|
@ -792,7 +730,7 @@ class TestNonPython(object):
|
|||
result, dom = runandparse(testdir)
|
||||
assert result.ret
|
||||
node = dom.find_first_by_tag("testsuite")
|
||||
node.assert_attr(errors=0, failures=1, skips=0, tests=1)
|
||||
node.assert_attr(errors=0, failures=1, skipped=0, tests=1)
|
||||
tnode = node.find_first_by_tag("testcase")
|
||||
tnode.assert_attr(name="myfile.xyz")
|
||||
fnode = tnode.find_first_by_tag("failure")
|
||||
|
@ -1042,6 +980,12 @@ def test_record_property_same_name(testdir):
|
|||
|
||||
@pytest.mark.filterwarnings("default")
|
||||
def test_record_attribute(testdir):
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
junit_family = xunit1
|
||||
"""
|
||||
)
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -1063,6 +1007,38 @@ def test_record_attribute(testdir):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings("default")
|
||||
def test_record_attribute_xunit2(testdir):
|
||||
"""Ensure record_xml_attribute drops values when outside of legacy family
|
||||
"""
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
junit_family = xunit2
|
||||
"""
|
||||
)
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
||||
@pytest.fixture
|
||||
def other(record_xml_attribute):
|
||||
record_xml_attribute("bar", 1)
|
||||
def test_record(record_xml_attribute, other):
|
||||
record_xml_attribute("foo", "<1");
|
||||
"""
|
||||
)
|
||||
|
||||
result, dom = runandparse(testdir, "-rw")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_record_attribute_xunit2.py:6:*record_xml_attribute is an experimental feature",
|
||||
"*test_record_attribute_xunit2.py:6:*record_xml_attribute is incompatible with "
|
||||
"junit_family: xunit2 (use: legacy|xunit1)",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def test_random_report_log_xdist(testdir, monkeypatch):
|
||||
"""xdist calls pytest_runtest_logreport as they are executed by the slaves,
|
||||
with nodes from several nodes overlapping, so junitxml must cope with that
|
||||
|
@ -1155,20 +1131,18 @@ def test_fancy_items_regression(testdir):
|
|||
|
||||
assert "INTERNALERROR" not in result.stdout.str()
|
||||
|
||||
items = sorted(
|
||||
"%(classname)s %(name)s %(file)s" % x for x in dom.find_by_tag("testcase")
|
||||
)
|
||||
items = sorted("%(classname)s %(name)s" % x for x in dom.find_by_tag("testcase"))
|
||||
import pprint
|
||||
|
||||
pprint.pprint(items)
|
||||
assert items == [
|
||||
u"conftest a conftest.py",
|
||||
u"conftest a conftest.py",
|
||||
u"conftest b conftest.py",
|
||||
u"test_fancy_items_regression a test_fancy_items_regression.py",
|
||||
u"test_fancy_items_regression a test_fancy_items_regression.py",
|
||||
u"test_fancy_items_regression b test_fancy_items_regression.py",
|
||||
u"test_fancy_items_regression test_pass" u" test_fancy_items_regression.py",
|
||||
u"conftest a",
|
||||
u"conftest a",
|
||||
u"conftest b",
|
||||
u"test_fancy_items_regression a",
|
||||
u"test_fancy_items_regression a",
|
||||
u"test_fancy_items_regression b",
|
||||
u"test_fancy_items_regression test_pass",
|
||||
]
|
||||
|
||||
|
||||
|
|
|
@ -147,29 +147,6 @@ class TestPDB(object):
|
|||
assert rep.failed
|
||||
assert len(pdblist) == 1
|
||||
|
||||
def test_pdb_interaction(self, testdir):
|
||||
p1 = testdir.makepyfile(
|
||||
"""
|
||||
def test_1():
|
||||
i = 0
|
||||
assert i == 1
|
||||
|
||||
def test_not_called_due_to_quit():
|
||||
pass
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest("--pdb %s" % p1)
|
||||
child.expect(".*def test_1")
|
||||
child.expect(".*i = 0")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
rest = child.read().decode("utf8")
|
||||
assert "= 1 failed in" in rest
|
||||
assert "def test_1" not in rest
|
||||
assert "Exit: Quitting debugger" in rest
|
||||
assert "PDB continue (IO-capturing resumed)" not in rest
|
||||
self.flush(child)
|
||||
|
||||
@staticmethod
|
||||
def flush(child):
|
||||
if platform.system() == "Darwin":
|
||||
|
@ -214,40 +191,32 @@ class TestPDB(object):
|
|||
child.sendeof()
|
||||
self.flush(child)
|
||||
|
||||
def test_pdb_print_captured_stdout(self, testdir):
|
||||
p1 = testdir.makepyfile(
|
||||
"""
|
||||
def test_1():
|
||||
print("get\\x20rekt")
|
||||
assert False
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest("--pdb %s" % p1)
|
||||
child.expect("captured stdout")
|
||||
child.expect("get rekt")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 failed" in rest
|
||||
assert "get rekt" not in rest
|
||||
self.flush(child)
|
||||
|
||||
def test_pdb_print_captured_stderr(self, testdir):
|
||||
def test_pdb_print_captured_stdout_and_stderr(self, testdir):
|
||||
p1 = testdir.makepyfile(
|
||||
"""
|
||||
def test_1():
|
||||
import sys
|
||||
sys.stderr.write("get\\x20rekt")
|
||||
print("get\\x20rekt")
|
||||
assert False
|
||||
|
||||
def test_not_called_due_to_quit():
|
||||
pass
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest("--pdb %s" % p1)
|
||||
child.expect("captured stdout")
|
||||
child.expect("get rekt")
|
||||
child.expect("captured stderr")
|
||||
child.expect("get rekt")
|
||||
child.expect("traceback")
|
||||
child.expect("def test_1")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 failed" in rest
|
||||
assert "Exit: Quitting debugger" in rest
|
||||
assert "= 1 failed in" in rest
|
||||
assert "def test_1" not in rest
|
||||
assert "get rekt" not in rest
|
||||
self.flush(child)
|
||||
|
||||
|
@ -375,15 +344,17 @@ class TestPDB(object):
|
|||
i = 0
|
||||
print("hello17")
|
||||
pytest.set_trace()
|
||||
x = 3
|
||||
i == 1
|
||||
assert 0
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest(str(p1))
|
||||
child.expect("test_1")
|
||||
child.expect("x = 3")
|
||||
child.expect(r"test_1\(\)")
|
||||
child.expect("i == 1")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
child.sendline("c")
|
||||
rest = child.read().decode("utf-8")
|
||||
assert "AssertionError" in rest
|
||||
assert "1 failed" in rest
|
||||
assert "def test_1" in rest
|
||||
assert "hello17" in rest # out is captured
|
||||
|
@ -398,13 +369,14 @@ class TestPDB(object):
|
|||
print("hello17")
|
||||
pytest.set_trace(header="== my_header ==")
|
||||
x = 3
|
||||
assert 0
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest(str(p1))
|
||||
child.expect("== my_header ==")
|
||||
assert "PDB set_trace" not in child.before.decode()
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
child.sendline("c")
|
||||
rest = child.read().decode("utf-8")
|
||||
assert "1 failed" in rest
|
||||
assert "def test_1" in rest
|
||||
|
@ -424,9 +396,9 @@ class TestPDB(object):
|
|||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 failed" in rest
|
||||
assert "no tests ran" in rest
|
||||
assert "reading from stdin while output" not in rest
|
||||
assert "BdbQuit" in rest
|
||||
assert "BdbQuit" not in rest
|
||||
self.flush(child)
|
||||
|
||||
def test_pdb_and_capsys(self, testdir):
|
||||
|
@ -518,6 +490,7 @@ class TestPDB(object):
|
|||
print("hello18")
|
||||
pytest.set_trace()
|
||||
x = 4
|
||||
assert 0
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest(str(p1))
|
||||
|
@ -530,11 +503,11 @@ class TestPDB(object):
|
|||
child.expect(r"PDB set_trace \(IO-capturing turned off\)")
|
||||
child.expect("x = 4")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
child.sendline("c")
|
||||
child.expect("_ test_1 _")
|
||||
child.expect("def test_1")
|
||||
child.expect("Captured stdout call")
|
||||
rest = child.read().decode("utf8")
|
||||
assert "Captured stdout call" in rest
|
||||
assert "hello17" in rest # out is captured
|
||||
assert "hello18" in rest # out is captured
|
||||
assert "1 failed" in rest
|
||||
|
@ -795,7 +768,7 @@ class TestDebuggingBreakpoints(object):
|
|||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 failed" in rest
|
||||
assert "Quitting debugger" in rest
|
||||
assert "reading from stdin while output" not in rest
|
||||
TestPDB.flush(child)
|
||||
|
||||
|
@ -808,12 +781,13 @@ class TestDebuggingBreakpoints(object):
|
|||
import pdb
|
||||
def test_1():
|
||||
pdb.set_trace()
|
||||
assert 0
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest(str(p1))
|
||||
child.expect("test_1")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
child.sendline("c")
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 failed" in rest
|
||||
assert "reading from stdin while output" not in rest
|
||||
|
@ -826,15 +800,29 @@ class TestTraceOption:
|
|||
"""
|
||||
def test_1():
|
||||
assert True
|
||||
|
||||
def test_2():
|
||||
pass
|
||||
|
||||
def test_3():
|
||||
pass
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest("--trace " + str(p1))
|
||||
child.expect("test_1")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
child.sendline("c")
|
||||
child.expect("test_2")
|
||||
child.expect("Pdb")
|
||||
child.sendline("c")
|
||||
child.expect("test_3")
|
||||
child.expect("Pdb")
|
||||
child.sendline("q")
|
||||
child.expect_exact("Exit: Quitting debugger")
|
||||
rest = child.read().decode("utf8")
|
||||
assert "1 passed" in rest
|
||||
assert "2 passed in" in rest
|
||||
assert "reading from stdin while output" not in rest
|
||||
assert "Exit: Quitting debugger" in child.before.decode("utf8")
|
||||
TestPDB.flush(child)
|
||||
|
||||
|
||||
|
@ -863,3 +851,31 @@ def test_trace_after_runpytest(testdir):
|
|||
rest = child.read().decode("utf8")
|
||||
TestPDB.flush(child)
|
||||
assert child.exitstatus == 0, rest
|
||||
|
||||
|
||||
def test_quit_with_swallowed_SystemExit(testdir):
|
||||
"""Test that debugging's pytest_configure is re-entrant."""
|
||||
p1 = testdir.makepyfile(
|
||||
"""
|
||||
def call_pdb_set_trace():
|
||||
__import__('pdb').set_trace()
|
||||
|
||||
|
||||
def test_1():
|
||||
try:
|
||||
call_pdb_set_trace()
|
||||
except SystemExit:
|
||||
pass
|
||||
|
||||
|
||||
def test_2():
|
||||
pass
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest(str(p1))
|
||||
child.expect("Pdb")
|
||||
child.sendline("q")
|
||||
child.expect_exact("Exit: Quitting debugger")
|
||||
rest = child.read().decode("utf8")
|
||||
assert "no tests ran" in rest
|
||||
TestPDB.flush(child)
|
||||
|
|
|
@ -8,10 +8,6 @@ import py
|
|||
|
||||
import _pytest._code
|
||||
import pytest
|
||||
from _pytest.nodes import FSCollector
|
||||
from _pytest.nodes import Item
|
||||
from _pytest.nodes import Node
|
||||
from _pytest.resultlog import generic_path
|
||||
from _pytest.resultlog import pytest_configure
|
||||
from _pytest.resultlog import pytest_unconfigure
|
||||
from _pytest.resultlog import ResultLog
|
||||
|
@ -20,31 +16,6 @@ from _pytest.resultlog import ResultLog
|
|||
pytestmark = pytest.mark.filterwarnings("ignore:--result-log is deprecated")
|
||||
|
||||
|
||||
def test_generic_path(testdir):
|
||||
from _pytest.main import Session
|
||||
|
||||
config = testdir.parseconfig()
|
||||
session = Session(config)
|
||||
p1 = Node("a", config=config, session=session, nodeid="a")
|
||||
# assert p1.fspath is None
|
||||
p2 = Node("B", parent=p1)
|
||||
p3 = Node("()", parent=p2)
|
||||
item = Item("c", parent=p3)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == "a.B().c"
|
||||
|
||||
p0 = FSCollector("proj/test", config=config, session=session)
|
||||
p1 = FSCollector("proj/test/a", parent=p0)
|
||||
p2 = Node("B", parent=p1)
|
||||
p3 = Node("()", parent=p2)
|
||||
p4 = Node("c", parent=p3)
|
||||
item = Item("[1]", parent=p4)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == "test/a:B().c[1]"
|
||||
|
||||
|
||||
def test_write_log_entry():
|
||||
reslog = ResultLog(None, None)
|
||||
reslog.logfile = py.io.TextIO()
|
||||
|
|
|
@ -465,12 +465,7 @@ class TestSessionReports(object):
|
|||
assert res[1].name == "TestClass"
|
||||
|
||||
|
||||
reporttypes = [
|
||||
reports.BaseReport,
|
||||
reports.TestReport,
|
||||
reports.TeardownErrorReport,
|
||||
reports.CollectReport,
|
||||
]
|
||||
reporttypes = [reports.BaseReport, reports.TestReport, reports.CollectReport]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
|
|
|
@ -181,7 +181,6 @@ class TestNewSession(SessionTests):
|
|||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == skipped == 0
|
||||
assert passed == 7
|
||||
# also test listnames() here ...
|
||||
|
||||
def test_collect_only_with_various_situations(self, testdir):
|
||||
p = testdir.makepyfile(
|
||||
|
|
|
@ -18,7 +18,6 @@ from _pytest.main import EXIT_NOTESTSCOLLECTED
|
|||
from _pytest.terminal import _plugin_nameversions
|
||||
from _pytest.terminal import build_summary_stats_line
|
||||
from _pytest.terminal import getreportopt
|
||||
from _pytest.terminal import repr_pythonversion
|
||||
from _pytest.terminal import TerminalReporter
|
||||
|
||||
DistInfo = collections.namedtuple("DistInfo", ["project_name", "version"])
|
||||
|
@ -361,16 +360,6 @@ class TestCollectonly(object):
|
|||
result.stdout.fnmatch_lines(["*test_fun.py: 1*"])
|
||||
|
||||
|
||||
def test_repr_python_version(monkeypatch):
|
||||
try:
|
||||
monkeypatch.setattr(sys, "version_info", (2, 5, 1, "final", 0))
|
||||
assert repr_pythonversion() == "2.5.1-final-0"
|
||||
sys.version_info = x = (2, 3)
|
||||
assert repr_pythonversion() == str(x)
|
||||
finally:
|
||||
monkeypatch.undo() # do this early as pytest can get confused
|
||||
|
||||
|
||||
class TestFixtureReporting(object):
|
||||
def test_setup_fixture_error(self, testdir):
|
||||
testdir.makepyfile(
|
||||
|
@ -485,7 +474,7 @@ class TestTerminalFunctional(object):
|
|||
)
|
||||
result = testdir.runpytest("-k", "test_two:", testpath)
|
||||
result.stdout.fnmatch_lines(
|
||||
["collected 3 items / 1 deselected", "*test_deselected.py ..*"]
|
||||
["collected 3 items / 1 deselected / 2 selected", "*test_deselected.py ..*"]
|
||||
)
|
||||
assert result.ret == 0
|
||||
|
||||
|
@ -509,7 +498,7 @@ class TestTerminalFunctional(object):
|
|||
result = testdir.runpytest("-m", "not foo")
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"collected 3 items / 1 deselected",
|
||||
"collected 3 items / 1 deselected / 2 selected",
|
||||
"*test_show_deselected.py ..*",
|
||||
"*= 2 passed, 1 deselected in * =*",
|
||||
]
|
||||
|
|
|
@ -353,3 +353,7 @@ def attempt_symlink_to(path, to_path):
|
|||
Path(path).symlink_to(Path(to_path))
|
||||
except OSError:
|
||||
pytest.skip("could not create symbolic link")
|
||||
|
||||
|
||||
def test_tmpdir_equals_tmp_path(tmpdir, tmp_path):
|
||||
assert Path(tmpdir) == tmp_path
|
||||
|
|
Loading…
Reference in New Issue