commit
253419316c
|
@ -41,7 +41,9 @@ jobs:
|
|||
- env: TOXENV=py36-freeze
|
||||
python: '3.6'
|
||||
- env: TOXENV=py37
|
||||
python: 'nightly'
|
||||
python: '3.7'
|
||||
sudo: required
|
||||
dist: xenial
|
||||
|
||||
- stage: deploy
|
||||
python: '3.6'
|
||||
|
|
|
@ -8,6 +8,70 @@
|
|||
|
||||
.. towncrier release notes start
|
||||
|
||||
pytest 3.7.0 (2018-07-30)
|
||||
=========================
|
||||
|
||||
Deprecations and Removals
|
||||
-------------------------
|
||||
|
||||
- `#2639 <https://github.com/pytest-dev/pytest/issues/2639>`_: ``pytest_namespace`` has been deprecated.
|
||||
|
||||
See the documentation for ``pytest_namespace`` hook for suggestions on how to deal
|
||||
with this in plugins which use this functionality.
|
||||
|
||||
|
||||
- `#3661 <https://github.com/pytest-dev/pytest/issues/3661>`_: Calling a fixture function directly, as opposed to request them in a test function, now issues a ``RemovedInPytest4Warning``. It will be changed into an error in pytest ``4.0``.
|
||||
|
||||
This is a great source of confusion to new users, which will often call the fixture functions and request them from test functions interchangeably, which breaks the fixture resolution model.
|
||||
|
||||
|
||||
|
||||
Features
|
||||
--------
|
||||
|
||||
- `#2283 <https://github.com/pytest-dev/pytest/issues/2283>`_: New ``package`` fixture scope: fixtures are finalized when the last test of a *package* finishes. This feature is considered **experimental**, so use it sparingly.
|
||||
|
||||
|
||||
- `#3576 <https://github.com/pytest-dev/pytest/issues/3576>`_: ``Node.add_marker`` now supports an ``append=True/False`` parameter to determine whether the mark comes last (default) or first.
|
||||
|
||||
|
||||
- `#3579 <https://github.com/pytest-dev/pytest/issues/3579>`_: Fixture ``caplog`` now has a ``messages`` property, providing convenient access to the format-interpolated log messages without the extra data provided by the formatter/handler.
|
||||
|
||||
|
||||
- `#3610 <https://github.com/pytest-dev/pytest/issues/3610>`_: New ``--trace`` option to enter the debugger at the start of a test.
|
||||
|
||||
|
||||
- `#3623 <https://github.com/pytest-dev/pytest/issues/3623>`_: Introduce ``pytester.copy_example`` as helper to do acceptance tests against examples from the project.
|
||||
|
||||
|
||||
|
||||
Bug Fixes
|
||||
---------
|
||||
|
||||
- `#2220 <https://github.com/pytest-dev/pytest/issues/2220>`_: Fix a bug where fixtures overridden by direct parameters (for example parametrization) were being instantiated even if they were not being used by a test.
|
||||
|
||||
|
||||
- `#3695 <https://github.com/pytest-dev/pytest/issues/3695>`_: Fix ``ApproxNumpy`` initialisation argument mixup, ``abs`` and ``rel`` tolerances were flipped causing strange comparsion results.
|
||||
Add tests to check ``abs`` and ``rel`` tolerances for ``np.array`` and test for expecting ``nan`` with ``np.array()``
|
||||
|
||||
|
||||
- `#980 <https://github.com/pytest-dev/pytest/issues/980>`_: Fix truncated locals output in verbose mode.
|
||||
|
||||
|
||||
|
||||
Improved Documentation
|
||||
----------------------
|
||||
|
||||
- `#3295 <https://github.com/pytest-dev/pytest/issues/3295>`_: Correct the usage documentation of ``--last-failed-no-failures`` by adding the missing ``--last-failed`` argument in the presented examples, because they are misleading and lead to think that the missing argument is not needed.
|
||||
|
||||
|
||||
|
||||
Trivial/Internal Changes
|
||||
------------------------
|
||||
|
||||
- `#3519 <https://github.com/pytest-dev/pytest/issues/3519>`_: Now a ``README.md`` file is created in ``.pytest_cache`` to make it clear why the directory exists.
|
||||
|
||||
|
||||
pytest 3.6.4 (2018-07-28)
|
||||
=========================
|
||||
|
||||
|
|
|
@ -14,6 +14,7 @@ environment:
|
|||
- TOXENV: "py34"
|
||||
- TOXENV: "py35"
|
||||
- TOXENV: "py36"
|
||||
- TOXENV: "py37"
|
||||
# - TOXENV: "pypy" reenable when we are able to provide a scandir wheel or build scandir
|
||||
- TOXENV: "py27-pexpect"
|
||||
- TOXENV: "py27-xdist"
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
Fix a bug where fixtures overriden by direct parameters (for example parametrization) were being instantiated even if they were not being used by a test.
|
|
@ -1 +0,0 @@
|
|||
New ``package`` fixture scope: fixtures are finalized when the last test of a *package* finishes. This feature is considered **experimental**, so use it sparingly.
|
|
@ -1,4 +0,0 @@
|
|||
``pytest_namespace`` has been deprecated.
|
||||
|
||||
See the documentation for ``pytest_namespace`` hook for suggestions on how to deal
|
||||
with this in plugins which use this functionality.
|
|
@ -1 +0,0 @@
|
|||
Now a ``README.md`` file is created in ``.pytest_cache`` to make it clear why the directory exists.
|
|
@ -1 +0,0 @@
|
|||
``Node.add_marker`` now supports an ``append=True/False`` parameter to determine whether the mark comes last (default) or first.
|
|
@ -1 +0,0 @@
|
|||
Fixture ``caplog`` now has a ``messages`` property, providing convenient access to the format-interpolated log messages without the extra data provided by the formatter/handler.
|
|
@ -1 +0,0 @@
|
|||
New ``--trace`` option to enter the debugger at the start of a test.
|
|
@ -1 +0,0 @@
|
|||
Introduce ``pytester.copy_example`` as helper to do acceptance tests against examples from the project.
|
|
@ -1,3 +0,0 @@
|
|||
Calling a fixture function directly, as opposed to request them in a test function, now issues a ``RemovedInPytest4Warning``. It will be changed into an error in pytest ``4.0``.
|
||||
|
||||
This is a great source of confusion to new users, which will often call the fixture functions and request them from test functions interchangeably, which breaks the fixture resolution model.
|
|
@ -1 +0,0 @@
|
|||
Fix truncated locals output in verbose mode.
|
|
@ -6,6 +6,7 @@ Release announcements
|
|||
:maxdepth: 2
|
||||
|
||||
|
||||
release-3.7.0
|
||||
release-3.6.4
|
||||
release-3.6.3
|
||||
release-3.6.2
|
||||
|
|
|
@ -0,0 +1,41 @@
|
|||
pytest-3.7.0
|
||||
=======================================
|
||||
|
||||
The pytest team is proud to announce the 3.7.0 release!
|
||||
|
||||
pytest is a mature Python testing tool with more than a 2000 tests
|
||||
against itself, passing on many different interpreters and platforms.
|
||||
|
||||
This release contains a number of bugs fixes and improvements, so users are encouraged
|
||||
to take a look at the CHANGELOG:
|
||||
|
||||
http://doc.pytest.org/en/latest/changelog.html
|
||||
|
||||
For complete documentation, please visit:
|
||||
|
||||
http://docs.pytest.org
|
||||
|
||||
As usual, you can upgrade from pypi via:
|
||||
|
||||
pip install -U pytest
|
||||
|
||||
Thanks to all who contributed to this release, among them:
|
||||
|
||||
* Alan
|
||||
* Alan Brammer
|
||||
* Ammar Najjar
|
||||
* Anthony Sottile
|
||||
* Bruno Oliveira
|
||||
* Jeffrey Rackauckas
|
||||
* Kale Kundert
|
||||
* Ronny Pfannschmidt
|
||||
* Serhii Mozghovyi
|
||||
* Tadek Teleżyński
|
||||
* Wil Cooley
|
||||
* abrammer
|
||||
* avirlrma
|
||||
* turturica
|
||||
|
||||
|
||||
Happy testing,
|
||||
The Pytest Development Team
|
|
@ -162,8 +162,8 @@ When no tests failed in the last run, or when no cached ``lastfailed`` data was
|
|||
found, ``pytest`` can be configured either to run all of the tests or no tests,
|
||||
using the ``--last-failed-no-failures`` option, which takes one of the following values::
|
||||
|
||||
pytest --last-failed-no-failures all # run all tests (default behavior)
|
||||
pytest --last-failed-no-failures none # run no tests and exit
|
||||
pytest --last-failed --last-failed-no-failures all # run all tests (default behavior)
|
||||
pytest --last-failed --last-failed-no-failures none # run no tests and exit
|
||||
|
||||
The new config.cache object
|
||||
--------------------------------
|
||||
|
|
|
@ -31,7 +31,7 @@ You can then restrict a test run to only run tests marked with ``webtest``::
|
|||
|
||||
$ pytest -v -m webtest
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 3 deselected
|
||||
|
@ -44,7 +44,7 @@ Or the inverse, running all tests except the webtest ones::
|
|||
|
||||
$ pytest -v -m "not webtest"
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 1 deselected
|
||||
|
@ -64,7 +64,7 @@ tests based on their module, class, method, or function name::
|
|||
|
||||
$ pytest -v test_server.py::TestClass::test_method
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 1 item
|
||||
|
@ -77,7 +77,7 @@ You can also select on the class::
|
|||
|
||||
$ pytest -v test_server.py::TestClass
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 1 item
|
||||
|
@ -90,7 +90,7 @@ Or select multiple nodes::
|
|||
|
||||
$ pytest -v test_server.py::TestClass test_server.py::test_send_http
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
@ -128,7 +128,7 @@ select tests based on their names::
|
|||
|
||||
$ pytest -v -k http # running with the above defined example module
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 3 deselected
|
||||
|
@ -141,7 +141,7 @@ And you can also run all tests except the ones that match the keyword::
|
|||
|
||||
$ pytest -k "not send_http" -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 1 deselected
|
||||
|
@ -156,7 +156,7 @@ Or to select "http" and "quick" tests::
|
|||
|
||||
$ pytest -k "http or quick" -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 4 items / 2 deselected
|
||||
|
|
|
@ -59,7 +59,7 @@ consulted when reporting in ``verbose`` mode::
|
|||
|
||||
nonpython $ pytest -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
@ -84,6 +84,7 @@ interesting to just look at the collection tree::
|
|||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y
|
||||
rootdir: $REGENDOC_TMPDIR/nonpython, inifile:
|
||||
collected 2 items
|
||||
<Package '$REGENDOC_TMPDIR/nonpython'>
|
||||
<YamlFile 'test_simple.yml'>
|
||||
<YamlItem 'hello'>
|
||||
<YamlItem 'ok'>
|
||||
|
|
|
@ -363,7 +363,7 @@ get on the terminal - we are working on that)::
|
|||
> int(s)
|
||||
E ValueError: invalid literal for int() with base 10: 'qwe'
|
||||
|
||||
<0-codegen $PYTHON_PREFIX/lib/python3.5/site-packages/_pytest/python_api.py:635>:1: ValueError
|
||||
<0-codegen $PYTHON_PREFIX/lib/python3.6/site-packages/_pytest/python_api.py:635>:1: ValueError
|
||||
______________________ TestRaises.test_raises_doesnt _______________________
|
||||
|
||||
self = <failure_demo.TestRaises object at 0xdeadbeef>
|
||||
|
|
|
@ -357,7 +357,7 @@ which will add info only when run with "--v"::
|
|||
|
||||
$ pytest -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
info1: did you know that ...
|
||||
did you?
|
||||
|
|
|
@ -726,7 +726,7 @@ Running this test will *skip* the invocation of ``data_set`` with value ``2``::
|
|||
|
||||
$ pytest test_fixture_marks.py -v
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 3 items
|
||||
|
@ -769,7 +769,7 @@ Here we declare an ``app`` fixture which receives the previously defined
|
|||
|
||||
$ pytest -v test_appsetup.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 2 items
|
||||
|
@ -838,7 +838,7 @@ Let's run the tests in verbose mode and with looking at the print-output::
|
|||
|
||||
$ pytest -v -s test_module.py
|
||||
=========================== test session starts ============================
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.5
|
||||
platform linux -- Python 3.x.y, pytest-3.x.y, py-1.x.y, pluggy-0.x.y -- $PYTHON_PREFIX/bin/python3.6
|
||||
cachedir: .pytest_cache
|
||||
rootdir: $REGENDOC_TMPDIR, inifile:
|
||||
collecting ... collected 8 items
|
||||
|
|
|
@ -27,7 +27,7 @@ Install ``pytest``
|
|||
2. Check that you installed the correct version::
|
||||
|
||||
$ pytest --version
|
||||
This is pytest version 3.x.y, imported from $PYTHON_PREFIX/lib/python3.5/site-packages/pytest.py
|
||||
This is pytest version 3.x.y, imported from $PYTHON_PREFIX/lib/python3.6/site-packages/pytest.py
|
||||
|
||||
.. _`simpletest`:
|
||||
|
||||
|
|
|
@ -211,7 +211,7 @@ class ApproxScalar(ApproxBase):
|
|||
the pre-specified tolerance.
|
||||
"""
|
||||
if _is_numpy_array(actual):
|
||||
return ApproxNumpy(actual, self.abs, self.rel, self.nan_ok) == self.expected
|
||||
return all(a == self for a in actual.flat)
|
||||
|
||||
# Short-circuit exact equality.
|
||||
if actual == self.expected:
|
||||
|
|
|
@ -342,6 +342,68 @@ class TestApprox(object):
|
|||
assert actual == approx(list(expected), rel=5e-7, abs=0)
|
||||
assert actual != approx(list(expected), rel=5e-8, abs=0)
|
||||
|
||||
def test_numpy_tolerance_args(self):
|
||||
"""
|
||||
Check that numpy rel/abs args are handled correctly
|
||||
for comparison against an np.array
|
||||
Check both sides of the operator, hopefully it doesn't impact things.
|
||||
Test all permutations of where the approx and np.array() can show up
|
||||
"""
|
||||
np = pytest.importorskip("numpy")
|
||||
expected = 100.
|
||||
actual = 99.
|
||||
abs_diff = expected - actual
|
||||
rel_diff = (expected - actual) / expected
|
||||
|
||||
tests = [
|
||||
(eq, abs_diff, 0),
|
||||
(eq, 0, rel_diff),
|
||||
(ne, 0, rel_diff / 2.), # rel diff fail
|
||||
(ne, abs_diff / 2., 0), # abs diff fail
|
||||
]
|
||||
|
||||
for op, _abs, _rel in tests:
|
||||
assert op(np.array(actual), approx(expected, abs=_abs, rel=_rel)) # a, b
|
||||
assert op(approx(expected, abs=_abs, rel=_rel), np.array(actual)) # b, a
|
||||
|
||||
assert op(actual, approx(np.array(expected), abs=_abs, rel=_rel)) # a, b
|
||||
assert op(approx(np.array(expected), abs=_abs, rel=_rel), actual) # b, a
|
||||
|
||||
assert op(np.array(actual), approx(np.array(expected), abs=_abs, rel=_rel))
|
||||
assert op(approx(np.array(expected), abs=_abs, rel=_rel), np.array(actual))
|
||||
|
||||
def test_numpy_expecting_nan(self):
|
||||
np = pytest.importorskip("numpy")
|
||||
examples = [
|
||||
(eq, nan, nan),
|
||||
(eq, -nan, -nan),
|
||||
(eq, nan, -nan),
|
||||
(ne, 0.0, nan),
|
||||
(ne, inf, nan),
|
||||
]
|
||||
for op, a, x in examples:
|
||||
# Nothing is equal to NaN by default.
|
||||
assert np.array(a) != approx(x)
|
||||
assert a != approx(np.array(x))
|
||||
|
||||
# If ``nan_ok=True``, then NaN is equal to NaN.
|
||||
assert op(np.array(a), approx(x, nan_ok=True))
|
||||
assert op(a, approx(np.array(x), nan_ok=True))
|
||||
|
||||
def test_numpy_expecting_inf(self):
|
||||
np = pytest.importorskip("numpy")
|
||||
examples = [
|
||||
(eq, inf, inf),
|
||||
(eq, -inf, -inf),
|
||||
(ne, inf, -inf),
|
||||
(ne, 0.0, inf),
|
||||
(ne, nan, inf),
|
||||
]
|
||||
for op, a, x in examples:
|
||||
assert op(np.array(a), approx(x))
|
||||
assert op(a, approx(np.array(x)))
|
||||
assert op(np.array(a), approx(np.array(x)))
|
||||
|
||||
def test_numpy_array_wrong_shape(self):
|
||||
np = pytest.importorskip("numpy")
|
||||
|
||||
|
|
Loading…
Reference in New Issue