Show deprecation warnings by default if no other filters are configured
Fix #2908
This commit is contained in:
parent
7e13593452
commit
9965ed84da
|
@ -0,0 +1,3 @@
|
|||
``DeprecationWarning`` and ``PendingDeprecationWarning`` are now shown by default if no other warning filter is
|
||||
configured. This makes pytest compliant with
|
||||
`PEP-0506 <https://www.python.org/dev/peps/pep-0565/#recommended-filter-settings-for-test-runners>`_.
|
|
@ -36,8 +36,6 @@ Running pytest now produces this output::
|
|||
-- Docs: https://docs.pytest.org/en/latest/warnings.html
|
||||
=================== 1 passed, 1 warnings in 0.12 seconds ===================
|
||||
|
||||
Pytest by default catches all warnings except for ``DeprecationWarning`` and ``PendingDeprecationWarning``.
|
||||
|
||||
The ``-W`` flag can be passed to control which warnings will be displayed or even turn
|
||||
them into errors::
|
||||
|
||||
|
@ -78,6 +76,32 @@ Both ``-W`` command-line option and ``filterwarnings`` ini option are based on P
|
|||
`-W option`_ and `warnings.simplefilter`_, so please refer to those sections in the Python
|
||||
documentation for other examples and advanced usage.
|
||||
|
||||
Disabling warning summary
|
||||
-------------------------
|
||||
|
||||
Although not recommended, you can use the ``--disable-warnings`` command-line option to suppress the
|
||||
warning summary entirely from the test run output.
|
||||
|
||||
|
||||
DeprecationWarning and PendingDeprecationWarning
|
||||
------------------------------------------------
|
||||
|
||||
.. versionadded:: 3.8
|
||||
|
||||
By default pytest will display ``DeprecationWarning`` and ``PendingDeprecationWarning`` if no other warning filters
|
||||
are configured. This complies with `PEP-0506 <https://www.python.org/dev/peps/pep-0565/#recommended-filter-settings-for-test-runners>`_ which suggests that those warnings should
|
||||
be shown by default by test runners.
|
||||
|
||||
To disable this behavior, you might define any warnings filter either in the command-line or in the ini file, but
|
||||
if you don't have any other warnings to filter you can use:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
|
||||
|
||||
.. _`filterwarnings`:
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
import sys
|
||||
import warnings
|
||||
from contextlib import contextmanager
|
||||
|
||||
|
@ -69,6 +70,8 @@ def catch_warnings_for_item(config, ihook, item):
|
|||
args = config.getoption("pythonwarnings") or []
|
||||
inifilters = config.getini("filterwarnings")
|
||||
with warnings.catch_warnings(record=True) as log:
|
||||
filters_configured = args or inifilters or sys.warnoptions
|
||||
|
||||
for arg in args:
|
||||
warnings._setoption(arg)
|
||||
|
||||
|
@ -79,6 +82,11 @@ def catch_warnings_for_item(config, ihook, item):
|
|||
for mark in item.iter_markers(name="filterwarnings"):
|
||||
for arg in mark.args:
|
||||
warnings._setoption(arg)
|
||||
filters_configured = True
|
||||
|
||||
if not filters_configured:
|
||||
warnings.filterwarnings("always", category=DeprecationWarning)
|
||||
warnings.filterwarnings("always", category=PendingDeprecationWarning)
|
||||
|
||||
yield
|
||||
|
||||
|
|
|
@ -8,10 +8,6 @@ import pytest
|
|||
from _pytest.main import EXIT_NOTESTSCOLLECTED
|
||||
from _pytest.nodes import Collector
|
||||
|
||||
ignore_parametrized_marks = pytest.mark.filterwarnings(
|
||||
"ignore:Applying marks directly to parameters"
|
||||
)
|
||||
|
||||
|
||||
class TestModule(object):
|
||||
def test_failing_import(self, testdir):
|
||||
|
@ -456,6 +452,13 @@ class TestGenerator(object):
|
|||
|
||||
|
||||
class TestFunction(object):
|
||||
@pytest.fixture
|
||||
def ignore_parametrized_marks_args(self):
|
||||
"""Provides arguments to pytester.runpytest() to ignore the warning about marks being applied directly
|
||||
to parameters.
|
||||
"""
|
||||
return ("-W", "ignore:Applying marks directly to parameters")
|
||||
|
||||
def test_getmodulecollector(self, testdir):
|
||||
item = testdir.getitem("def test_func(): pass")
|
||||
modcol = item.getparent(pytest.Module)
|
||||
|
@ -669,7 +672,7 @@ class TestFunction(object):
|
|||
rec = testdir.inline_run()
|
||||
rec.assertoutcome(passed=1)
|
||||
|
||||
@ignore_parametrized_marks
|
||||
@pytest.mark.filterwarnings("ignore:Applying marks directly to parameters")
|
||||
def test_parametrize_with_mark(self, testdir):
|
||||
items = testdir.getitems(
|
||||
"""
|
||||
|
@ -755,8 +758,7 @@ class TestFunction(object):
|
|||
assert colitems[2].name == "test2[a-c]"
|
||||
assert colitems[3].name == "test2[b-c]"
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_skipif(self, testdir):
|
||||
def test_parametrize_skipif(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -768,11 +770,10 @@ class TestFunction(object):
|
|||
assert x < 2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_skip(self, testdir):
|
||||
def test_parametrize_skip(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -784,11 +785,10 @@ class TestFunction(object):
|
|||
assert x < 2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_skipif_no_skip(self, testdir):
|
||||
def test_parametrize_skipif_no_skip(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -800,11 +800,10 @@ class TestFunction(object):
|
|||
assert x < 2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 1 failed, 2 passed in *")
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_xfail(self, testdir):
|
||||
def test_parametrize_xfail(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -816,11 +815,10 @@ class TestFunction(object):
|
|||
assert x < 2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 2 passed, 1 xfailed in *")
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_passed(self, testdir):
|
||||
def test_parametrize_passed(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -832,11 +830,10 @@ class TestFunction(object):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 2 passed, 1 xpassed in *")
|
||||
|
||||
@ignore_parametrized_marks
|
||||
def test_parametrize_xfail_passed(self, testdir):
|
||||
def test_parametrize_xfail_passed(self, testdir, ignore_parametrized_marks_args):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest
|
||||
|
@ -848,7 +845,7 @@ class TestFunction(object):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(*ignore_parametrized_marks_args)
|
||||
result.stdout.fnmatch_lines("* 3 passed in *")
|
||||
|
||||
def test_function_original_name(self, testdir):
|
||||
|
|
|
@ -326,6 +326,7 @@ def test_warning_captured_hook(testdir, pyfile_with_warnings):
|
|||
@pytest.mark.filterwarnings("always")
|
||||
def test_collection_warnings(testdir):
|
||||
"""
|
||||
Check that we also capture warnings issued during test collection (#3251).
|
||||
"""
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
|
@ -346,3 +347,75 @@ def test_collection_warnings(testdir):
|
|||
"* 1 passed, 1 warnings*",
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class TestDeprecationWarningsByDefault:
|
||||
"""
|
||||
Note: all pytest runs are executed in a subprocess so we don't inherit warning filters
|
||||
from pytest's own test suite
|
||||
"""
|
||||
|
||||
def create_file(self, testdir, mark=""):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import pytest, warnings
|
||||
|
||||
warnings.warn(DeprecationWarning("collection"))
|
||||
|
||||
{mark}
|
||||
def test_foo():
|
||||
warnings.warn(PendingDeprecationWarning("test run"))
|
||||
""".format(
|
||||
mark=mark
|
||||
)
|
||||
)
|
||||
|
||||
def test_shown_by_default(self, testdir):
|
||||
self.create_file(testdir)
|
||||
result = testdir.runpytest_subprocess()
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*== %s ==*" % WARNINGS_SUMMARY_HEADER,
|
||||
"*test_shown_by_default.py:3: DeprecationWarning: collection",
|
||||
"*test_shown_by_default.py:7: PendingDeprecationWarning: test run",
|
||||
"* 1 passed, 2 warnings*",
|
||||
]
|
||||
)
|
||||
|
||||
def test_hidden_by_ini(self, testdir):
|
||||
self.create_file(testdir)
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
filterwarnings = once::UserWarning
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest_subprocess()
|
||||
assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
|
||||
|
||||
def test_hidden_by_mark(self, testdir):
|
||||
"""Should hide the deprecation warning from the function, but the warning during collection should
|
||||
be displayed normally.
|
||||
"""
|
||||
self.create_file(
|
||||
testdir, mark='@pytest.mark.filterwarnings("once::UserWarning")'
|
||||
)
|
||||
result = testdir.runpytest_subprocess()
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*== %s ==*" % WARNINGS_SUMMARY_HEADER,
|
||||
"*test_hidden_by_mark.py:3: DeprecationWarning: collection",
|
||||
"* 1 passed, 1 warnings*",
|
||||
]
|
||||
)
|
||||
|
||||
def test_hidden_by_cmdline(self, testdir):
|
||||
self.create_file(testdir)
|
||||
result = testdir.runpytest_subprocess("-W", "once::UserWarning")
|
||||
assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
|
||||
|
||||
def test_hidden_by_system(self, testdir, monkeypatch):
|
||||
self.create_file(testdir)
|
||||
monkeypatch.setenv(str("PYTHONWARNINGS"), str("once::UserWarning"))
|
||||
result = testdir.runpytest_subprocess()
|
||||
assert WARNINGS_SUMMARY_HEADER not in result.stdout.str()
|
||||
|
|
Loading…
Reference in New Issue