Merge pull request #4349 from nicoddemus/pytest4-warnings-as-errors
Change RemovedInPytest4Warnings to errors by default
This commit is contained in:
commit
8198e7cd33
|
@ -0,0 +1,23 @@
|
|||
**RemovedInPytest4Warnings are now errors by default.**
|
||||
|
||||
Following our plan to remove deprecated features with as little disruption as
|
||||
possible, all warnings of type ``RemovedInPytest4Warnings`` now generate errors
|
||||
instead of warning messages.
|
||||
|
||||
**The affected features will be effectively removed in pytest 4.1**, so please consult the
|
||||
`Deprecations and Removals <https://docs.pytest.org/en/latest/deprecations.html>`__
|
||||
section in the docs for directions on how to update existing code.
|
||||
|
||||
In the pytest ``4.0.X`` series, it is possible to change the errors back into warnings as a stop
|
||||
gap measure by adding this to your ``pytest.ini`` file:
|
||||
|
||||
.. code-block:: ini
|
||||
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::pytest.RemovedInPytest4Warning
|
||||
|
||||
But this will stop working when pytest ``4.1`` is released.
|
||||
|
||||
**If you have concerns** about the removal of a specific feature, please add a
|
||||
comment to `#4348 <https://github.com/pytest-dev/pytest/issues/4348>`__.
|
|
@ -1,6 +1,7 @@
|
|||
import six
|
||||
|
||||
import _pytest._code
|
||||
import pytest
|
||||
from pytest import raises
|
||||
|
||||
|
||||
|
@ -16,15 +17,11 @@ def otherfunc_multi(a, b):
|
|||
assert a == b
|
||||
|
||||
|
||||
@pytest.mark.parametrize("param1, param2", [(3, 6)])
|
||||
def test_generative(param1, param2):
|
||||
assert param1 * 2 < param2
|
||||
|
||||
|
||||
def pytest_generate_tests(metafunc):
|
||||
if "param1" in metafunc.fixturenames:
|
||||
metafunc.addcall(funcargs=dict(param1=3, param2=6))
|
||||
|
||||
|
||||
class TestFailing(object):
|
||||
def test_simple(self):
|
||||
def f():
|
||||
|
|
|
@ -9,6 +9,8 @@ from contextlib import contextmanager
|
|||
import pytest
|
||||
from _pytest import compat
|
||||
|
||||
SHOW_PYTEST_WARNINGS_ARG = "-Walways::pytest.RemovedInPytest4Warning"
|
||||
|
||||
|
||||
def _setoption(wmod, arg):
|
||||
"""
|
||||
|
@ -77,6 +79,8 @@ def catch_warnings_for_item(config, ihook, when, item):
|
|||
warnings.filterwarnings("always", category=DeprecationWarning)
|
||||
warnings.filterwarnings("always", category=PendingDeprecationWarning)
|
||||
|
||||
warnings.filterwarnings("error", category=pytest.RemovedInPytest4Warning)
|
||||
|
||||
# filters should have this precedence: mark, cmdline options, ini
|
||||
# filters should be applied in the inverse order of precedence
|
||||
for arg in inifilters:
|
||||
|
|
|
@ -14,6 +14,7 @@ import six
|
|||
import pytest
|
||||
from _pytest.main import EXIT_NOTESTSCOLLECTED
|
||||
from _pytest.main import EXIT_USAGEERROR
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def prepend_pythonpath(*dirs):
|
||||
|
@ -307,7 +308,7 @@ class TestGeneralUsage(object):
|
|||
"""
|
||||
)
|
||||
p = testdir.makepyfile("""def test_func(x): pass""")
|
||||
res = testdir.runpytest(p)
|
||||
res = testdir.runpytest(p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert res.ret == 0
|
||||
res.stdout.fnmatch_lines(["*1 skipped*"])
|
||||
|
||||
|
@ -321,7 +322,9 @@ class TestGeneralUsage(object):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
res = testdir.runpytest(p.basename + "::" + "test_func[1]")
|
||||
res = testdir.runpytest(
|
||||
p.basename + "::" + "test_func[1]", SHOW_PYTEST_WARNINGS_ARG
|
||||
)
|
||||
assert res.ret == 0
|
||||
res.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
|
|
|
@ -5,11 +5,11 @@ from __future__ import print_function
|
|||
import os
|
||||
|
||||
import pytest
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
pytestmark = pytest.mark.pytester_example_path("deprecated")
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings("default")
|
||||
def test_yield_tests_deprecation(testdir):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
|
@ -23,7 +23,7 @@ def test_yield_tests_deprecation(testdir):
|
|||
yield func1, 1, 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_yield_tests_deprecation.py:3:*yield tests are deprecated*",
|
||||
|
@ -41,7 +41,7 @@ def test_compat_properties_deprecation(testdir):
|
|||
print(request.node.Module)
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_compat_properties_deprecation.py:2:*usage of Function.Module is deprecated, "
|
||||
|
@ -63,7 +63,7 @@ def test_cached_setup_deprecation(testdir):
|
|||
assert fix == 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_cached_setup_deprecation.py:4:*cached_setup is deprecated*",
|
||||
|
@ -93,7 +93,7 @@ def test_custom_class_deprecation(testdir):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
'*test_custom_class_deprecation.py:1:*"Class" objects in collectors of type "MyModule*',
|
||||
|
@ -102,7 +102,6 @@ def test_custom_class_deprecation(testdir):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings("default")
|
||||
def test_funcarg_prefix_deprecation(testdir):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
|
@ -113,7 +112,7 @@ def test_funcarg_prefix_deprecation(testdir):
|
|||
assert value == 10
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-ra")
|
||||
result = testdir.runpytest("-ra", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
(
|
||||
|
@ -198,7 +197,6 @@ def test_resultlog_is_deprecated(testdir):
|
|||
)
|
||||
|
||||
|
||||
@pytest.mark.filterwarnings("always:Metafunc.addcall is deprecated")
|
||||
def test_metafunc_addcall_deprecated(testdir):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
|
@ -209,7 +207,7 @@ def test_metafunc_addcall_deprecated(testdir):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
res = testdir.runpytest("-s")
|
||||
res = testdir.runpytest("-s", SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert res.ret == 0
|
||||
res.stdout.fnmatch_lines(
|
||||
["*Metafunc.addcall is deprecated*", "*2 passed, 2 warnings*"]
|
||||
|
@ -263,7 +261,7 @@ def test_pytest_plugins_in_non_top_level_conftest_deprecated(testdir):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
res = testdir.runpytest()
|
||||
res = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert res.ret == 0
|
||||
msg = str(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST).splitlines()[0]
|
||||
res.stdout.fnmatch_lines(
|
||||
|
@ -292,6 +290,7 @@ def test_pytest_plugins_in_non_top_level_conftest_deprecated_pyargs(
|
|||
testdir.syspathinsert(testdir.tmpdir.join("src"))
|
||||
|
||||
args = ("--pyargs", "pkg") if use_pyargs else ()
|
||||
args += (SHOW_PYTEST_WARNINGS_ARG,)
|
||||
res = testdir.runpytest(*args)
|
||||
assert res.ret == 0
|
||||
msg = str(PYTEST_PLUGINS_FROM_NON_TOP_LEVEL_CONFTEST).splitlines()[0]
|
||||
|
|
|
@ -7,6 +7,7 @@ import _pytest._code
|
|||
import pytest
|
||||
from _pytest.main import EXIT_NOTESTSCOLLECTED
|
||||
from _pytest.nodes import Collector
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
class TestModule(object):
|
||||
|
@ -370,7 +371,7 @@ class TestGenerator(object):
|
|||
yield assert_order_of_execution
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run(o)
|
||||
reprec = testdir.inline_run(o, SHOW_PYTEST_WARNINGS_ARG)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert passed == 7
|
||||
assert not skipped and not failed
|
||||
|
@ -404,7 +405,7 @@ class TestGenerator(object):
|
|||
yield assert_order_of_execution
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run(o)
|
||||
reprec = testdir.inline_run(o, SHOW_PYTEST_WARNINGS_ARG)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert passed == 4
|
||||
assert not skipped and not failed
|
||||
|
@ -448,7 +449,7 @@ class TestGenerator(object):
|
|||
assert setuplist[1] != setuplist[2], setuplist
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run(o, "-v")
|
||||
reprec = testdir.inline_run(o, "-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert passed == 4
|
||||
assert not skipped and not failed
|
||||
|
@ -1380,7 +1381,7 @@ def test_collector_attributes(testdir):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
|
||||
|
@ -1407,7 +1408,7 @@ def test_customize_through_attributes(testdir):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("--collect-only")
|
||||
result = testdir.runpytest("--collect-only", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*MyClass*", "*MyFunction*test_hello*"])
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ from _pytest.fixtures import FixtureLookupError
|
|||
from _pytest.fixtures import FixtureRequest
|
||||
from _pytest.pathlib import Path
|
||||
from _pytest.pytester import get_public_names
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def test_getfuncargnames():
|
||||
|
@ -975,7 +976,8 @@ class TestRequestCachedSetup(object):
|
|||
class TestClass(object):
|
||||
def test_func1a(self, something):
|
||||
assert something == "hello"
|
||||
"""
|
||||
""",
|
||||
SHOW_PYTEST_WARNINGS_ARG,
|
||||
)
|
||||
reprec.assertoutcome(passed=2)
|
||||
|
||||
|
@ -997,7 +999,8 @@ class TestRequestCachedSetup(object):
|
|||
assert something == "hello"
|
||||
def test_func2b(self, something):
|
||||
assert something == "hello"
|
||||
"""
|
||||
""",
|
||||
SHOW_PYTEST_WARNINGS_ARG,
|
||||
)
|
||||
reprec.assertoutcome(passed=4)
|
||||
|
||||
|
@ -1057,7 +1060,7 @@ class TestRequestCachedSetup(object):
|
|||
assert arg1 != arg2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v")
|
||||
result = testdir.runpytest("-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
def test_request_cached_setup_getfixturevalue(self, testdir):
|
||||
|
@ -1076,7 +1079,7 @@ class TestRequestCachedSetup(object):
|
|||
assert arg1 == 11
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v")
|
||||
result = testdir.runpytest("-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
def test_request_cached_setup_functional(self, testdir):
|
||||
|
@ -1107,7 +1110,7 @@ class TestRequestCachedSetup(object):
|
|||
assert test_0.values == [2]
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v")
|
||||
result = testdir.runpytest("-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*3 passed*"])
|
||||
|
||||
def test_issue117_sessionscopeteardown(self, testdir):
|
||||
|
@ -1126,7 +1129,7 @@ class TestRequestCachedSetup(object):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert result.ret != 0
|
||||
result.stdout.fnmatch_lines(["*3/x*", "*ZeroDivisionError*"])
|
||||
|
||||
|
@ -1868,7 +1871,7 @@ class TestAutouseManagement(object):
|
|||
yield f, -3
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2)
|
||||
|
||||
def test_funcarg_and_setup(self, testdir):
|
||||
|
@ -2348,7 +2351,7 @@ class TestFixtureMarker(object):
|
|||
"""
|
||||
% method
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert result.ret != 0
|
||||
result.stdout.fnmatch_lines(
|
||||
["*ScopeMismatch*You tried*function*session*request*"]
|
||||
|
|
|
@ -10,6 +10,7 @@ from hypothesis import strategies
|
|||
import pytest
|
||||
from _pytest import fixtures
|
||||
from _pytest import python
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
PY3 = sys.version_info >= (3, 0)
|
||||
|
||||
|
@ -444,7 +445,7 @@ class TestMetafunc(object):
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("--collect-only")
|
||||
result = testdir.runpytest("--collect-only", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"<Module 'test_parametrize_ids_exception.py'>",
|
||||
|
@ -866,7 +867,7 @@ class TestMetafuncFunctional(object):
|
|||
assert metafunc.cls == TestClass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p, "-v")
|
||||
result = testdir.runpytest(p, "-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.assert_outcomes(passed=2)
|
||||
|
||||
def test_addcall_with_two_funcargs_generators(self, testdir):
|
||||
|
@ -887,7 +888,7 @@ class TestMetafuncFunctional(object):
|
|||
assert arg1 == arg2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
["*test_myfunc*0*PASS*", "*test_myfunc*1*FAIL*", "*1 failed, 1 passed*"]
|
||||
)
|
||||
|
@ -910,7 +911,7 @@ class TestMetafuncFunctional(object):
|
|||
assert arg1 in (10, 20)
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_func1*0*PASS*",
|
||||
|
@ -960,7 +961,7 @@ class TestMetafuncFunctional(object):
|
|||
assert arg1 == arg2
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_myfunc*hello*PASS*",
|
||||
|
@ -980,7 +981,7 @@ class TestMetafuncFunctional(object):
|
|||
assert hello == "world"
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*test_myfunc*hello*PASS*", "*1 passed*"])
|
||||
|
||||
def test_two_functions_not_same_instance(self, testdir):
|
||||
|
@ -996,7 +997,7 @@ class TestMetafuncFunctional(object):
|
|||
self.x = 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
["*test_func*0*PASS*", "*test_func*1*PASS*", "*2 pass*"]
|
||||
)
|
||||
|
@ -1014,7 +1015,7 @@ class TestMetafuncFunctional(object):
|
|||
self.val = 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p)
|
||||
result = testdir.runpytest(p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.assert_outcomes(passed=1)
|
||||
|
||||
def test_parametrize_functional2(self, testdir):
|
||||
|
@ -1536,7 +1537,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
rec = testdir.inline_run("-m", "foo")
|
||||
rec = testdir.inline_run("-m", "foo", SHOW_PYTEST_WARNINGS_ARG)
|
||||
passed, skipped, fail = rec.listoutcomes()
|
||||
assert len(passed) == 1
|
||||
assert len(skipped) == 0
|
||||
|
@ -1576,7 +1577,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
# xfail is skip??
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
|
@ -1593,7 +1594,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n % 2 == 0
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_arg(self, testdir):
|
||||
|
@ -1609,7 +1610,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_kwarg(self, testdir):
|
||||
|
@ -1625,7 +1626,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_arg_and_kwarg(self, testdir):
|
||||
|
@ -1641,7 +1642,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
@pytest.mark.parametrize("strict", [True, False])
|
||||
|
@ -1660,7 +1661,7 @@ class TestMarkersWithParametrization(object):
|
|||
strict=strict
|
||||
)
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
passed, failed = (2, 1) if strict else (3, 0)
|
||||
reprec.assertoutcome(passed=passed, failed=failed)
|
||||
|
||||
|
@ -1684,7 +1685,7 @@ class TestMarkersWithParametrization(object):
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=2, skipped=2)
|
||||
|
||||
@pytest.mark.issue290
|
||||
|
|
|
@ -12,6 +12,7 @@ from _pytest.config.findpaths import determine_setup
|
|||
from _pytest.config.findpaths import get_common_ancestor
|
||||
from _pytest.config.findpaths import getcfg
|
||||
from _pytest.main import EXIT_NOTESTSCOLLECTED
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
class TestParseIni(object):
|
||||
|
@ -808,7 +809,7 @@ class TestLegacyWarning(object):
|
|||
assert conftest.values == [1]
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
["*hello", "*config.warn has been deprecated*", "*1 passed*"]
|
||||
)
|
||||
|
@ -832,10 +833,12 @@ class TestLegacyWarning(object):
|
|||
code_kw=code_kw, message_kw=message_kw
|
||||
)
|
||||
)
|
||||
result = testdir.runpytest("--disable-pytest-warnings")
|
||||
result = testdir.runpytest(
|
||||
"--disable-pytest-warnings", SHOW_PYTEST_WARNINGS_ARG
|
||||
)
|
||||
assert "hello" not in result.stdout.str()
|
||||
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
"""
|
||||
===*warnings summary*===
|
||||
|
|
|
@ -5,6 +5,8 @@ from __future__ import print_function
|
|||
import os
|
||||
import sys
|
||||
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
try:
|
||||
import mock
|
||||
except ImportError:
|
||||
|
@ -862,7 +864,7 @@ class TestFunctional(object):
|
|||
assert marker.kwargs == {}
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run("-m", "mark1")
|
||||
reprec = testdir.inline_run("-m", "mark1", SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(passed=1)
|
||||
|
||||
def assert_markers(self, items, **expected):
|
||||
|
@ -904,7 +906,7 @@ class TestFunctional(object):
|
|||
assert True
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run()
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec.assertoutcome(skipped=1)
|
||||
|
||||
|
||||
|
@ -1248,5 +1250,5 @@ def test_markers_from_parametrize(testdir):
|
|||
"""
|
||||
)
|
||||
|
||||
result = testdir.runpytest()
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.assert_outcomes(passed=4)
|
||||
|
|
|
@ -3,6 +3,7 @@ from __future__ import division
|
|||
from __future__ import print_function
|
||||
|
||||
import pytest
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def setup_module(mod):
|
||||
|
@ -224,7 +225,7 @@ def test_nose_test_generator_fixtures(testdir):
|
|||
eq_(self.called, expect)
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p, "-p", "nose")
|
||||
result = testdir.runpytest(p, "-p", "nose", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*10 passed*"])
|
||||
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import sys
|
|||
|
||||
import _pytest._code
|
||||
import pytest
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
try:
|
||||
breakpoint
|
||||
|
@ -818,7 +819,9 @@ class TestTraceOption:
|
|||
yield is_equal, 1, 1
|
||||
"""
|
||||
)
|
||||
child = testdir.spawn_pytest("--trace " + str(p1))
|
||||
child = testdir.spawn_pytest(
|
||||
"{} --trace {}".format(SHOW_PYTEST_WARNINGS_ARG, str(p1))
|
||||
)
|
||||
child.expect("is_equal")
|
||||
child.expect("Pdb")
|
||||
child.sendeof()
|
||||
|
|
|
@ -7,6 +7,7 @@ from __future__ import division
|
|||
from __future__ import print_function
|
||||
|
||||
import pytest
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def test_module_and_function_setup(testdir):
|
||||
|
@ -189,7 +190,8 @@ def test_method_generator_setup(testdir):
|
|||
assert self.classsetup
|
||||
assert self.methsetup == self.test_generate
|
||||
assert value == 5
|
||||
"""
|
||||
""",
|
||||
SHOW_PYTEST_WARNINGS_ARG,
|
||||
)
|
||||
reprec.assertoutcome(passed=1, failed=1)
|
||||
|
||||
|
@ -219,7 +221,8 @@ def test_func_generator_setup(testdir):
|
|||
assert x == [1]
|
||||
yield check
|
||||
assert x == [1]
|
||||
"""
|
||||
""",
|
||||
SHOW_PYTEST_WARNINGS_ARG,
|
||||
)
|
||||
rep = reprec.matchreport("test_one", names="pytest_runtest_logreport")
|
||||
assert rep.passed
|
||||
|
|
|
@ -41,6 +41,14 @@ def test_success():
|
|||
"""
|
||||
)
|
||||
|
||||
# customize cache directory so we don't use the tox's cache directory, which makes tests in this module flaky
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
cache_dir = .cache
|
||||
"""
|
||||
)
|
||||
|
||||
return testdir
|
||||
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ from _pytest.terminal import build_summary_stats_line
|
|||
from _pytest.terminal import getreportopt
|
||||
from _pytest.terminal import repr_pythonversion
|
||||
from _pytest.terminal import TerminalReporter
|
||||
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
DistInfo = collections.namedtuple("DistInfo", ["project_name", "version"])
|
||||
|
||||
|
@ -602,7 +602,7 @@ class TestTerminalFunctional(object):
|
|||
yield check, 0
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p1, "-v")
|
||||
result = testdir.runpytest(p1, "-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_verbose_reporting.py::test_fail *FAIL*",
|
||||
|
@ -616,7 +616,7 @@ class TestTerminalFunctional(object):
|
|||
if not pytestconfig.pluginmanager.get_plugin("xdist"):
|
||||
pytest.skip("xdist plugin not installed")
|
||||
|
||||
result = testdir.runpytest(p1, "-v", "-n 1")
|
||||
result = testdir.runpytest(p1, "-v", "-n 1", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result.stdout.fnmatch_lines(["*FAIL*test_verbose_reporting.py::test_fail*"])
|
||||
assert result.ret == 1
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@ import six
|
|||
import pytest
|
||||
from _pytest import pathlib
|
||||
from _pytest.pathlib import Path
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def test_tmpdir_fixture(testdir):
|
||||
|
@ -67,7 +68,7 @@ def test_basetemp(testdir):
|
|||
pytest.ensuretemp("hello")
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p, "--basetemp=%s" % mytemp)
|
||||
result = testdir.runpytest(p, "--basetemp=%s" % mytemp, SHOW_PYTEST_WARNINGS_ARG)
|
||||
assert result.ret == 0
|
||||
assert mytemp.join("hello").check()
|
||||
|
||||
|
|
|
@ -592,3 +592,34 @@ def test_infinite_loop_warning_against_unicode_usage_py2(testdir):
|
|||
)
|
||||
result = testdir.runpytest_subprocess()
|
||||
result.stdout.fnmatch_lines(["*1 passed, * warnings in*"])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("change_default", [None, "ini", "cmdline"])
|
||||
def test_removed_in_pytest4_warning_as_error(testdir, change_default):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import warnings, pytest
|
||||
def test():
|
||||
warnings.warn(pytest.RemovedInPytest4Warning("some warning"))
|
||||
"""
|
||||
)
|
||||
if change_default == "ini":
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::pytest.RemovedInPytest4Warning
|
||||
"""
|
||||
)
|
||||
|
||||
args = (
|
||||
("-Wignore::pytest.RemovedInPytest4Warning",)
|
||||
if change_default == "cmdline"
|
||||
else ()
|
||||
)
|
||||
result = testdir.runpytest(*args)
|
||||
if change_default is None:
|
||||
result.stdout.fnmatch_lines(["* 1 failed in *"])
|
||||
else:
|
||||
assert change_default in ("ini", "cmdline")
|
||||
result.stdout.fnmatch_lines(["* 1 passed in *"])
|
||||
|
|
1
tox.ini
1
tox.ini
|
@ -207,6 +207,7 @@ filterwarnings =
|
|||
error
|
||||
ignore:yield tests are deprecated, and scheduled to be removed in pytest 4.0:pytest.RemovedInPytest4Warning
|
||||
ignore:Metafunc.addcall is deprecated and scheduled to be removed in pytest 4.0:pytest.RemovedInPytest4Warning
|
||||
ignore::pytest.RemovedInPytest4Warning
|
||||
ignore:Module already imported so cannot be rewritten:pytest.PytestWarning
|
||||
# produced by path.local
|
||||
ignore:bad escape.*:DeprecationWarning:re
|
||||
|
|
Loading…
Reference in New Issue