Remove 'RemovedInPytest4Warning'
This commit is contained in:
parent
3a17c1b30b
commit
c470ade0a5
|
@ -17,6 +17,8 @@ removed:
|
|||
|
||||
* ``pytest_logwarning`` hook.
|
||||
|
||||
* ``RemovedInPytest4Warning`` warning type.
|
||||
|
||||
|
||||
For more information consult
|
||||
`Deprecations and Removals <https://docs.pytest.org/en/latest/deprecations.html>`__ in the docs.
|
||||
|
|
|
@ -430,5 +430,3 @@ The following warning types ares used by pytest and are part of the public API:
|
|||
.. autoclass:: pytest.PytestUnhandledCoroutineWarning
|
||||
|
||||
.. autoclass:: pytest.PytestUnknownMarkWarning
|
||||
|
||||
.. autoclass:: pytest.RemovedInPytest4Warning
|
||||
|
|
|
@ -103,16 +103,6 @@ class PytestUnknownMarkWarning(PytestWarning):
|
|||
__module__ = "pytest"
|
||||
|
||||
|
||||
class RemovedInPytest4Warning(PytestDeprecationWarning):
|
||||
"""
|
||||
Bases: :class:`pytest.PytestDeprecationWarning`.
|
||||
|
||||
Warning class for features scheduled to be removed in pytest 4.0.
|
||||
"""
|
||||
|
||||
__module__ = "pytest"
|
||||
|
||||
|
||||
@attr.s
|
||||
class UnformattedWarning:
|
||||
"""Used to hold warnings that need to format their message at runtime, as opposed to a direct message.
|
||||
|
|
|
@ -4,8 +4,6 @@ from contextlib import contextmanager
|
|||
|
||||
import pytest
|
||||
|
||||
SHOW_PYTEST_WARNINGS_ARG = "-Walways::pytest.RemovedInPytest4Warning"
|
||||
|
||||
|
||||
def _setoption(wmod, arg):
|
||||
"""
|
||||
|
@ -74,9 +72,6 @@ def catch_warnings_for_item(config, ihook, when, item):
|
|||
warnings.filterwarnings("always", category=DeprecationWarning)
|
||||
warnings.filterwarnings("always", category=PendingDeprecationWarning)
|
||||
|
||||
warnings.filterwarnings("error", category=pytest.RemovedInPytest4Warning)
|
||||
warnings.filterwarnings("error", category=pytest.PytestDeprecationWarning)
|
||||
|
||||
# filters should have this precedence: mark, cmdline options, ini
|
||||
# filters should be applied in the inverse order of precedence
|
||||
for arg in inifilters:
|
||||
|
|
|
@ -44,7 +44,7 @@ from _pytest.warning_types import PytestExperimentalApiWarning
|
|||
from _pytest.warning_types import PytestUnhandledCoroutineWarning
|
||||
from _pytest.warning_types import PytestUnknownMarkWarning
|
||||
from _pytest.warning_types import PytestWarning
|
||||
from _pytest.warning_types import RemovedInPytest4Warning
|
||||
|
||||
|
||||
set_trace = __pytestPDB.set_trace
|
||||
|
||||
|
@ -84,7 +84,6 @@ __all__ = [
|
|||
"PytestWarning",
|
||||
"raises",
|
||||
"register_assert_rewrite",
|
||||
"RemovedInPytest4Warning",
|
||||
"Session",
|
||||
"set_trace",
|
||||
"skip",
|
||||
|
|
|
@ -9,7 +9,6 @@ import py
|
|||
|
||||
import pytest
|
||||
from _pytest.main import ExitCode
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def prepend_pythonpath(*dirs):
|
||||
|
@ -343,7 +342,7 @@ class TestGeneralUsage:
|
|||
"""
|
||||
)
|
||||
p = testdir.makepyfile("""def test_func(x): pass""")
|
||||
res = testdir.runpytest(p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
res = testdir.runpytest(p)
|
||||
assert res.ret == 0
|
||||
res.stdout.fnmatch_lines(["*1 skipped*"])
|
||||
|
||||
|
@ -356,9 +355,7 @@ class TestGeneralUsage:
|
|||
pass
|
||||
"""
|
||||
)
|
||||
res = testdir.runpytest(
|
||||
p.basename + "::" + "test_func[1]", SHOW_PYTEST_WARNINGS_ARG
|
||||
)
|
||||
res = testdir.runpytest(p.basename + "::" + "test_func[1]")
|
||||
assert res.ret == 0
|
||||
res.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ from _pytest.fixtures import FixtureLookupError
|
|||
from _pytest.fixtures import FixtureRequest
|
||||
from _pytest.pathlib import Path
|
||||
from _pytest.pytester import get_public_names
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def test_getfuncargnames():
|
||||
|
@ -2186,7 +2185,7 @@ class TestFixtureMarker:
|
|||
pass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest()
|
||||
assert result.ret != 0
|
||||
result.stdout.fnmatch_lines(
|
||||
["*ScopeMismatch*You tried*function*session*request*"]
|
||||
|
|
|
@ -9,7 +9,6 @@ from hypothesis import strategies
|
|||
import pytest
|
||||
from _pytest import fixtures
|
||||
from _pytest import python
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
class TestMetafunc:
|
||||
|
@ -915,7 +914,7 @@ class TestMetafuncFunctional:
|
|||
assert metafunc.cls == TestClass
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p, "-v", SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest(p, "-v")
|
||||
result.assert_outcomes(passed=2)
|
||||
|
||||
def test_two_functions(self, testdir):
|
||||
|
@ -931,7 +930,7 @@ class TestMetafuncFunctional:
|
|||
assert arg1 in (10, 20)
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result.stdout.fnmatch_lines(
|
||||
[
|
||||
"*test_func1*0*PASS*",
|
||||
|
@ -967,7 +966,7 @@ class TestMetafuncFunctional:
|
|||
assert hello == "world"
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result.stdout.fnmatch_lines(["*test_myfunc*hello*PASS*", "*1 passed*"])
|
||||
|
||||
def test_two_functions_not_same_instance(self, testdir):
|
||||
|
@ -982,7 +981,7 @@ class TestMetafuncFunctional:
|
|||
self.x = 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest("-v", p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest("-v", p)
|
||||
result.stdout.fnmatch_lines(
|
||||
["*test_func*0*PASS*", "*test_func*1*PASS*", "*2 pass*"]
|
||||
)
|
||||
|
@ -1000,7 +999,7 @@ class TestMetafuncFunctional:
|
|||
self.val = 1
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(p, SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest(p)
|
||||
result.assert_outcomes(passed=1)
|
||||
|
||||
def test_parametrize_functional2(self, testdir):
|
||||
|
@ -1522,7 +1521,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
rec = testdir.inline_run("-m", "foo", SHOW_PYTEST_WARNINGS_ARG)
|
||||
rec = testdir.inline_run("-m", "foo")
|
||||
passed, skipped, fail = rec.listoutcomes()
|
||||
assert len(passed) == 1
|
||||
assert len(skipped) == 0
|
||||
|
@ -1562,7 +1561,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
# xfail is skip??
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
|
@ -1579,7 +1578,7 @@ class TestMarkersWithParametrization:
|
|||
assert n % 2 == 0
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_arg(self, testdir):
|
||||
|
@ -1595,7 +1594,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_kwarg(self, testdir):
|
||||
|
@ -1611,7 +1610,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
def test_xfail_with_arg_and_kwarg(self, testdir):
|
||||
|
@ -1627,7 +1626,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=2, skipped=1)
|
||||
|
||||
@pytest.mark.parametrize("strict", [True, False])
|
||||
|
@ -1648,7 +1647,7 @@ class TestMarkersWithParametrization:
|
|||
strict=strict
|
||||
)
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
passed, failed = (2, 1) if strict else (3, 0)
|
||||
reprec.assertoutcome(passed=passed, failed=failed)
|
||||
|
||||
|
@ -1672,7 +1671,7 @@ class TestMarkersWithParametrization:
|
|||
assert n + 1 == expected
|
||||
"""
|
||||
testdir.makepyfile(s)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=2, skipped=2)
|
||||
|
||||
def test_parametrize_ID_generation_string_int_works(self, testdir):
|
||||
|
|
|
@ -8,11 +8,6 @@ from _pytest.mark import EMPTY_PARAMETERSET_OPTION
|
|||
from _pytest.mark import MarkGenerator as Mark
|
||||
from _pytest.nodes import Collector
|
||||
from _pytest.nodes import Node
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
ignore_markinfo = pytest.mark.filterwarnings(
|
||||
"ignore:MarkInfo objects:pytest.RemovedInPytest4Warning"
|
||||
)
|
||||
|
||||
|
||||
class TestMark:
|
||||
|
@ -625,7 +620,6 @@ class TestFunctional:
|
|||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(passed=1)
|
||||
|
||||
@ignore_markinfo
|
||||
def test_keyword_added_for_session(self, testdir):
|
||||
testdir.makeconftest(
|
||||
"""
|
||||
|
@ -651,7 +645,7 @@ class TestFunctional:
|
|||
assert marker.kwargs == {}
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run("-m", "mark1", SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run("-m", "mark1")
|
||||
reprec.assertoutcome(passed=1)
|
||||
|
||||
def assert_markers(self, items, **expected):
|
||||
|
@ -689,7 +683,7 @@ class TestFunctional:
|
|||
assert True
|
||||
"""
|
||||
)
|
||||
reprec = testdir.inline_run(SHOW_PYTEST_WARNINGS_ARG)
|
||||
reprec = testdir.inline_run()
|
||||
reprec.assertoutcome(skipped=1)
|
||||
|
||||
|
||||
|
@ -989,7 +983,7 @@ def test_markers_from_parametrize(testdir):
|
|||
"""
|
||||
)
|
||||
|
||||
result = testdir.runpytest(SHOW_PYTEST_WARNINGS_ARG)
|
||||
result = testdir.runpytest()
|
||||
result.assert_outcomes(passed=4)
|
||||
|
||||
|
||||
|
|
|
@ -5,7 +5,6 @@ import attr
|
|||
import pytest
|
||||
from _pytest import pathlib
|
||||
from _pytest.pathlib import Path
|
||||
from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
|
||||
|
||||
|
||||
def test_tmpdir_fixture(testdir):
|
||||
|
@ -82,9 +81,7 @@ def test_basetemp(testdir):
|
|||
tmpdir_factory.mktemp('hello', numbered=False)
|
||||
"""
|
||||
)
|
||||
result = testdir.runpytest(
|
||||
p, "--basetemp=%s" % mytemp, SHOW_PYTEST_WARNINGS_ARG, "-s"
|
||||
)
|
||||
result = testdir.runpytest(p, "--basetemp=%s" % mytemp)
|
||||
assert result.ret == 0
|
||||
print(mytemp)
|
||||
assert mytemp.join("hello").check()
|
||||
|
|
|
@ -498,38 +498,15 @@ class TestDeprecationWarningsByDefault:
|
|||
|
||||
|
||||
@pytest.mark.parametrize("change_default", [None, "ini", "cmdline"])
|
||||
def test_removed_in_pytest4_warning_as_error(testdir, change_default):
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import warnings, pytest
|
||||
def test():
|
||||
warnings.warn(pytest.RemovedInPytest4Warning("some warning"))
|
||||
"""
|
||||
)
|
||||
if change_default == "ini":
|
||||
testdir.makeini(
|
||||
"""
|
||||
[pytest]
|
||||
filterwarnings =
|
||||
ignore::pytest.RemovedInPytest4Warning
|
||||
"""
|
||||
)
|
||||
|
||||
args = (
|
||||
("-Wignore::pytest.RemovedInPytest4Warning",)
|
||||
if change_default == "cmdline"
|
||||
else ()
|
||||
)
|
||||
result = testdir.runpytest(*args)
|
||||
if change_default is None:
|
||||
result.stdout.fnmatch_lines(["* 1 failed in *"])
|
||||
else:
|
||||
assert change_default in ("ini", "cmdline")
|
||||
result.stdout.fnmatch_lines(["* 1 passed in *"])
|
||||
|
||||
|
||||
@pytest.mark.parametrize("change_default", [None, "ini", "cmdline"])
|
||||
@pytest.mark.skip(
|
||||
reason="This test should be enabled again before pytest 6.0 is released"
|
||||
)
|
||||
def test_deprecation_warning_as_error(testdir, change_default):
|
||||
"""This ensures that PytestDeprecationWarnings raised by pytest are turned into errors.
|
||||
|
||||
This test should be enabled as part of each major release, and skipped again afterwards
|
||||
to ensure our deprecations are turning into warnings as expected.
|
||||
"""
|
||||
testdir.makepyfile(
|
||||
"""
|
||||
import warnings, pytest
|
||||
|
|
3
tox.ini
3
tox.ini
|
@ -128,9 +128,6 @@ norecursedirs = testing/example_scripts
|
|||
xfail_strict=true
|
||||
filterwarnings =
|
||||
error
|
||||
ignore:yield tests are deprecated, and scheduled to be removed in pytest 4.0:pytest.RemovedInPytest4Warning
|
||||
ignore:Metafunc.addcall is deprecated and scheduled to be removed in pytest 4.0:pytest.RemovedInPytest4Warning
|
||||
ignore::pytest.RemovedInPytest4Warning
|
||||
ignore:Module already imported so cannot be rewritten:pytest.PytestWarning
|
||||
# produced by path.local
|
||||
ignore:bad escape.*:DeprecationWarning:re
|
||||
|
|
Loading…
Reference in New Issue