tests: Migrate to pytester - incremental update (#8145)
This commit is contained in:
parent
cb8142b8ec
commit
8eef8c6004
|
@ -5,6 +5,7 @@ import os
|
||||||
import queue
|
import queue
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
@ -19,7 +20,10 @@ from _pytest._code.code import ExceptionChainRepr
|
||||||
from _pytest._code.code import ExceptionInfo
|
from _pytest._code.code import ExceptionInfo
|
||||||
from _pytest._code.code import FormattedExcinfo
|
from _pytest._code.code import FormattedExcinfo
|
||||||
from _pytest._io import TerminalWriter
|
from _pytest._io import TerminalWriter
|
||||||
|
from _pytest.pathlib import import_path
|
||||||
from _pytest.pytester import LineMatcher
|
from _pytest.pytester import LineMatcher
|
||||||
|
from _pytest.pytester import Pytester
|
||||||
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from _pytest._code.code import _TracebackStyle
|
from _pytest._code.code import _TracebackStyle
|
||||||
|
@ -155,10 +159,10 @@ class TestTraceback_f_g_h:
|
||||||
newtraceback = traceback.cut(path=path, lineno=firstlineno + 2)
|
newtraceback = traceback.cut(path=path, lineno=firstlineno + 2)
|
||||||
assert len(newtraceback) == 1
|
assert len(newtraceback) == 1
|
||||||
|
|
||||||
def test_traceback_cut_excludepath(self, testdir):
|
def test_traceback_cut_excludepath(self, pytester: Pytester) -> None:
|
||||||
p = testdir.makepyfile("def f(): raise ValueError")
|
p = pytester.makepyfile("def f(): raise ValueError")
|
||||||
with pytest.raises(ValueError) as excinfo:
|
with pytest.raises(ValueError) as excinfo:
|
||||||
p.pyimport().f()
|
import_path(p).f() # type: ignore[attr-defined]
|
||||||
basedir = py.path.local(pytest.__file__).dirpath()
|
basedir = py.path.local(pytest.__file__).dirpath()
|
||||||
newtraceback = excinfo.traceback.cut(excludepath=basedir)
|
newtraceback = excinfo.traceback.cut(excludepath=basedir)
|
||||||
for x in newtraceback:
|
for x in newtraceback:
|
||||||
|
@ -406,8 +410,8 @@ def test_match_succeeds():
|
||||||
excinfo.match(r".*zero.*")
|
excinfo.match(r".*zero.*")
|
||||||
|
|
||||||
|
|
||||||
def test_match_raises_error(testdir):
|
def test_match_raises_error(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_division_zero():
|
def test_division_zero():
|
||||||
|
@ -416,14 +420,14 @@ def test_match_raises_error(testdir):
|
||||||
excinfo.match(r'[123]+')
|
excinfo.match(r'[123]+')
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
assert result.ret != 0
|
assert result.ret != 0
|
||||||
|
|
||||||
exc_msg = "Regex pattern '[[]123[]]+' does not match 'division by zero'."
|
exc_msg = "Regex pattern '[[]123[]]+' does not match 'division by zero'."
|
||||||
result.stdout.fnmatch_lines([f"E * AssertionError: {exc_msg}"])
|
result.stdout.fnmatch_lines([f"E * AssertionError: {exc_msg}"])
|
||||||
result.stdout.no_fnmatch_line("*__tracebackhide__ = True*")
|
result.stdout.no_fnmatch_line("*__tracebackhide__ = True*")
|
||||||
|
|
||||||
result = testdir.runpytest("--fulltrace")
|
result = pytester.runpytest("--fulltrace")
|
||||||
assert result.ret != 0
|
assert result.ret != 0
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
["*__tracebackhide__ = True*", f"E * AssertionError: {exc_msg}"]
|
["*__tracebackhide__ = True*", f"E * AssertionError: {exc_msg}"]
|
||||||
|
@ -432,15 +436,14 @@ def test_match_raises_error(testdir):
|
||||||
|
|
||||||
class TestFormattedExcinfo:
|
class TestFormattedExcinfo:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def importasmod(self, request, _sys_snapshot):
|
def importasmod(self, tmp_path: Path, _sys_snapshot):
|
||||||
def importasmod(source):
|
def importasmod(source):
|
||||||
source = textwrap.dedent(source)
|
source = textwrap.dedent(source)
|
||||||
tmpdir = request.getfixturevalue("tmpdir")
|
modpath = tmp_path.joinpath("mod.py")
|
||||||
modpath = tmpdir.join("mod.py")
|
tmp_path.joinpath("__init__.py").touch()
|
||||||
tmpdir.ensure("__init__.py")
|
modpath.write_text(source)
|
||||||
modpath.write(source)
|
|
||||||
importlib.invalidate_caches()
|
importlib.invalidate_caches()
|
||||||
return modpath.pyimport()
|
return import_path(modpath)
|
||||||
|
|
||||||
return importasmod
|
return importasmod
|
||||||
|
|
||||||
|
@ -682,7 +685,7 @@ raise ValueError()
|
||||||
p = FormattedExcinfo(style="short")
|
p = FormattedExcinfo(style="short")
|
||||||
reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
|
reprtb = p.repr_traceback_entry(excinfo.traceback[-2])
|
||||||
lines = reprtb.lines
|
lines = reprtb.lines
|
||||||
basename = py.path.local(mod.__file__).basename
|
basename = Path(mod.__file__).name
|
||||||
assert lines[0] == " func1()"
|
assert lines[0] == " func1()"
|
||||||
assert reprtb.reprfileloc is not None
|
assert reprtb.reprfileloc is not None
|
||||||
assert basename in str(reprtb.reprfileloc.path)
|
assert basename in str(reprtb.reprfileloc.path)
|
||||||
|
@ -948,7 +951,9 @@ raise ValueError()
|
||||||
assert line.endswith("mod.py")
|
assert line.endswith("mod.py")
|
||||||
assert tw_mock.lines[12] == ":3: ValueError"
|
assert tw_mock.lines[12] == ":3: ValueError"
|
||||||
|
|
||||||
def test_toterminal_long_missing_source(self, importasmod, tmpdir, tw_mock):
|
def test_toterminal_long_missing_source(
|
||||||
|
self, importasmod, tmp_path: Path, tw_mock
|
||||||
|
) -> None:
|
||||||
mod = importasmod(
|
mod = importasmod(
|
||||||
"""
|
"""
|
||||||
def g(x):
|
def g(x):
|
||||||
|
@ -958,7 +963,7 @@ raise ValueError()
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
excinfo = pytest.raises(ValueError, mod.f)
|
excinfo = pytest.raises(ValueError, mod.f)
|
||||||
tmpdir.join("mod.py").remove()
|
tmp_path.joinpath("mod.py").unlink()
|
||||||
excinfo.traceback = excinfo.traceback.filter()
|
excinfo.traceback = excinfo.traceback.filter()
|
||||||
repr = excinfo.getrepr()
|
repr = excinfo.getrepr()
|
||||||
repr.toterminal(tw_mock)
|
repr.toterminal(tw_mock)
|
||||||
|
@ -978,7 +983,9 @@ raise ValueError()
|
||||||
assert line.endswith("mod.py")
|
assert line.endswith("mod.py")
|
||||||
assert tw_mock.lines[10] == ":3: ValueError"
|
assert tw_mock.lines[10] == ":3: ValueError"
|
||||||
|
|
||||||
def test_toterminal_long_incomplete_source(self, importasmod, tmpdir, tw_mock):
|
def test_toterminal_long_incomplete_source(
|
||||||
|
self, importasmod, tmp_path: Path, tw_mock
|
||||||
|
) -> None:
|
||||||
mod = importasmod(
|
mod = importasmod(
|
||||||
"""
|
"""
|
||||||
def g(x):
|
def g(x):
|
||||||
|
@ -988,7 +995,7 @@ raise ValueError()
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
excinfo = pytest.raises(ValueError, mod.f)
|
excinfo = pytest.raises(ValueError, mod.f)
|
||||||
tmpdir.join("mod.py").write("asdf")
|
tmp_path.joinpath("mod.py").write_text("asdf")
|
||||||
excinfo.traceback = excinfo.traceback.filter()
|
excinfo.traceback = excinfo.traceback.filter()
|
||||||
repr = excinfo.getrepr()
|
repr = excinfo.getrepr()
|
||||||
repr.toterminal(tw_mock)
|
repr.toterminal(tw_mock)
|
||||||
|
@ -1374,16 +1381,18 @@ def test_repr_traceback_with_unicode(style, encoding):
|
||||||
assert repr_traceback is not None
|
assert repr_traceback is not None
|
||||||
|
|
||||||
|
|
||||||
def test_cwd_deleted(testdir):
|
def test_cwd_deleted(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test(tmpdir):
|
import os
|
||||||
tmpdir.chdir()
|
|
||||||
tmpdir.remove()
|
def test(tmp_path):
|
||||||
|
os.chdir(tmp_path)
|
||||||
|
tmp_path.unlink()
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["* 1 failed in *"])
|
result.stdout.fnmatch_lines(["* 1 failed in *"])
|
||||||
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
||||||
result.stderr.no_fnmatch_line("*INTERNALERROR*")
|
result.stderr.no_fnmatch_line("*INTERNALERROR*")
|
||||||
|
|
|
@ -1,3 +1,6 @@
|
||||||
def test_510(testdir):
|
from _pytest.pytester import Pytester
|
||||||
testdir.copy_example("issue_519.py")
|
|
||||||
testdir.runpytest("issue_519.py")
|
|
||||||
|
def test_510(pytester: Pytester) -> None:
|
||||||
|
pytester.copy_example("issue_519.py")
|
||||||
|
pytester.runpytest("issue_519.py")
|
||||||
|
|
|
@ -3,6 +3,7 @@ import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
from unittest import mock
|
from unittest import mock
|
||||||
|
|
||||||
|
@ -64,10 +65,10 @@ win32 = int(sys.platform == "win32")
|
||||||
class TestTerminalWriter:
|
class TestTerminalWriter:
|
||||||
@pytest.fixture(params=["path", "stringio"])
|
@pytest.fixture(params=["path", "stringio"])
|
||||||
def tw(
|
def tw(
|
||||||
self, request, tmpdir
|
self, request, tmp_path: Path
|
||||||
) -> Generator[terminalwriter.TerminalWriter, None, None]:
|
) -> Generator[terminalwriter.TerminalWriter, None, None]:
|
||||||
if request.param == "path":
|
if request.param == "path":
|
||||||
p = tmpdir.join("tmpfile")
|
p = tmp_path.joinpath("tmpfile")
|
||||||
f = open(str(p), "w+", encoding="utf8")
|
f = open(str(p), "w+", encoding="utf8")
|
||||||
tw = terminalwriter.TerminalWriter(f)
|
tw = terminalwriter.TerminalWriter(f)
|
||||||
|
|
||||||
|
|
|
@ -2,14 +2,14 @@ import logging
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.logging import caplog_records_key
|
from _pytest.logging import caplog_records_key
|
||||||
from _pytest.pytester import Testdir
|
from _pytest.pytester import Pytester
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
sublogger = logging.getLogger(__name__ + ".baz")
|
sublogger = logging.getLogger(__name__ + ".baz")
|
||||||
|
|
||||||
|
|
||||||
def test_fixture_help(testdir):
|
def test_fixture_help(pytester: Pytester) -> None:
|
||||||
result = testdir.runpytest("--fixtures")
|
result = pytester.runpytest("--fixtures")
|
||||||
result.stdout.fnmatch_lines(["*caplog*"])
|
result.stdout.fnmatch_lines(["*caplog*"])
|
||||||
|
|
||||||
|
|
||||||
|
@ -28,12 +28,12 @@ def test_change_level(caplog):
|
||||||
assert "CRITICAL" in caplog.text
|
assert "CRITICAL" in caplog.text
|
||||||
|
|
||||||
|
|
||||||
def test_change_level_undo(testdir: Testdir) -> None:
|
def test_change_level_undo(pytester: Pytester) -> None:
|
||||||
"""Ensure that 'set_level' is undone after the end of the test.
|
"""Ensure that 'set_level' is undone after the end of the test.
|
||||||
|
|
||||||
Tests the logging output themselves (affacted both by logger and handler levels).
|
Tests the logging output themselves (affacted both by logger and handler levels).
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -49,17 +49,17 @@ def test_change_level_undo(testdir: Testdir) -> None:
|
||||||
assert 0
|
assert 0
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*log from test1*", "*2 failed in *"])
|
result.stdout.fnmatch_lines(["*log from test1*", "*2 failed in *"])
|
||||||
result.stdout.no_fnmatch_line("*log from test2*")
|
result.stdout.no_fnmatch_line("*log from test2*")
|
||||||
|
|
||||||
|
|
||||||
def test_change_level_undos_handler_level(testdir: Testdir) -> None:
|
def test_change_level_undos_handler_level(pytester: Pytester) -> None:
|
||||||
"""Ensure that 'set_level' is undone after the end of the test (handler).
|
"""Ensure that 'set_level' is undone after the end of the test (handler).
|
||||||
|
|
||||||
Issue #7569. Tests the handler level specifically.
|
Issue #7569. Tests the handler level specifically.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -78,7 +78,7 @@ def test_change_level_undos_handler_level(testdir: Testdir) -> None:
|
||||||
assert caplog.handler.level == 43
|
assert caplog.handler.level == 43
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.assert_outcomes(passed=3)
|
result.assert_outcomes(passed=3)
|
||||||
|
|
||||||
|
|
||||||
|
@ -172,8 +172,8 @@ def test_caplog_captures_for_all_stages(caplog, logging_during_setup_and_teardow
|
||||||
assert set(caplog._item._store[caplog_records_key]) == {"setup", "call"}
|
assert set(caplog._item._store[caplog_records_key]) == {"setup", "call"}
|
||||||
|
|
||||||
|
|
||||||
def test_ini_controls_global_log_level(testdir):
|
def test_ini_controls_global_log_level(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -187,20 +187,20 @@ def test_ini_controls_global_log_level(testdir):
|
||||||
assert 'ERROR' in caplog.text
|
assert 'ERROR' in caplog.text
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_level=ERROR
|
log_level=ERROR
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
# make sure that that we get a '0' exit code for the testsuite
|
# make sure that that we get a '0' exit code for the testsuite
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_caplog_can_override_global_log_level(testdir):
|
def test_caplog_can_override_global_log_level(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -227,19 +227,19 @@ def test_caplog_can_override_global_log_level(testdir):
|
||||||
assert "message won't be shown" not in caplog.text
|
assert "message won't be shown" not in caplog.text
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_level=WARNING
|
log_level=WARNING
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_caplog_captures_despite_exception(testdir):
|
def test_caplog_captures_despite_exception(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -255,26 +255,28 @@ def test_caplog_captures_despite_exception(testdir):
|
||||||
raise Exception()
|
raise Exception()
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_level=WARNING
|
log_level=WARNING
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*ERROR message will be shown*"])
|
result.stdout.fnmatch_lines(["*ERROR message will be shown*"])
|
||||||
result.stdout.no_fnmatch_line("*DEBUG message won't be shown*")
|
result.stdout.no_fnmatch_line("*DEBUG message won't be shown*")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
|
|
||||||
|
|
||||||
def test_log_report_captures_according_to_config_option_upon_failure(testdir):
|
def test_log_report_captures_according_to_config_option_upon_failure(
|
||||||
|
pytester: Pytester,
|
||||||
|
) -> None:
|
||||||
"""Test that upon failure:
|
"""Test that upon failure:
|
||||||
(1) `caplog` succeeded to capture the DEBUG message and assert on it => No `Exception` is raised.
|
(1) `caplog` succeeded to capture the DEBUG message and assert on it => No `Exception` is raised.
|
||||||
(2) The `DEBUG` message does NOT appear in the `Captured log call` report.
|
(2) The `DEBUG` message does NOT appear in the `Captured log call` report.
|
||||||
(3) The stdout, `INFO`, and `WARNING` messages DO appear in the test reports due to `--log-level=INFO`.
|
(3) The stdout, `INFO`, and `WARNING` messages DO appear in the test reports due to `--log-level=INFO`.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -299,7 +301,7 @@ def test_log_report_captures_according_to_config_option_upon_failure(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("--log-level=INFO")
|
result = pytester.runpytest("--log-level=INFO")
|
||||||
result.stdout.no_fnmatch_line("*Exception: caplog failed to capture DEBUG*")
|
result.stdout.no_fnmatch_line("*Exception: caplog failed to capture DEBUG*")
|
||||||
result.stdout.no_fnmatch_line("*DEBUG log message*")
|
result.stdout.no_fnmatch_line("*DEBUG log message*")
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
|
|
@ -6,12 +6,13 @@ from typing import cast
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.capture import CaptureManager
|
from _pytest.capture import CaptureManager
|
||||||
from _pytest.config import ExitCode
|
from _pytest.config import ExitCode
|
||||||
from _pytest.pytester import Testdir
|
from _pytest.fixtures import FixtureRequest
|
||||||
|
from _pytest.pytester import Pytester
|
||||||
from _pytest.terminal import TerminalReporter
|
from _pytest.terminal import TerminalReporter
|
||||||
|
|
||||||
|
|
||||||
def test_nothing_logged(testdir):
|
def test_nothing_logged(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
@ -21,7 +22,7 @@ def test_nothing_logged(testdir):
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
|
result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
|
||||||
result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
|
result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
|
||||||
|
@ -29,8 +30,8 @@ def test_nothing_logged(testdir):
|
||||||
result.stdout.fnmatch_lines(["*- Captured *log call -*"])
|
result.stdout.fnmatch_lines(["*- Captured *log call -*"])
|
||||||
|
|
||||||
|
|
||||||
def test_messages_logged(testdir):
|
def test_messages_logged(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
import logging
|
import logging
|
||||||
|
@ -44,15 +45,15 @@ def test_messages_logged(testdir):
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--log-level=INFO")
|
result = pytester.runpytest("--log-level=INFO")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
result.stdout.fnmatch_lines(["*- Captured *log call -*", "*text going to logger*"])
|
result.stdout.fnmatch_lines(["*- Captured *log call -*", "*text going to logger*"])
|
||||||
result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
|
result.stdout.fnmatch_lines(["*- Captured stdout call -*", "text going to stdout"])
|
||||||
result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
|
result.stdout.fnmatch_lines(["*- Captured stderr call -*", "text going to stderr"])
|
||||||
|
|
||||||
|
|
||||||
def test_root_logger_affected(testdir):
|
def test_root_logger_affected(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
@ -65,8 +66,8 @@ def test_root_logger_affected(testdir):
|
||||||
assert 0
|
assert 0
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
result = testdir.runpytest("--log-level=ERROR", "--log-file=pytest.log")
|
result = pytester.runpytest("--log-level=ERROR", "--log-file=pytest.log")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
|
|
||||||
# The capture log calls in the stdout section only contain the
|
# The capture log calls in the stdout section only contain the
|
||||||
|
@ -87,8 +88,8 @@ def test_root_logger_affected(testdir):
|
||||||
assert "error text going to logger" in contents
|
assert "error text going to logger" in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_level_log_level_interaction(testdir):
|
def test_log_cli_level_log_level_interaction(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger()
|
logger = logging.getLogger()
|
||||||
|
@ -102,7 +103,7 @@ def test_log_cli_level_log_level_interaction(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("--log-cli-level=INFO", "--log-level=ERROR")
|
result = pytester.runpytest("--log-cli-level=INFO", "--log-level=ERROR")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
|
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -117,8 +118,8 @@ def test_log_cli_level_log_level_interaction(testdir):
|
||||||
result.stdout.no_re_match_line("DEBUG")
|
result.stdout.no_re_match_line("DEBUG")
|
||||||
|
|
||||||
|
|
||||||
def test_setup_logging(testdir):
|
def test_setup_logging(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -132,7 +133,7 @@ def test_setup_logging(testdir):
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--log-level=INFO")
|
result = pytester.runpytest("--log-level=INFO")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
|
@ -144,8 +145,8 @@ def test_setup_logging(testdir):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_teardown_logging(testdir):
|
def test_teardown_logging(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -159,7 +160,7 @@ def test_teardown_logging(testdir):
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--log-level=INFO")
|
result = pytester.runpytest("--log-level=INFO")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
|
@ -172,9 +173,9 @@ def test_teardown_logging(testdir):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("enabled", [True, False])
|
@pytest.mark.parametrize("enabled", [True, False])
|
||||||
def test_log_cli_enabled_disabled(testdir, enabled):
|
def test_log_cli_enabled_disabled(pytester: Pytester, enabled: bool) -> None:
|
||||||
msg = "critical message logged by test"
|
msg = "critical message logged by test"
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
def test_log_cli():
|
def test_log_cli():
|
||||||
|
@ -184,13 +185,13 @@ def test_log_cli_enabled_disabled(testdir, enabled):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if enabled:
|
if enabled:
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
if enabled:
|
if enabled:
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
|
@ -204,9 +205,9 @@ def test_log_cli_enabled_disabled(testdir, enabled):
|
||||||
assert msg not in result.stdout.str()
|
assert msg not in result.stdout.str()
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_default_level(testdir):
|
def test_log_cli_default_level(pytester: Pytester) -> None:
|
||||||
# Default log file level
|
# Default log file level
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -217,14 +218,14 @@ def test_log_cli_default_level(testdir):
|
||||||
logging.getLogger('catchlog').warning("WARNING message will be shown")
|
logging.getLogger('catchlog').warning("WARNING message will be shown")
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -238,10 +239,12 @@ def test_log_cli_default_level(testdir):
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_default_level_multiple_tests(testdir, request):
|
def test_log_cli_default_level_multiple_tests(
|
||||||
|
pytester: Pytester, request: FixtureRequest
|
||||||
|
) -> None:
|
||||||
"""Ensure we reset the first newline added by the live logger between tests"""
|
"""Ensure we reset the first newline added by the live logger between tests"""
|
||||||
filename = request.node.name + ".py"
|
filename = request.node.name + ".py"
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -252,14 +255,14 @@ def test_log_cli_default_level_multiple_tests(testdir, request):
|
||||||
logging.warning("log message from test_log_2")
|
logging.warning("log message from test_log_2")
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
f"{filename}::test_log_1 ",
|
f"{filename}::test_log_1 ",
|
||||||
|
@ -273,11 +276,13 @@ def test_log_cli_default_level_multiple_tests(testdir, request):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_default_level_sections(testdir, request):
|
def test_log_cli_default_level_sections(
|
||||||
|
pytester: Pytester, request: FixtureRequest
|
||||||
|
) -> None:
|
||||||
"""Check that with live logging enable we are printing the correct headers during
|
"""Check that with live logging enable we are printing the correct headers during
|
||||||
start/setup/call/teardown/finish."""
|
start/setup/call/teardown/finish."""
|
||||||
filename = request.node.name + ".py"
|
filename = request.node.name + ".py"
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -290,7 +295,7 @@ def test_log_cli_default_level_sections(testdir, request):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -308,14 +313,14 @@ def test_log_cli_default_level_sections(testdir, request):
|
||||||
logging.warning("log message from test_log_2")
|
logging.warning("log message from test_log_2")
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
f"{filename}::test_log_1 ",
|
f"{filename}::test_log_1 ",
|
||||||
|
@ -347,11 +352,13 @@ def test_log_cli_default_level_sections(testdir, request):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_live_logs_unknown_sections(testdir, request):
|
def test_live_logs_unknown_sections(
|
||||||
|
pytester: Pytester, request: FixtureRequest
|
||||||
|
) -> None:
|
||||||
"""Check that with live logging enable we are printing the correct headers during
|
"""Check that with live logging enable we are printing the correct headers during
|
||||||
start/setup/call/teardown/finish."""
|
start/setup/call/teardown/finish."""
|
||||||
filename = request.node.name + ".py"
|
filename = request.node.name + ".py"
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -367,7 +374,7 @@ def test_live_logs_unknown_sections(testdir, request):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -383,14 +390,14 @@ def test_live_logs_unknown_sections(testdir, request):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
"*WARNING*Unknown Section*",
|
"*WARNING*Unknown Section*",
|
||||||
|
@ -409,11 +416,13 @@ def test_live_logs_unknown_sections(testdir, request):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_sections_single_new_line_after_test_outcome(testdir, request):
|
def test_sections_single_new_line_after_test_outcome(
|
||||||
|
pytester: Pytester, request: FixtureRequest
|
||||||
|
) -> None:
|
||||||
"""Check that only a single new line is written between log messages during
|
"""Check that only a single new line is written between log messages during
|
||||||
teardown/finish."""
|
teardown/finish."""
|
||||||
filename = request.node.name + ".py"
|
filename = request.node.name + ".py"
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -427,7 +436,7 @@ def test_sections_single_new_line_after_test_outcome(testdir, request):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -443,14 +452,14 @@ def test_sections_single_new_line_after_test_outcome(testdir, request):
|
||||||
logging.warning("log message from test_log_1")
|
logging.warning("log message from test_log_1")
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
f"{filename}::test_log_1 ",
|
f"{filename}::test_log_1 ",
|
||||||
|
@ -487,9 +496,9 @@ def test_sections_single_new_line_after_test_outcome(testdir, request):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_level(testdir):
|
def test_log_cli_level(pytester: Pytester) -> None:
|
||||||
# Default log file level
|
# Default log file level
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -501,14 +510,14 @@ def test_log_cli_level(testdir):
|
||||||
print('PASSED')
|
print('PASSED')
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("-s", "--log-cli-level=INFO")
|
result = pytester.runpytest("-s", "--log-cli-level=INFO")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -522,7 +531,7 @@ def test_log_cli_level(testdir):
|
||||||
# make sure that that we get a '0' exit code for the testsuite
|
# make sure that that we get a '0' exit code for the testsuite
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
result = testdir.runpytest("-s", "--log-level=INFO")
|
result = pytester.runpytest("-s", "--log-level=INFO")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -537,15 +546,15 @@ def test_log_cli_level(testdir):
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_log_cli_ini_level(testdir):
|
def test_log_cli_ini_level(pytester: Pytester) -> None:
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_cli=true
|
log_cli=true
|
||||||
log_cli_level = INFO
|
log_cli_level = INFO
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -558,7 +567,7 @@ def test_log_cli_ini_level(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("-s")
|
result = pytester.runpytest("-s")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -577,11 +586,11 @@ def test_log_cli_ini_level(testdir):
|
||||||
"cli_args",
|
"cli_args",
|
||||||
["", "--log-level=WARNING", "--log-file-level=WARNING", "--log-cli-level=WARNING"],
|
["", "--log-level=WARNING", "--log-file-level=WARNING", "--log-cli-level=WARNING"],
|
||||||
)
|
)
|
||||||
def test_log_cli_auto_enable(testdir, cli_args):
|
def test_log_cli_auto_enable(pytester: Pytester, cli_args: str) -> None:
|
||||||
"""Check that live logs are enabled if --log-level or --log-cli-level is passed on the CLI.
|
"""Check that live logs are enabled if --log-level or --log-cli-level is passed on the CLI.
|
||||||
It should not be auto enabled if the same configs are set on the INI file.
|
It should not be auto enabled if the same configs are set on the INI file.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -591,7 +600,7 @@ def test_log_cli_auto_enable(testdir, cli_args):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_level=INFO
|
log_level=INFO
|
||||||
|
@ -599,7 +608,7 @@ def test_log_cli_auto_enable(testdir, cli_args):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest(cli_args)
|
result = pytester.runpytest(cli_args)
|
||||||
stdout = result.stdout.str()
|
stdout = result.stdout.str()
|
||||||
if cli_args == "--log-cli-level=WARNING":
|
if cli_args == "--log-cli-level=WARNING":
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
|
@ -620,9 +629,9 @@ def test_log_cli_auto_enable(testdir, cli_args):
|
||||||
assert "WARNING" not in stdout
|
assert "WARNING" not in stdout
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_cli(testdir):
|
def test_log_file_cli(pytester: Pytester) -> None:
|
||||||
# Default log file level
|
# Default log file level
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -635,9 +644,9 @@ def test_log_file_cli(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
result = testdir.runpytest(
|
result = pytester.runpytest(
|
||||||
"-s", f"--log-file={log_file}", "--log-file-level=WARNING"
|
"-s", f"--log-file={log_file}", "--log-file-level=WARNING"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -653,9 +662,9 @@ def test_log_file_cli(testdir):
|
||||||
assert "This log message won't be shown" not in contents
|
assert "This log message won't be shown" not in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_cli_level(testdir):
|
def test_log_file_cli_level(pytester: Pytester) -> None:
|
||||||
# Default log file level
|
# Default log file level
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -668,9 +677,9 @@ def test_log_file_cli_level(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
result = testdir.runpytest("-s", f"--log-file={log_file}", "--log-file-level=INFO")
|
result = pytester.runpytest("-s", f"--log-file={log_file}", "--log-file-level=INFO")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(["test_log_file_cli_level.py PASSED"])
|
result.stdout.fnmatch_lines(["test_log_file_cli_level.py PASSED"])
|
||||||
|
@ -684,22 +693,22 @@ def test_log_file_cli_level(testdir):
|
||||||
assert "This log message won't be shown" not in contents
|
assert "This log message won't be shown" not in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_level_not_changed_by_default(testdir):
|
def test_log_level_not_changed_by_default(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
def test_log_file():
|
def test_log_file():
|
||||||
assert logging.getLogger().level == logging.WARNING
|
assert logging.getLogger().level == logging.WARNING
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("-s")
|
result = pytester.runpytest("-s")
|
||||||
result.stdout.fnmatch_lines(["* 1 passed in *"])
|
result.stdout.fnmatch_lines(["* 1 passed in *"])
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_ini(testdir):
|
def test_log_file_ini(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -708,7 +717,7 @@ def test_log_file_ini(testdir):
|
||||||
log_file
|
log_file
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -721,7 +730,7 @@ def test_log_file_ini(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("-s")
|
result = pytester.runpytest("-s")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(["test_log_file_ini.py PASSED"])
|
result.stdout.fnmatch_lines(["test_log_file_ini.py PASSED"])
|
||||||
|
@ -735,10 +744,10 @@ def test_log_file_ini(testdir):
|
||||||
assert "This log message won't be shown" not in contents
|
assert "This log message won't be shown" not in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_ini_level(testdir):
|
def test_log_file_ini_level(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -747,7 +756,7 @@ def test_log_file_ini_level(testdir):
|
||||||
log_file
|
log_file
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import logging
|
import logging
|
||||||
|
@ -760,7 +769,7 @@ def test_log_file_ini_level(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("-s")
|
result = pytester.runpytest("-s")
|
||||||
|
|
||||||
# fnmatch_lines does an assertion internally
|
# fnmatch_lines does an assertion internally
|
||||||
result.stdout.fnmatch_lines(["test_log_file_ini_level.py PASSED"])
|
result.stdout.fnmatch_lines(["test_log_file_ini_level.py PASSED"])
|
||||||
|
@ -774,10 +783,10 @@ def test_log_file_ini_level(testdir):
|
||||||
assert "This log message won't be shown" not in contents
|
assert "This log message won't be shown" not in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_unicode(testdir):
|
def test_log_file_unicode(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -786,7 +795,7 @@ def test_log_file_unicode(testdir):
|
||||||
log_file
|
log_file
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""\
|
"""\
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -797,7 +806,7 @@ def test_log_file_unicode(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
|
|
||||||
# make sure that that we get a '0' exit code for the testsuite
|
# make sure that that we get a '0' exit code for the testsuite
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
@ -810,11 +819,13 @@ def test_log_file_unicode(testdir):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("has_capture_manager", [True, False])
|
@pytest.mark.parametrize("has_capture_manager", [True, False])
|
||||||
def test_live_logging_suspends_capture(has_capture_manager: bool, request) -> None:
|
def test_live_logging_suspends_capture(
|
||||||
|
has_capture_manager: bool, request: FixtureRequest
|
||||||
|
) -> None:
|
||||||
"""Test that capture manager is suspended when we emitting messages for live logging.
|
"""Test that capture manager is suspended when we emitting messages for live logging.
|
||||||
|
|
||||||
This tests the implementation calls instead of behavior because it is difficult/impossible to do it using
|
This tests the implementation calls instead of behavior because it is difficult/impossible to do it using
|
||||||
``testdir`` facilities because they do their own capturing.
|
``pytester`` facilities because they do their own capturing.
|
||||||
|
|
||||||
We parametrize the test to also make sure _LiveLoggingStreamHandler works correctly if no capture manager plugin
|
We parametrize the test to also make sure _LiveLoggingStreamHandler works correctly if no capture manager plugin
|
||||||
is installed.
|
is installed.
|
||||||
|
@ -856,8 +867,8 @@ def test_live_logging_suspends_capture(has_capture_manager: bool, request) -> No
|
||||||
assert cast(io.StringIO, out_file).getvalue() == "\nsome message\n"
|
assert cast(io.StringIO, out_file).getvalue() == "\nsome message\n"
|
||||||
|
|
||||||
|
|
||||||
def test_collection_live_logging(testdir):
|
def test_collection_live_logging(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -865,22 +876,22 @@ def test_collection_live_logging(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("--log-cli-level=INFO")
|
result = pytester.runpytest("--log-cli-level=INFO")
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
["*--- live log collection ---*", "*Normal message*", "collected 0 items"]
|
["*--- live log collection ---*", "*Normal message*", "collected 0 items"]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("verbose", ["", "-q", "-qq"])
|
@pytest.mark.parametrize("verbose", ["", "-q", "-qq"])
|
||||||
def test_collection_collect_only_live_logging(testdir, verbose):
|
def test_collection_collect_only_live_logging(pytester: Pytester, verbose: str) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_simple():
|
def test_simple():
|
||||||
pass
|
pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest("--collect-only", "--log-cli-level=INFO", verbose)
|
result = pytester.runpytest("--collect-only", "--log-cli-level=INFO", verbose)
|
||||||
|
|
||||||
expected_lines = []
|
expected_lines = []
|
||||||
|
|
||||||
|
@ -907,10 +918,10 @@ def test_collection_collect_only_live_logging(testdir, verbose):
|
||||||
result.stdout.fnmatch_lines(expected_lines)
|
result.stdout.fnmatch_lines(expected_lines)
|
||||||
|
|
||||||
|
|
||||||
def test_collection_logging_to_file(testdir):
|
def test_collection_logging_to_file(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -920,7 +931,7 @@ def test_collection_logging_to_file(testdir):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -932,7 +943,7 @@ def test_collection_logging_to_file(testdir):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
|
|
||||||
result.stdout.no_fnmatch_line("*--- live log collection ---*")
|
result.stdout.no_fnmatch_line("*--- live log collection ---*")
|
||||||
|
|
||||||
|
@ -945,10 +956,10 @@ def test_collection_logging_to_file(testdir):
|
||||||
assert "info message in test_simple" in contents
|
assert "info message in test_simple" in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_in_hooks(testdir):
|
def test_log_in_hooks(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -958,7 +969,7 @@ def test_log_in_hooks(testdir):
|
||||||
log_file
|
log_file
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -972,7 +983,7 @@ def test_log_in_hooks(testdir):
|
||||||
logging.info('sessionfinish')
|
logging.info('sessionfinish')
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*sessionstart*", "*runtestloop*", "*sessionfinish*"])
|
result.stdout.fnmatch_lines(["*sessionstart*", "*runtestloop*", "*sessionfinish*"])
|
||||||
with open(log_file) as rfh:
|
with open(log_file) as rfh:
|
||||||
contents = rfh.read()
|
contents = rfh.read()
|
||||||
|
@ -981,10 +992,10 @@ def test_log_in_hooks(testdir):
|
||||||
assert "sessionfinish" in contents
|
assert "sessionfinish" in contents
|
||||||
|
|
||||||
|
|
||||||
def test_log_in_runtest_logreport(testdir):
|
def test_log_in_runtest_logreport(pytester: Pytester) -> None:
|
||||||
log_file = testdir.tmpdir.join("pytest.log").strpath
|
log_file = str(pytester.path.joinpath("pytest.log"))
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file={}
|
log_file={}
|
||||||
|
@ -994,7 +1005,7 @@ def test_log_in_runtest_logreport(testdir):
|
||||||
log_file
|
log_file
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
@ -1003,29 +1014,29 @@ def test_log_in_runtest_logreport(testdir):
|
||||||
logger.info("logreport")
|
logger.info("logreport")
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_first():
|
def test_first():
|
||||||
assert True
|
assert True
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.runpytest()
|
pytester.runpytest()
|
||||||
with open(log_file) as rfh:
|
with open(log_file) as rfh:
|
||||||
contents = rfh.read()
|
contents = rfh.read()
|
||||||
assert contents.count("logreport") == 3
|
assert contents.count("logreport") == 3
|
||||||
|
|
||||||
|
|
||||||
def test_log_set_path(testdir):
|
def test_log_set_path(pytester: Pytester) -> None:
|
||||||
report_dir_base = testdir.tmpdir.strpath
|
report_dir_base = str(pytester.path)
|
||||||
|
|
||||||
testdir.makeini(
|
pytester.makeini(
|
||||||
"""
|
"""
|
||||||
[pytest]
|
[pytest]
|
||||||
log_file_level = DEBUG
|
log_file_level = DEBUG
|
||||||
log_cli=true
|
log_cli=true
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import os
|
import os
|
||||||
import pytest
|
import pytest
|
||||||
|
@ -1040,7 +1051,7 @@ def test_log_set_path(testdir):
|
||||||
repr(report_dir_base)
|
repr(report_dir_base)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger("testcase-logger")
|
logger = logging.getLogger("testcase-logger")
|
||||||
|
@ -1053,7 +1064,7 @@ def test_log_set_path(testdir):
|
||||||
assert True
|
assert True
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.runpytest()
|
pytester.runpytest()
|
||||||
with open(os.path.join(report_dir_base, "test_first")) as rfh:
|
with open(os.path.join(report_dir_base, "test_first")) as rfh:
|
||||||
content = rfh.read()
|
content = rfh.read()
|
||||||
assert "message from test 1" in content
|
assert "message from test 1" in content
|
||||||
|
@ -1063,10 +1074,10 @@ def test_log_set_path(testdir):
|
||||||
assert "message from test 2" in content
|
assert "message from test 2" in content
|
||||||
|
|
||||||
|
|
||||||
def test_colored_captured_log(testdir):
|
def test_colored_captured_log(pytester: Pytester) -> None:
|
||||||
"""Test that the level names of captured log messages of a failing test
|
"""Test that the level names of captured log messages of a failing test
|
||||||
are colored."""
|
are colored."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -1077,7 +1088,7 @@ def test_colored_captured_log(testdir):
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--log-level=INFO", "--color=yes")
|
result = pytester.runpytest("--log-level=INFO", "--color=yes")
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
|
@ -1087,9 +1098,9 @@ def test_colored_captured_log(testdir):
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_colored_ansi_esc_caplogtext(testdir):
|
def test_colored_ansi_esc_caplogtext(pytester: Pytester) -> None:
|
||||||
"""Make sure that caplog.text does not contain ANSI escape sequences."""
|
"""Make sure that caplog.text does not contain ANSI escape sequences."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -1100,11 +1111,11 @@ def test_colored_ansi_esc_caplogtext(testdir):
|
||||||
assert '\x1b' not in caplog.text
|
assert '\x1b' not in caplog.text
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--log-level=INFO", "--color=yes")
|
result = pytester.runpytest("--log-level=INFO", "--color=yes")
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_logging_emit_error(testdir: Testdir) -> None:
|
def test_logging_emit_error(pytester: Pytester) -> None:
|
||||||
"""An exception raised during emit() should fail the test.
|
"""An exception raised during emit() should fail the test.
|
||||||
|
|
||||||
The default behavior of logging is to print "Logging error"
|
The default behavior of logging is to print "Logging error"
|
||||||
|
@ -1112,7 +1123,7 @@ def test_logging_emit_error(testdir: Testdir) -> None:
|
||||||
|
|
||||||
pytest overrides this behavior to propagate the exception.
|
pytest overrides this behavior to propagate the exception.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -1120,7 +1131,7 @@ def test_logging_emit_error(testdir: Testdir) -> None:
|
||||||
logging.warning('oops', 'first', 2)
|
logging.warning('oops', 'first', 2)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.assert_outcomes(failed=1)
|
result.assert_outcomes(failed=1)
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
[
|
[
|
||||||
|
@ -1130,10 +1141,10 @@ def test_logging_emit_error(testdir: Testdir) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_logging_emit_error_supressed(testdir: Testdir) -> None:
|
def test_logging_emit_error_supressed(pytester: Pytester) -> None:
|
||||||
"""If logging is configured to silently ignore errors, pytest
|
"""If logging is configured to silently ignore errors, pytest
|
||||||
doesn't propagate errors either."""
|
doesn't propagate errors either."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
@ -1142,13 +1153,15 @@ def test_logging_emit_error_supressed(testdir: Testdir) -> None:
|
||||||
logging.warning('oops', 'first', 2)
|
logging.warning('oops', 'first', 2)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.assert_outcomes(passed=1)
|
result.assert_outcomes(passed=1)
|
||||||
|
|
||||||
|
|
||||||
def test_log_file_cli_subdirectories_are_successfully_created(testdir):
|
def test_log_file_cli_subdirectories_are_successfully_created(
|
||||||
path = testdir.makepyfile(""" def test_logger(): pass """)
|
pytester: Pytester,
|
||||||
|
) -> None:
|
||||||
|
path = pytester.makepyfile(""" def test_logger(): pass """)
|
||||||
expected = os.path.join(os.path.dirname(str(path)), "foo", "bar")
|
expected = os.path.join(os.path.dirname(str(path)), "foo", "bar")
|
||||||
result = testdir.runpytest("--log-file=foo/bar/logf.log")
|
result = pytester.runpytest("--log-file=foo/bar/logf.log")
|
||||||
assert "logf.log" in os.listdir(expected)
|
assert "logf.log" in os.listdir(expected)
|
||||||
assert result.ret == ExitCode.OK
|
assert result.ret == ExitCode.OK
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -8,8 +8,6 @@ from typing import Generator
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.config import ExitCode
|
from _pytest.config import ExitCode
|
||||||
from _pytest.config import PytestPluginManager
|
from _pytest.config import PytestPluginManager
|
||||||
|
@ -93,9 +91,9 @@ class TestConftestValueAccessGlobal:
|
||||||
conftest = ConftestWithSetinitial(startdir)
|
conftest = ConftestWithSetinitial(startdir)
|
||||||
mod, value = conftest._rget_with_confmod("a", startdir, importmode="prepend")
|
mod, value = conftest._rget_with_confmod("a", startdir, importmode="prepend")
|
||||||
assert value == 1.5
|
assert value == 1.5
|
||||||
path = py.path.local(mod.__file__)
|
path = Path(mod.__file__)
|
||||||
assert path.dirpath() == basedir / "adir" / "b"
|
assert path.parent == basedir / "adir" / "b"
|
||||||
assert path.purebasename.startswith("conftest")
|
assert path.stem == "conftest"
|
||||||
|
|
||||||
|
|
||||||
def test_conftest_in_nonpkg_with_init(tmp_path: Path, _sys_snapshot) -> None:
|
def test_conftest_in_nonpkg_with_init(tmp_path: Path, _sys_snapshot) -> None:
|
||||||
|
@ -361,11 +359,10 @@ def test_conftest_import_order(pytester: Pytester, monkeypatch: MonkeyPatch) ->
|
||||||
|
|
||||||
|
|
||||||
def test_fixture_dependency(pytester: Pytester) -> None:
|
def test_fixture_dependency(pytester: Pytester) -> None:
|
||||||
ct1 = pytester.makeconftest("")
|
pytester.makeconftest("")
|
||||||
ct1 = pytester.makepyfile("__init__.py")
|
pytester.path.joinpath("__init__.py").touch()
|
||||||
ct1.write_text("")
|
|
||||||
sub = pytester.mkdir("sub")
|
sub = pytester.mkdir("sub")
|
||||||
sub.joinpath("__init__.py").write_text("")
|
sub.joinpath("__init__.py").touch()
|
||||||
sub.joinpath("conftest.py").write_text(
|
sub.joinpath("conftest.py").write_text(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
"""\
|
"""\
|
||||||
|
@ -387,7 +384,7 @@ def test_fixture_dependency(pytester: Pytester) -> None:
|
||||||
)
|
)
|
||||||
subsub = sub.joinpath("subsub")
|
subsub = sub.joinpath("subsub")
|
||||||
subsub.mkdir()
|
subsub.mkdir()
|
||||||
subsub.joinpath("__init__.py").write_text("")
|
subsub.joinpath("__init__.py").touch()
|
||||||
subsub.joinpath("test_bar.py").write_text(
|
subsub.joinpath("test_bar.py").write_text(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
"""\
|
"""\
|
||||||
|
@ -525,8 +522,8 @@ class TestConftestVisibility:
|
||||||
"""#616"""
|
"""#616"""
|
||||||
dirs = self._setup_tree(pytester)
|
dirs = self._setup_tree(pytester)
|
||||||
print("pytest run in cwd: %s" % (dirs[chdir].relative_to(pytester.path)))
|
print("pytest run in cwd: %s" % (dirs[chdir].relative_to(pytester.path)))
|
||||||
print("pytestarg : %s" % (testarg))
|
print("pytestarg : %s" % testarg)
|
||||||
print("expected pass : %s" % (expect_ntests_passed))
|
print("expected pass : %s" % expect_ntests_passed)
|
||||||
os.chdir(dirs[chdir])
|
os.chdir(dirs[chdir])
|
||||||
reprec = pytester.inline_run(testarg, "-q", "--traceconfig")
|
reprec = pytester.inline_run(testarg, "-q", "--traceconfig")
|
||||||
reprec.assertoutcome(passed=expect_ntests_passed)
|
reprec.assertoutcome(passed=expect_ntests_passed)
|
||||||
|
|
|
@ -2,15 +2,14 @@ import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
from pathlib import Path
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest.monkeypatch import MonkeyPatch
|
from _pytest.monkeypatch import MonkeyPatch
|
||||||
from _pytest.pytester import Testdir
|
from _pytest.pytester import Pytester
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -233,8 +232,8 @@ def test_setenv_prepend() -> None:
|
||||||
assert "XYZ123" not in os.environ
|
assert "XYZ123" not in os.environ
|
||||||
|
|
||||||
|
|
||||||
def test_monkeypatch_plugin(testdir: Testdir) -> None:
|
def test_monkeypatch_plugin(pytester: Pytester) -> None:
|
||||||
reprec = testdir.inline_runsource(
|
reprec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
def test_method(monkeypatch):
|
def test_method(monkeypatch):
|
||||||
assert monkeypatch.__class__.__name__ == "MonkeyPatch"
|
assert monkeypatch.__class__.__name__ == "MonkeyPatch"
|
||||||
|
@ -268,33 +267,33 @@ def test_syspath_prepend_double_undo(mp: MonkeyPatch) -> None:
|
||||||
sys.path[:] = old_syspath
|
sys.path[:] = old_syspath
|
||||||
|
|
||||||
|
|
||||||
def test_chdir_with_path_local(mp: MonkeyPatch, tmpdir: py.path.local) -> None:
|
def test_chdir_with_path_local(mp: MonkeyPatch, tmp_path: Path) -> None:
|
||||||
mp.chdir(tmpdir)
|
mp.chdir(tmp_path)
|
||||||
assert os.getcwd() == tmpdir.strpath
|
assert os.getcwd() == str(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
def test_chdir_with_str(mp: MonkeyPatch, tmpdir: py.path.local) -> None:
|
def test_chdir_with_str(mp: MonkeyPatch, tmp_path: Path) -> None:
|
||||||
mp.chdir(tmpdir.strpath)
|
mp.chdir(str(tmp_path))
|
||||||
assert os.getcwd() == tmpdir.strpath
|
assert os.getcwd() == str(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
def test_chdir_undo(mp: MonkeyPatch, tmpdir: py.path.local) -> None:
|
def test_chdir_undo(mp: MonkeyPatch, tmp_path: Path) -> None:
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
mp.chdir(tmpdir)
|
mp.chdir(tmp_path)
|
||||||
mp.undo()
|
mp.undo()
|
||||||
assert os.getcwd() == cwd
|
assert os.getcwd() == cwd
|
||||||
|
|
||||||
|
|
||||||
def test_chdir_double_undo(mp: MonkeyPatch, tmpdir: py.path.local) -> None:
|
def test_chdir_double_undo(mp: MonkeyPatch, tmp_path: Path) -> None:
|
||||||
mp.chdir(tmpdir.strpath)
|
mp.chdir(str(tmp_path))
|
||||||
mp.undo()
|
mp.undo()
|
||||||
tmpdir.chdir()
|
os.chdir(tmp_path)
|
||||||
mp.undo()
|
mp.undo()
|
||||||
assert os.getcwd() == tmpdir.strpath
|
assert os.getcwd() == str(tmp_path)
|
||||||
|
|
||||||
|
|
||||||
def test_issue185_time_breaks(testdir: Testdir) -> None:
|
def test_issue185_time_breaks(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import time
|
import time
|
||||||
def test_m(monkeypatch):
|
def test_m(monkeypatch):
|
||||||
|
@ -303,7 +302,7 @@ def test_issue185_time_breaks(testdir: Testdir) -> None:
|
||||||
monkeypatch.setattr(time, "time", f)
|
monkeypatch.setattr(time, "time", f)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
"""
|
"""
|
||||||
*1 passed*
|
*1 passed*
|
||||||
|
@ -311,9 +310,9 @@ def test_issue185_time_breaks(testdir: Testdir) -> None:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_importerror(testdir: Testdir) -> None:
|
def test_importerror(pytester: Pytester) -> None:
|
||||||
p = testdir.mkpydir("package")
|
p = pytester.mkpydir("package")
|
||||||
p.join("a.py").write(
|
p.joinpath("a.py").write_text(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
"""\
|
"""\
|
||||||
import doesnotexist
|
import doesnotexist
|
||||||
|
@ -322,7 +321,7 @@ def test_importerror(testdir: Testdir) -> None:
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
testdir.tmpdir.join("test_importerror.py").write(
|
pytester.path.joinpath("test_importerror.py").write_text(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
"""\
|
"""\
|
||||||
def test_importerror(monkeypatch):
|
def test_importerror(monkeypatch):
|
||||||
|
@ -330,7 +329,7 @@ def test_importerror(testdir: Testdir) -> None:
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
"""
|
"""
|
||||||
*import error in package.a: No module named 'doesnotexist'*
|
*import error in package.a: No module named 'doesnotexist'*
|
||||||
|
@ -420,16 +419,18 @@ def test_context_classmethod() -> None:
|
||||||
|
|
||||||
|
|
||||||
def test_syspath_prepend_with_namespace_packages(
|
def test_syspath_prepend_with_namespace_packages(
|
||||||
testdir: Testdir, monkeypatch: MonkeyPatch
|
pytester: Pytester, monkeypatch: MonkeyPatch
|
||||||
) -> None:
|
) -> None:
|
||||||
for dirname in "hello", "world":
|
for dirname in "hello", "world":
|
||||||
d = testdir.mkdir(dirname)
|
d = pytester.mkdir(dirname)
|
||||||
ns = d.mkdir("ns_pkg")
|
ns = d.joinpath("ns_pkg")
|
||||||
ns.join("__init__.py").write(
|
ns.mkdir()
|
||||||
|
ns.joinpath("__init__.py").write_text(
|
||||||
"__import__('pkg_resources').declare_namespace(__name__)"
|
"__import__('pkg_resources').declare_namespace(__name__)"
|
||||||
)
|
)
|
||||||
lib = ns.mkdir(dirname)
|
lib = ns.joinpath(dirname)
|
||||||
lib.join("__init__.py").write("def check(): return %r" % dirname)
|
lib.mkdir()
|
||||||
|
lib.joinpath("__init__.py").write_text("def check(): return %r" % dirname)
|
||||||
|
|
||||||
monkeypatch.syspath_prepend("hello")
|
monkeypatch.syspath_prepend("hello")
|
||||||
import ns_pkg.hello
|
import ns_pkg.hello
|
||||||
|
@ -446,8 +447,7 @@ def test_syspath_prepend_with_namespace_packages(
|
||||||
assert ns_pkg.world.check() == "world"
|
assert ns_pkg.world.check() == "world"
|
||||||
|
|
||||||
# Should invalidate caches via importlib.invalidate_caches.
|
# Should invalidate caches via importlib.invalidate_caches.
|
||||||
tmpdir = testdir.tmpdir
|
modules_tmpdir = pytester.mkdir("modules_tmpdir")
|
||||||
modules_tmpdir = tmpdir.mkdir("modules_tmpdir")
|
|
||||||
monkeypatch.syspath_prepend(str(modules_tmpdir))
|
monkeypatch.syspath_prepend(str(modules_tmpdir))
|
||||||
modules_tmpdir.join("main_app.py").write("app = True")
|
modules_tmpdir.joinpath("main_app.py").write_text("app = True")
|
||||||
from main_app import app # noqa: F401
|
from main_app import app # noqa: F401
|
||||||
|
|
|
@ -1,13 +1,17 @@
|
||||||
import os
|
import os
|
||||||
|
import shutil
|
||||||
import sys
|
import sys
|
||||||
import types
|
import types
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
from _pytest.config import Config
|
||||||
from _pytest.config import ExitCode
|
from _pytest.config import ExitCode
|
||||||
from _pytest.config import PytestPluginManager
|
from _pytest.config import PytestPluginManager
|
||||||
from _pytest.config.exceptions import UsageError
|
from _pytest.config.exceptions import UsageError
|
||||||
from _pytest.main import Session
|
from _pytest.main import Session
|
||||||
|
from _pytest.monkeypatch import MonkeyPatch
|
||||||
|
from _pytest.pathlib import import_path
|
||||||
from _pytest.pytester import Pytester
|
from _pytest.pytester import Pytester
|
||||||
|
|
||||||
|
|
||||||
|
@ -18,7 +22,7 @@ def pytestpm() -> PytestPluginManager:
|
||||||
|
|
||||||
class TestPytestPluginInteractions:
|
class TestPytestPluginInteractions:
|
||||||
def test_addhooks_conftestplugin(
|
def test_addhooks_conftestplugin(
|
||||||
self, pytester: Pytester, _config_for_test
|
self, pytester: Pytester, _config_for_test: Config
|
||||||
) -> None:
|
) -> None:
|
||||||
pytester.makepyfile(
|
pytester.makepyfile(
|
||||||
newhooks="""
|
newhooks="""
|
||||||
|
@ -45,15 +49,15 @@ class TestPytestPluginInteractions:
|
||||||
res = config.hook.pytest_myhook(xyz=10)
|
res = config.hook.pytest_myhook(xyz=10)
|
||||||
assert res == [11]
|
assert res == [11]
|
||||||
|
|
||||||
def test_addhooks_nohooks(self, testdir):
|
def test_addhooks_nohooks(self, pytester: Pytester) -> None:
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
def pytest_addhooks(pluginmanager):
|
def pytest_addhooks(pluginmanager):
|
||||||
pluginmanager.add_hookspecs(sys)
|
pluginmanager.add_hookspecs(sys)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
res = testdir.runpytest()
|
res = pytester.runpytest()
|
||||||
assert res.ret != 0
|
assert res.ret != 0
|
||||||
res.stderr.fnmatch_lines(["*did not find*sys*"])
|
res.stderr.fnmatch_lines(["*did not find*sys*"])
|
||||||
|
|
||||||
|
@ -70,8 +74,8 @@ class TestPytestPluginInteractions:
|
||||||
config.pluginmanager._importconftest(p, importmode="prepend")
|
config.pluginmanager._importconftest(p, importmode="prepend")
|
||||||
assert config.option.test123
|
assert config.option.test123
|
||||||
|
|
||||||
def test_configure(self, testdir):
|
def test_configure(self, pytester: Pytester) -> None:
|
||||||
config = testdir.parseconfig()
|
config = pytester.parseconfig()
|
||||||
values = []
|
values = []
|
||||||
|
|
||||||
class A:
|
class A:
|
||||||
|
@ -90,7 +94,7 @@ class TestPytestPluginInteractions:
|
||||||
config.pluginmanager.register(A())
|
config.pluginmanager.register(A())
|
||||||
assert len(values) == 2
|
assert len(values) == 2
|
||||||
|
|
||||||
def test_hook_tracing(self, _config_for_test) -> None:
|
def test_hook_tracing(self, _config_for_test: Config) -> None:
|
||||||
pytestpm = _config_for_test.pluginmanager # fully initialized with plugins
|
pytestpm = _config_for_test.pluginmanager # fully initialized with plugins
|
||||||
saveindent = []
|
saveindent = []
|
||||||
|
|
||||||
|
@ -139,9 +143,9 @@ class TestPytestPluginInteractions:
|
||||||
ihook_b = session.gethookproxy(pytester.path / "tests")
|
ihook_b = session.gethookproxy(pytester.path / "tests")
|
||||||
assert ihook_a is not ihook_b
|
assert ihook_a is not ihook_b
|
||||||
|
|
||||||
def test_hook_with_addoption(self, testdir):
|
def test_hook_with_addoption(self, pytester: Pytester) -> None:
|
||||||
"""Test that hooks can be used in a call to pytest_addoption"""
|
"""Test that hooks can be used in a call to pytest_addoption"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
newhooks="""
|
newhooks="""
|
||||||
import pytest
|
import pytest
|
||||||
@pytest.hookspec(firstresult=True)
|
@pytest.hookspec(firstresult=True)
|
||||||
|
@ -149,7 +153,7 @@ class TestPytestPluginInteractions:
|
||||||
pass
|
pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
myplugin="""
|
myplugin="""
|
||||||
import newhooks
|
import newhooks
|
||||||
def pytest_addhooks(pluginmanager):
|
def pytest_addhooks(pluginmanager):
|
||||||
|
@ -159,30 +163,32 @@ class TestPytestPluginInteractions:
|
||||||
parser.addoption("--config", help="Config, defaults to %(default)s", default=default_value)
|
parser.addoption("--config", help="Config, defaults to %(default)s", default=default_value)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
pytest_plugins=("myplugin",)
|
pytest_plugins=("myplugin",)
|
||||||
def pytest_default_value():
|
def pytest_default_value():
|
||||||
return "default_value"
|
return "default_value"
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
res = testdir.runpytest("--help")
|
res = pytester.runpytest("--help")
|
||||||
res.stdout.fnmatch_lines(["*--config=CONFIG*default_value*"])
|
res.stdout.fnmatch_lines(["*--config=CONFIG*default_value*"])
|
||||||
|
|
||||||
|
|
||||||
def test_default_markers(testdir):
|
def test_default_markers(pytester: Pytester) -> None:
|
||||||
result = testdir.runpytest("--markers")
|
result = pytester.runpytest("--markers")
|
||||||
result.stdout.fnmatch_lines(["*tryfirst*first*", "*trylast*last*"])
|
result.stdout.fnmatch_lines(["*tryfirst*first*", "*trylast*last*"])
|
||||||
|
|
||||||
|
|
||||||
def test_importplugin_error_message(testdir, pytestpm):
|
def test_importplugin_error_message(
|
||||||
|
pytester: Pytester, pytestpm: PytestPluginManager
|
||||||
|
) -> None:
|
||||||
"""Don't hide import errors when importing plugins and provide
|
"""Don't hide import errors when importing plugins and provide
|
||||||
an easy to debug message.
|
an easy to debug message.
|
||||||
|
|
||||||
See #375 and #1998.
|
See #375 and #1998.
|
||||||
"""
|
"""
|
||||||
testdir.syspathinsert(testdir.tmpdir)
|
pytester.syspathinsert(pytester.path)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
qwe="""\
|
qwe="""\
|
||||||
def test_traceback():
|
def test_traceback():
|
||||||
raise ImportError('Not possible to import: ☺')
|
raise ImportError('Not possible to import: ☺')
|
||||||
|
@ -199,7 +205,7 @@ def test_importplugin_error_message(testdir, pytestpm):
|
||||||
|
|
||||||
|
|
||||||
class TestPytestPluginManager:
|
class TestPytestPluginManager:
|
||||||
def test_register_imported_modules(self):
|
def test_register_imported_modules(self) -> None:
|
||||||
pm = PytestPluginManager()
|
pm = PytestPluginManager()
|
||||||
mod = types.ModuleType("x.y.pytest_hello")
|
mod = types.ModuleType("x.y.pytest_hello")
|
||||||
pm.register(mod)
|
pm.register(mod)
|
||||||
|
@ -219,23 +225,27 @@ class TestPytestPluginManager:
|
||||||
assert pm.get_plugin("pytest_xyz") == mod
|
assert pm.get_plugin("pytest_xyz") == mod
|
||||||
assert pm.is_registered(mod)
|
assert pm.is_registered(mod)
|
||||||
|
|
||||||
def test_consider_module(self, testdir, pytestpm: PytestPluginManager) -> None:
|
def test_consider_module(
|
||||||
testdir.syspathinsert()
|
self, pytester: Pytester, pytestpm: PytestPluginManager
|
||||||
testdir.makepyfile(pytest_p1="#")
|
) -> None:
|
||||||
testdir.makepyfile(pytest_p2="#")
|
pytester.syspathinsert()
|
||||||
|
pytester.makepyfile(pytest_p1="#")
|
||||||
|
pytester.makepyfile(pytest_p2="#")
|
||||||
mod = types.ModuleType("temp")
|
mod = types.ModuleType("temp")
|
||||||
mod.__dict__["pytest_plugins"] = ["pytest_p1", "pytest_p2"]
|
mod.__dict__["pytest_plugins"] = ["pytest_p1", "pytest_p2"]
|
||||||
pytestpm.consider_module(mod)
|
pytestpm.consider_module(mod)
|
||||||
assert pytestpm.get_plugin("pytest_p1").__name__ == "pytest_p1"
|
assert pytestpm.get_plugin("pytest_p1").__name__ == "pytest_p1"
|
||||||
assert pytestpm.get_plugin("pytest_p2").__name__ == "pytest_p2"
|
assert pytestpm.get_plugin("pytest_p2").__name__ == "pytest_p2"
|
||||||
|
|
||||||
def test_consider_module_import_module(self, testdir, _config_for_test) -> None:
|
def test_consider_module_import_module(
|
||||||
|
self, pytester: Pytester, _config_for_test: Config
|
||||||
|
) -> None:
|
||||||
pytestpm = _config_for_test.pluginmanager
|
pytestpm = _config_for_test.pluginmanager
|
||||||
mod = types.ModuleType("x")
|
mod = types.ModuleType("x")
|
||||||
mod.__dict__["pytest_plugins"] = "pytest_a"
|
mod.__dict__["pytest_plugins"] = "pytest_a"
|
||||||
aplugin = testdir.makepyfile(pytest_a="#")
|
aplugin = pytester.makepyfile(pytest_a="#")
|
||||||
reprec = testdir.make_hook_recorder(pytestpm)
|
reprec = pytester.make_hook_recorder(pytestpm)
|
||||||
testdir.syspathinsert(aplugin.dirpath())
|
pytester.syspathinsert(aplugin.parent)
|
||||||
pytestpm.consider_module(mod)
|
pytestpm.consider_module(mod)
|
||||||
call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)
|
call = reprec.getcall(pytestpm.hook.pytest_plugin_registered.name)
|
||||||
assert call.plugin.__name__ == "pytest_a"
|
assert call.plugin.__name__ == "pytest_a"
|
||||||
|
@ -245,30 +255,37 @@ class TestPytestPluginManager:
|
||||||
values = reprec.getcalls("pytest_plugin_registered")
|
values = reprec.getcalls("pytest_plugin_registered")
|
||||||
assert len(values) == 1
|
assert len(values) == 1
|
||||||
|
|
||||||
def test_consider_env_fails_to_import(self, monkeypatch, pytestpm):
|
def test_consider_env_fails_to_import(
|
||||||
|
self, monkeypatch: MonkeyPatch, pytestpm: PytestPluginManager
|
||||||
|
) -> None:
|
||||||
monkeypatch.setenv("PYTEST_PLUGINS", "nonexisting", prepend=",")
|
monkeypatch.setenv("PYTEST_PLUGINS", "nonexisting", prepend=",")
|
||||||
with pytest.raises(ImportError):
|
with pytest.raises(ImportError):
|
||||||
pytestpm.consider_env()
|
pytestpm.consider_env()
|
||||||
|
|
||||||
@pytest.mark.filterwarnings("always")
|
@pytest.mark.filterwarnings("always")
|
||||||
def test_plugin_skip(self, testdir, monkeypatch):
|
def test_plugin_skip(self, pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
|
||||||
p = testdir.makepyfile(
|
p = pytester.makepyfile(
|
||||||
skipping1="""
|
skipping1="""
|
||||||
import pytest
|
import pytest
|
||||||
pytest.skip("hello", allow_module_level=True)
|
pytest.skip("hello", allow_module_level=True)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
p.copy(p.dirpath("skipping2.py"))
|
shutil.copy(p, p.with_name("skipping2.py"))
|
||||||
monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
|
monkeypatch.setenv("PYTEST_PLUGINS", "skipping2")
|
||||||
result = testdir.runpytest("-p", "skipping1", syspathinsert=True)
|
result = pytester.runpytest("-p", "skipping1", syspathinsert=True)
|
||||||
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
||||||
result.stdout.fnmatch_lines(
|
result.stdout.fnmatch_lines(
|
||||||
["*skipped plugin*skipping1*hello*", "*skipped plugin*skipping2*hello*"]
|
["*skipped plugin*skipping1*hello*", "*skipped plugin*skipping2*hello*"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_consider_env_plugin_instantiation(self, testdir, monkeypatch, pytestpm):
|
def test_consider_env_plugin_instantiation(
|
||||||
testdir.syspathinsert()
|
self,
|
||||||
testdir.makepyfile(xy123="#")
|
pytester: Pytester,
|
||||||
|
monkeypatch: MonkeyPatch,
|
||||||
|
pytestpm: PytestPluginManager,
|
||||||
|
) -> None:
|
||||||
|
pytester.syspathinsert()
|
||||||
|
pytester.makepyfile(xy123="#")
|
||||||
monkeypatch.setitem(os.environ, "PYTEST_PLUGINS", "xy123")
|
monkeypatch.setitem(os.environ, "PYTEST_PLUGINS", "xy123")
|
||||||
l1 = len(pytestpm.get_plugins())
|
l1 = len(pytestpm.get_plugins())
|
||||||
pytestpm.consider_env()
|
pytestpm.consider_env()
|
||||||
|
@ -279,9 +296,11 @@ class TestPytestPluginManager:
|
||||||
l3 = len(pytestpm.get_plugins())
|
l3 = len(pytestpm.get_plugins())
|
||||||
assert l2 == l3
|
assert l2 == l3
|
||||||
|
|
||||||
def test_pluginmanager_ENV_startup(self, testdir, monkeypatch):
|
def test_pluginmanager_ENV_startup(
|
||||||
testdir.makepyfile(pytest_x500="#")
|
self, pytester: Pytester, monkeypatch: MonkeyPatch
|
||||||
p = testdir.makepyfile(
|
) -> None:
|
||||||
|
pytester.makepyfile(pytest_x500="#")
|
||||||
|
p = pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_hello(pytestconfig):
|
def test_hello(pytestconfig):
|
||||||
|
@ -290,17 +309,19 @@ class TestPytestPluginManager:
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
monkeypatch.setenv("PYTEST_PLUGINS", "pytest_x500", prepend=",")
|
monkeypatch.setenv("PYTEST_PLUGINS", "pytest_x500", prepend=",")
|
||||||
result = testdir.runpytest(p, syspathinsert=True)
|
result = pytester.runpytest(p, syspathinsert=True)
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||||
|
|
||||||
def test_import_plugin_importname(self, testdir, pytestpm):
|
def test_import_plugin_importname(
|
||||||
|
self, pytester: Pytester, pytestpm: PytestPluginManager
|
||||||
|
) -> None:
|
||||||
pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
|
pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
|
||||||
pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwx.y")
|
pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwx.y")
|
||||||
|
|
||||||
testdir.syspathinsert()
|
pytester.syspathinsert()
|
||||||
pluginname = "pytest_hello"
|
pluginname = "pytest_hello"
|
||||||
testdir.makepyfile(**{pluginname: ""})
|
pytester.makepyfile(**{pluginname: ""})
|
||||||
pytestpm.import_plugin("pytest_hello")
|
pytestpm.import_plugin("pytest_hello")
|
||||||
len1 = len(pytestpm.get_plugins())
|
len1 = len(pytestpm.get_plugins())
|
||||||
pytestpm.import_plugin("pytest_hello")
|
pytestpm.import_plugin("pytest_hello")
|
||||||
|
@ -311,25 +332,29 @@ class TestPytestPluginManager:
|
||||||
plugin2 = pytestpm.get_plugin("pytest_hello")
|
plugin2 = pytestpm.get_plugin("pytest_hello")
|
||||||
assert plugin2 is plugin1
|
assert plugin2 is plugin1
|
||||||
|
|
||||||
def test_import_plugin_dotted_name(self, testdir, pytestpm):
|
def test_import_plugin_dotted_name(
|
||||||
|
self, pytester: Pytester, pytestpm: PytestPluginManager
|
||||||
|
) -> None:
|
||||||
pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
|
pytest.raises(ImportError, pytestpm.import_plugin, "qweqwex.y")
|
||||||
pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwex.y")
|
pytest.raises(ImportError, pytestpm.import_plugin, "pytest_qweqwex.y")
|
||||||
|
|
||||||
testdir.syspathinsert()
|
pytester.syspathinsert()
|
||||||
testdir.mkpydir("pkg").join("plug.py").write("x=3")
|
pytester.mkpydir("pkg").joinpath("plug.py").write_text("x=3")
|
||||||
pluginname = "pkg.plug"
|
pluginname = "pkg.plug"
|
||||||
pytestpm.import_plugin(pluginname)
|
pytestpm.import_plugin(pluginname)
|
||||||
mod = pytestpm.get_plugin("pkg.plug")
|
mod = pytestpm.get_plugin("pkg.plug")
|
||||||
assert mod.x == 3
|
assert mod.x == 3
|
||||||
|
|
||||||
def test_consider_conftest_deps(self, testdir, pytestpm):
|
def test_consider_conftest_deps(
|
||||||
mod = testdir.makepyfile("pytest_plugins='xyz'").pyimport()
|
self, pytester: Pytester, pytestpm: PytestPluginManager,
|
||||||
|
) -> None:
|
||||||
|
mod = import_path(pytester.makepyfile("pytest_plugins='xyz'"))
|
||||||
with pytest.raises(ImportError):
|
with pytest.raises(ImportError):
|
||||||
pytestpm.consider_conftest(mod)
|
pytestpm.consider_conftest(mod)
|
||||||
|
|
||||||
|
|
||||||
class TestPytestPluginManagerBootstrapming:
|
class TestPytestPluginManagerBootstrapming:
|
||||||
def test_preparse_args(self, pytestpm):
|
def test_preparse_args(self, pytestpm: PytestPluginManager) -> None:
|
||||||
pytest.raises(
|
pytest.raises(
|
||||||
ImportError, lambda: pytestpm.consider_preparse(["xyz", "-p", "hello123"])
|
ImportError, lambda: pytestpm.consider_preparse(["xyz", "-p", "hello123"])
|
||||||
)
|
)
|
||||||
|
@ -346,7 +371,7 @@ class TestPytestPluginManagerBootstrapming:
|
||||||
with pytest.raises(UsageError, match="^plugin main cannot be disabled$"):
|
with pytest.raises(UsageError, match="^plugin main cannot be disabled$"):
|
||||||
pytestpm.consider_preparse(["-p", "no:main"])
|
pytestpm.consider_preparse(["-p", "no:main"])
|
||||||
|
|
||||||
def test_plugin_prevent_register(self, pytestpm):
|
def test_plugin_prevent_register(self, pytestpm: PytestPluginManager) -> None:
|
||||||
pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
|
pytestpm.consider_preparse(["xyz", "-p", "no:abc"])
|
||||||
l1 = pytestpm.get_plugins()
|
l1 = pytestpm.get_plugins()
|
||||||
pytestpm.register(42, name="abc")
|
pytestpm.register(42, name="abc")
|
||||||
|
@ -354,7 +379,9 @@ class TestPytestPluginManagerBootstrapming:
|
||||||
assert len(l2) == len(l1)
|
assert len(l2) == len(l1)
|
||||||
assert 42 not in l2
|
assert 42 not in l2
|
||||||
|
|
||||||
def test_plugin_prevent_register_unregistered_alredy_registered(self, pytestpm):
|
def test_plugin_prevent_register_unregistered_alredy_registered(
|
||||||
|
self, pytestpm: PytestPluginManager
|
||||||
|
) -> None:
|
||||||
pytestpm.register(42, name="abc")
|
pytestpm.register(42, name="abc")
|
||||||
l1 = pytestpm.get_plugins()
|
l1 = pytestpm.get_plugins()
|
||||||
assert 42 in l1
|
assert 42 in l1
|
||||||
|
@ -363,8 +390,8 @@ class TestPytestPluginManagerBootstrapming:
|
||||||
assert 42 not in l2
|
assert 42 not in l2
|
||||||
|
|
||||||
def test_plugin_prevent_register_stepwise_on_cacheprovider_unregister(
|
def test_plugin_prevent_register_stepwise_on_cacheprovider_unregister(
|
||||||
self, pytestpm
|
self, pytestpm: PytestPluginManager
|
||||||
):
|
) -> None:
|
||||||
"""From PR #4304: The only way to unregister a module is documented at
|
"""From PR #4304: The only way to unregister a module is documented at
|
||||||
the end of https://docs.pytest.org/en/stable/plugins.html.
|
the end of https://docs.pytest.org/en/stable/plugins.html.
|
||||||
|
|
||||||
|
@ -380,7 +407,7 @@ class TestPytestPluginManagerBootstrapming:
|
||||||
assert 42 not in l2
|
assert 42 not in l2
|
||||||
assert 43 not in l2
|
assert 43 not in l2
|
||||||
|
|
||||||
def test_blocked_plugin_can_be_used(self, pytestpm):
|
def test_blocked_plugin_can_be_used(self, pytestpm: PytestPluginManager) -> None:
|
||||||
pytestpm.consider_preparse(["xyz", "-p", "no:abc", "-p", "abc"])
|
pytestpm.consider_preparse(["xyz", "-p", "no:abc", "-p", "abc"])
|
||||||
|
|
||||||
assert pytestpm.has_plugin("abc")
|
assert pytestpm.has_plugin("abc")
|
||||||
|
|
|
@ -1,29 +1,30 @@
|
||||||
from pathlib import Path
|
|
||||||
from typing import Sequence
|
from typing import Sequence
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
import py.path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest._code.code import ExceptionChainRepr
|
from _pytest._code.code import ExceptionChainRepr
|
||||||
from _pytest._code.code import ExceptionRepr
|
from _pytest._code.code import ExceptionRepr
|
||||||
from _pytest.config import Config
|
from _pytest.config import Config
|
||||||
from _pytest.pytester import Testdir
|
from _pytest.pytester import Pytester
|
||||||
from _pytest.reports import CollectReport
|
from _pytest.reports import CollectReport
|
||||||
from _pytest.reports import TestReport
|
from _pytest.reports import TestReport
|
||||||
|
|
||||||
|
|
||||||
class TestReportSerialization:
|
class TestReportSerialization:
|
||||||
def test_xdist_longrepr_to_str_issue_241(self, testdir: Testdir) -> None:
|
def test_xdist_longrepr_to_str_issue_241(self, pytester: Pytester) -> None:
|
||||||
"""Regarding issue pytest-xdist#241.
|
"""Regarding issue pytest-xdist#241.
|
||||||
|
|
||||||
This test came originally from test_remote.py in xdist (ca03269).
|
This test came originally from test_remote.py in xdist (ca03269).
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a(): assert False
|
def test_a(): assert False
|
||||||
def test_b(): pass
|
def test_b(): pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports("pytest_runtest_logreport")
|
reports = reprec.getreports("pytest_runtest_logreport")
|
||||||
assert len(reports) == 6
|
assert len(reports) == 6
|
||||||
test_a_call = reports[1]
|
test_a_call = reports[1]
|
||||||
|
@ -35,12 +36,12 @@ class TestReportSerialization:
|
||||||
assert test_b_call.outcome == "passed"
|
assert test_b_call.outcome == "passed"
|
||||||
assert test_b_call._to_json()["longrepr"] is None
|
assert test_b_call._to_json()["longrepr"] is None
|
||||||
|
|
||||||
def test_xdist_report_longrepr_reprcrash_130(self, testdir: Testdir) -> None:
|
def test_xdist_report_longrepr_reprcrash_130(self, pytester: Pytester) -> None:
|
||||||
"""Regarding issue pytest-xdist#130
|
"""Regarding issue pytest-xdist#130
|
||||||
|
|
||||||
This test came originally from test_remote.py in xdist (ca03269).
|
This test came originally from test_remote.py in xdist (ca03269).
|
||||||
"""
|
"""
|
||||||
reprec = testdir.inline_runsource(
|
reprec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
def test_fail():
|
def test_fail():
|
||||||
assert False, 'Expected Message'
|
assert False, 'Expected Message'
|
||||||
|
@ -74,14 +75,14 @@ class TestReportSerialization:
|
||||||
# Missing section attribute PR171
|
# Missing section attribute PR171
|
||||||
assert added_section in a.longrepr.sections
|
assert added_section in a.longrepr.sections
|
||||||
|
|
||||||
def test_reprentries_serialization_170(self, testdir: Testdir) -> None:
|
def test_reprentries_serialization_170(self, pytester: Pytester) -> None:
|
||||||
"""Regarding issue pytest-xdist#170
|
"""Regarding issue pytest-xdist#170
|
||||||
|
|
||||||
This test came originally from test_remote.py in xdist (ca03269).
|
This test came originally from test_remote.py in xdist (ca03269).
|
||||||
"""
|
"""
|
||||||
from _pytest._code.code import ReprEntry
|
from _pytest._code.code import ReprEntry
|
||||||
|
|
||||||
reprec = testdir.inline_runsource(
|
reprec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
def test_repr_entry():
|
def test_repr_entry():
|
||||||
x = 0
|
x = 0
|
||||||
|
@ -120,14 +121,14 @@ class TestReportSerialization:
|
||||||
assert rep_entry.reprlocals.lines == a_entry.reprlocals.lines
|
assert rep_entry.reprlocals.lines == a_entry.reprlocals.lines
|
||||||
assert rep_entry.style == a_entry.style
|
assert rep_entry.style == a_entry.style
|
||||||
|
|
||||||
def test_reprentries_serialization_196(self, testdir: Testdir) -> None:
|
def test_reprentries_serialization_196(self, pytester: Pytester) -> None:
|
||||||
"""Regarding issue pytest-xdist#196
|
"""Regarding issue pytest-xdist#196
|
||||||
|
|
||||||
This test came originally from test_remote.py in xdist (ca03269).
|
This test came originally from test_remote.py in xdist (ca03269).
|
||||||
"""
|
"""
|
||||||
from _pytest._code.code import ReprEntryNative
|
from _pytest._code.code import ReprEntryNative
|
||||||
|
|
||||||
reprec = testdir.inline_runsource(
|
reprec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
def test_repr_entry_native():
|
def test_repr_entry_native():
|
||||||
x = 0
|
x = 0
|
||||||
|
@ -149,9 +150,9 @@ class TestReportSerialization:
|
||||||
assert isinstance(rep_entries[i], ReprEntryNative)
|
assert isinstance(rep_entries[i], ReprEntryNative)
|
||||||
assert rep_entries[i].lines == a_entries[i].lines
|
assert rep_entries[i].lines == a_entries[i].lines
|
||||||
|
|
||||||
def test_itemreport_outcomes(self, testdir: Testdir) -> None:
|
def test_itemreport_outcomes(self, pytester: Pytester) -> None:
|
||||||
# This test came originally from test_remote.py in xdist (ca03269).
|
# This test came originally from test_remote.py in xdist (ca03269).
|
||||||
reprec = testdir.inline_runsource(
|
reprec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_pass(): pass
|
def test_pass(): pass
|
||||||
|
@ -183,9 +184,9 @@ class TestReportSerialization:
|
||||||
if rep.failed:
|
if rep.failed:
|
||||||
assert newrep.longreprtext == rep.longreprtext
|
assert newrep.longreprtext == rep.longreprtext
|
||||||
|
|
||||||
def test_collectreport_passed(self, testdir: Testdir) -> None:
|
def test_collectreport_passed(self, pytester: Pytester) -> None:
|
||||||
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
||||||
reprec = testdir.inline_runsource("def test_func(): pass")
|
reprec = pytester.inline_runsource("def test_func(): pass")
|
||||||
reports = reprec.getreports("pytest_collectreport")
|
reports = reprec.getreports("pytest_collectreport")
|
||||||
for rep in reports:
|
for rep in reports:
|
||||||
d = rep._to_json()
|
d = rep._to_json()
|
||||||
|
@ -194,9 +195,9 @@ class TestReportSerialization:
|
||||||
assert newrep.failed == rep.failed
|
assert newrep.failed == rep.failed
|
||||||
assert newrep.skipped == rep.skipped
|
assert newrep.skipped == rep.skipped
|
||||||
|
|
||||||
def test_collectreport_fail(self, testdir: Testdir) -> None:
|
def test_collectreport_fail(self, pytester: Pytester) -> None:
|
||||||
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
||||||
reprec = testdir.inline_runsource("qwe abc")
|
reprec = pytester.inline_runsource("qwe abc")
|
||||||
reports = reprec.getreports("pytest_collectreport")
|
reports = reprec.getreports("pytest_collectreport")
|
||||||
assert reports
|
assert reports
|
||||||
for rep in reports:
|
for rep in reports:
|
||||||
|
@ -208,9 +209,9 @@ class TestReportSerialization:
|
||||||
if rep.failed:
|
if rep.failed:
|
||||||
assert newrep.longrepr == str(rep.longrepr)
|
assert newrep.longrepr == str(rep.longrepr)
|
||||||
|
|
||||||
def test_extended_report_deserialization(self, testdir: Testdir) -> None:
|
def test_extended_report_deserialization(self, pytester: Pytester) -> None:
|
||||||
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
"""This test came originally from test_remote.py in xdist (ca03269)."""
|
||||||
reprec = testdir.inline_runsource("qwe abc")
|
reprec = pytester.inline_runsource("qwe abc")
|
||||||
reports = reprec.getreports("pytest_collectreport")
|
reports = reprec.getreports("pytest_collectreport")
|
||||||
assert reports
|
assert reports
|
||||||
for rep in reports:
|
for rep in reports:
|
||||||
|
@ -224,33 +225,33 @@ class TestReportSerialization:
|
||||||
if rep.failed:
|
if rep.failed:
|
||||||
assert newrep.longrepr == str(rep.longrepr)
|
assert newrep.longrepr == str(rep.longrepr)
|
||||||
|
|
||||||
def test_paths_support(self, testdir: Testdir) -> None:
|
def test_paths_support(self, pytester: Pytester) -> None:
|
||||||
"""Report attributes which are py.path or pathlib objects should become strings."""
|
"""Report attributes which are py.path or pathlib objects should become strings."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a():
|
def test_a():
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports("pytest_runtest_logreport")
|
reports = reprec.getreports("pytest_runtest_logreport")
|
||||||
assert len(reports) == 3
|
assert len(reports) == 3
|
||||||
test_a_call = reports[1]
|
test_a_call = reports[1]
|
||||||
test_a_call.path1 = testdir.tmpdir # type: ignore[attr-defined]
|
test_a_call.path1 = py.path.local(pytester.path) # type: ignore[attr-defined]
|
||||||
test_a_call.path2 = Path(testdir.tmpdir) # type: ignore[attr-defined]
|
test_a_call.path2 = pytester.path # type: ignore[attr-defined]
|
||||||
data = test_a_call._to_json()
|
data = test_a_call._to_json()
|
||||||
assert data["path1"] == str(testdir.tmpdir)
|
assert data["path1"] == str(pytester.path)
|
||||||
assert data["path2"] == str(testdir.tmpdir)
|
assert data["path2"] == str(pytester.path)
|
||||||
|
|
||||||
def test_deserialization_failure(self, testdir: Testdir) -> None:
|
def test_deserialization_failure(self, pytester: Pytester) -> None:
|
||||||
"""Check handling of failure during deserialization of report types."""
|
"""Check handling of failure during deserialization of report types."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a():
|
def test_a():
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports("pytest_runtest_logreport")
|
reports = reprec.getreports("pytest_runtest_logreport")
|
||||||
assert len(reports) == 3
|
assert len(reports) == 3
|
||||||
test_a_call = reports[1]
|
test_a_call = reports[1]
|
||||||
|
@ -265,9 +266,11 @@ class TestReportSerialization:
|
||||||
TestReport._from_json(data)
|
TestReport._from_json(data)
|
||||||
|
|
||||||
@pytest.mark.parametrize("report_class", [TestReport, CollectReport])
|
@pytest.mark.parametrize("report_class", [TestReport, CollectReport])
|
||||||
def test_chained_exceptions(self, testdir: Testdir, tw_mock, report_class) -> None:
|
def test_chained_exceptions(
|
||||||
|
self, pytester: Pytester, tw_mock, report_class
|
||||||
|
) -> None:
|
||||||
"""Check serialization/deserialization of report objects containing chained exceptions (#5786)"""
|
"""Check serialization/deserialization of report objects containing chained exceptions (#5786)"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def foo():
|
def foo():
|
||||||
raise ValueError('value error')
|
raise ValueError('value error')
|
||||||
|
@ -283,7 +286,7 @@ class TestReportSerialization:
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
if report_class is TestReport:
|
if report_class is TestReport:
|
||||||
reports: Union[
|
reports: Union[
|
||||||
Sequence[TestReport], Sequence[CollectReport]
|
Sequence[TestReport], Sequence[CollectReport]
|
||||||
|
@ -338,14 +341,14 @@ class TestReportSerialization:
|
||||||
# elsewhere and we do check the contents of the longrepr object after loading it.
|
# elsewhere and we do check the contents of the longrepr object after loading it.
|
||||||
loaded_report.longrepr.toterminal(tw_mock)
|
loaded_report.longrepr.toterminal(tw_mock)
|
||||||
|
|
||||||
def test_chained_exceptions_no_reprcrash(self, testdir: Testdir, tw_mock) -> None:
|
def test_chained_exceptions_no_reprcrash(self, pytester: Pytester, tw_mock) -> None:
|
||||||
"""Regression test for tracebacks without a reprcrash (#5971)
|
"""Regression test for tracebacks without a reprcrash (#5971)
|
||||||
|
|
||||||
This happens notably on exceptions raised by multiprocess.pool: the exception transfer
|
This happens notably on exceptions raised by multiprocess.pool: the exception transfer
|
||||||
from subprocess to main process creates an artificial exception, which ExceptionInfo
|
from subprocess to main process creates an artificial exception, which ExceptionInfo
|
||||||
can't obtain the ReprFileLocation from.
|
can't obtain the ReprFileLocation from.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
from concurrent.futures import ProcessPoolExecutor
|
from concurrent.futures import ProcessPoolExecutor
|
||||||
|
|
||||||
|
@ -358,8 +361,8 @@ class TestReportSerialization:
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
testdir.syspathinsert()
|
pytester.syspathinsert()
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
|
|
||||||
reports = reprec.getreports("pytest_runtest_logreport")
|
reports = reprec.getreports("pytest_runtest_logreport")
|
||||||
|
|
||||||
|
@ -396,12 +399,13 @@ class TestReportSerialization:
|
||||||
loaded_report.longrepr.toterminal(tw_mock)
|
loaded_report.longrepr.toterminal(tw_mock)
|
||||||
|
|
||||||
def test_report_prevent_ConftestImportFailure_hiding_exception(
|
def test_report_prevent_ConftestImportFailure_hiding_exception(
|
||||||
self, testdir: Testdir
|
self, pytester: Pytester
|
||||||
) -> None:
|
) -> None:
|
||||||
sub_dir = testdir.tmpdir.join("ns").ensure_dir()
|
sub_dir = pytester.path.joinpath("ns")
|
||||||
sub_dir.join("conftest").new(ext=".py").write("import unknown")
|
sub_dir.mkdir()
|
||||||
|
sub_dir.joinpath("conftest.py").write_text("import unknown")
|
||||||
|
|
||||||
result = testdir.runpytest_subprocess(".")
|
result = pytester.runpytest_subprocess(".")
|
||||||
result.stdout.fnmatch_lines(["E *Error: No module named 'unknown'"])
|
result.stdout.fnmatch_lines(["E *Error: No module named 'unknown'"])
|
||||||
result.stdout.no_fnmatch_line("ERROR - *ConftestImportFailure*")
|
result.stdout.no_fnmatch_line("ERROR - *ConftestImportFailure*")
|
||||||
|
|
||||||
|
@ -409,14 +413,14 @@ class TestReportSerialization:
|
||||||
class TestHooks:
|
class TestHooks:
|
||||||
"""Test that the hooks are working correctly for plugins"""
|
"""Test that the hooks are working correctly for plugins"""
|
||||||
|
|
||||||
def test_test_report(self, testdir: Testdir, pytestconfig: Config) -> None:
|
def test_test_report(self, pytester: Pytester, pytestconfig: Config) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a(): assert False
|
def test_a(): assert False
|
||||||
def test_b(): pass
|
def test_b(): pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports("pytest_runtest_logreport")
|
reports = reprec.getreports("pytest_runtest_logreport")
|
||||||
assert len(reports) == 6
|
assert len(reports) == 6
|
||||||
for rep in reports:
|
for rep in reports:
|
||||||
|
@ -431,14 +435,14 @@ class TestHooks:
|
||||||
assert new_rep.when == rep.when
|
assert new_rep.when == rep.when
|
||||||
assert new_rep.outcome == rep.outcome
|
assert new_rep.outcome == rep.outcome
|
||||||
|
|
||||||
def test_collect_report(self, testdir: Testdir, pytestconfig: Config) -> None:
|
def test_collect_report(self, pytester: Pytester, pytestconfig: Config) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a(): assert False
|
def test_a(): assert False
|
||||||
def test_b(): pass
|
def test_b(): pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports("pytest_collectreport")
|
reports = reprec.getreports("pytest_collectreport")
|
||||||
assert len(reports) == 2
|
assert len(reports) == 2
|
||||||
for rep in reports:
|
for rep in reports:
|
||||||
|
@ -457,14 +461,14 @@ class TestHooks:
|
||||||
"hook_name", ["pytest_runtest_logreport", "pytest_collectreport"]
|
"hook_name", ["pytest_runtest_logreport", "pytest_collectreport"]
|
||||||
)
|
)
|
||||||
def test_invalid_report_types(
|
def test_invalid_report_types(
|
||||||
self, testdir: Testdir, pytestconfig: Config, hook_name: str
|
self, pytester: Pytester, pytestconfig: Config, hook_name: str
|
||||||
) -> None:
|
) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_a(): pass
|
def test_a(): pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reprec = testdir.inline_run()
|
reprec = pytester.inline_run()
|
||||||
reports = reprec.getreports(hook_name)
|
reports = reprec.getreports(hook_name)
|
||||||
assert reports
|
assert reports
|
||||||
rep = reports[0]
|
rep = reports[0]
|
||||||
|
|
|
@ -2,26 +2,28 @@ import inspect
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import types
|
import types
|
||||||
|
from pathlib import Path
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from typing import List
|
from typing import List
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
import py
|
|
||||||
|
|
||||||
import _pytest._code
|
|
||||||
import pytest
|
import pytest
|
||||||
from _pytest import outcomes
|
from _pytest import outcomes
|
||||||
from _pytest import reports
|
from _pytest import reports
|
||||||
from _pytest import runner
|
from _pytest import runner
|
||||||
|
from _pytest._code import ExceptionInfo
|
||||||
|
from _pytest._code.code import ExceptionChainRepr
|
||||||
from _pytest.config import ExitCode
|
from _pytest.config import ExitCode
|
||||||
|
from _pytest.monkeypatch import MonkeyPatch
|
||||||
from _pytest.outcomes import OutcomeException
|
from _pytest.outcomes import OutcomeException
|
||||||
|
from _pytest.pytester import Pytester
|
||||||
|
|
||||||
|
|
||||||
class TestSetupState:
|
class TestSetupState:
|
||||||
def test_setup(self, testdir) -> None:
|
def test_setup(self, pytester: Pytester) -> None:
|
||||||
ss = runner.SetupState()
|
ss = runner.SetupState()
|
||||||
item = testdir.getitem("def test_func(): pass")
|
item = pytester.getitem("def test_func(): pass")
|
||||||
values = [1]
|
values = [1]
|
||||||
ss.prepare(item)
|
ss.prepare(item)
|
||||||
ss.addfinalizer(values.pop, colitem=item)
|
ss.addfinalizer(values.pop, colitem=item)
|
||||||
|
@ -29,15 +31,15 @@ class TestSetupState:
|
||||||
ss._pop_and_teardown()
|
ss._pop_and_teardown()
|
||||||
assert not values
|
assert not values
|
||||||
|
|
||||||
def test_teardown_exact_stack_empty(self, testdir) -> None:
|
def test_teardown_exact_stack_empty(self, pytester: Pytester) -> None:
|
||||||
item = testdir.getitem("def test_func(): pass")
|
item = pytester.getitem("def test_func(): pass")
|
||||||
ss = runner.SetupState()
|
ss = runner.SetupState()
|
||||||
ss.teardown_exact(item, None)
|
ss.teardown_exact(item, None)
|
||||||
ss.teardown_exact(item, None)
|
ss.teardown_exact(item, None)
|
||||||
ss.teardown_exact(item, None)
|
ss.teardown_exact(item, None)
|
||||||
|
|
||||||
def test_setup_fails_and_failure_is_cached(self, testdir) -> None:
|
def test_setup_fails_and_failure_is_cached(self, pytester: Pytester) -> None:
|
||||||
item = testdir.getitem(
|
item = pytester.getitem(
|
||||||
"""
|
"""
|
||||||
def setup_module(mod):
|
def setup_module(mod):
|
||||||
raise ValueError(42)
|
raise ValueError(42)
|
||||||
|
@ -48,7 +50,7 @@ class TestSetupState:
|
||||||
pytest.raises(ValueError, lambda: ss.prepare(item))
|
pytest.raises(ValueError, lambda: ss.prepare(item))
|
||||||
pytest.raises(ValueError, lambda: ss.prepare(item))
|
pytest.raises(ValueError, lambda: ss.prepare(item))
|
||||||
|
|
||||||
def test_teardown_multiple_one_fails(self, testdir) -> None:
|
def test_teardown_multiple_one_fails(self, pytester: Pytester) -> None:
|
||||||
r = []
|
r = []
|
||||||
|
|
||||||
def fin1():
|
def fin1():
|
||||||
|
@ -60,7 +62,7 @@ class TestSetupState:
|
||||||
def fin3():
|
def fin3():
|
||||||
r.append("fin3")
|
r.append("fin3")
|
||||||
|
|
||||||
item = testdir.getitem("def test_func(): pass")
|
item = pytester.getitem("def test_func(): pass")
|
||||||
ss = runner.SetupState()
|
ss = runner.SetupState()
|
||||||
ss.addfinalizer(fin1, item)
|
ss.addfinalizer(fin1, item)
|
||||||
ss.addfinalizer(fin2, item)
|
ss.addfinalizer(fin2, item)
|
||||||
|
@ -70,7 +72,7 @@ class TestSetupState:
|
||||||
assert err.value.args == ("oops",)
|
assert err.value.args == ("oops",)
|
||||||
assert r == ["fin3", "fin1"]
|
assert r == ["fin3", "fin1"]
|
||||||
|
|
||||||
def test_teardown_multiple_fail(self, testdir) -> None:
|
def test_teardown_multiple_fail(self, pytester: Pytester) -> None:
|
||||||
# Ensure the first exception is the one which is re-raised.
|
# Ensure the first exception is the one which is re-raised.
|
||||||
# Ideally both would be reported however.
|
# Ideally both would be reported however.
|
||||||
def fin1():
|
def fin1():
|
||||||
|
@ -79,7 +81,7 @@ class TestSetupState:
|
||||||
def fin2():
|
def fin2():
|
||||||
raise Exception("oops2")
|
raise Exception("oops2")
|
||||||
|
|
||||||
item = testdir.getitem("def test_func(): pass")
|
item = pytester.getitem("def test_func(): pass")
|
||||||
ss = runner.SetupState()
|
ss = runner.SetupState()
|
||||||
ss.addfinalizer(fin1, item)
|
ss.addfinalizer(fin1, item)
|
||||||
ss.addfinalizer(fin2, item)
|
ss.addfinalizer(fin2, item)
|
||||||
|
@ -87,7 +89,7 @@ class TestSetupState:
|
||||||
ss._callfinalizers(item)
|
ss._callfinalizers(item)
|
||||||
assert err.value.args == ("oops2",)
|
assert err.value.args == ("oops2",)
|
||||||
|
|
||||||
def test_teardown_multiple_scopes_one_fails(self, testdir) -> None:
|
def test_teardown_multiple_scopes_one_fails(self, pytester: Pytester) -> None:
|
||||||
module_teardown = []
|
module_teardown = []
|
||||||
|
|
||||||
def fin_func():
|
def fin_func():
|
||||||
|
@ -96,7 +98,7 @@ class TestSetupState:
|
||||||
def fin_module():
|
def fin_module():
|
||||||
module_teardown.append("fin_module")
|
module_teardown.append("fin_module")
|
||||||
|
|
||||||
item = testdir.getitem("def test_func(): pass")
|
item = pytester.getitem("def test_func(): pass")
|
||||||
ss = runner.SetupState()
|
ss = runner.SetupState()
|
||||||
ss.addfinalizer(fin_module, item.listchain()[-2])
|
ss.addfinalizer(fin_module, item.listchain()[-2])
|
||||||
ss.addfinalizer(fin_func, item)
|
ss.addfinalizer(fin_func, item)
|
||||||
|
@ -107,8 +109,8 @@ class TestSetupState:
|
||||||
|
|
||||||
|
|
||||||
class BaseFunctionalTests:
|
class BaseFunctionalTests:
|
||||||
def test_passfunction(self, testdir) -> None:
|
def test_passfunction(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
pass
|
pass
|
||||||
|
@ -120,8 +122,8 @@ class BaseFunctionalTests:
|
||||||
assert rep.outcome == "passed"
|
assert rep.outcome == "passed"
|
||||||
assert not rep.longrepr
|
assert not rep.longrepr
|
||||||
|
|
||||||
def test_failfunction(self, testdir) -> None:
|
def test_failfunction(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
assert 0
|
assert 0
|
||||||
|
@ -135,8 +137,8 @@ class BaseFunctionalTests:
|
||||||
assert rep.outcome == "failed"
|
assert rep.outcome == "failed"
|
||||||
# assert isinstance(rep.longrepr, ReprExceptionInfo)
|
# assert isinstance(rep.longrepr, ReprExceptionInfo)
|
||||||
|
|
||||||
def test_skipfunction(self, testdir) -> None:
|
def test_skipfunction(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -155,8 +157,8 @@ class BaseFunctionalTests:
|
||||||
# assert rep.skipped.location.path
|
# assert rep.skipped.location.path
|
||||||
# assert not rep.skipped.failurerepr
|
# assert not rep.skipped.failurerepr
|
||||||
|
|
||||||
def test_skip_in_setup_function(self, testdir) -> None:
|
def test_skip_in_setup_function(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def setup_function(func):
|
def setup_function(func):
|
||||||
|
@ -176,8 +178,8 @@ class BaseFunctionalTests:
|
||||||
assert len(reports) == 2
|
assert len(reports) == 2
|
||||||
assert reports[1].passed # teardown
|
assert reports[1].passed # teardown
|
||||||
|
|
||||||
def test_failure_in_setup_function(self, testdir) -> None:
|
def test_failure_in_setup_function(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def setup_function(func):
|
def setup_function(func):
|
||||||
|
@ -193,8 +195,8 @@ class BaseFunctionalTests:
|
||||||
assert rep.when == "setup"
|
assert rep.when == "setup"
|
||||||
assert len(reports) == 2
|
assert len(reports) == 2
|
||||||
|
|
||||||
def test_failure_in_teardown_function(self, testdir) -> None:
|
def test_failure_in_teardown_function(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def teardown_function(func):
|
def teardown_function(func):
|
||||||
|
@ -213,8 +215,8 @@ class BaseFunctionalTests:
|
||||||
# assert rep.longrepr.reprcrash.lineno == 3
|
# assert rep.longrepr.reprcrash.lineno == 3
|
||||||
# assert rep.longrepr.reprtraceback.reprentries
|
# assert rep.longrepr.reprtraceback.reprentries
|
||||||
|
|
||||||
def test_custom_failure_repr(self, testdir) -> None:
|
def test_custom_failure_repr(self, pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
conftest="""
|
conftest="""
|
||||||
import pytest
|
import pytest
|
||||||
class Function(pytest.Function):
|
class Function(pytest.Function):
|
||||||
|
@ -222,7 +224,7 @@ class BaseFunctionalTests:
|
||||||
return "hello"
|
return "hello"
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -238,8 +240,8 @@ class BaseFunctionalTests:
|
||||||
# assert rep.failed.where.path.basename == "test_func.py"
|
# assert rep.failed.where.path.basename == "test_func.py"
|
||||||
# assert rep.failed.failurerepr == "hello"
|
# assert rep.failed.failurerepr == "hello"
|
||||||
|
|
||||||
def test_teardown_final_returncode(self, testdir) -> None:
|
def test_teardown_final_returncode(self, pytester: Pytester) -> None:
|
||||||
rec = testdir.inline_runsource(
|
rec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
pass
|
pass
|
||||||
|
@ -249,8 +251,8 @@ class BaseFunctionalTests:
|
||||||
)
|
)
|
||||||
assert rec.ret == 1
|
assert rec.ret == 1
|
||||||
|
|
||||||
def test_logstart_logfinish_hooks(self, testdir) -> None:
|
def test_logstart_logfinish_hooks(self, pytester: Pytester) -> None:
|
||||||
rec = testdir.inline_runsource(
|
rec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -266,8 +268,8 @@ class BaseFunctionalTests:
|
||||||
assert rep.nodeid == "test_logstart_logfinish_hooks.py::test_func"
|
assert rep.nodeid == "test_logstart_logfinish_hooks.py::test_func"
|
||||||
assert rep.location == ("test_logstart_logfinish_hooks.py", 1, "test_func")
|
assert rep.location == ("test_logstart_logfinish_hooks.py", 1, "test_func")
|
||||||
|
|
||||||
def test_exact_teardown_issue90(self, testdir) -> None:
|
def test_exact_teardown_issue90(self, pytester: Pytester) -> None:
|
||||||
rec = testdir.inline_runsource(
|
rec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -306,9 +308,9 @@ class BaseFunctionalTests:
|
||||||
assert reps[5].nodeid.endswith("test_func")
|
assert reps[5].nodeid.endswith("test_func")
|
||||||
assert reps[5].failed
|
assert reps[5].failed
|
||||||
|
|
||||||
def test_exact_teardown_issue1206(self, testdir) -> None:
|
def test_exact_teardown_issue1206(self, pytester: Pytester) -> None:
|
||||||
"""Issue shadowing error with wrong number of arguments on teardown_method."""
|
"""Issue shadowing error with wrong number of arguments on teardown_method."""
|
||||||
rec = testdir.inline_runsource(
|
rec = pytester.inline_runsource(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -335,14 +337,19 @@ class BaseFunctionalTests:
|
||||||
assert reps[2].nodeid.endswith("test_method")
|
assert reps[2].nodeid.endswith("test_method")
|
||||||
assert reps[2].failed
|
assert reps[2].failed
|
||||||
assert reps[2].when == "teardown"
|
assert reps[2].when == "teardown"
|
||||||
assert reps[2].longrepr.reprcrash.message in (
|
longrepr = reps[2].longrepr
|
||||||
|
assert isinstance(longrepr, ExceptionChainRepr)
|
||||||
|
assert longrepr.reprcrash
|
||||||
|
assert longrepr.reprcrash.message in (
|
||||||
"TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'",
|
"TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'",
|
||||||
# Python >= 3.10
|
# Python >= 3.10
|
||||||
"TypeError: TestClass.teardown_method() missing 2 required positional arguments: 'y' and 'z'",
|
"TypeError: TestClass.teardown_method() missing 2 required positional arguments: 'y' and 'z'",
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_failure_in_setup_function_ignores_custom_repr(self, testdir) -> None:
|
def test_failure_in_setup_function_ignores_custom_repr(
|
||||||
testdir.makepyfile(
|
self, pytester: Pytester
|
||||||
|
) -> None:
|
||||||
|
pytester.makepyfile(
|
||||||
conftest="""
|
conftest="""
|
||||||
import pytest
|
import pytest
|
||||||
class Function(pytest.Function):
|
class Function(pytest.Function):
|
||||||
|
@ -350,7 +357,7 @@ class BaseFunctionalTests:
|
||||||
assert 0
|
assert 0
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def setup_function(func):
|
def setup_function(func):
|
||||||
raise ValueError(42)
|
raise ValueError(42)
|
||||||
|
@ -369,9 +376,9 @@ class BaseFunctionalTests:
|
||||||
# assert rep.outcome.where.path.basename == "test_func.py"
|
# assert rep.outcome.where.path.basename == "test_func.py"
|
||||||
# assert instanace(rep.failed.failurerepr, PythonFailureRepr)
|
# assert instanace(rep.failed.failurerepr, PythonFailureRepr)
|
||||||
|
|
||||||
def test_systemexit_does_not_bail_out(self, testdir) -> None:
|
def test_systemexit_does_not_bail_out(self, pytester: Pytester) -> None:
|
||||||
try:
|
try:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
raise SystemExit(42)
|
raise SystemExit(42)
|
||||||
|
@ -383,9 +390,9 @@ class BaseFunctionalTests:
|
||||||
assert rep.failed
|
assert rep.failed
|
||||||
assert rep.when == "call"
|
assert rep.when == "call"
|
||||||
|
|
||||||
def test_exit_propagates(self, testdir) -> None:
|
def test_exit_propagates(self, pytester: Pytester) -> None:
|
||||||
try:
|
try:
|
||||||
testdir.runitem(
|
pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -405,9 +412,9 @@ class TestExecutionNonForked(BaseFunctionalTests):
|
||||||
|
|
||||||
return f
|
return f
|
||||||
|
|
||||||
def test_keyboardinterrupt_propagates(self, testdir) -> None:
|
def test_keyboardinterrupt_propagates(self, pytester: Pytester) -> None:
|
||||||
try:
|
try:
|
||||||
testdir.runitem(
|
pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
raise KeyboardInterrupt("fake")
|
raise KeyboardInterrupt("fake")
|
||||||
|
@ -420,8 +427,8 @@ class TestExecutionNonForked(BaseFunctionalTests):
|
||||||
|
|
||||||
|
|
||||||
class TestSessionReports:
|
class TestSessionReports:
|
||||||
def test_collect_result(self, testdir) -> None:
|
def test_collect_result(self, pytester: Pytester) -> None:
|
||||||
col = testdir.getmodulecol(
|
col = pytester.getmodulecol(
|
||||||
"""
|
"""
|
||||||
def test_func1():
|
def test_func1():
|
||||||
pass
|
pass
|
||||||
|
@ -489,8 +496,8 @@ def test_callinfo() -> None:
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.xfail
|
@pytest.mark.xfail
|
||||||
def test_runtest_in_module_ordering(testdir) -> None:
|
def test_runtest_in_module_ordering(pytester: Pytester) -> None:
|
||||||
p1 = testdir.makepyfile(
|
p1 = pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def pytest_runtest_setup(item): # runs after class-level!
|
def pytest_runtest_setup(item): # runs after class-level!
|
||||||
|
@ -517,7 +524,7 @@ def test_runtest_in_module_ordering(testdir) -> None:
|
||||||
del item.function.mylist
|
del item.function.mylist
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest(p1)
|
result = pytester.runpytest(p1)
|
||||||
result.stdout.fnmatch_lines(["*2 passed*"])
|
result.stdout.fnmatch_lines(["*2 passed*"])
|
||||||
|
|
||||||
|
|
||||||
|
@ -547,8 +554,8 @@ def test_pytest_fail() -> None:
|
||||||
assert s.startswith("Failed")
|
assert s.startswith("Failed")
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_exit_msg(testdir) -> None:
|
def test_pytest_exit_msg(pytester: Pytester) -> None:
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -556,7 +563,7 @@ def test_pytest_exit_msg(testdir) -> None:
|
||||||
pytest.exit('oh noes')
|
pytest.exit('oh noes')
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stderr.fnmatch_lines(["Exit: oh noes"])
|
result.stderr.fnmatch_lines(["Exit: oh noes"])
|
||||||
|
|
||||||
|
|
||||||
|
@ -570,22 +577,22 @@ def _strip_resource_warnings(lines):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_exit_returncode(testdir) -> None:
|
def test_pytest_exit_returncode(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""\
|
"""\
|
||||||
import pytest
|
import pytest
|
||||||
def test_foo():
|
def test_foo():
|
||||||
pytest.exit("some exit msg", 99)
|
pytest.exit("some exit msg", 99)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*! *Exit: some exit msg !*"])
|
result.stdout.fnmatch_lines(["*! *Exit: some exit msg !*"])
|
||||||
|
|
||||||
assert _strip_resource_warnings(result.stderr.lines) == []
|
assert _strip_resource_warnings(result.stderr.lines) == []
|
||||||
assert result.ret == 99
|
assert result.ret == 99
|
||||||
|
|
||||||
# It prints to stderr also in case of exit during pytest_sessionstart.
|
# It prints to stderr also in case of exit during pytest_sessionstart.
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""\
|
"""\
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -593,7 +600,7 @@ def test_pytest_exit_returncode(testdir) -> None:
|
||||||
pytest.exit("during_sessionstart", 98)
|
pytest.exit("during_sessionstart", 98)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*! *Exit: during_sessionstart !*"])
|
result.stdout.fnmatch_lines(["*! *Exit: during_sessionstart !*"])
|
||||||
assert _strip_resource_warnings(result.stderr.lines) == [
|
assert _strip_resource_warnings(result.stderr.lines) == [
|
||||||
"Exit: during_sessionstart"
|
"Exit: during_sessionstart"
|
||||||
|
@ -601,9 +608,9 @@ def test_pytest_exit_returncode(testdir) -> None:
|
||||||
assert result.ret == 98
|
assert result.ret == 98
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_fail_notrace_runtest(testdir) -> None:
|
def test_pytest_fail_notrace_runtest(pytester: Pytester) -> None:
|
||||||
"""Test pytest.fail(..., pytrace=False) does not show tracebacks during test run."""
|
"""Test pytest.fail(..., pytrace=False) does not show tracebacks during test run."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_hello():
|
def test_hello():
|
||||||
|
@ -612,14 +619,14 @@ def test_pytest_fail_notrace_runtest(testdir) -> None:
|
||||||
pytest.fail("world", pytrace=False)
|
pytest.fail("world", pytrace=False)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["world", "hello"])
|
result.stdout.fnmatch_lines(["world", "hello"])
|
||||||
result.stdout.no_fnmatch_line("*def teardown_function*")
|
result.stdout.no_fnmatch_line("*def teardown_function*")
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_fail_notrace_collection(testdir) -> None:
|
def test_pytest_fail_notrace_collection(pytester: Pytester) -> None:
|
||||||
"""Test pytest.fail(..., pytrace=False) does not show tracebacks during collection."""
|
"""Test pytest.fail(..., pytrace=False) does not show tracebacks during collection."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def some_internal_function():
|
def some_internal_function():
|
||||||
|
@ -627,17 +634,17 @@ def test_pytest_fail_notrace_collection(testdir) -> None:
|
||||||
some_internal_function()
|
some_internal_function()
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["hello"])
|
result.stdout.fnmatch_lines(["hello"])
|
||||||
result.stdout.no_fnmatch_line("*def some_internal_function()*")
|
result.stdout.no_fnmatch_line("*def some_internal_function()*")
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_fail_notrace_non_ascii(testdir) -> None:
|
def test_pytest_fail_notrace_non_ascii(pytester: Pytester) -> None:
|
||||||
"""Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
|
"""Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
|
||||||
|
|
||||||
This tests with native and unicode strings containing non-ascii chars.
|
This tests with native and unicode strings containing non-ascii chars.
|
||||||
"""
|
"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""\
|
"""\
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -645,28 +652,28 @@ def test_pytest_fail_notrace_non_ascii(testdir) -> None:
|
||||||
pytest.fail('oh oh: ☺', pytrace=False)
|
pytest.fail('oh oh: ☺', pytrace=False)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*test_hello*", "oh oh: ☺"])
|
result.stdout.fnmatch_lines(["*test_hello*", "oh oh: ☺"])
|
||||||
result.stdout.no_fnmatch_line("*def test_hello*")
|
result.stdout.no_fnmatch_line("*def test_hello*")
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_no_tests_collected_exit_status(testdir) -> None:
|
def test_pytest_no_tests_collected_exit_status(pytester: Pytester) -> None:
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*collected 0 items*"])
|
result.stdout.fnmatch_lines(["*collected 0 items*"])
|
||||||
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
test_foo="""
|
test_foo="""
|
||||||
def test_foo():
|
def test_foo():
|
||||||
assert 1
|
assert 1
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*collected 1 item*"])
|
result.stdout.fnmatch_lines(["*collected 1 item*"])
|
||||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||||
assert result.ret == ExitCode.OK
|
assert result.ret == ExitCode.OK
|
||||||
|
|
||||||
result = testdir.runpytest("-k nonmatch")
|
result = pytester.runpytest("-k nonmatch")
|
||||||
result.stdout.fnmatch_lines(["*collected 1 item*"])
|
result.stdout.fnmatch_lines(["*collected 1 item*"])
|
||||||
result.stdout.fnmatch_lines(["*1 deselected*"])
|
result.stdout.fnmatch_lines(["*1 deselected*"])
|
||||||
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
assert result.ret == ExitCode.NO_TESTS_COLLECTED
|
||||||
|
@ -677,7 +684,7 @@ def test_exception_printing_skip() -> None:
|
||||||
try:
|
try:
|
||||||
pytest.skip("hello")
|
pytest.skip("hello")
|
||||||
except pytest.skip.Exception:
|
except pytest.skip.Exception:
|
||||||
excinfo = _pytest._code.ExceptionInfo.from_current()
|
excinfo = ExceptionInfo.from_current()
|
||||||
s = excinfo.exconly(tryshort=True)
|
s = excinfo.exconly(tryshort=True)
|
||||||
assert s.startswith("Skipped")
|
assert s.startswith("Skipped")
|
||||||
|
|
||||||
|
@ -698,10 +705,10 @@ def test_importorskip(monkeypatch) -> None:
|
||||||
excrepr = excinfo.getrepr()
|
excrepr = excinfo.getrepr()
|
||||||
assert excrepr is not None
|
assert excrepr is not None
|
||||||
assert excrepr.reprcrash is not None
|
assert excrepr.reprcrash is not None
|
||||||
path = py.path.local(excrepr.reprcrash.path)
|
path = Path(excrepr.reprcrash.path)
|
||||||
# check that importorskip reports the actual call
|
# check that importorskip reports the actual call
|
||||||
# in this test the test_runner.py file
|
# in this test the test_runner.py file
|
||||||
assert path.purebasename == "test_runner"
|
assert path.stem == "test_runner"
|
||||||
pytest.raises(SyntaxError, pytest.importorskip, "x y z")
|
pytest.raises(SyntaxError, pytest.importorskip, "x y z")
|
||||||
pytest.raises(SyntaxError, pytest.importorskip, "x=y")
|
pytest.raises(SyntaxError, pytest.importorskip, "x=y")
|
||||||
mod = types.ModuleType("hello123")
|
mod = types.ModuleType("hello123")
|
||||||
|
@ -712,9 +719,7 @@ def test_importorskip(monkeypatch) -> None:
|
||||||
mod2 = pytest.importorskip("hello123", minversion="1.3")
|
mod2 = pytest.importorskip("hello123", minversion="1.3")
|
||||||
assert mod2 == mod
|
assert mod2 == mod
|
||||||
except pytest.skip.Exception: # pragma: no cover
|
except pytest.skip.Exception: # pragma: no cover
|
||||||
assert False, "spurious skip: {}".format(
|
assert False, f"spurious skip: {ExceptionInfo.from_current()}"
|
||||||
_pytest._code.ExceptionInfo.from_current()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_importorskip_imports_last_module_part() -> None:
|
def test_importorskip_imports_last_module_part() -> None:
|
||||||
|
@ -732,14 +737,12 @@ def test_importorskip_dev_module(monkeypatch) -> None:
|
||||||
with pytest.raises(pytest.skip.Exception):
|
with pytest.raises(pytest.skip.Exception):
|
||||||
pytest.importorskip("mockmodule1", minversion="0.14.0")
|
pytest.importorskip("mockmodule1", minversion="0.14.0")
|
||||||
except pytest.skip.Exception: # pragma: no cover
|
except pytest.skip.Exception: # pragma: no cover
|
||||||
assert False, "spurious skip: {}".format(
|
assert False, f"spurious skip: {ExceptionInfo.from_current()}"
|
||||||
_pytest._code.ExceptionInfo.from_current()
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_importorskip_module_level(testdir) -> None:
|
def test_importorskip_module_level(pytester: Pytester) -> None:
|
||||||
"""`importorskip` must be able to skip entire modules when used at module level."""
|
"""`importorskip` must be able to skip entire modules when used at module level."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
foobarbaz = pytest.importorskip("foobarbaz")
|
foobarbaz = pytest.importorskip("foobarbaz")
|
||||||
|
@ -748,13 +751,13 @@ def test_importorskip_module_level(testdir) -> None:
|
||||||
pass
|
pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
|
result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
|
||||||
|
|
||||||
|
|
||||||
def test_importorskip_custom_reason(testdir) -> None:
|
def test_importorskip_custom_reason(pytester: Pytester) -> None:
|
||||||
"""Make sure custom reasons are used."""
|
"""Make sure custom reasons are used."""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
foobarbaz = pytest.importorskip("foobarbaz2", reason="just because")
|
foobarbaz = pytest.importorskip("foobarbaz2", reason="just because")
|
||||||
|
@ -763,13 +766,13 @@ def test_importorskip_custom_reason(testdir) -> None:
|
||||||
pass
|
pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("-ra")
|
result = pytester.runpytest("-ra")
|
||||||
result.stdout.fnmatch_lines(["*just because*"])
|
result.stdout.fnmatch_lines(["*just because*"])
|
||||||
result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
|
result.stdout.fnmatch_lines(["*collected 0 items / 1 skipped*"])
|
||||||
|
|
||||||
|
|
||||||
def test_pytest_cmdline_main(testdir) -> None:
|
def test_pytest_cmdline_main(pytester: Pytester) -> None:
|
||||||
p = testdir.makepyfile(
|
p = pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_hello():
|
def test_hello():
|
||||||
|
@ -786,8 +789,8 @@ def test_pytest_cmdline_main(testdir) -> None:
|
||||||
assert ret == 0
|
assert ret == 0
|
||||||
|
|
||||||
|
|
||||||
def test_unicode_in_longrepr(testdir) -> None:
|
def test_unicode_in_longrepr(pytester: Pytester) -> None:
|
||||||
testdir.makeconftest(
|
pytester.makeconftest(
|
||||||
"""\
|
"""\
|
||||||
import pytest
|
import pytest
|
||||||
@pytest.hookimpl(hookwrapper=True)
|
@pytest.hookimpl(hookwrapper=True)
|
||||||
|
@ -798,19 +801,19 @@ def test_unicode_in_longrepr(testdir) -> None:
|
||||||
rep.longrepr = 'ä'
|
rep.longrepr = 'ä'
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_out():
|
def test_out():
|
||||||
assert 0
|
assert 0
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
assert result.ret == 1
|
assert result.ret == 1
|
||||||
assert "UnicodeEncodeError" not in result.stderr.str()
|
assert "UnicodeEncodeError" not in result.stderr.str()
|
||||||
|
|
||||||
|
|
||||||
def test_failure_in_setup(testdir) -> None:
|
def test_failure_in_setup(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def setup_module():
|
def setup_module():
|
||||||
0/0
|
0/0
|
||||||
|
@ -818,24 +821,26 @@ def test_failure_in_setup(testdir) -> None:
|
||||||
pass
|
pass
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("--tb=line")
|
result = pytester.runpytest("--tb=line")
|
||||||
result.stdout.no_fnmatch_line("*def setup_module*")
|
result.stdout.no_fnmatch_line("*def setup_module*")
|
||||||
|
|
||||||
|
|
||||||
def test_makereport_getsource(testdir) -> None:
|
def test_makereport_getsource(pytester: Pytester) -> None:
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
def test_foo():
|
def test_foo():
|
||||||
if False: pass
|
if False: pass
|
||||||
else: assert False
|
else: assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest()
|
result = pytester.runpytest()
|
||||||
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
||||||
result.stdout.fnmatch_lines(["*else: assert False*"])
|
result.stdout.fnmatch_lines(["*else: assert False*"])
|
||||||
|
|
||||||
|
|
||||||
def test_makereport_getsource_dynamic_code(testdir, monkeypatch) -> None:
|
def test_makereport_getsource_dynamic_code(
|
||||||
|
pytester: Pytester, monkeypatch: MonkeyPatch
|
||||||
|
) -> None:
|
||||||
"""Test that exception in dynamically generated code doesn't break getting the source line."""
|
"""Test that exception in dynamically generated code doesn't break getting the source line."""
|
||||||
import inspect
|
import inspect
|
||||||
|
|
||||||
|
@ -849,7 +854,7 @@ def test_makereport_getsource_dynamic_code(testdir, monkeypatch) -> None:
|
||||||
|
|
||||||
monkeypatch.setattr(inspect, "findsource", findsource)
|
monkeypatch.setattr(inspect, "findsource", findsource)
|
||||||
|
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -861,7 +866,7 @@ def test_makereport_getsource_dynamic_code(testdir, monkeypatch) -> None:
|
||||||
assert False
|
assert False
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest("-vv")
|
result = pytester.runpytest("-vv")
|
||||||
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
result.stdout.no_fnmatch_line("*INTERNALERROR*")
|
||||||
result.stdout.fnmatch_lines(["*test_fix*", "*fixture*'missing'*not found*"])
|
result.stdout.fnmatch_lines(["*test_fix*", "*fixture*'missing'*not found*"])
|
||||||
|
|
||||||
|
@ -896,12 +901,12 @@ def test_store_except_info_on_error() -> None:
|
||||||
assert not hasattr(sys, "last_traceback")
|
assert not hasattr(sys, "last_traceback")
|
||||||
|
|
||||||
|
|
||||||
def test_current_test_env_var(testdir, monkeypatch) -> None:
|
def test_current_test_env_var(pytester: Pytester, monkeypatch: MonkeyPatch) -> None:
|
||||||
pytest_current_test_vars: List[Tuple[str, str]] = []
|
pytest_current_test_vars: List[Tuple[str, str]] = []
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(
|
||||||
sys, "pytest_current_test_vars", pytest_current_test_vars, raising=False
|
sys, "pytest_current_test_vars", pytest_current_test_vars, raising=False
|
||||||
)
|
)
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import sys
|
import sys
|
||||||
|
@ -917,7 +922,7 @@ def test_current_test_env_var(testdir, monkeypatch) -> None:
|
||||||
sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))
|
sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
result = testdir.runpytest_inprocess()
|
result = pytester.runpytest_inprocess()
|
||||||
assert result.ret == 0
|
assert result.ret == 0
|
||||||
test_id = "test_current_test_env_var.py::test"
|
test_id = "test_current_test_env_var.py::test"
|
||||||
assert pytest_current_test_vars == [
|
assert pytest_current_test_vars == [
|
||||||
|
@ -934,8 +939,8 @@ class TestReportContents:
|
||||||
def getrunner(self):
|
def getrunner(self):
|
||||||
return lambda item: runner.runtestprotocol(item, log=False)
|
return lambda item: runner.runtestprotocol(item, log=False)
|
||||||
|
|
||||||
def test_longreprtext_pass(self, testdir) -> None:
|
def test_longreprtext_pass(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
pass
|
pass
|
||||||
|
@ -944,9 +949,9 @@ class TestReportContents:
|
||||||
rep = reports[1]
|
rep = reports[1]
|
||||||
assert rep.longreprtext == ""
|
assert rep.longreprtext == ""
|
||||||
|
|
||||||
def test_longreprtext_skip(self, testdir) -> None:
|
def test_longreprtext_skip(self, pytester: Pytester) -> None:
|
||||||
"""TestReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
|
"""TestReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -957,22 +962,22 @@ class TestReportContents:
|
||||||
assert isinstance(call_rep.longrepr, tuple)
|
assert isinstance(call_rep.longrepr, tuple)
|
||||||
assert "Skipped" in call_rep.longreprtext
|
assert "Skipped" in call_rep.longreprtext
|
||||||
|
|
||||||
def test_longreprtext_collect_skip(self, testdir) -> None:
|
def test_longreprtext_collect_skip(self, pytester: Pytester) -> None:
|
||||||
"""CollectReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
|
"""CollectReport.longreprtext can handle non-str ``longrepr`` attributes (#7559)"""
|
||||||
testdir.makepyfile(
|
pytester.makepyfile(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
pytest.skip(allow_module_level=True)
|
pytest.skip(allow_module_level=True)
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
rec = testdir.inline_run()
|
rec = pytester.inline_run()
|
||||||
calls = rec.getcalls("pytest_collectreport")
|
calls = rec.getcalls("pytest_collectreport")
|
||||||
_, call = calls
|
_, call = calls
|
||||||
assert isinstance(call.report.longrepr, tuple)
|
assert isinstance(call.report.longrepr, tuple)
|
||||||
assert "Skipped" in call.report.longreprtext
|
assert "Skipped" in call.report.longreprtext
|
||||||
|
|
||||||
def test_longreprtext_failure(self, testdir) -> None:
|
def test_longreprtext_failure(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
x = 1
|
x = 1
|
||||||
|
@ -982,8 +987,8 @@ class TestReportContents:
|
||||||
rep = reports[1]
|
rep = reports[1]
|
||||||
assert "assert 1 == 4" in rep.longreprtext
|
assert "assert 1 == 4" in rep.longreprtext
|
||||||
|
|
||||||
def test_captured_text(self, testdir) -> None:
|
def test_captured_text(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
import sys
|
import sys
|
||||||
|
@ -1012,8 +1017,8 @@ class TestReportContents:
|
||||||
assert call.capstderr == "setup: stderr\ncall: stderr\n"
|
assert call.capstderr == "setup: stderr\ncall: stderr\n"
|
||||||
assert teardown.capstderr == "setup: stderr\ncall: stderr\nteardown: stderr\n"
|
assert teardown.capstderr == "setup: stderr\ncall: stderr\nteardown: stderr\n"
|
||||||
|
|
||||||
def test_no_captured_text(self, testdir) -> None:
|
def test_no_captured_text(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
def test_func():
|
def test_func():
|
||||||
pass
|
pass
|
||||||
|
@ -1023,8 +1028,8 @@ class TestReportContents:
|
||||||
assert rep.capstdout == ""
|
assert rep.capstdout == ""
|
||||||
assert rep.capstderr == ""
|
assert rep.capstderr == ""
|
||||||
|
|
||||||
def test_longrepr_type(self, testdir) -> None:
|
def test_longrepr_type(self, pytester: Pytester) -> None:
|
||||||
reports = testdir.runitem(
|
reports = pytester.runitem(
|
||||||
"""
|
"""
|
||||||
import pytest
|
import pytest
|
||||||
def test_func():
|
def test_func():
|
||||||
|
@ -1032,7 +1037,7 @@ class TestReportContents:
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
rep = reports[1]
|
rep = reports[1]
|
||||||
assert isinstance(rep.longrepr, _pytest._code.code.ExceptionRepr)
|
assert isinstance(rep.longrepr, ExceptionChainRepr)
|
||||||
|
|
||||||
|
|
||||||
def test_outcome_exception_bad_msg() -> None:
|
def test_outcome_exception_bad_msg() -> None:
|
||||||
|
|
Loading…
Reference in New Issue