2009-07-17 22:19:19 +08:00
|
|
|
"""
|
|
|
|
terminal reporting of the full testing process.
|
|
|
|
"""
|
2018-10-25 15:01:29 +08:00
|
|
|
from __future__ import absolute_import
|
|
|
|
from __future__ import division
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2015-08-17 15:10:01 +08:00
|
|
|
import collections
|
2018-08-27 04:12:55 +08:00
|
|
|
import os
|
2009-07-17 22:19:19 +08:00
|
|
|
import sys
|
2018-08-24 00:06:17 +08:00
|
|
|
import textwrap
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2017-08-25 23:46:55 +08:00
|
|
|
import pluggy
|
2015-11-27 22:43:01 +08:00
|
|
|
import py
|
2018-10-25 15:01:29 +08:00
|
|
|
|
2015-11-27 22:43:01 +08:00
|
|
|
import pytest
|
2015-07-05 01:42:22 +08:00
|
|
|
from _pytest.main import EXIT_NOTESTSCOLLECTED
|
2019-03-07 05:54:45 +08:00
|
|
|
from _pytest.reports import BaseReport
|
2019-04-08 01:36:29 +08:00
|
|
|
from _pytest.terminal import _folded_skips
|
2019-04-17 21:30:34 +08:00
|
|
|
from _pytest.terminal import _get_line_with_reprcrash_message
|
2018-10-25 15:01:29 +08:00
|
|
|
from _pytest.terminal import _plugin_nameversions
|
|
|
|
from _pytest.terminal import build_summary_stats_line
|
|
|
|
from _pytest.terminal import getreportopt
|
|
|
|
from _pytest.terminal import TerminalReporter
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
DistInfo = collections.namedtuple("DistInfo", ["project_name", "version"])
|
2015-08-17 15:10:01 +08:00
|
|
|
|
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class Option(object):
|
2019-03-22 14:45:43 +08:00
|
|
|
def __init__(self, verbosity=0, fulltrace=False):
|
|
|
|
self.verbosity = verbosity
|
2010-05-25 22:52:09 +08:00
|
|
|
self.fulltrace = fulltrace
|
2010-07-07 18:41:15 +08:00
|
|
|
|
|
|
|
@property
|
|
|
|
def args(self):
|
2017-11-04 23:17:20 +08:00
|
|
|
values = []
|
2019-03-22 14:45:43 +08:00
|
|
|
values.append("--verbosity=%d" % self.verbosity)
|
2010-05-25 22:52:09 +08:00
|
|
|
if self.fulltrace:
|
2018-05-23 22:48:46 +08:00
|
|
|
values.append("--fulltrace")
|
2017-11-04 23:17:20 +08:00
|
|
|
return values
|
2009-07-18 00:07:37 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
@pytest.fixture(
|
|
|
|
params=[
|
2019-03-22 14:45:43 +08:00
|
|
|
Option(verbosity=0),
|
|
|
|
Option(verbosity=1),
|
|
|
|
Option(verbosity=-1),
|
2018-05-23 22:48:46 +08:00
|
|
|
Option(fulltrace=True),
|
|
|
|
],
|
|
|
|
ids=["default", "verbose", "quiet", "fulltrace"],
|
|
|
|
)
|
2018-02-22 01:54:39 +08:00
|
|
|
def option(request):
|
|
|
|
return request.param
|
2010-07-07 18:41:15 +08:00
|
|
|
|
2009-07-18 00:07:37 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"input,expected",
|
|
|
|
[
|
|
|
|
([DistInfo(project_name="test", version=1)], ["test-1"]),
|
|
|
|
([DistInfo(project_name="pytest-test", version=1)], ["test-1"]),
|
|
|
|
(
|
|
|
|
[
|
|
|
|
DistInfo(project_name="test", version=1),
|
|
|
|
DistInfo(project_name="test", version=1),
|
|
|
|
],
|
|
|
|
["test-1"],
|
|
|
|
),
|
|
|
|
],
|
|
|
|
ids=["normal", "prefix-strip", "deduplicate"],
|
|
|
|
)
|
2015-08-17 15:10:01 +08:00
|
|
|
def test_plugin_nameversion(input, expected):
|
|
|
|
pluginlist = [(None, x) for x in input]
|
|
|
|
result = _plugin_nameversions(pluginlist)
|
|
|
|
assert result == expected
|
|
|
|
|
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestTerminal(object):
|
2009-07-18 00:07:37 +08:00
|
|
|
def test_pass_skip_fail(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2009-07-17 22:19:19 +08:00
|
|
|
def test_ok():
|
|
|
|
pass
|
|
|
|
def test_skip():
|
2010-11-18 05:12:16 +08:00
|
|
|
pytest.skip("xx")
|
2009-07-17 22:19:19 +08:00
|
|
|
def test_func():
|
|
|
|
assert 0
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
result = testdir.runpytest(*option.args)
|
2019-03-22 14:45:43 +08:00
|
|
|
if option.verbosity > 0:
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*test_pass_skip_fail.py::test_ok PASS*",
|
|
|
|
"*test_pass_skip_fail.py::test_skip SKIP*",
|
|
|
|
"*test_pass_skip_fail.py::test_func FAIL*",
|
|
|
|
]
|
|
|
|
)
|
2019-03-22 14:45:43 +08:00
|
|
|
elif option.verbosity == 0:
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*test_pass_skip_fail.py .sF*"])
|
2019-03-22 14:45:43 +08:00
|
|
|
else:
|
|
|
|
result.stdout.fnmatch_lines([".sF*"])
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[" def test_func():", "> assert 0", "E assert 0"]
|
|
|
|
)
|
2009-07-17 22:19:19 +08:00
|
|
|
|
|
|
|
def test_internalerror(self, testdir, linecomp):
|
|
|
|
modcol = testdir.getmodulecol("def test_one(): pass")
|
|
|
|
rep = TerminalReporter(modcol.config, file=linecomp.stringio)
|
2018-11-23 02:05:10 +08:00
|
|
|
with pytest.raises(ValueError) as excinfo:
|
|
|
|
raise ValueError("hello")
|
2009-07-17 22:19:19 +08:00
|
|
|
rep.pytest_internalerror(excinfo.getrepr())
|
2018-05-23 22:48:46 +08:00
|
|
|
linecomp.assert_contains_lines(["INTERNALERROR> *ValueError*hello*"])
|
2009-07-17 22:19:19 +08:00
|
|
|
|
|
|
|
def test_writeline(self, testdir, linecomp):
|
|
|
|
modcol = testdir.getmodulecol("def test_one(): pass")
|
|
|
|
rep = TerminalReporter(modcol.config, file=linecomp.stringio)
|
2015-02-27 04:56:44 +08:00
|
|
|
rep.write_fspath_result(modcol.nodeid, ".")
|
2009-07-17 22:19:19 +08:00
|
|
|
rep.write_line("hello world")
|
2018-05-23 22:48:46 +08:00
|
|
|
lines = linecomp.stringio.getvalue().split("\n")
|
2009-07-17 22:19:19 +08:00
|
|
|
assert not lines[0]
|
2015-02-27 04:56:44 +08:00
|
|
|
assert lines[1].endswith(modcol.name + " .")
|
2009-07-17 22:19:19 +08:00
|
|
|
assert lines[2] == "hello world"
|
|
|
|
|
2010-09-26 22:23:45 +08:00
|
|
|
def test_show_runtest_logstart(self, testdir, linecomp):
|
2009-07-17 22:19:19 +08:00
|
|
|
item = testdir.getitem("def test_func(): pass")
|
|
|
|
tr = TerminalReporter(item.config, file=linecomp.stringio)
|
|
|
|
item.config.pluginmanager.register(tr)
|
2010-11-05 06:21:23 +08:00
|
|
|
location = item.reportinfo()
|
2018-05-23 22:48:46 +08:00
|
|
|
tr.config.hook.pytest_runtest_logstart(
|
|
|
|
nodeid=item.nodeid, location=location, fspath=str(item.fspath)
|
|
|
|
)
|
|
|
|
linecomp.assert_contains_lines(["*test_show_runtest_logstart.py*"])
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2010-09-26 22:23:45 +08:00
|
|
|
def test_runtest_location_shown_before_test_starts(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-09-26 22:23:45 +08:00
|
|
|
def test_1():
|
|
|
|
import time
|
|
|
|
time.sleep(20)
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-09-26 22:23:45 +08:00
|
|
|
child = testdir.spawn_pytest("")
|
|
|
|
child.expect(".*test_runtest_location.*py")
|
|
|
|
child.sendeof()
|
|
|
|
child.kill(15)
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2019-03-21 01:24:25 +08:00
|
|
|
def test_report_collect_after_half_a_second(self, testdir):
|
|
|
|
"""Test for "collecting" being updated after 0.5s"""
|
|
|
|
|
|
|
|
testdir.makepyfile(
|
|
|
|
**{
|
|
|
|
"test1.py": """
|
|
|
|
import _pytest.terminal
|
|
|
|
|
|
|
|
_pytest.terminal.REPORT_COLLECTING_RESOLUTION = 0
|
|
|
|
|
|
|
|
def test_1():
|
|
|
|
pass
|
|
|
|
""",
|
|
|
|
"test2.py": "def test_2(): pass",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
child = testdir.spawn_pytest("-v test1.py test2.py")
|
|
|
|
child.expect(r"collecting \.\.\.")
|
|
|
|
child.expect(r"collecting 1 item")
|
|
|
|
child.expect(r"collecting 2 items")
|
|
|
|
child.expect(r"collected 2 items")
|
|
|
|
rest = child.read().decode("utf8")
|
|
|
|
assert "2 passed in" in rest
|
|
|
|
|
2009-08-17 22:45:52 +08:00
|
|
|
def test_itemreport_subclasses_show_subclassed_file(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
test_p1="""
|
2017-02-17 02:41:51 +08:00
|
|
|
class BaseTests(object):
|
2009-08-17 22:45:52 +08:00
|
|
|
def test_p1(self):
|
|
|
|
pass
|
|
|
|
class TestClass(BaseTests):
|
2010-07-27 03:15:15 +08:00
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
p2 = testdir.makepyfile(
|
|
|
|
test_p2="""
|
2009-08-17 22:45:52 +08:00
|
|
|
from test_p1 import BaseTests
|
|
|
|
class TestMore(BaseTests):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-08-17 22:45:52 +08:00
|
|
|
result = testdir.runpytest(p2)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*test_p2.py .*", "*1 passed*"])
|
2018-10-16 01:29:39 +08:00
|
|
|
result = testdir.runpytest("-vv", p2)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*test_p2.py::TestMore::test_p1* <- *test_p1.py*PASSED*"]
|
|
|
|
)
|
2009-08-17 22:45:52 +08:00
|
|
|
|
2011-03-08 20:37:00 +08:00
|
|
|
def test_itemreport_directclasses_not_shown_as_subclasses(self, testdir):
|
2015-05-07 17:02:55 +08:00
|
|
|
a = testdir.mkpydir("a123")
|
2018-05-23 22:48:46 +08:00
|
|
|
a.join("test_hello123.py").write(
|
2018-08-24 00:06:17 +08:00
|
|
|
textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
class TestClass(object):
|
|
|
|
def test_method(self):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
)
|
2018-10-16 01:29:39 +08:00
|
|
|
result = testdir.runpytest("-vv")
|
2011-03-08 20:37:00 +08:00
|
|
|
assert result.ret == 0
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*a123/test_hello123.py*PASS*"])
|
2011-03-08 20:37:00 +08:00
|
|
|
assert " <- " not in result.stdout.str()
|
|
|
|
|
2009-07-18 00:07:37 +08:00
|
|
|
def test_keyboard_interrupt(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-07-17 22:19:19 +08:00
|
|
|
def test_foobar():
|
|
|
|
assert 0
|
|
|
|
def test_spamegg():
|
2010-11-18 05:12:16 +08:00
|
|
|
import py; pytest.skip('skip me please!')
|
2009-07-17 22:19:19 +08:00
|
|
|
def test_interrupt_me():
|
|
|
|
raise KeyboardInterrupt # simulating the user
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-07-18 00:07:37 +08:00
|
|
|
|
2015-05-07 17:02:55 +08:00
|
|
|
result = testdir.runpytest(*option.args, no_reraise_ctrlc=True)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
" def test_foobar():",
|
|
|
|
"> assert 0",
|
|
|
|
"E assert 0",
|
|
|
|
"*_keyboard_interrupt.py:6: KeyboardInterrupt*",
|
|
|
|
]
|
|
|
|
)
|
2010-05-25 22:52:09 +08:00
|
|
|
if option.fulltrace:
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*raise KeyboardInterrupt # simulating the user*"]
|
|
|
|
)
|
2016-02-11 02:54:10 +08:00
|
|
|
else:
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2018-06-07 07:18:51 +08:00
|
|
|
["(to show a full traceback on KeyboardInterrupt use --fulltrace)"]
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
|
|
|
result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
|
2010-05-25 22:52:09 +08:00
|
|
|
|
2011-07-08 03:24:09 +08:00
|
|
|
def test_keyboard_in_sessionstart(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2011-07-08 03:24:09 +08:00
|
|
|
def pytest_sessionstart():
|
|
|
|
raise KeyboardInterrupt
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2011-07-08 03:24:09 +08:00
|
|
|
def test_foobar():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2011-07-08 03:24:09 +08:00
|
|
|
|
2015-05-07 17:02:55 +08:00
|
|
|
result = testdir.runpytest(no_reraise_ctrlc=True)
|
2011-07-08 03:24:09 +08:00
|
|
|
assert result.ret == 2
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*KeyboardInterrupt*"])
|
2010-01-13 04:43:25 +08:00
|
|
|
|
2017-06-04 05:42:26 +08:00
|
|
|
def test_collect_single_item(self, testdir):
|
|
|
|
"""Use singular 'item' when reporting a single test item"""
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2017-06-04 05:42:26 +08:00
|
|
|
def test_foobar():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2017-06-04 05:42:26 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["collected 1 item"])
|
2017-06-04 05:42:26 +08:00
|
|
|
|
2017-08-04 07:54:33 +08:00
|
|
|
def test_rewrite(self, testdir, monkeypatch):
|
|
|
|
config = testdir.parseconfig()
|
|
|
|
f = py.io.TextIO()
|
2018-05-23 22:48:46 +08:00
|
|
|
monkeypatch.setattr(f, "isatty", lambda *args: True)
|
2017-08-04 07:54:33 +08:00
|
|
|
tr = TerminalReporter(config, f)
|
2017-11-24 05:26:57 +08:00
|
|
|
tr._tw.fullwidth = 10
|
2018-05-23 22:48:46 +08:00
|
|
|
tr.write("hello")
|
|
|
|
tr.rewrite("hey", erase=True)
|
|
|
|
assert f.getvalue() == "hello" + "\r" + "hey" + (6 * " ")
|
2017-08-04 07:54:33 +08:00
|
|
|
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestCollectonly(object):
|
2011-03-07 01:32:00 +08:00
|
|
|
def test_collectonly_basic(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-07-17 22:19:19 +08:00
|
|
|
def test_func():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest("--collect-only")
|
|
|
|
result.stdout.fnmatch_lines(
|
2018-11-26 01:33:18 +08:00
|
|
|
["<Module test_collectonly_basic.py>", " <Function test_func>"]
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2011-03-07 01:32:00 +08:00
|
|
|
def test_collectonly_skipped_module(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2011-03-07 01:32:00 +08:00
|
|
|
pytest.skip("hello")
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", "-rs")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*ERROR collecting*"])
|
2009-07-17 22:19:19 +08:00
|
|
|
|
2018-12-30 14:46:46 +08:00
|
|
|
def test_collectonly_display_test_description(self, testdir):
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
|
|
|
def test_with_description():
|
|
|
|
\""" This test has a description.
|
|
|
|
\"""
|
|
|
|
assert True
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest("--collect-only", "--verbose")
|
|
|
|
result.stdout.fnmatch_lines([" This test has a description."])
|
|
|
|
|
2011-03-07 01:32:00 +08:00
|
|
|
def test_collectonly_failed_module(self, testdir):
|
|
|
|
testdir.makepyfile("""raise ValueError(0)""")
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*raise ValueError*", "*1 error*"])
|
2009-07-17 22:19:19 +08:00
|
|
|
|
|
|
|
def test_collectonly_fatal(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2009-07-17 22:19:19 +08:00
|
|
|
def pytest_collectstart(collector):
|
2010-07-27 03:15:15 +08:00
|
|
|
assert 0, "urgs"
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*INTERNAL*args*"])
|
2009-07-17 22:19:19 +08:00
|
|
|
assert result.ret == 3
|
|
|
|
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_collectonly_simple(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
p = testdir.makepyfile(
|
|
|
|
"""
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_func1():
|
|
|
|
pass
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestClass(object):
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_method(self):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", p)
|
2017-07-17 07:25:09 +08:00
|
|
|
# assert stderr.startswith("inserting into sys.path")
|
2009-08-03 21:27:26 +08:00
|
|
|
assert result.ret == 0
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
2018-11-26 01:33:18 +08:00
|
|
|
"*<Module *.py>",
|
|
|
|
"* <Function test_func1>",
|
|
|
|
"* <Class TestClass>",
|
|
|
|
"* <Function test_method>",
|
2018-05-23 22:48:46 +08:00
|
|
|
]
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
|
|
|
|
def test_collectonly_error(self, testdir):
|
|
|
|
p = testdir.makepyfile("import Errlkjqweqwe")
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", p)
|
2016-06-20 21:05:50 +08:00
|
|
|
assert result.ret == 2
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2018-08-24 00:06:17 +08:00
|
|
|
textwrap.dedent(
|
|
|
|
"""\
|
|
|
|
*ERROR*
|
|
|
|
*ImportError*
|
|
|
|
*No module named *Errlk*
|
|
|
|
*1 error*
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
).strip()
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2012-02-03 23:33:32 +08:00
|
|
|
def test_collectonly_missing_path(self, testdir):
|
|
|
|
"""this checks issue 115,
|
|
|
|
failure in parseargs will cause session
|
|
|
|
not to have the items attribute
|
|
|
|
"""
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", "uhm_missing_path")
|
2012-05-18 05:11:23 +08:00
|
|
|
assert result.ret == 4
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stderr.fnmatch_lines(["*ERROR: file not found*"])
|
2012-02-03 23:33:32 +08:00
|
|
|
|
2012-02-03 23:56:06 +08:00
|
|
|
def test_collectonly_quiet(self, testdir):
|
|
|
|
testdir.makepyfile("def test_foo(): pass")
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", "-q")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*test_foo*"])
|
2012-02-03 23:56:06 +08:00
|
|
|
|
|
|
|
def test_collectonly_more_quiet(self, testdir):
|
|
|
|
testdir.makepyfile(test_fun="def test_foo(): pass")
|
2013-08-01 23:32:19 +08:00
|
|
|
result = testdir.runpytest("--collect-only", "-qq")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*test_fun.py: 1*"])
|
2012-02-03 23:56:06 +08:00
|
|
|
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestFixtureReporting(object):
|
2009-07-31 20:22:02 +08:00
|
|
|
def test_setup_fixture_error(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-07-31 20:22:02 +08:00
|
|
|
def setup_function(function):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("setup func")
|
2009-07-31 20:22:02 +08:00
|
|
|
assert 0
|
|
|
|
def test_nada():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-07-31 20:22:02 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*ERROR at setup of test_nada*",
|
|
|
|
"*setup_function(function):*",
|
|
|
|
"*setup func*",
|
|
|
|
"*assert 0*",
|
|
|
|
"*1 error*",
|
|
|
|
]
|
|
|
|
)
|
2009-07-31 20:22:02 +08:00
|
|
|
assert result.ret != 0
|
2010-07-27 03:15:15 +08:00
|
|
|
|
2009-07-31 20:22:02 +08:00
|
|
|
def test_teardown_fixture_error(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-07-31 20:22:02 +08:00
|
|
|
def test_nada():
|
|
|
|
pass
|
|
|
|
def teardown_function(function):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("teardown func")
|
2009-07-31 20:22:02 +08:00
|
|
|
assert 0
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-07-31 20:22:02 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*ERROR at teardown*",
|
|
|
|
"*teardown_function(function):*",
|
|
|
|
"*assert 0*",
|
|
|
|
"*Captured stdout*",
|
|
|
|
"*teardown func*",
|
|
|
|
"*1 passed*1 error*",
|
|
|
|
]
|
|
|
|
)
|
2009-07-31 20:22:02 +08:00
|
|
|
|
|
|
|
def test_teardown_fixture_error_and_test_failure(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-07-31 20:22:02 +08:00
|
|
|
def test_fail():
|
|
|
|
assert 0, "failingfunc"
|
|
|
|
|
|
|
|
def teardown_function(function):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("teardown func")
|
2009-07-31 20:22:02 +08:00
|
|
|
assert False
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-07-31 20:22:02 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*ERROR at teardown of test_fail*",
|
|
|
|
"*teardown_function(function):*",
|
|
|
|
"*assert False*",
|
|
|
|
"*Captured stdout*",
|
|
|
|
"*teardown func*",
|
|
|
|
"*test_fail*",
|
|
|
|
"*def test_fail():",
|
|
|
|
"*failingfunc*",
|
|
|
|
"*1 failed*1 error*",
|
|
|
|
]
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2016-10-30 16:46:08 +08:00
|
|
|
def test_setup_teardown_output_and_test_failure(self, testdir):
|
|
|
|
""" Test for issue #442 """
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2016-10-30 16:46:08 +08:00
|
|
|
def setup_function(function):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("setup func")
|
2016-10-30 16:46:08 +08:00
|
|
|
|
|
|
|
def test_fail():
|
|
|
|
assert 0, "failingfunc"
|
|
|
|
|
|
|
|
def teardown_function(function):
|
2018-11-22 16:15:14 +08:00
|
|
|
print("teardown func")
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2016-10-30 16:46:08 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*test_fail*",
|
|
|
|
"*def test_fail():",
|
|
|
|
"*failingfunc*",
|
|
|
|
"*Captured stdout setup*",
|
|
|
|
"*setup func*",
|
|
|
|
"*Captured stdout teardown*",
|
|
|
|
"*teardown func*",
|
|
|
|
"*1 failed*",
|
|
|
|
]
|
|
|
|
)
|
2016-10-30 16:46:08 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestTerminalFunctional(object):
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_deselected(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testpath = testdir.makepyfile(
|
|
|
|
"""
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_one():
|
|
|
|
pass
|
|
|
|
def test_two():
|
|
|
|
pass
|
|
|
|
def test_three():
|
|
|
|
pass
|
|
|
|
"""
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
result = testdir.runpytest("-k", "test_two:", testpath)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2019-01-24 07:08:43 +08:00
|
|
|
["collected 3 items / 1 deselected / 2 selected", "*test_deselected.py ..*"]
|
2018-05-23 22:48:46 +08:00
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
assert result.ret == 0
|
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
def test_show_deselected_items_using_markexpr_before_test_execution(self, testdir):
|
|
|
|
testdir.makepyfile(
|
2018-08-27 04:12:55 +08:00
|
|
|
test_show_deselected="""
|
2018-02-13 21:46:11 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.mark.foo
|
|
|
|
def test_foobar():
|
|
|
|
pass
|
|
|
|
|
|
|
|
@pytest.mark.bar
|
|
|
|
def test_bar():
|
|
|
|
pass
|
|
|
|
|
|
|
|
def test_pass():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest("-m", "not foo")
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
2019-01-24 07:08:43 +08:00
|
|
|
"collected 3 items / 1 deselected / 2 selected",
|
2018-08-27 04:12:55 +08:00
|
|
|
"*test_show_deselected.py ..*",
|
2018-05-23 22:48:46 +08:00
|
|
|
"*= 2 passed, 1 deselected in * =*",
|
|
|
|
]
|
|
|
|
)
|
2018-02-13 21:46:11 +08:00
|
|
|
assert "= 1 deselected =" not in result.stdout.str()
|
|
|
|
assert result.ret == 0
|
|
|
|
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_no_skip_summary_if_failure(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_ok():
|
|
|
|
pass
|
|
|
|
def test_fail():
|
|
|
|
assert 0
|
|
|
|
def test_skip():
|
2010-11-18 05:12:16 +08:00
|
|
|
pytest.skip("dontshow")
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-27 03:15:15 +08:00
|
|
|
result = testdir.runpytest()
|
2009-08-03 21:27:26 +08:00
|
|
|
assert result.stdout.str().find("skip test summary") == -1
|
|
|
|
assert result.ret == 1
|
|
|
|
|
|
|
|
def test_passes(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
p1 = testdir.makepyfile(
|
|
|
|
"""
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_passes():
|
|
|
|
pass
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestClass(object):
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_method(self):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
old = p1.dirpath().chdir()
|
|
|
|
try:
|
|
|
|
result = testdir.runpytest()
|
|
|
|
finally:
|
|
|
|
old.chdir()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["test_passes.py ..*", "* 2 pass*"])
|
2009-08-03 21:27:26 +08:00
|
|
|
assert result.ret == 0
|
|
|
|
|
2018-12-03 12:09:04 +08:00
|
|
|
def test_header_trailer_info(self, testdir, request):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_passes():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
result = testdir.runpytest()
|
2017-12-27 11:47:26 +08:00
|
|
|
verinfo = ".".join(map(str, sys.version_info[:3]))
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*===== test session starts ====*",
|
|
|
|
"platform %s -- Python %s*pytest-%s*py-%s*pluggy-%s"
|
|
|
|
% (
|
|
|
|
sys.platform,
|
|
|
|
verinfo,
|
|
|
|
pytest.__version__,
|
|
|
|
py.__version__,
|
|
|
|
pluggy.__version__,
|
|
|
|
),
|
|
|
|
"*test_header_trailer_info.py .*",
|
|
|
|
"=* 1 passed*in *.[0-9][0-9] seconds *=",
|
|
|
|
]
|
|
|
|
)
|
2018-12-03 12:09:04 +08:00
|
|
|
if request.config.pluginmanager.list_plugin_distinfo():
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["plugins: *"])
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2019-03-02 22:17:43 +08:00
|
|
|
def test_header(self, testdir, request):
|
|
|
|
testdir.tmpdir.join("tests").ensure_dir()
|
|
|
|
testdir.tmpdir.join("gui").ensure_dir()
|
2019-03-02 22:31:09 +08:00
|
|
|
|
|
|
|
# no ini file
|
|
|
|
result = testdir.runpytest()
|
|
|
|
result.stdout.fnmatch_lines(["rootdir: *test_header0"])
|
|
|
|
|
|
|
|
# with inifile
|
|
|
|
testdir.makeini("""[pytest]""")
|
2019-03-02 22:17:43 +08:00
|
|
|
result = testdir.runpytest()
|
2019-03-02 22:31:09 +08:00
|
|
|
result.stdout.fnmatch_lines(["rootdir: *test_header0, inifile: tox.ini"])
|
2019-03-02 22:17:43 +08:00
|
|
|
|
2019-03-02 22:31:09 +08:00
|
|
|
# with testpaths option, and not passing anything in the command-line
|
2019-03-02 22:17:43 +08:00
|
|
|
testdir.makeini(
|
|
|
|
"""
|
|
|
|
[pytest]
|
|
|
|
testpaths = tests gui
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest()
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["rootdir: *test_header0, inifile: tox.ini, testpaths: tests, gui"]
|
|
|
|
)
|
|
|
|
|
2019-03-02 22:31:09 +08:00
|
|
|
# with testpaths option, passing directory in command-line: do not show testpaths then
|
2019-03-02 22:17:43 +08:00
|
|
|
result = testdir.runpytest("tests")
|
|
|
|
result.stdout.fnmatch_lines(["rootdir: *test_header0, inifile: tox.ini"])
|
|
|
|
|
2010-07-27 03:15:15 +08:00
|
|
|
def test_showlocals(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
p1 = testdir.makepyfile(
|
|
|
|
"""
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_showlocals():
|
|
|
|
x = 3
|
2010-07-27 03:15:15 +08:00
|
|
|
y = "x" * 5000
|
2009-08-03 21:27:26 +08:00
|
|
|
assert 0
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest(p1, "-l")
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
# "_ _ * Locals *",
|
|
|
|
"x* = 3",
|
|
|
|
"y* = 'xxxxxx*",
|
|
|
|
]
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2018-08-28 16:26:18 +08:00
|
|
|
@pytest.fixture
|
|
|
|
def verbose_testfile(self, testdir):
|
|
|
|
return testdir.makepyfile(
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_fail():
|
|
|
|
raise ValueError()
|
|
|
|
def test_pass():
|
|
|
|
pass
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestClass(object):
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_skip(self):
|
2010-11-18 05:12:16 +08:00
|
|
|
pytest.skip("hello")
|
2009-08-03 21:27:26 +08:00
|
|
|
def test_gen():
|
|
|
|
def check(x):
|
|
|
|
assert x == 1
|
|
|
|
yield check, 0
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-08-28 16:26:18 +08:00
|
|
|
|
|
|
|
def test_verbose_reporting(self, verbose_testfile, testdir, pytestconfig):
|
|
|
|
result = testdir.runpytest(
|
|
|
|
verbose_testfile, "-v", "-Walways::pytest.PytestWarning"
|
|
|
|
)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*test_verbose_reporting.py::test_fail *FAIL*",
|
|
|
|
"*test_verbose_reporting.py::test_pass *PASS*",
|
|
|
|
"*test_verbose_reporting.py::TestClass::test_skip *SKIP*",
|
2019-01-23 13:26:30 +08:00
|
|
|
"*test_verbose_reporting.py::test_gen *XFAIL*",
|
2018-05-23 22:48:46 +08:00
|
|
|
]
|
|
|
|
)
|
2009-08-03 21:27:26 +08:00
|
|
|
assert result.ret == 1
|
2011-03-08 20:37:00 +08:00
|
|
|
|
2018-08-28 16:26:18 +08:00
|
|
|
def test_verbose_reporting_xdist(self, verbose_testfile, testdir, pytestconfig):
|
2015-04-26 02:17:32 +08:00
|
|
|
if not pytestconfig.pluginmanager.get_plugin("xdist"):
|
2015-04-22 19:31:46 +08:00
|
|
|
pytest.skip("xdist plugin not installed")
|
|
|
|
|
2018-08-28 16:26:18 +08:00
|
|
|
result = testdir.runpytest(
|
|
|
|
verbose_testfile, "-v", "-n 1", "-Walways::pytest.PytestWarning"
|
|
|
|
)
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*FAIL*test_verbose_reporting_xdist.py::test_fail*"]
|
|
|
|
)
|
2010-01-13 23:00:33 +08:00
|
|
|
assert result.ret == 1
|
2009-08-03 21:27:26 +08:00
|
|
|
|
2010-11-01 02:51:16 +08:00
|
|
|
def test_quiet_reporting(self, testdir):
|
|
|
|
p1 = testdir.makepyfile("def test_pass(): pass")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest(p1, "-q")
|
2010-11-01 02:51:16 +08:00
|
|
|
s = result.stdout.str()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "test session starts" not in s
|
2010-11-01 02:51:16 +08:00
|
|
|
assert p1.basename not in s
|
|
|
|
assert "===" not in s
|
2013-07-06 21:43:59 +08:00
|
|
|
assert "passed" in s
|
|
|
|
|
|
|
|
def test_more_quiet_reporting(self, testdir):
|
|
|
|
p1 = testdir.makepyfile("def test_pass(): pass")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest(p1, "-qq")
|
2013-07-06 21:43:59 +08:00
|
|
|
s = result.stdout.str()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "test session starts" not in s
|
2013-07-06 21:43:59 +08:00
|
|
|
assert p1.basename not in s
|
|
|
|
assert "===" not in s
|
|
|
|
assert "passed" not in s
|
2017-07-27 21:34:49 +08:00
|
|
|
|
2019-02-06 07:29:30 +08:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"params", [(), ("--collect-only",)], ids=["no-params", "collect-only"]
|
|
|
|
)
|
|
|
|
def test_report_collectionfinish_hook(self, testdir, params):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2017-07-27 21:34:49 +08:00
|
|
|
def pytest_report_collectionfinish(config, startdir, items):
|
|
|
|
return ['hello from hook: {0} items'.format(len(items))]
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2017-07-27 21:34:49 +08:00
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(3))
|
|
|
|
def test(i):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2019-02-06 07:29:30 +08:00
|
|
|
result = testdir.runpytest(*params)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["collected 3 items", "hello from hook: 3 items"])
|
2013-07-06 21:43:59 +08:00
|
|
|
|
2010-11-01 02:51:16 +08:00
|
|
|
|
2019-03-30 00:59:02 +08:00
|
|
|
def test_fail_extra_reporting(testdir, monkeypatch):
|
|
|
|
monkeypatch.setenv("COLUMNS", "80")
|
|
|
|
testdir.makepyfile("def test_this(): assert 0, 'this_failed' * 100")
|
2013-10-12 21:39:22 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "short test summary" not in result.stdout.str()
|
|
|
|
result = testdir.runpytest("-rf")
|
2019-03-30 00:59:02 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*test summary*",
|
2019-04-05 18:17:08 +08:00
|
|
|
"FAILED test_fail_extra_reporting.py::test_this - AssertionError: this_failedt...",
|
2019-03-30 00:59:02 +08:00
|
|
|
]
|
|
|
|
)
|
2010-05-06 01:50:59 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2010-05-06 01:50:59 +08:00
|
|
|
def test_fail_reporting_on_pass(testdir):
|
2013-10-12 21:39:22 +08:00
|
|
|
testdir.makepyfile("def test_this(): assert 1")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest("-rf")
|
|
|
|
assert "short test summary" not in result.stdout.str()
|
2009-10-17 23:42:40 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2015-12-09 09:54:23 +08:00
|
|
|
def test_pass_extra_reporting(testdir):
|
|
|
|
testdir.makepyfile("def test_this(): assert 1")
|
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "short test summary" not in result.stdout.str()
|
|
|
|
result = testdir.runpytest("-rp")
|
|
|
|
result.stdout.fnmatch_lines(["*test summary*", "PASS*test_pass_extra_reporting*"])
|
2015-12-09 09:54:23 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2015-12-09 09:54:23 +08:00
|
|
|
def test_pass_reporting_on_fail(testdir):
|
|
|
|
testdir.makepyfile("def test_this(): assert 0")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest("-rp")
|
|
|
|
assert "short test summary" not in result.stdout.str()
|
2015-12-09 09:54:23 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2015-12-09 09:54:23 +08:00
|
|
|
def test_pass_output_reporting(testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2018-09-26 21:44:00 +08:00
|
|
|
def test_pass_has_output():
|
2015-12-09 09:54:23 +08:00
|
|
|
print("Four score and seven years ago...")
|
2018-09-26 21:44:00 +08:00
|
|
|
def test_pass_no_output():
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2015-12-09 11:33:03 +08:00
|
|
|
result = testdir.runpytest()
|
2018-09-26 21:44:00 +08:00
|
|
|
s = result.stdout.str()
|
|
|
|
assert "test_pass_has_output" not in s
|
|
|
|
assert "Four score and seven years ago..." not in s
|
|
|
|
assert "test_pass_no_output" not in s
|
2019-04-14 21:13:15 +08:00
|
|
|
result = testdir.runpytest("-rPp")
|
2018-09-26 21:44:00 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2019-04-14 21:13:15 +08:00
|
|
|
[
|
|
|
|
"*= PASSES =*",
|
|
|
|
"*_ test_pass_has_output _*",
|
|
|
|
"*- Captured stdout call -*",
|
|
|
|
"Four score and seven years ago...",
|
|
|
|
"*= short test summary info =*",
|
|
|
|
"PASSED test_pass_output_reporting.py::test_pass_has_output",
|
|
|
|
"PASSED test_pass_output_reporting.py::test_pass_no_output",
|
|
|
|
"*= 2 passed in *",
|
|
|
|
]
|
2018-09-26 21:44:00 +08:00
|
|
|
)
|
2015-12-09 09:54:23 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2013-12-08 04:04:23 +08:00
|
|
|
def test_color_yes(testdir):
|
2013-12-07 03:49:48 +08:00
|
|
|
testdir.makepyfile("def test_this(): assert 1")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest("--color=yes")
|
|
|
|
assert "test session starts" in result.stdout.str()
|
|
|
|
assert "\x1b[1m" in result.stdout.str()
|
2013-12-07 03:49:48 +08:00
|
|
|
|
2016-02-21 00:21:42 +08:00
|
|
|
|
2013-12-07 03:49:48 +08:00
|
|
|
def test_color_no(testdir):
|
|
|
|
testdir.makepyfile("def test_this(): assert 1")
|
2018-05-23 22:48:46 +08:00
|
|
|
result = testdir.runpytest("--color=no")
|
|
|
|
assert "test session starts" in result.stdout.str()
|
|
|
|
assert "\x1b[1m" not in result.stdout.str()
|
2013-12-07 03:49:48 +08:00
|
|
|
|
2016-02-21 00:21:42 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
@pytest.mark.parametrize("verbose", [True, False])
|
2016-02-21 00:21:42 +08:00
|
|
|
def test_color_yes_collection_on_non_atty(testdir, verbose):
|
|
|
|
"""skip collect progress report when working on non-terminals.
|
|
|
|
#1397
|
|
|
|
"""
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2016-02-21 00:21:42 +08:00
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(10))
|
|
|
|
def test_this(i):
|
|
|
|
assert 1
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
args = ["--color=yes"]
|
2016-02-21 00:21:42 +08:00
|
|
|
if verbose:
|
2018-05-23 22:48:46 +08:00
|
|
|
args.append("-vv")
|
2016-02-21 00:21:42 +08:00
|
|
|
result = testdir.runpytest(*args)
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "test session starts" in result.stdout.str()
|
|
|
|
assert "\x1b[1m" in result.stdout.str()
|
|
|
|
assert "collecting 10 items" not in result.stdout.str()
|
2016-02-21 00:21:42 +08:00
|
|
|
if verbose:
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "collecting ..." in result.stdout.str()
|
|
|
|
assert "collected 10 items" in result.stdout.str()
|
2016-02-21 00:21:42 +08:00
|
|
|
|
|
|
|
|
2009-10-17 23:42:40 +08:00
|
|
|
def test_getreportopt():
|
2018-02-22 01:54:39 +08:00
|
|
|
class Config(object):
|
|
|
|
class Option(object):
|
2010-11-01 06:28:31 +08:00
|
|
|
reportchars = ""
|
2017-03-08 07:47:43 +08:00
|
|
|
disable_warnings = True
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2018-02-22 01:54:39 +08:00
|
|
|
option = Option()
|
2018-05-23 22:48:46 +08:00
|
|
|
|
2018-02-22 01:54:39 +08:00
|
|
|
config = Config()
|
2010-05-06 01:50:59 +08:00
|
|
|
|
2010-11-01 06:28:31 +08:00
|
|
|
config.option.reportchars = "sf"
|
2010-05-06 01:50:59 +08:00
|
|
|
assert getreportopt(config) == "sf"
|
|
|
|
|
2016-06-26 00:16:13 +08:00
|
|
|
config.option.reportchars = "sfxw"
|
2010-05-06 01:50:59 +08:00
|
|
|
assert getreportopt(config) == "sfx"
|
2009-10-17 23:42:40 +08:00
|
|
|
|
2019-04-08 00:04:09 +08:00
|
|
|
# Now with --disable-warnings.
|
2017-03-08 07:47:43 +08:00
|
|
|
config.option.disable_warnings = False
|
2019-04-08 00:04:09 +08:00
|
|
|
config.option.reportchars = "a"
|
|
|
|
assert getreportopt(config) == "sxXwEf" # NOTE: "w" included!
|
|
|
|
|
|
|
|
config.option.reportchars = "sfx"
|
2016-06-26 00:16:13 +08:00
|
|
|
assert getreportopt(config) == "sfxw"
|
|
|
|
|
|
|
|
config.option.reportchars = "sfxw"
|
|
|
|
assert getreportopt(config) == "sfxw"
|
|
|
|
|
2019-04-08 00:04:09 +08:00
|
|
|
config.option.reportchars = "a"
|
|
|
|
assert getreportopt(config) == "sxXwEf" # NOTE: "w" included!
|
|
|
|
|
2019-04-08 00:03:06 +08:00
|
|
|
config.option.reportchars = "A"
|
|
|
|
assert getreportopt(config) == "sxXwEfpP"
|
|
|
|
|
2016-06-26 00:16:13 +08:00
|
|
|
|
2010-11-01 15:55:14 +08:00
|
|
|
def test_terminalreporter_reportopt_addopts(testdir):
|
|
|
|
testdir.makeini("[pytest]\naddopts=-rs")
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2016-07-12 09:03:53 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def tr(request):
|
2009-10-17 23:42:40 +08:00
|
|
|
tr = request.config.pluginmanager.getplugin("terminalreporter")
|
|
|
|
return tr
|
|
|
|
def test_opt(tr):
|
|
|
|
assert tr.hasopt('skipped')
|
|
|
|
assert not tr.hasopt('qwe')
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2009-10-17 23:42:40 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*1 passed*"])
|
2010-01-18 06:23:02 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2010-05-22 22:18:24 +08:00
|
|
|
def test_tbstyle_short(testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
p = testdir.makepyfile(
|
|
|
|
"""
|
2016-07-12 09:03:53 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def arg(request):
|
2010-05-22 22:18:24 +08:00
|
|
|
return 42
|
|
|
|
def test_opt(arg):
|
|
|
|
x = 0
|
|
|
|
assert x
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-05-22 22:18:24 +08:00
|
|
|
result = testdir.runpytest("--tb=short")
|
|
|
|
s = result.stdout.str()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "arg = 42" not in s
|
|
|
|
assert "x = 0" not in s
|
|
|
|
result.stdout.fnmatch_lines(["*%s:8*" % p.basename, " assert x", "E assert*"])
|
2010-05-22 22:18:24 +08:00
|
|
|
result = testdir.runpytest()
|
|
|
|
s = result.stdout.str()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "x = 0" in s
|
|
|
|
assert "assert x" in s
|
2010-05-22 22:18:24 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2010-11-06 06:37:31 +08:00
|
|
|
def test_traceconfig(testdir, monkeypatch):
|
2010-05-22 22:18:24 +08:00
|
|
|
result = testdir.runpytest("--traceconfig")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*active plugins*"])
|
2015-07-05 01:42:22 +08:00
|
|
|
assert result.ret == EXIT_NOTESTSCOLLECTED
|
2010-05-22 22:18:24 +08:00
|
|
|
|
2010-07-07 18:41:15 +08:00
|
|
|
|
2017-02-17 02:41:51 +08:00
|
|
|
class TestGenericReporting(object):
|
2010-07-07 18:41:15 +08:00
|
|
|
""" this test class can be subclassed with a different option
|
|
|
|
provider to run e.g. distributed tests.
|
|
|
|
"""
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2010-07-07 18:41:15 +08:00
|
|
|
def test_collect_fail(self, testdir, option):
|
2013-10-12 21:39:22 +08:00
|
|
|
testdir.makepyfile("import xyz\n")
|
2010-07-07 18:41:15 +08:00
|
|
|
result = testdir.runpytest(*option.args)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["ImportError while importing*", "*No module named *xyz*", "*1 error*"]
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
|
|
|
|
def test_maxfailures(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-07-07 18:41:15 +08:00
|
|
|
def test_1():
|
|
|
|
assert 0
|
|
|
|
def test_2():
|
|
|
|
assert 0
|
|
|
|
def test_3():
|
|
|
|
assert 0
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
result = testdir.runpytest("--maxfail=2", *option.args)
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*def test_1():*", "*def test_2():*", "*2 failed*"]
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
|
|
|
|
def test_tb_option(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2010-07-07 18:41:15 +08:00
|
|
|
def g():
|
|
|
|
raise IndexError
|
|
|
|
def test_func():
|
2018-11-22 16:15:14 +08:00
|
|
|
print(6*7)
|
2010-07-07 18:41:15 +08:00
|
|
|
g() # --calling--
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
for tbopt in ["long", "short", "no"]:
|
2018-05-23 22:48:46 +08:00
|
|
|
print("testing --tb=%s..." % tbopt)
|
|
|
|
result = testdir.runpytest("--tb=%s" % tbopt)
|
2010-07-07 18:41:15 +08:00
|
|
|
s = result.stdout.str()
|
|
|
|
if tbopt == "long":
|
2018-11-22 16:15:14 +08:00
|
|
|
assert "print(6*7)" in s
|
2010-07-07 18:41:15 +08:00
|
|
|
else:
|
2018-11-22 16:15:14 +08:00
|
|
|
assert "print(6*7)" not in s
|
2010-07-07 18:41:15 +08:00
|
|
|
if tbopt != "no":
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "--calling--" in s
|
|
|
|
assert "IndexError" in s
|
2010-07-07 18:41:15 +08:00
|
|
|
else:
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "FAILURES" not in s
|
|
|
|
assert "--calling--" not in s
|
|
|
|
assert "IndexError" not in s
|
2010-07-07 18:41:15 +08:00
|
|
|
|
|
|
|
def test_tb_crashline(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
p = testdir.makepyfile(
|
|
|
|
"""
|
2010-11-18 05:12:16 +08:00
|
|
|
import pytest
|
2010-07-07 18:41:15 +08:00
|
|
|
def g():
|
|
|
|
raise IndexError
|
|
|
|
def test_func1():
|
2018-11-22 16:15:14 +08:00
|
|
|
print(6*7)
|
2010-07-07 18:41:15 +08:00
|
|
|
g() # --calling--
|
|
|
|
def test_func2():
|
|
|
|
assert 0, "hello"
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
result = testdir.runpytest("--tb=line")
|
|
|
|
bn = p.basename
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
["*%s:3: IndexError*" % bn, "*%s:8: AssertionError: hello*" % bn]
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
s = result.stdout.str()
|
|
|
|
assert "def test_func2" not in s
|
|
|
|
|
|
|
|
def test_pytest_report_header(self, testdir, option):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2012-06-21 17:07:22 +08:00
|
|
|
def pytest_sessionstart(session):
|
|
|
|
session.config._somevalue = 42
|
2010-07-07 18:41:15 +08:00
|
|
|
def pytest_report_header(config):
|
2012-06-21 17:07:22 +08:00
|
|
|
return "hello: %s" % config._somevalue
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
testdir.mkdir("a").join("conftest.py").write(
|
|
|
|
"""
|
2012-06-21 17:07:22 +08:00
|
|
|
def pytest_report_header(config, startdir):
|
|
|
|
return ["line1", str(startdir)]
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2010-07-07 18:41:15 +08:00
|
|
|
result = testdir.runpytest("a")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*hello: 42*", "line1", str(testdir.tmpdir)])
|
2010-10-05 23:21:50 +08:00
|
|
|
|
2018-02-07 04:38:51 +08:00
|
|
|
def test_show_capture(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2018-02-07 04:38:51 +08:00
|
|
|
import sys
|
2018-02-18 19:42:25 +08:00
|
|
|
import logging
|
2018-02-01 03:36:28 +08:00
|
|
|
def test_one():
|
2018-02-07 04:38:51 +08:00
|
|
|
sys.stdout.write('!This is stdout!')
|
|
|
|
sys.stderr.write('!This is stderr!')
|
2018-02-18 19:42:25 +08:00
|
|
|
logging.warning('!This is a warning log msg!')
|
2018-02-01 03:36:28 +08:00
|
|
|
assert False, 'Something failed'
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-02-01 03:36:28 +08:00
|
|
|
|
|
|
|
result = testdir.runpytest("--tb=short")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"!This is stdout!",
|
|
|
|
"!This is stderr!",
|
|
|
|
"*WARNING*!This is a warning log msg!",
|
|
|
|
]
|
|
|
|
)
|
2018-02-18 19:42:25 +08:00
|
|
|
|
|
|
|
result = testdir.runpytest("--show-capture=all", "--tb=short")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"!This is stdout!",
|
|
|
|
"!This is stderr!",
|
|
|
|
"*WARNING*!This is a warning log msg!",
|
|
|
|
]
|
|
|
|
)
|
2018-02-18 19:42:25 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
stdout = testdir.runpytest("--show-capture=stdout", "--tb=short").stdout.str()
|
2018-02-18 19:42:25 +08:00
|
|
|
assert "!This is stderr!" not in stdout
|
|
|
|
assert "!This is stdout!" in stdout
|
|
|
|
assert "!This is a warning log msg!" not in stdout
|
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
stdout = testdir.runpytest("--show-capture=stderr", "--tb=short").stdout.str()
|
2018-02-18 19:42:25 +08:00
|
|
|
assert "!This is stdout!" not in stdout
|
|
|
|
assert "!This is stderr!" in stdout
|
|
|
|
assert "!This is a warning log msg!" not in stdout
|
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
stdout = testdir.runpytest("--show-capture=log", "--tb=short").stdout.str()
|
2018-02-18 19:42:25 +08:00
|
|
|
assert "!This is stdout!" not in stdout
|
|
|
|
assert "!This is stderr!" not in stdout
|
|
|
|
assert "!This is a warning log msg!" in stdout
|
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
stdout = testdir.runpytest("--show-capture=no", "--tb=short").stdout.str()
|
2018-02-18 19:42:25 +08:00
|
|
|
assert "!This is stdout!" not in stdout
|
|
|
|
assert "!This is stderr!" not in stdout
|
|
|
|
assert "!This is a warning log msg!" not in stdout
|
2018-02-08 21:21:22 +08:00
|
|
|
|
2018-08-21 03:25:01 +08:00
|
|
|
def test_show_capture_with_teardown_logs(self, testdir):
|
|
|
|
"""Ensure that the capturing of teardown logs honor --show-capture setting"""
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
|
|
|
import logging
|
|
|
|
import sys
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.fixture(scope="function", autouse="True")
|
|
|
|
def hook_each_test(request):
|
|
|
|
yield
|
|
|
|
sys.stdout.write("!stdout!")
|
|
|
|
sys.stderr.write("!stderr!")
|
|
|
|
logging.warning("!log!")
|
|
|
|
|
|
|
|
def test_func():
|
|
|
|
assert False
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
|
|
|
|
result = testdir.runpytest("--show-capture=stdout", "--tb=short").stdout.str()
|
|
|
|
assert "!stdout!" in result
|
|
|
|
assert "!stderr!" not in result
|
|
|
|
assert "!log!" not in result
|
|
|
|
|
|
|
|
result = testdir.runpytest("--show-capture=stderr", "--tb=short").stdout.str()
|
|
|
|
assert "!stdout!" not in result
|
|
|
|
assert "!stderr!" in result
|
|
|
|
assert "!log!" not in result
|
|
|
|
|
|
|
|
result = testdir.runpytest("--show-capture=log", "--tb=short").stdout.str()
|
|
|
|
assert "!stdout!" not in result
|
|
|
|
assert "!stderr!" not in result
|
|
|
|
assert "!log!" in result
|
|
|
|
|
|
|
|
result = testdir.runpytest("--show-capture=no", "--tb=short").stdout.str()
|
|
|
|
assert "!stdout!" not in result
|
|
|
|
assert "!stderr!" not in result
|
|
|
|
assert "!log!" not in result
|
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2010-11-18 05:12:16 +08:00
|
|
|
@pytest.mark.xfail("not hasattr(os, 'dup')")
|
2010-10-05 23:21:50 +08:00
|
|
|
def test_fdopen_kept_alive_issue124(testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2010-10-05 23:21:50 +08:00
|
|
|
import os, sys
|
|
|
|
k = []
|
|
|
|
def test_open_file_and_keep_alive(capfd):
|
|
|
|
stdout = os.fdopen(1, 'w', 1)
|
|
|
|
k.append(stdout)
|
|
|
|
|
|
|
|
def test_close_kept_alive_file():
|
|
|
|
stdout = k.pop()
|
|
|
|
stdout.close()
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-09-27 18:28:34 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(["*2 passed*"])
|
2013-05-08 23:01:20 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2013-05-08 23:01:20 +08:00
|
|
|
def test_tbstyle_native_setup_error(testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2013-05-08 23:01:20 +08:00
|
|
|
import pytest
|
|
|
|
@pytest.fixture
|
|
|
|
def setup_error_fixture():
|
|
|
|
raise Exception("error in exception")
|
2013-09-27 15:49:39 +08:00
|
|
|
|
2013-05-08 23:01:20 +08:00
|
|
|
def test_error_fixture(setup_error_fixture):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-05-08 23:01:20 +08:00
|
|
|
result = testdir.runpytest("--tb=native")
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
['*File *test_tbstyle_native_setup_error.py", line *, in setup_error_fixture*']
|
|
|
|
)
|
2013-09-27 21:48:03 +08:00
|
|
|
|
2017-07-17 07:25:09 +08:00
|
|
|
|
2013-09-27 21:48:03 +08:00
|
|
|
def test_terminal_summary(testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2016-08-14 21:02:35 +08:00
|
|
|
def pytest_terminal_summary(terminalreporter, exitstatus):
|
2013-09-27 21:48:03 +08:00
|
|
|
w = terminalreporter
|
|
|
|
w.section("hello")
|
|
|
|
w.line("world")
|
2016-08-14 21:02:35 +08:00
|
|
|
w.line("exitstatus: {0}".format(exitstatus))
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2013-09-27 21:48:03 +08:00
|
|
|
result = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
"""
|
2013-09-27 21:48:03 +08:00
|
|
|
*==== hello ====*
|
|
|
|
world
|
2016-08-14 21:02:35 +08:00
|
|
|
exitstatus: 5
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2015-07-03 01:03:05 +08:00
|
|
|
|
2016-01-04 10:07:45 +08:00
|
|
|
|
2018-09-02 04:10:26 +08:00
|
|
|
@pytest.mark.filterwarnings("default")
|
2016-01-04 10:07:45 +08:00
|
|
|
def test_terminal_summary_warnings_are_displayed(testdir):
|
|
|
|
"""Test that warnings emitted during pytest_terminal_summary are displayed.
|
|
|
|
(#1305).
|
|
|
|
"""
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2018-09-02 04:10:26 +08:00
|
|
|
import warnings
|
2016-01-04 10:07:45 +08:00
|
|
|
def pytest_terminal_summary(terminalreporter):
|
2018-09-02 04:10:26 +08:00
|
|
|
warnings.warn(UserWarning('internal warning'))
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-11-16 00:05:56 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
|
|
|
def test_failure():
|
|
|
|
import warnings
|
|
|
|
warnings.warn("warning_from_" + "test")
|
|
|
|
assert 0
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest("-ra")
|
2018-06-11 01:51:36 +08:00
|
|
|
result.stdout.fnmatch_lines(
|
2018-11-16 00:05:56 +08:00
|
|
|
[
|
|
|
|
"*= warnings summary =*",
|
|
|
|
"*warning_from_test*",
|
|
|
|
"*= short test summary info =*",
|
|
|
|
"*= warnings summary (final) =*",
|
|
|
|
"*conftest.py:3:*internal warning",
|
|
|
|
"*== 1 failed, 2 warnings in *",
|
|
|
|
]
|
2018-06-11 01:51:36 +08:00
|
|
|
)
|
|
|
|
assert "None" not in result.stdout.str()
|
2018-11-24 05:39:05 +08:00
|
|
|
stdout = result.stdout.str()
|
|
|
|
assert stdout.count("warning_from_test") == 1
|
|
|
|
assert stdout.count("=== warnings summary ") == 2
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.filterwarnings("default")
|
|
|
|
def test_terminal_summary_warnings_header_once(testdir):
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
|
|
|
def test_failure():
|
|
|
|
import warnings
|
|
|
|
warnings.warn("warning_from_" + "test")
|
|
|
|
assert 0
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = testdir.runpytest("-ra")
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"*= warnings summary =*",
|
|
|
|
"*warning_from_test*",
|
|
|
|
"*= short test summary info =*",
|
|
|
|
"*== 1 failed, 1 warnings in *",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
assert "None" not in result.stdout.str()
|
|
|
|
stdout = result.stdout.str()
|
|
|
|
assert stdout.count("warning_from_test") == 1
|
|
|
|
assert stdout.count("=== warnings summary ") == 1
|
2018-05-23 22:48:46 +08:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"exp_color, exp_line, stats_arg",
|
|
|
|
[
|
|
|
|
# The method under test only cares about the length of each
|
|
|
|
# dict value, not the actual contents, so tuples of anything
|
|
|
|
# suffice
|
|
|
|
# Important statuses -- the highest priority of these always wins
|
|
|
|
("red", "1 failed", {"failed": (1,)}),
|
|
|
|
("red", "1 failed, 1 passed", {"failed": (1,), "passed": (1,)}),
|
|
|
|
("red", "1 error", {"error": (1,)}),
|
|
|
|
("red", "1 passed, 1 error", {"error": (1,), "passed": (1,)}),
|
|
|
|
# (a status that's not known to the code)
|
|
|
|
("yellow", "1 weird", {"weird": (1,)}),
|
|
|
|
("yellow", "1 passed, 1 weird", {"weird": (1,), "passed": (1,)}),
|
|
|
|
("yellow", "1 warnings", {"warnings": (1,)}),
|
|
|
|
("yellow", "1 passed, 1 warnings", {"warnings": (1,), "passed": (1,)}),
|
|
|
|
("green", "5 passed", {"passed": (1, 2, 3, 4, 5)}),
|
|
|
|
# "Boring" statuses. These have no effect on the color of the summary
|
|
|
|
# line. Thus, if *every* test has a boring status, the summary line stays
|
|
|
|
# at its default color, i.e. yellow, to warn the user that the test run
|
|
|
|
# produced no useful information
|
|
|
|
("yellow", "1 skipped", {"skipped": (1,)}),
|
|
|
|
("green", "1 passed, 1 skipped", {"skipped": (1,), "passed": (1,)}),
|
|
|
|
("yellow", "1 deselected", {"deselected": (1,)}),
|
|
|
|
("green", "1 passed, 1 deselected", {"deselected": (1,), "passed": (1,)}),
|
|
|
|
("yellow", "1 xfailed", {"xfailed": (1,)}),
|
|
|
|
("green", "1 passed, 1 xfailed", {"xfailed": (1,), "passed": (1,)}),
|
|
|
|
("yellow", "1 xpassed", {"xpassed": (1,)}),
|
|
|
|
("green", "1 passed, 1 xpassed", {"xpassed": (1,), "passed": (1,)}),
|
|
|
|
# Likewise if no tests were found at all
|
|
|
|
("yellow", "no tests ran", {}),
|
|
|
|
# Test the empty-key special case
|
|
|
|
("yellow", "no tests ran", {"": (1,)}),
|
|
|
|
("green", "1 passed", {"": (1,), "passed": (1,)}),
|
|
|
|
# A couple more complex combinations
|
|
|
|
(
|
|
|
|
"red",
|
|
|
|
"1 failed, 2 passed, 3 xfailed",
|
|
|
|
{"passed": (1, 2), "failed": (1,), "xfailed": (1, 2, 3)},
|
|
|
|
),
|
|
|
|
(
|
|
|
|
"green",
|
|
|
|
"1 passed, 2 skipped, 3 deselected, 2 xfailed",
|
|
|
|
{
|
|
|
|
"passed": (1,),
|
|
|
|
"skipped": (1, 2),
|
|
|
|
"deselected": (1, 2, 3),
|
|
|
|
"xfailed": (1, 2),
|
|
|
|
},
|
|
|
|
),
|
|
|
|
],
|
|
|
|
)
|
2015-07-03 01:03:05 +08:00
|
|
|
def test_summary_stats(exp_line, exp_color, stats_arg):
|
2015-07-01 06:48:49 +08:00
|
|
|
print("Based on stats: %s" % stats_arg)
|
2018-08-23 09:30:42 +08:00
|
|
|
print('Expect summary: "{}"; with color "{}"'.format(exp_line, exp_color))
|
2015-07-03 01:03:05 +08:00
|
|
|
(line, color) = build_summary_stats_line(stats_arg)
|
2018-08-23 09:30:42 +08:00
|
|
|
print('Actually got: "{}"; with color "{}"'.format(line, color))
|
2015-07-03 01:03:05 +08:00
|
|
|
assert line == exp_line
|
|
|
|
assert color == exp_color
|
2017-03-06 03:44:13 +08:00
|
|
|
|
|
|
|
|
2019-03-07 05:54:45 +08:00
|
|
|
def test_skip_counting_towards_summary():
|
|
|
|
class DummyReport(BaseReport):
|
|
|
|
count_towards_summary = True
|
|
|
|
|
|
|
|
r1 = DummyReport()
|
|
|
|
r2 = DummyReport()
|
|
|
|
res = build_summary_stats_line({"failed": (r1, r2)})
|
|
|
|
assert res == ("2 failed", "red")
|
|
|
|
|
|
|
|
r1.count_towards_summary = False
|
|
|
|
res = build_summary_stats_line({"failed": (r1, r2)})
|
|
|
|
assert res == ("1 failed", "red")
|
|
|
|
|
|
|
|
|
2018-08-27 04:12:55 +08:00
|
|
|
class TestClassicOutputStyle(object):
|
|
|
|
"""Ensure classic output style works as expected (#3883)"""
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def test_files(self, testdir):
|
|
|
|
testdir.makepyfile(
|
|
|
|
**{
|
|
|
|
"test_one.py": "def test_one(): pass",
|
|
|
|
"test_two.py": "def test_two(): assert 0",
|
|
|
|
"sub/test_three.py": """
|
|
|
|
def test_three_1(): pass
|
|
|
|
def test_three_2(): assert 0
|
|
|
|
def test_three_3(): pass
|
|
|
|
""",
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_normal_verbosity(self, testdir, test_files):
|
|
|
|
result = testdir.runpytest("-o", "console_output_style=classic")
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"test_one.py .",
|
|
|
|
"test_two.py F",
|
|
|
|
"sub{}test_three.py .F.".format(os.sep),
|
|
|
|
"*2 failed, 3 passed in*",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_verbose(self, testdir, test_files):
|
|
|
|
result = testdir.runpytest("-o", "console_output_style=classic", "-v")
|
|
|
|
result.stdout.fnmatch_lines(
|
|
|
|
[
|
|
|
|
"test_one.py::test_one PASSED",
|
|
|
|
"test_two.py::test_two FAILED",
|
|
|
|
"sub{}test_three.py::test_three_1 PASSED".format(os.sep),
|
|
|
|
"sub{}test_three.py::test_three_2 FAILED".format(os.sep),
|
|
|
|
"sub{}test_three.py::test_three_3 PASSED".format(os.sep),
|
|
|
|
"*2 failed, 3 passed in*",
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_quiet(self, testdir, test_files):
|
|
|
|
result = testdir.runpytest("-o", "console_output_style=classic", "-q")
|
|
|
|
result.stdout.fnmatch_lines([".F.F.", "*2 failed, 3 passed in*"])
|
|
|
|
|
|
|
|
|
|
|
|
class TestProgressOutputStyle(object):
|
2017-11-22 06:43:12 +08:00
|
|
|
@pytest.fixture
|
2018-01-12 06:22:18 +08:00
|
|
|
def many_tests_files(self, testdir):
|
2017-11-22 06:43:12 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
test_bar="""
|
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(10))
|
|
|
|
def test_bar(i): pass
|
|
|
|
""",
|
|
|
|
test_foo="""
|
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(5))
|
|
|
|
def test_foo(i): pass
|
|
|
|
""",
|
|
|
|
test_foobar="""
|
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(5))
|
|
|
|
def test_foobar(i): pass
|
|
|
|
""",
|
|
|
|
)
|
|
|
|
|
2017-11-29 08:42:52 +08:00
|
|
|
def test_zero_tests_collected(self, testdir):
|
|
|
|
"""Some plugins (testmon for example) might issue pytest_runtest_logreport without any tests being
|
|
|
|
actually collected (#2971)."""
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2017-11-29 08:42:52 +08:00
|
|
|
def pytest_collection_modifyitems(items, config):
|
|
|
|
from _pytest.runner import CollectReport
|
|
|
|
for node_id in ('nodeid1', 'nodeid2'):
|
|
|
|
rep = CollectReport(node_id, 'passed', None, None)
|
|
|
|
rep.when = 'passed'
|
|
|
|
rep.duration = 0.1
|
|
|
|
config.hook.pytest_runtest_logreport(report=rep)
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2017-11-29 08:42:52 +08:00
|
|
|
output = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
assert "ZeroDivisionError" not in output.stdout.str()
|
|
|
|
output.stdout.fnmatch_lines(["=* 2 passed in *="])
|
2017-11-29 08:42:52 +08:00
|
|
|
|
2018-01-12 06:22:18 +08:00
|
|
|
def test_normal(self, many_tests_files, testdir):
|
2017-11-22 06:43:12 +08:00
|
|
|
output = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[
|
|
|
|
r"test_bar.py \.{10} \s+ \[ 50%\]",
|
|
|
|
r"test_foo.py \.{5} \s+ \[ 75%\]",
|
|
|
|
r"test_foobar.py \.{5} \s+ \[100%\]",
|
|
|
|
]
|
|
|
|
)
|
2017-11-22 06:43:12 +08:00
|
|
|
|
2018-08-24 13:56:25 +08:00
|
|
|
def test_count(self, many_tests_files, testdir):
|
|
|
|
testdir.makeini(
|
|
|
|
"""
|
|
|
|
[pytest]
|
2018-08-27 10:21:00 +08:00
|
|
|
console_output_style = count
|
2018-08-24 14:00:02 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-08-24 13:56:25 +08:00
|
|
|
output = testdir.runpytest()
|
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[
|
2018-08-28 11:23:17 +08:00
|
|
|
r"test_bar.py \.{10} \s+ \[10/20\]",
|
|
|
|
r"test_foo.py \.{5} \s+ \[15/20\]",
|
|
|
|
r"test_foobar.py \.{5} \s+ \[20/20\]",
|
2018-08-24 13:56:25 +08:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2018-01-12 06:22:18 +08:00
|
|
|
def test_verbose(self, many_tests_files, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-v")
|
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[
|
|
|
|
r"test_bar.py::test_bar\[0\] PASSED \s+ \[ 5%\]",
|
|
|
|
r"test_foo.py::test_foo\[4\] PASSED \s+ \[ 75%\]",
|
|
|
|
r"test_foobar.py::test_foobar\[4\] PASSED \s+ \[100%\]",
|
|
|
|
]
|
|
|
|
)
|
2017-11-22 06:43:12 +08:00
|
|
|
|
2018-08-24 13:56:25 +08:00
|
|
|
def test_verbose_count(self, many_tests_files, testdir):
|
|
|
|
testdir.makeini(
|
|
|
|
"""
|
|
|
|
[pytest]
|
2018-08-27 10:21:00 +08:00
|
|
|
console_output_style = count
|
2018-08-24 14:00:02 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-08-24 13:56:25 +08:00
|
|
|
output = testdir.runpytest("-v")
|
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[
|
2018-08-28 11:23:17 +08:00
|
|
|
r"test_bar.py::test_bar\[0\] PASSED \s+ \[ 1/20\]",
|
|
|
|
r"test_foo.py::test_foo\[4\] PASSED \s+ \[15/20\]",
|
|
|
|
r"test_foobar.py::test_foobar\[4\] PASSED \s+ \[20/20\]",
|
2018-08-24 13:56:25 +08:00
|
|
|
]
|
|
|
|
)
|
|
|
|
|
2018-12-09 18:53:41 +08:00
|
|
|
def test_xdist_normal(self, many_tests_files, testdir, monkeypatch):
|
2018-05-23 22:48:46 +08:00
|
|
|
pytest.importorskip("xdist")
|
2018-12-09 18:53:41 +08:00
|
|
|
monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-n2")
|
|
|
|
output.stdout.re_match_lines([r"\.{20} \s+ \[100%\]"])
|
2017-11-22 06:43:12 +08:00
|
|
|
|
2018-12-09 18:53:41 +08:00
|
|
|
def test_xdist_normal_count(self, many_tests_files, testdir, monkeypatch):
|
2018-08-24 13:56:25 +08:00
|
|
|
pytest.importorskip("xdist")
|
2018-12-09 18:53:41 +08:00
|
|
|
monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
|
2018-08-26 13:18:29 +08:00
|
|
|
testdir.makeini(
|
|
|
|
"""
|
|
|
|
[pytest]
|
2018-08-27 10:21:00 +08:00
|
|
|
console_output_style = count
|
2018-08-26 13:18:29 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-08-24 13:56:25 +08:00
|
|
|
output = testdir.runpytest("-n2")
|
2018-08-28 11:23:17 +08:00
|
|
|
output.stdout.re_match_lines([r"\.{20} \s+ \[20/20\]"])
|
2018-08-24 13:56:25 +08:00
|
|
|
|
2018-12-09 18:53:41 +08:00
|
|
|
def test_xdist_verbose(self, many_tests_files, testdir, monkeypatch):
|
2018-05-23 22:48:46 +08:00
|
|
|
pytest.importorskip("xdist")
|
2018-12-09 18:53:41 +08:00
|
|
|
monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-n2", "-v")
|
|
|
|
output.stdout.re_match_lines_random(
|
|
|
|
[
|
|
|
|
r"\[gw\d\] \[\s*\d+%\] PASSED test_bar.py::test_bar\[1\]",
|
|
|
|
r"\[gw\d\] \[\s*\d+%\] PASSED test_foo.py::test_foo\[1\]",
|
|
|
|
r"\[gw\d\] \[\s*\d+%\] PASSED test_foobar.py::test_foobar\[1\]",
|
|
|
|
]
|
|
|
|
)
|
2017-12-16 20:52:30 +08:00
|
|
|
|
2018-01-12 06:22:18 +08:00
|
|
|
def test_capture_no(self, many_tests_files, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-s")
|
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[r"test_bar.py \.{10}", r"test_foo.py \.{5}", r"test_foobar.py \.{5}"]
|
|
|
|
)
|
|
|
|
|
|
|
|
output = testdir.runpytest("--capture=no")
|
2018-02-26 22:19:58 +08:00
|
|
|
assert "%]" not in output.stdout.str()
|
2018-01-12 06:22:18 +08:00
|
|
|
|
|
|
|
|
2018-01-25 04:23:42 +08:00
|
|
|
class TestProgressWithTeardown(object):
|
2018-01-12 06:22:18 +08:00
|
|
|
"""Ensure we show the correct percentages for tests that fail during teardown (#3088)"""
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def contest_with_teardown_fixture(self, testdir):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makeconftest(
|
|
|
|
"""
|
2018-01-12 06:22:18 +08:00
|
|
|
import pytest
|
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def fail_teardown():
|
|
|
|
yield
|
|
|
|
assert False
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def many_files(self, testdir, contest_with_teardown_fixture):
|
|
|
|
testdir.makepyfile(
|
2018-05-23 22:48:46 +08:00
|
|
|
test_bar="""
|
2018-01-12 06:22:18 +08:00
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(5))
|
|
|
|
def test_bar(fail_teardown, i):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
""",
|
|
|
|
test_foo="""
|
2018-01-12 06:22:18 +08:00
|
|
|
import pytest
|
|
|
|
@pytest.mark.parametrize('i', range(15))
|
|
|
|
def test_foo(fail_teardown, i):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
""",
|
2018-01-12 06:22:18 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
def test_teardown_simple(self, testdir, contest_with_teardown_fixture):
|
2018-05-23 22:48:46 +08:00
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2018-01-12 06:22:18 +08:00
|
|
|
def test_foo(fail_teardown):
|
|
|
|
pass
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
output = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
output.stdout.re_match_lines([r"test_teardown_simple.py \.E\s+\[100%\]"])
|
2018-01-12 06:22:18 +08:00
|
|
|
|
2018-05-23 22:48:46 +08:00
|
|
|
def test_teardown_with_test_also_failing(
|
|
|
|
self, testdir, contest_with_teardown_fixture
|
|
|
|
):
|
|
|
|
testdir.makepyfile(
|
|
|
|
"""
|
2018-01-12 06:22:18 +08:00
|
|
|
def test_foo(fail_teardown):
|
|
|
|
assert False
|
2018-05-23 22:48:46 +08:00
|
|
|
"""
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
output = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[r"test_teardown_with_test_also_failing.py FE\s+\[100%\]"]
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
|
|
|
|
def test_teardown_many(self, testdir, many_files):
|
|
|
|
output = testdir.runpytest()
|
2018-05-23 22:48:46 +08:00
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[r"test_bar.py (\.E){5}\s+\[ 25%\]", r"test_foo.py (\.E){15}\s+\[100%\]"]
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
|
|
|
|
def test_teardown_many_verbose(self, testdir, many_files):
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-v")
|
|
|
|
output.stdout.re_match_lines(
|
|
|
|
[
|
|
|
|
r"test_bar.py::test_bar\[0\] PASSED\s+\[ 5%\]",
|
|
|
|
r"test_bar.py::test_bar\[0\] ERROR\s+\[ 5%\]",
|
|
|
|
r"test_bar.py::test_bar\[4\] PASSED\s+\[ 25%\]",
|
|
|
|
r"test_bar.py::test_bar\[4\] ERROR\s+\[ 25%\]",
|
|
|
|
]
|
|
|
|
)
|
2018-01-12 06:22:18 +08:00
|
|
|
|
2018-12-09 18:53:41 +08:00
|
|
|
def test_xdist_normal(self, many_files, testdir, monkeypatch):
|
2018-05-23 22:48:46 +08:00
|
|
|
pytest.importorskip("xdist")
|
2018-12-09 18:53:41 +08:00
|
|
|
monkeypatch.delenv("PYTEST_DISABLE_PLUGIN_AUTOLOAD", raising=False)
|
2018-05-23 22:48:46 +08:00
|
|
|
output = testdir.runpytest("-n2")
|
|
|
|
output.stdout.re_match_lines([r"[\.E]{40} \s+ \[100%\]"])
|
2019-04-08 00:21:37 +08:00
|
|
|
|
|
|
|
|
|
|
|
def test_skip_reasons_folding():
|
|
|
|
path = "xyz"
|
|
|
|
lineno = 3
|
|
|
|
message = "justso"
|
|
|
|
longrepr = (path, lineno, message)
|
|
|
|
|
|
|
|
class X(object):
|
|
|
|
pass
|
|
|
|
|
|
|
|
ev1 = X()
|
|
|
|
ev1.when = "execute"
|
|
|
|
ev1.skipped = True
|
|
|
|
ev1.longrepr = longrepr
|
|
|
|
|
|
|
|
ev2 = X()
|
|
|
|
ev2.when = "execute"
|
|
|
|
ev2.longrepr = longrepr
|
|
|
|
ev2.skipped = True
|
|
|
|
|
|
|
|
# ev3 might be a collection report
|
|
|
|
ev3 = X()
|
|
|
|
ev3.when = "collect"
|
|
|
|
ev3.longrepr = longrepr
|
|
|
|
ev3.skipped = True
|
|
|
|
|
2019-04-08 01:36:29 +08:00
|
|
|
values = _folded_skips([ev1, ev2, ev3])
|
2019-04-08 00:21:37 +08:00
|
|
|
assert len(values) == 1
|
|
|
|
num, fspath, lineno, reason = values[0]
|
|
|
|
assert num == 3
|
|
|
|
assert fspath == path
|
|
|
|
assert lineno == lineno
|
|
|
|
assert reason == message
|
2019-04-17 21:30:34 +08:00
|
|
|
|
|
|
|
|
|
|
|
def test_line_with_reprcrash(monkeypatch):
|
|
|
|
import _pytest.terminal
|
|
|
|
from wcwidth import wcswidth
|
|
|
|
|
|
|
|
mocked_verbose_word = "FAILED"
|
|
|
|
|
|
|
|
mocked_pos = "some::nodeid"
|
|
|
|
|
|
|
|
def mock_get_pos(*args):
|
|
|
|
return mocked_pos
|
|
|
|
|
|
|
|
monkeypatch.setattr(_pytest.terminal, "_get_pos", mock_get_pos)
|
|
|
|
|
|
|
|
class config:
|
|
|
|
pass
|
|
|
|
|
|
|
|
class rep:
|
|
|
|
def _get_verbose_word(self, *args):
|
|
|
|
return mocked_verbose_word
|
|
|
|
|
|
|
|
class longrepr:
|
|
|
|
class reprcrash:
|
|
|
|
pass
|
|
|
|
|
|
|
|
def check(msg, width, expected):
|
|
|
|
__tracebackhide__ = True
|
|
|
|
if msg:
|
|
|
|
rep.longrepr.reprcrash.message = msg
|
|
|
|
actual = _get_line_with_reprcrash_message(config, rep, width)
|
|
|
|
|
|
|
|
assert actual == expected
|
|
|
|
if actual != "%s %s" % (mocked_verbose_word, mocked_pos):
|
|
|
|
assert len(actual) <= width
|
|
|
|
assert wcswidth(actual) <= width
|
|
|
|
|
|
|
|
# AttributeError with message
|
|
|
|
check(None, 80, "FAILED some::nodeid")
|
|
|
|
|
|
|
|
check("msg", 80, "FAILED some::nodeid - msg")
|
|
|
|
check("msg", 3, "FAILED some::nodeid")
|
|
|
|
|
|
|
|
check("msg", 24, "FAILED some::nodeid")
|
|
|
|
check("msg", 25, "FAILED some::nodeid - msg")
|
|
|
|
|
|
|
|
check("some longer msg", 24, "FAILED some::nodeid")
|
|
|
|
check("some longer msg", 25, "FAILED some::nodeid - ...")
|
|
|
|
check("some longer msg", 26, "FAILED some::nodeid - s...")
|
|
|
|
|
|
|
|
check("some\nmessage", 25, "FAILED some::nodeid - ...")
|
|
|
|
check("some\nmessage", 26, "FAILED some::nodeid - some")
|
|
|
|
check("some\nmessage", 80, "FAILED some::nodeid - some")
|
|
|
|
|
|
|
|
# Test unicode safety.
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 25, u"FAILED some::nodeid - ...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 26, u"FAILED some::nodeid - ...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 27, u"FAILED some::nodeid - 😄...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 28, u"FAILED some::nodeid - 😄...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 29, u"FAILED some::nodeid - 😄😄...")
|
|
|
|
|
|
|
|
# NOTE: constructed, not sure if this is supported.
|
|
|
|
# It would fail if not using u"" in Python 2 for mocked_pos.
|
|
|
|
mocked_pos = u"nodeid::😄::withunicode"
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 29, u"FAILED nodeid::😄::withunicode")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 40, u"FAILED nodeid::😄::withunicode - 😄😄...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 41, u"FAILED nodeid::😄::withunicode - 😄😄...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 42, u"FAILED nodeid::😄::withunicode - 😄😄😄...")
|
|
|
|
check(u"😄😄😄😄😄\n2nd line", 80, u"FAILED nodeid::😄::withunicode - 😄😄😄😄😄")
|