tests: fnmatch_lines: use list

For strings fnmatch_lines converts it into a Source objects, splitted on
newlines.  This is not necessary here, and it is more consistent to use
lists here in the first place.
This commit is contained in:
Daniel Hahler 2019-03-23 11:36:18 +01:00
parent 15d608867d
commit 08f3b02dfc
16 changed files with 58 additions and 56 deletions

View File

@ -1033,7 +1033,7 @@ def test_pytest_plugins_as_module(testdir):
}
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 1 passed in *")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def test_deferred_hook_checking(testdir):
@ -1173,7 +1173,7 @@ def test_fixture_mock_integration(testdir):
"""Test that decorators applied to fixture are left working (#3774)"""
p = testdir.copy_example("acceptance/fixture_mock_integration.py")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_usage_error_code(testdir):

View File

@ -147,7 +147,7 @@ def test_pytest_plugins_in_non_top_level_conftest_unsupported_pyargs(
if use_pyargs:
assert msg not in res.stdout.str()
else:
res.stdout.fnmatch_lines("*{msg}*".format(msg=msg))
res.stdout.fnmatch_lines(["*{msg}*".format(msg=msg)])
def test_pytest_plugins_in_non_top_level_conftest_unsupported_no_top_level_conftest(

View File

@ -747,7 +747,7 @@ def test_log_level_not_changed_by_default(testdir):
"""
)
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("* 1 passed in *")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def test_log_file_ini(testdir):

View File

@ -560,7 +560,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
result.stdout.fnmatch_lines(["* 2 passed, 1 skipped in *"])
def test_parametrize_skip(self, testdir):
testdir.makepyfile(
@ -575,7 +575,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 2 passed, 1 skipped in *")
result.stdout.fnmatch_lines(["* 2 passed, 1 skipped in *"])
def test_parametrize_skipif_no_skip(self, testdir):
testdir.makepyfile(
@ -590,7 +590,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 1 failed, 2 passed in *")
result.stdout.fnmatch_lines(["* 1 failed, 2 passed in *"])
def test_parametrize_xfail(self, testdir):
testdir.makepyfile(
@ -605,7 +605,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 2 passed, 1 xfailed in *")
result.stdout.fnmatch_lines(["* 2 passed, 1 xfailed in *"])
def test_parametrize_passed(self, testdir):
testdir.makepyfile(
@ -620,7 +620,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 2 passed, 1 xpassed in *")
result.stdout.fnmatch_lines(["* 2 passed, 1 xpassed in *"])
def test_parametrize_xfail_passed(self, testdir):
testdir.makepyfile(
@ -635,7 +635,7 @@ class TestFunction(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 3 passed in *")
result.stdout.fnmatch_lines(["* 3 passed in *"])
def test_function_original_name(self, testdir):
items = testdir.getitems(
@ -833,7 +833,7 @@ class TestConftestCustomization(object):
)
# Use runpytest_subprocess, since we're futzing with sys.meta_path.
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_setup_only_available_in_subdir(testdir):
@ -1298,14 +1298,14 @@ def test_keep_duplicates(testdir):
def test_package_collection_infinite_recursion(testdir):
testdir.copy_example("collect/package_infinite_recursion")
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_package_collection_init_given_as_argument(testdir):
"""Regression test for #3749"""
p = testdir.copy_example("collect/package_init_given_as_arg")
result = testdir.runpytest(p / "pkg" / "__init__.py")
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_package_with_modules(testdir):

View File

@ -536,7 +536,7 @@ class TestRequestBasic(object):
"""
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines("* 1 passed in *")
result.stdout.fnmatch_lines(["* 1 passed in *"])
def test_getfixturevalue_recursive(self, testdir):
testdir.makeconftest(
@ -598,7 +598,7 @@ class TestRequestBasic(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 2 passed in *")
result.stdout.fnmatch_lines(["* 2 passed in *"])
@pytest.mark.parametrize("getfixmethod", ("getfixturevalue", "getfuncargvalue"))
def test_getfixturevalue(self, testdir, getfixmethod):
@ -787,7 +787,7 @@ class TestRequestBasic(object):
"""Regression test for #3057"""
testdir.copy_example("fixtures/test_getfixturevalue_dynamic.py")
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_funcargnames_compatattr(self, testdir):
testdir.makepyfile(
@ -1527,7 +1527,7 @@ class TestFixtureManagerParseFactories(object):
def test_collect_custom_items(self, testdir):
testdir.copy_example("fixtures/custom_item")
result = testdir.runpytest("foo")
result.stdout.fnmatch_lines("*passed*")
result.stdout.fnmatch_lines(["*passed*"])
class TestAutouseDiscovery(object):
@ -2609,7 +2609,7 @@ class TestFixtureMarker(object):
)
reprec = testdir.runpytest("-s")
for test in ["test_browser"]:
reprec.stdout.fnmatch_lines("*Finalized*")
reprec.stdout.fnmatch_lines(["*Finalized*"])
def test_class_scope_with_normal_tests(self, testdir):
testpath = testdir.makepyfile(
@ -3450,7 +3450,7 @@ class TestContextManagerFixtureFuncs(object):
"""
)
result = testdir.runpytest("-s")
result.stdout.fnmatch_lines("*mew*")
result.stdout.fnmatch_lines(["*mew*"])
class TestParameterizedSubRequest(object):

View File

@ -796,7 +796,7 @@ def test_rewritten():
)
# needs to be a subprocess because pytester explicitly disables this warning
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines("*Module already imported*: _pytest")
result.stdout.fnmatch_lines(["*Module already imported*: _pytest"])
def test_rewrite_module_imported_from_conftest(self, testdir):
testdir.makeconftest(
@ -1123,7 +1123,7 @@ class TestAssertionRewriteHookDetails(object):
)
path.join("data.txt").write("Hey")
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
def test_issue731(testdir):
@ -1154,7 +1154,7 @@ class TestIssue925(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*E*assert (False == False) == False")
result.stdout.fnmatch_lines(["*E*assert (False == False) == False"])
def test_long_case(self, testdir):
testdir.makepyfile(
@ -1164,7 +1164,7 @@ class TestIssue925(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*E*assert (False == True) == True")
result.stdout.fnmatch_lines(["*E*assert (False == True) == True"])
def test_many_brackets(self, testdir):
testdir.makepyfile(
@ -1174,7 +1174,7 @@ class TestIssue925(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*E*assert True == ((False == True) == True)")
result.stdout.fnmatch_lines(["*E*assert True == ((False == True) == True)"])
class TestIssue2121:
@ -1194,7 +1194,7 @@ class TestIssue2121:
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*E*assert (1 + 1) == 3")
result.stdout.fnmatch_lines(["*E*assert (1 + 1) == 3"])
@pytest.mark.parametrize("offset", [-1, +1])
@ -1356,4 +1356,4 @@ class TestEarlyRewriteBailout(object):
}
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 1 passed in *")
result.stdout.fnmatch_lines(["* 1 passed in *"])

View File

@ -393,7 +393,7 @@ class TestLastFailed(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 failed in*")
result.stdout.fnmatch_lines(["*1 failed in*"])
def test_terminal_report_lastfailed(self, testdir):
test_a = testdir.makepyfile(
@ -574,7 +574,7 @@ class TestLastFailed(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 xfailed*")
result.stdout.fnmatch_lines(["*1 xfailed*"])
assert self.get_cached_last_failed(testdir) == []
def test_xfail_strict_considered_failure(self, testdir):
@ -587,7 +587,7 @@ class TestLastFailed(object):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*1 failed*")
result.stdout.fnmatch_lines(["*1 failed*"])
assert self.get_cached_last_failed(testdir) == [
"test_xfail_strict_considered_failure.py::test"
]
@ -680,12 +680,12 @@ class TestLastFailed(object):
"""
)
result = testdir.runpytest(test_bar)
result.stdout.fnmatch_lines("*2 passed*")
result.stdout.fnmatch_lines(["*2 passed*"])
# ensure cache does not forget that test_foo_4 failed once before
assert self.get_cached_last_failed(testdir) == ["test_foo.py::test_foo_4"]
result = testdir.runpytest("--last-failed")
result.stdout.fnmatch_lines("*1 failed, 3 deselected*")
result.stdout.fnmatch_lines(["*1 failed, 3 deselected*"])
assert self.get_cached_last_failed(testdir) == ["test_foo.py::test_foo_4"]
# 3. fix test_foo_4, run only test_foo.py
@ -698,11 +698,11 @@ class TestLastFailed(object):
"""
)
result = testdir.runpytest(test_foo, "--last-failed")
result.stdout.fnmatch_lines("*1 passed, 1 deselected*")
result.stdout.fnmatch_lines(["*1 passed, 1 deselected*"])
assert self.get_cached_last_failed(testdir) == []
result = testdir.runpytest("--last-failed")
result.stdout.fnmatch_lines("*4 passed*")
result.stdout.fnmatch_lines(["*4 passed*"])
assert self.get_cached_last_failed(testdir) == []
def test_lastfailed_no_failures_behavior_all_passed(self, testdir):

View File

@ -673,7 +673,7 @@ class TestCaptureFixture(object):
)
)
result = testdir.runpytest_subprocess()
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
assert "stdout contents begin" not in result.stdout.str()
assert "stderr contents begin" not in result.stdout.str()

View File

@ -350,10 +350,10 @@ class TestCustomConftests(object):
p = testdir.makepyfile("def test_hello(): pass")
result = testdir.runpytest(p)
assert result.ret == 0
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines("*collected 0 items*")
result.stdout.fnmatch_lines(["*collected 0 items*"])
def test_collectignore_exclude_on_option(self, testdir):
testdir.makeconftest(
@ -390,10 +390,10 @@ class TestCustomConftests(object):
testdir.makepyfile(test_welt="def test_hallo(): pass")
result = testdir.runpytest()
assert result.ret == EXIT_NOTESTSCOLLECTED
result.stdout.fnmatch_lines("*collected 0 items*")
result.stdout.fnmatch_lines(["*collected 0 items*"])
result = testdir.runpytest("--XX")
assert result.ret == 0
result.stdout.fnmatch_lines("*2 passed*")
result.stdout.fnmatch_lines(["*2 passed*"])
def test_pytest_fs_collect_hooks_are_seen(self, testdir):
testdir.makeconftest(

View File

@ -805,7 +805,7 @@ def test_collect_pytest_prefix_bug_integration(testdir):
"""Integration test for issue #3775"""
p = testdir.copy_example("config/collect_pytest_prefix")
result = testdir.runpytest(p)
result.stdout.fnmatch_lines("* 1 passed *")
result.stdout.fnmatch_lines(["* 1 passed *"])
def test_collect_pytest_prefix_bug(pytestconfig):

View File

@ -968,7 +968,7 @@ class TestDoctestAutoUseFixtures(object):
"""
)
result = testdir.runpytest("--doctest-modules")
result.stdout.fnmatch_lines("*2 passed*")
result.stdout.fnmatch_lines(["*2 passed*"])
@pytest.mark.parametrize("scope", SCOPES)
@pytest.mark.parametrize("enable_doctest", [True, False])

View File

@ -380,4 +380,4 @@ def test_skip_test_with_unicode(testdir):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("* 1 skipped *")
result.stdout.fnmatch_lines(["* 1 skipped *"])

View File

@ -640,7 +640,7 @@ def test_pytest_fail_notrace_non_ascii(testdir, str_prefix):
def test_pytest_no_tests_collected_exit_status(testdir):
result = testdir.runpytest()
result.stdout.fnmatch_lines("*collected 0 items*")
result.stdout.fnmatch_lines(["*collected 0 items*"])
assert result.ret == main.EXIT_NOTESTSCOLLECTED
testdir.makepyfile(
@ -650,13 +650,13 @@ def test_pytest_no_tests_collected_exit_status(testdir):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*collected 1 item*")
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*collected 1 item*"])
result.stdout.fnmatch_lines(["*1 passed*"])
assert result.ret == main.EXIT_OK
result = testdir.runpytest("-k nonmatch")
result.stdout.fnmatch_lines("*collected 1 item*")
result.stdout.fnmatch_lines("*1 deselected*")
result.stdout.fnmatch_lines(["*collected 1 item*"])
result.stdout.fnmatch_lines(["*1 deselected*"])
assert result.ret == main.EXIT_NOTESTSCOLLECTED

View File

@ -331,7 +331,7 @@ class TestXFail(object):
result = testdir.runpytest(p, "-rx")
result.stdout.fnmatch_lines(["*XFAIL*test_this*", "*reason:*hello*"])
result = testdir.runpytest(p, "--runxfail")
result.stdout.fnmatch_lines("*1 pass*")
result.stdout.fnmatch_lines(["*1 pass*"])
def test_xfail_imperative_in_setup_function(self, testdir):
p = testdir.makepyfile(
@ -477,7 +477,7 @@ class TestXFail(object):
% strict
)
result = testdir.runpytest(p, "-rxX")
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
assert result.ret == 0
@pytest.mark.parametrize("strict", [True, False])
@ -493,7 +493,7 @@ class TestXFail(object):
% strict
)
result = testdir.runpytest(p, "-rxX")
result.stdout.fnmatch_lines("*1 passed*")
result.stdout.fnmatch_lines(["*1 passed*"])
assert result.ret == 0
@pytest.mark.parametrize("strict_val", ["true", "false"])
@ -515,7 +515,7 @@ class TestXFail(object):
)
result = testdir.runpytest(p, "-rxX")
strict = strict_val == "true"
result.stdout.fnmatch_lines("*1 failed*" if strict else "*1 xpassed*")
result.stdout.fnmatch_lines(["*1 failed*" if strict else "*1 xpassed*"])
assert result.ret == (1 if strict else 0)
@ -1130,7 +1130,9 @@ def test_module_level_skip_error(testdir):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*Using pytest.skip outside of a test is not allowed*")
result.stdout.fnmatch_lines(
["*Using pytest.skip outside of a test is not allowed*"]
)
def test_module_level_skip_with_allow_module_level(testdir):
@ -1147,7 +1149,7 @@ def test_module_level_skip_with_allow_module_level(testdir):
"""
)
result = testdir.runpytest("-rxs")
result.stdout.fnmatch_lines("*SKIP*skip_module_level")
result.stdout.fnmatch_lines(["*SKIP*skip_module_level"])
def test_invalid_skip_keyword_parameter(testdir):
@ -1164,7 +1166,7 @@ def test_invalid_skip_keyword_parameter(testdir):
"""
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*TypeError:*['unknown']*")
result.stdout.fnmatch_lines(["*TypeError:*['unknown']*"])
def test_mark_xfail_item(testdir):

View File

@ -16,7 +16,7 @@ from _pytest.warnings import SHOW_PYTEST_WARNINGS_ARG
def test_tmpdir_fixture(testdir):
p = testdir.copy_example("tmpdir/tmpdir_fixture.py")
results = testdir.runpytest(p)
results.stdout.fnmatch_lines("*1 passed*")
results.stdout.fnmatch_lines(["*1 passed*"])
def test_ensuretemp(recwarn):

View File

@ -794,7 +794,7 @@ def test_unittest_setup_interaction(testdir, fix_type, stmt):
)
)
result = testdir.runpytest()
result.stdout.fnmatch_lines("*3 passed*")
result.stdout.fnmatch_lines(["*3 passed*"])
def test_non_unittest_no_setupclass_support(testdir):
@ -1040,4 +1040,4 @@ def test_setup_inheritance_skipping(testdir, test_name, expected_outcome):
"""Issue #4700"""
testdir.copy_example("unittest/{}".format(test_name))
result = testdir.runpytest()
result.stdout.fnmatch_lines("* {} in *".format(expected_outcome))
result.stdout.fnmatch_lines(["* {} in *".format(expected_outcome)])