remove the unused return value of fnmatch_lines
--HG-- branch : trunk
This commit is contained in:
parent
f2be437daa
commit
d93016d85f
|
@ -486,4 +486,3 @@ class LineMatcher:
|
|||
extralines.append(nextline)
|
||||
else:
|
||||
assert line == nextline
|
||||
return True
|
||||
|
|
|
@ -8,7 +8,7 @@ class TestGeneralUsage:
|
|||
""")
|
||||
result = testdir.runpytest(testdir.tmpdir)
|
||||
assert result.ret != 0
|
||||
assert result.stderr.fnmatch_lines([
|
||||
result.stderr.fnmatch_lines([
|
||||
'*ERROR: hello'
|
||||
])
|
||||
|
||||
|
@ -24,7 +24,7 @@ class TestGeneralUsage:
|
|||
""")
|
||||
result = testdir.runpytest("-p", "xyz", "--xyz=123")
|
||||
assert result.ret == 0
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
'*1 passed*',
|
||||
])
|
||||
|
||||
|
@ -46,7 +46,7 @@ class TestGeneralUsage:
|
|||
assert x
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"> assert x",
|
||||
"E assert 0",
|
||||
])
|
||||
|
@ -60,7 +60,7 @@ class TestGeneralUsage:
|
|||
""")
|
||||
testdir.makepyfile(import_fails="import does_not_work")
|
||||
result = testdir.runpytest(p)
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
#XXX on jython this fails: "> import import_fails",
|
||||
"E ImportError: No module named does_not_work",
|
||||
])
|
||||
|
@ -71,7 +71,7 @@ class TestGeneralUsage:
|
|||
p2 = testdir.makefile(".pyc", "123")
|
||||
result = testdir.runpytest(p1, p2)
|
||||
assert result.ret != 0
|
||||
assert result.stderr.fnmatch_lines([
|
||||
result.stderr.fnmatch_lines([
|
||||
"*ERROR: can't collect: %s" %(p2,)
|
||||
])
|
||||
|
||||
|
|
|
@ -187,7 +187,7 @@ class TestPerTestCapturing:
|
|||
pass
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
'*teardown_function*',
|
||||
'*Captured stdout*',
|
||||
"setup func1*",
|
||||
|
@ -205,7 +205,7 @@ class TestPerTestCapturing:
|
|||
pass
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*def teardown_module(mod):*",
|
||||
"*Captured stdout*",
|
||||
"*teardown module*",
|
||||
|
@ -271,7 +271,7 @@ class TestLoggingInteraction:
|
|||
print ("suspend2 and captured %s" % (outerr,))
|
||||
""")
|
||||
result = testdir.runpython(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"suspeneded and captured*hello1*",
|
||||
"suspend2 and captured*hello2*WARNING:root:hello3*",
|
||||
])
|
||||
|
@ -358,7 +358,7 @@ class TestCaptureFuncarg:
|
|||
pass
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_partial_setup_failure*",
|
||||
"*1 error*",
|
||||
])
|
||||
|
|
|
@ -23,7 +23,7 @@ def test_exclude(testdir):
|
|||
testdir.makepyfile(test_ok="def test_pass(): pass")
|
||||
result = testdir.runpytest("--ignore=hello", "--ignore=hello2")
|
||||
assert result.ret == 0
|
||||
assert result.stdout.fnmatch_lines(["*1 passed*"])
|
||||
result.stdout.fnmatch_lines(["*1 passed*"])
|
||||
|
||||
def test_pytest_report_iteminfo():
|
||||
class FakeItem(object):
|
||||
|
@ -43,4 +43,4 @@ def test_conftest_confcutdir(testdir):
|
|||
parser.addoption("--xyz", action="store_true")
|
||||
"""))
|
||||
result = testdir.runpytest("-h", "--confcutdir=%s" % x, x)
|
||||
assert result.stdout.fnmatch_lines(["*--xyz*"])
|
||||
result.stdout.fnmatch_lines(["*--xyz*"])
|
||||
|
|
|
@ -6,14 +6,14 @@ def test_version(testdir):
|
|||
result = testdir.runpytest("--version")
|
||||
assert result.ret == 0
|
||||
#p = py.path.local(py.__file__).dirpath()
|
||||
assert result.stderr.fnmatch_lines([
|
||||
result.stderr.fnmatch_lines([
|
||||
'*py.test*%s*imported from*' % (py.version, )
|
||||
])
|
||||
|
||||
def test_helpconfig(testdir):
|
||||
result = testdir.runpytest("--help-config")
|
||||
assert result.ret == 0
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*cmdline*conftest*ENV*",
|
||||
])
|
||||
|
||||
|
@ -36,7 +36,7 @@ def test_hookvalidation_unknown(testdir):
|
|||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.ret != 0
|
||||
assert result.stderr.fnmatch_lines([
|
||||
result.stderr.fnmatch_lines([
|
||||
'*unknown hook*pytest_hello*'
|
||||
])
|
||||
|
||||
|
|
|
@ -56,7 +56,7 @@ class TestFunctional:
|
|||
assert hasattr(test_hello, 'hello')
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines(["*passed*"])
|
||||
result.stdout.fnmatch_lines(["*passed*"])
|
||||
|
||||
def test_mark_per_module(self, testdir):
|
||||
item = testdir.getitem("""
|
||||
|
|
|
@ -63,7 +63,7 @@ def test_testdir_runs_with_plugin(testdir):
|
|||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
|
||||
|
|
|
@ -310,7 +310,7 @@ def test_runtest_in_module_ordering(testdir):
|
|||
del item.function.mylist
|
||||
""")
|
||||
result = testdir.runpytest(p1)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*2 passed*"
|
||||
])
|
||||
|
||||
|
|
|
@ -8,7 +8,7 @@ def test_xfail_not_report_default(testdir):
|
|||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p, '-v')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 expected failures*--report=xfailed*",
|
||||
])
|
||||
|
||||
|
@ -19,7 +19,7 @@ def test_skip_not_report_default(testdir):
|
|||
py.test.skip("hello")
|
||||
""")
|
||||
result = testdir.runpytest(p, '-v')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 skipped*--report=skipped*",
|
||||
])
|
||||
|
||||
|
@ -35,7 +35,7 @@ def test_xfail_decorator(testdir):
|
|||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest(p, '--report=xfailed')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*expected failures*",
|
||||
"*test_one.test_this*test_one.py:4*",
|
||||
"*UNEXPECTEDLY PASSING*",
|
||||
|
@ -52,7 +52,7 @@ def test_xfail_at_module(testdir):
|
|||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p, '--report=xfailed')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*expected failures*",
|
||||
"*test_intentional_xfail*:4*",
|
||||
"*1 xfailed*"
|
||||
|
@ -67,7 +67,7 @@ def test_xfail_evalfalse_but_fails(testdir):
|
|||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p, '--report=xfailed')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_xfail_evalfalse_but_fails*:4*",
|
||||
"*1 failed*"
|
||||
])
|
||||
|
@ -81,7 +81,7 @@ def test_skipif_decorator(testdir):
|
|||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p, '--report=skipped')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*Skipped*platform*",
|
||||
"*1 skipped*"
|
||||
])
|
||||
|
@ -99,7 +99,7 @@ def test_skipif_class(testdir):
|
|||
assert 0
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*2 skipped*"
|
||||
])
|
||||
|
||||
|
@ -165,7 +165,7 @@ def test_skipped_reasons_functional(testdir):
|
|||
"""
|
||||
)
|
||||
result = testdir.runpytest('--report=skipped')
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_one.py ss",
|
||||
"*test_two.py S",
|
||||
"___* skipped test summary *_",
|
||||
|
|
|
@ -226,7 +226,7 @@ class TestTerminal:
|
|||
pass
|
||||
""")
|
||||
result = testdir.runpytest(p2)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_p2.py .",
|
||||
"*1 passed*",
|
||||
])
|
||||
|
@ -461,7 +461,7 @@ class TestTerminalFunctional:
|
|||
"""
|
||||
)
|
||||
result = testdir.runpytest("-k", "test_two:", testpath)
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_deselected.py ..",
|
||||
"=* 1 test*deselected by 'test_two:'*=",
|
||||
])
|
||||
|
@ -494,7 +494,7 @@ class TestTerminalFunctional:
|
|||
result = testdir.runpytest()
|
||||
finally:
|
||||
old.chdir()
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"test_passes.py ..",
|
||||
"* 2 pass*",
|
||||
])
|
||||
|
@ -507,7 +507,7 @@ class TestTerminalFunctional:
|
|||
""")
|
||||
result = testdir.runpytest()
|
||||
verinfo = ".".join(map(str, py.std.sys.version_info[:3]))
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*===== test session starts ====*",
|
||||
"python: platform %s -- Python %s*" %(
|
||||
py.std.sys.platform, verinfo), # , py.std.sys.executable),
|
||||
|
@ -577,13 +577,13 @@ def test_terminalreporter_reportopt_conftestsetting(testdir):
|
|||
assert not tr.hasopt('qwe')
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
|
||||
def test_trace_reporting(testdir):
|
||||
result = testdir.runpytest("--traceconfig")
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*active plugins*"
|
||||
])
|
||||
assert result.ret == 0
|
||||
|
@ -592,7 +592,7 @@ def test_trace_reporting(testdir):
|
|||
def test_show_funcarg(testdir, option):
|
||||
args = option._getcmdargs() + ["--funcargs"]
|
||||
result = testdir.runpytest(*args)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*tmpdir*",
|
||||
"*temporary directory*",
|
||||
]
|
||||
|
|
|
@ -83,7 +83,7 @@ class TestFillFuncArgs:
|
|||
assert something is self
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
|
||||
|
@ -201,7 +201,7 @@ class TestRequest:
|
|||
assert len(l) == 1
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*1 passed*1 error*"
|
||||
])
|
||||
|
||||
|
@ -430,7 +430,7 @@ class TestGenfuncFunctional:
|
|||
assert arg1 == arg2
|
||||
""")
|
||||
result = testdir.runpytest("-v", p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_myfunc*0*PASS*",
|
||||
"*test_myfunc*1*FAIL*",
|
||||
"*1 failed, 1 passed*"
|
||||
|
@ -451,7 +451,7 @@ class TestGenfuncFunctional:
|
|||
assert arg1 in (10, 20)
|
||||
""")
|
||||
result = testdir.runpytest("-v", p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_func1*0*PASS*",
|
||||
"*test_func1*1*FAIL*",
|
||||
"*test_func2*PASS*",
|
||||
|
@ -478,7 +478,7 @@ class TestGenfuncFunctional:
|
|||
assert arg1 == arg2
|
||||
""")
|
||||
result = testdir.runpytest("-v", p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_myfunc*hello*PASS*",
|
||||
"*test_myfunc*world*FAIL*",
|
||||
"*1 failed, 1 passed*"
|
||||
|
@ -494,7 +494,7 @@ class TestGenfuncFunctional:
|
|||
assert hello == "world"
|
||||
""")
|
||||
result = testdir.runpytest("-v", p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_myfunc*hello*PASS*",
|
||||
"*1 passed*"
|
||||
])
|
||||
|
@ -511,7 +511,7 @@ class TestGenfuncFunctional:
|
|||
self.x = 1
|
||||
""")
|
||||
result = testdir.runpytest("-v", p)
|
||||
assert result.stdout.fnmatch_lines([
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_func*0*PASS*",
|
||||
"*test_func*1*PASS*",
|
||||
"*2 pass*",
|
||||
|
|
|
@ -115,5 +115,5 @@ def test_addoption_parser_epilog(testdir):
|
|||
""")
|
||||
result = testdir.runpytest('--help')
|
||||
#assert result.ret != 0
|
||||
assert result.stdout.fnmatch_lines(["*hint: hello world*"])
|
||||
result.stdout.fnmatch_lines(["*hint: hello world*"])
|
||||
|
||||
|
|
|
@ -81,7 +81,7 @@ class TestBootstrapping:
|
|||
monkeypatch.setenv('PYTEST_PLUGINS', 'pytest_x500', prepend=",")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.ret == 0
|
||||
extra = result.stdout.fnmatch_lines(["*1 passed in*"])
|
||||
result.stdout.fnmatch_lines(["*1 passed in*"])
|
||||
|
||||
def test_import_plugin_importname(self, testdir):
|
||||
pluginmanager = PluginManager()
|
||||
|
|
Loading…
Reference in New Issue