move test files out of py lib proper
* separate all tests from plugins * simplify implicit inclusion of plugins under test * have test_initpkg perform direct checks instead of yielding tests * fix example tests for 3k --HG-- branch : trunk
This commit is contained in:
parent
5cf27098cf
commit
c8119d89b6
|
@ -1,6 +1,6 @@
|
|||
pytest_plugins = '_pytest doctest pytester'.split()
|
||||
|
||||
rsyncdirs = ['.', '../doc']
|
||||
rsyncdirs = ['conftest.py', 'py', 'doc', 'testing']
|
||||
|
||||
import py
|
||||
def pytest_addoption(parser):
|
|
@ -2,7 +2,7 @@ Changes between 1.0.x and 'trunk'
|
|||
=====================================
|
||||
|
||||
* consolidate builtins implementation to be compatible with >=2.3,
|
||||
add print_, exec_ and _reraise helpers for 2-3 compatible code
|
||||
add helpers to ease keeping 2 and 3k compatible code
|
||||
|
||||
* deprecate py.compat.doctest|subprocess|textwrap|optparse
|
||||
|
||||
|
|
|
@ -46,7 +46,7 @@ A quick example::
|
|||
>>> isinstance(c.source(), py.code.Source)
|
||||
True
|
||||
>>> str(c.source()).split('\n')[0]
|
||||
"def read(self, mode='rb'):"
|
||||
"def read(self, mode='r'):"
|
||||
|
||||
source: :source:`py/code/code.py`
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ class TestStateFullThing:
|
|||
cls.classcount -= 1
|
||||
|
||||
def setup_method(self, method):
|
||||
self.id = eval(method.func_name[5:])
|
||||
self.id = eval(method.__name__[5:])
|
||||
|
||||
def test_42(self):
|
||||
assert self.classcount == 1
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
|
||||
from myapp import MyApp
|
||||
from mysetup.myapp import MyApp
|
||||
|
||||
def pytest_funcarg__mysetup(request):
|
||||
return MySetup()
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import py
|
||||
from myapp import MyApp
|
||||
from mysetup2.myapp import MyApp
|
||||
|
||||
def pytest_funcarg__mysetup(request):
|
||||
return MySetup(request)
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
import py
|
||||
import sys
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
import py
|
||||
|
||||
pydir = py.path.local(py.__file__).dirpath()
|
||||
mydatadir = py.path.local(__file__).dirpath('data')
|
||||
|
||||
def getdata():
|
||||
rel = mydatadir.relto(pydir)
|
||||
tmpdir = py.test.ensuretemp(rel.replace(pydir.sep, '_'))
|
||||
mydatadir.copy(tmpdir)
|
||||
return tmpdir
|
||||
|
|
@ -99,51 +99,3 @@ class HookRecorder:
|
|||
assert len(l) == 1, (name, l)
|
||||
return l[0]
|
||||
|
||||
|
||||
def test_hookrecorder_basic():
|
||||
comregistry = py._com.Registry()
|
||||
rec = HookRecorder(comregistry)
|
||||
class ApiClass:
|
||||
def xyz(self, arg):
|
||||
pass
|
||||
rec.start_recording(ApiClass)
|
||||
rec.hook.xyz(arg=123)
|
||||
call = rec.popcall("xyz")
|
||||
assert call.arg == 123
|
||||
assert call._name == "xyz"
|
||||
py.test.raises(ValueError, "rec.popcall('abc')")
|
||||
|
||||
def test_hookrecorder_basic_no_args_hook():
|
||||
import sys
|
||||
comregistry = py._com.Registry()
|
||||
rec = HookRecorder(comregistry)
|
||||
apimod = type(sys)('api')
|
||||
def xyz():
|
||||
pass
|
||||
apimod.xyz = xyz
|
||||
rec.start_recording(apimod)
|
||||
rec.hook.xyz()
|
||||
call = rec.popcall("xyz")
|
||||
assert call._name == "xyz"
|
||||
|
||||
reg = py._com.comregistry
|
||||
def test_functional_default(testdir, _pytest):
|
||||
assert _pytest.comregistry == py._com.comregistry
|
||||
assert _pytest.comregistry != reg
|
||||
|
||||
def test_functional(testdir, linecomp):
|
||||
reprec = testdir.inline_runsource("""
|
||||
import py
|
||||
pytest_plugins="_pytest"
|
||||
def test_func(_pytest):
|
||||
class ApiClass:
|
||||
def xyz(self, arg): pass
|
||||
rec = _pytest.gethookrecorder(ApiClass)
|
||||
class Plugin:
|
||||
def xyz(self, arg):
|
||||
return arg + 1
|
||||
rec._comregistry.register(Plugin())
|
||||
res = rec.hook.xyz(arg=41)
|
||||
assert res == [42]
|
||||
""")
|
||||
reprec.assertoutcome(passed=1)
|
||||
|
|
|
@ -26,44 +26,3 @@ def warn_about_missing_assertion():
|
|||
else:
|
||||
py.std.warnings.warn("Assertions are turned off!"
|
||||
" (are you using python -O?)")
|
||||
|
||||
def test_functional(testdir):
|
||||
testdir.makepyfile("""
|
||||
def test_hello():
|
||||
x = 3
|
||||
assert x == 4
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert "3 == 4" in result.stdout.str()
|
||||
result = testdir.runpytest("--no-assert")
|
||||
assert "3 == 4" not in result.stdout.str()
|
||||
|
||||
def test_traceback_failure(testdir):
|
||||
p1 = testdir.makepyfile("""
|
||||
def g():
|
||||
return 2
|
||||
def f(x):
|
||||
assert x == g()
|
||||
def test_onefails():
|
||||
f(3)
|
||||
""")
|
||||
result = testdir.runpytest(p1)
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_traceback_failure.py F",
|
||||
"====* FAILURES *====",
|
||||
"____*____",
|
||||
"",
|
||||
" def test_onefails():",
|
||||
"> f(3)",
|
||||
"",
|
||||
"*test_*.py:6: ",
|
||||
"_ _ _ *",
|
||||
#"",
|
||||
" def f(x):",
|
||||
"> assert x == g()",
|
||||
"E assert 3 == 2",
|
||||
"E + where 2 = g()",
|
||||
"",
|
||||
"*test_traceback_failure.py:4: AssertionError"
|
||||
])
|
||||
|
||||
|
|
|
@ -115,83 +115,3 @@ def setsession(config):
|
|||
elif val("dist") != "no":
|
||||
from py.__.test.dist.dsession import DSession
|
||||
config.setsessionclass(DSession)
|
||||
|
||||
def test_implied_different_sessions(tmpdir):
|
||||
def x(*args):
|
||||
config = py.test.config._reparse([tmpdir] + list(args))
|
||||
try:
|
||||
config.pluginmanager.do_configure(config)
|
||||
except ValueError:
|
||||
return Exception
|
||||
return getattr(config._sessionclass, '__name__', None)
|
||||
assert x() == None
|
||||
assert x('-d') == 'DSession'
|
||||
assert x('--dist=each') == 'DSession'
|
||||
assert x('-n3') == 'DSession'
|
||||
assert x('-f') == 'LooponfailingSession'
|
||||
|
||||
def test_plugin_specify(testdir):
|
||||
testdir.chdir()
|
||||
config = py.test.raises(ImportError, """
|
||||
testdir.parseconfig("-p", "nqweotexistent")
|
||||
""")
|
||||
#py.test.raises(ImportError,
|
||||
# "config.pluginmanager.do_configure(config)"
|
||||
#)
|
||||
|
||||
def test_plugin_already_exists(testdir):
|
||||
config = testdir.parseconfig("-p", "default")
|
||||
assert config.option.plugins == ['default']
|
||||
config.pluginmanager.do_configure(config)
|
||||
|
||||
|
||||
class TestDistOptions:
|
||||
def test_getxspecs(self, testdir):
|
||||
config = testdir.parseconfigure("--tx=popen", "--tx", "ssh=xyz")
|
||||
xspecs = config.getxspecs()
|
||||
assert len(xspecs) == 2
|
||||
print(xspecs)
|
||||
assert xspecs[0].popen
|
||||
assert xspecs[1].ssh == "xyz"
|
||||
|
||||
def test_xspecs_multiplied(self, testdir):
|
||||
xspecs = testdir.parseconfigure("--tx=3*popen",).getxspecs()
|
||||
assert len(xspecs) == 3
|
||||
assert xspecs[1].popen
|
||||
|
||||
def test_getrsyncdirs(self, testdir):
|
||||
config = testdir.parseconfigure('--rsyncdir=' + str(testdir.tmpdir))
|
||||
roots = config.getrsyncdirs()
|
||||
assert len(roots) == 1 + 1
|
||||
assert testdir.tmpdir in roots
|
||||
|
||||
def test_getrsyncdirs_with_conftest(self, testdir):
|
||||
p = py.path.local()
|
||||
for bn in 'x y z'.split():
|
||||
p.mkdir(bn)
|
||||
testdir.makeconftest("""
|
||||
rsyncdirs= 'x',
|
||||
""")
|
||||
config = testdir.parseconfigure(testdir.tmpdir, '--rsyncdir=y', '--rsyncdir=z')
|
||||
roots = config.getrsyncdirs()
|
||||
assert len(roots) == 3 + 1
|
||||
assert py.path.local('y') in roots
|
||||
assert py.path.local('z') in roots
|
||||
assert testdir.tmpdir.join('x') in roots
|
||||
|
||||
def test_dist_options(testdir):
|
||||
config = testdir.parseconfigure("-n 2")
|
||||
assert config.option.dist == "load"
|
||||
assert config.option.tx == ['popen'] * 2
|
||||
|
||||
config = testdir.parseconfigure("-d")
|
||||
assert config.option.dist == "load"
|
||||
|
||||
def test_pytest_report_iteminfo():
|
||||
class FakeItem(object):
|
||||
|
||||
def reportinfo(self):
|
||||
return "-reportinfo-"
|
||||
|
||||
res = pytest_report_iteminfo(FakeItem())
|
||||
assert res == "-reportinfo-"
|
||||
|
|
|
@ -84,110 +84,3 @@ class DoctestModule(DoctestItem):
|
|||
module = self.fspath.pyimport()
|
||||
failed, tot = doctest.testmod(
|
||||
module, raise_on_error=True, verbose=0)
|
||||
|
||||
|
||||
#
|
||||
# Plugin tests
|
||||
#
|
||||
|
||||
class TestDoctests:
|
||||
|
||||
def test_collect_testtextfile(self, testdir):
|
||||
testdir.maketxtfile(whatever="")
|
||||
checkfile = testdir.maketxtfile(test_something="""
|
||||
alskdjalsdk
|
||||
>>> i = 5
|
||||
>>> i-1
|
||||
4
|
||||
""")
|
||||
for x in (testdir.tmpdir, checkfile):
|
||||
#print "checking that %s returns custom items" % (x,)
|
||||
items, reprec = testdir.inline_genitems(x)
|
||||
assert len(items) == 1
|
||||
assert isinstance(items[0], DoctestTextfile)
|
||||
|
||||
def test_collect_module(self, testdir):
|
||||
path = testdir.makepyfile(whatever="#")
|
||||
for p in (path, testdir.tmpdir):
|
||||
items, reprec = testdir.inline_genitems(p, '--doctest-modules')
|
||||
assert len(items) == 1
|
||||
assert isinstance(items[0], DoctestModule)
|
||||
|
||||
def test_simple_doctestfile(self, testdir):
|
||||
p = testdir.maketxtfile(test_doc="""
|
||||
>>> x = 1
|
||||
>>> x == 1
|
||||
False
|
||||
""")
|
||||
reprec = testdir.inline_run(p)
|
||||
reprec.assertoutcome(failed=1)
|
||||
|
||||
def test_doctest_unexpected_exception(self, testdir):
|
||||
from py.__.test.outcome import Failed
|
||||
|
||||
p = testdir.maketxtfile("""
|
||||
>>> i = 0
|
||||
>>> i = 1
|
||||
>>> x
|
||||
2
|
||||
""")
|
||||
reprec = testdir.inline_run(p)
|
||||
call = reprec.getcall("pytest_runtest_logreport")
|
||||
assert call.report.failed
|
||||
assert call.report.longrepr
|
||||
# XXX
|
||||
#testitem, = items
|
||||
#excinfo = py.test.raises(Failed, "testitem.runtest()")
|
||||
#repr = testitem.repr_failure(excinfo, ("", ""))
|
||||
#assert repr.reprlocation
|
||||
|
||||
def test_doctestmodule(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
'''
|
||||
>>> x = 1
|
||||
>>> x == 1
|
||||
False
|
||||
|
||||
'''
|
||||
""")
|
||||
reprec = testdir.inline_run(p, "--doctest-modules")
|
||||
reprec.assertoutcome(failed=1)
|
||||
|
||||
def test_doctestmodule_external(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
#
|
||||
def somefunc():
|
||||
'''
|
||||
>>> i = 0
|
||||
>>> i + 1
|
||||
2
|
||||
'''
|
||||
""")
|
||||
result = testdir.runpytest(p, "--doctest-modules")
|
||||
result.stdout.fnmatch_lines([
|
||||
'004 *>>> i = 0',
|
||||
'005 *>>> i + 1',
|
||||
'*Expected:',
|
||||
"* 2",
|
||||
"*Got:",
|
||||
"* 1",
|
||||
"*:5: DocTestFailure"
|
||||
])
|
||||
|
||||
|
||||
def test_txtfile_failing(self, testdir):
|
||||
p = testdir.maketxtfile("""
|
||||
>>> i = 0
|
||||
>>> i + 1
|
||||
2
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result.stdout.fnmatch_lines([
|
||||
'001 >>> i = 0',
|
||||
'002 >>> i + 1',
|
||||
'Expected:',
|
||||
" 2",
|
||||
"Got:",
|
||||
" 1",
|
||||
"*test_txtfile_failing.txt:2: DocTestFailure"
|
||||
])
|
||||
|
|
|
@ -39,16 +39,3 @@ class Execnetcleanup:
|
|||
while len(self._gateways) > len(gateways):
|
||||
self._gateways[-1].exit()
|
||||
return res
|
||||
|
||||
def test_execnetplugin(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
import py
|
||||
import sys
|
||||
def test_hello():
|
||||
sys._gw = py.execnet.PopenGateway()
|
||||
def test_world():
|
||||
assert hasattr(sys, '_gw')
|
||||
assert sys._gw not in sys._gw._cleanup._activegateways
|
||||
|
||||
""", "-s", "--debug")
|
||||
reprec.assertoutcome(passed=2)
|
||||
|
|
|
@ -48,20 +48,3 @@ def get_coverage(datafile, config):
|
|||
if not py.path.local(path).relto(basepath):
|
||||
del coverage[path]
|
||||
return coverage
|
||||
|
||||
|
||||
def test_functional(testdir):
|
||||
py.test.importorskip("figleaf")
|
||||
testdir.plugins.append("figleaf")
|
||||
testdir.makepyfile("""
|
||||
def f():
|
||||
x = 42
|
||||
def test_whatever():
|
||||
pass
|
||||
""")
|
||||
result = testdir.runpytest('-F')
|
||||
assert result.ret == 0
|
||||
assert result.stdout.fnmatch_lines([
|
||||
'*figleaf html*'
|
||||
])
|
||||
#print result.stdout.str()
|
||||
|
|
|
@ -29,18 +29,3 @@ def pytest_unconfigure(config):
|
|||
del config.hook.__dict__['_performcall']
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# ===============================================================================
|
||||
# plugin tests
|
||||
# ===============================================================================
|
||||
|
||||
def test_functional(testdir):
|
||||
testdir.makepyfile("""
|
||||
def test_pass():
|
||||
pass
|
||||
""")
|
||||
testdir.runpytest("--hooklog=hook.log")
|
||||
s = testdir.tmpdir.join("hook.log").read()
|
||||
assert s.find("pytest_sessionstart") != -1
|
||||
assert s.find("ItemTestReport") != -1
|
||||
assert s.find("sessionfinish") != -1
|
||||
|
|
|
@ -66,29 +66,3 @@ class MarkerDecorator:
|
|||
class MarkHolder:
|
||||
def __init__(self, kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
def test_pytest_mark_api():
|
||||
mark = Mark()
|
||||
py.test.raises(TypeError, "mark(x=3)")
|
||||
|
||||
def f(): pass
|
||||
mark.hello(f)
|
||||
assert f.hello
|
||||
|
||||
mark.world(x=3, y=4)(f)
|
||||
assert f.world
|
||||
assert f.world.x == 3
|
||||
assert f.world.y == 4
|
||||
|
||||
py.test.raises(TypeError, "mark.some(x=3)(f=5)")
|
||||
|
||||
def test_mark_plugin(testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import py
|
||||
pytest_plugins = "keyword"
|
||||
@py.test.mark.hello
|
||||
def test_hello():
|
||||
assert hasattr(test_hello, 'hello')
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines(["*passed*"])
|
||||
|
|
|
@ -132,137 +132,3 @@ class MonkeyPatch:
|
|||
self._setitem[:] = []
|
||||
if hasattr(self, '_savesyspath'):
|
||||
sys.path[:] = self._savesyspath
|
||||
|
||||
def test_setattr():
|
||||
class A:
|
||||
x = 1
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setattr(A, 'x', 2)
|
||||
assert A.x == 2
|
||||
monkeypatch.setattr(A, 'x', 3)
|
||||
assert A.x == 3
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
A.x = 5
|
||||
monkeypatch.undo() # double-undo makes no modification
|
||||
assert A.x == 5
|
||||
|
||||
monkeypatch.setattr(A, 'y', 3)
|
||||
assert A.y == 3
|
||||
monkeypatch.undo()
|
||||
assert not hasattr(A, 'y')
|
||||
|
||||
def test_delattr():
|
||||
class A:
|
||||
x = 1
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delattr(A, 'x')
|
||||
assert not hasattr(A, 'x')
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delattr(A, 'x')
|
||||
py.test.raises(AttributeError, "monkeypatch.delattr(A, 'y')")
|
||||
monkeypatch.delattr(A, 'y', raising=False)
|
||||
monkeypatch.setattr(A, 'x', 5)
|
||||
assert A.x == 5
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
def test_setitem():
|
||||
d = {'x': 1}
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setitem(d, 'x', 2)
|
||||
monkeypatch.setitem(d, 'y', 1700)
|
||||
assert d['x'] == 2
|
||||
assert d['y'] == 1700
|
||||
monkeypatch.setitem(d, 'x', 3)
|
||||
assert d['x'] == 3
|
||||
monkeypatch.undo()
|
||||
assert d['x'] == 1
|
||||
assert 'y' not in d
|
||||
d['x'] = 5
|
||||
monkeypatch.undo()
|
||||
assert d['x'] == 5
|
||||
|
||||
def test_delitem():
|
||||
d = {'x': 1}
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delitem(d, 'x')
|
||||
assert 'x' not in d
|
||||
monkeypatch.delitem(d, 'y', raising=False)
|
||||
py.test.raises(KeyError, "monkeypatch.delitem(d, 'y')")
|
||||
assert not d
|
||||
monkeypatch.setitem(d, 'y', 1700)
|
||||
assert d['y'] == 1700
|
||||
d['hello'] = 'world'
|
||||
monkeypatch.setitem(d, 'x', 1500)
|
||||
assert d['x'] == 1500
|
||||
monkeypatch.undo()
|
||||
assert d == {'hello': 'world', 'x': 1}
|
||||
|
||||
def test_setenv():
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setenv('XYZ123', 2)
|
||||
import os
|
||||
assert os.environ['XYZ123'] == "2"
|
||||
monkeypatch.undo()
|
||||
assert 'XYZ123' not in os.environ
|
||||
|
||||
def test_delenv():
|
||||
name = 'xyz1234'
|
||||
assert name not in os.environ
|
||||
monkeypatch = MonkeyPatch()
|
||||
py.test.raises(KeyError, "monkeypatch.delenv(%r, raising=True)" % name)
|
||||
monkeypatch.delenv(name, raising=False)
|
||||
monkeypatch.undo()
|
||||
os.environ[name] = "1"
|
||||
try:
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delenv(name)
|
||||
assert name not in os.environ
|
||||
monkeypatch.setenv(name, "3")
|
||||
assert os.environ[name] == "3"
|
||||
monkeypatch.undo()
|
||||
assert os.environ[name] == "1"
|
||||
finally:
|
||||
if name in os.environ:
|
||||
del os.environ[name]
|
||||
|
||||
def test_setenv_prepend():
|
||||
import os
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setenv('XYZ123', 2, prepend="-")
|
||||
assert os.environ['XYZ123'] == "2"
|
||||
monkeypatch.setenv('XYZ123', 3, prepend="-")
|
||||
assert os.environ['XYZ123'] == "3-2"
|
||||
monkeypatch.undo()
|
||||
assert 'XYZ123' not in os.environ
|
||||
|
||||
def test_monkeypatch_plugin(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
pytest_plugins = 'pytest_monkeypatch',
|
||||
def test_method(monkeypatch):
|
||||
assert monkeypatch.__class__.__name__ == "MonkeyPatch"
|
||||
""")
|
||||
res = reprec.countoutcomes()
|
||||
assert tuple(res) == (1, 0, 0), res
|
||||
|
||||
def test_syspath_prepend():
|
||||
old = list(sys.path)
|
||||
try:
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.syspath_prepend('world')
|
||||
monkeypatch.syspath_prepend('hello')
|
||||
assert sys.path[0] == "hello"
|
||||
assert sys.path[1] == "world"
|
||||
monkeypatch.undo()
|
||||
assert sys.path == old
|
||||
monkeypatch.undo()
|
||||
assert sys.path == old
|
||||
finally:
|
||||
sys.path[:] = old
|
||||
|
||||
|
||||
|
|
|
@ -81,50 +81,3 @@ def pytest_terminal_summary(terminalreporter):
|
|||
proxyid = serverproxy.newPaste("python", s)
|
||||
pastebinurl = "%s%s" % (url.show, proxyid)
|
||||
tr.write_line("%s --> %s" %(msg, pastebinurl))
|
||||
|
||||
|
||||
class TestPasting:
|
||||
def pytest_funcarg__pastebinlist(self, request):
|
||||
mp = request.getfuncargvalue("monkeypatch")
|
||||
pastebinlist = []
|
||||
class MockProxy:
|
||||
def newPaste(self, language, code):
|
||||
pastebinlist.append((language, code))
|
||||
mp.setitem(globals(), 'getproxy', MockProxy)
|
||||
return pastebinlist
|
||||
|
||||
def test_failed(self, testdir, pastebinlist):
|
||||
testpath = testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath, "--paste=failed")
|
||||
assert len(pastebinlist) == 1
|
||||
assert pastebinlist[0][0] == "python"
|
||||
s = pastebinlist[0][1]
|
||||
assert s.find("def test_fail") != -1
|
||||
assert reprec.countoutcomes() == [1,1,1]
|
||||
|
||||
def test_all(self, testdir, pastebinlist):
|
||||
testpath = testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath, "--pastebin=all")
|
||||
assert reprec.countoutcomes() == [1,1,1]
|
||||
assert len(pastebinlist) == 1
|
||||
assert pastebinlist[0][0] == "python"
|
||||
s = pastebinlist[0][1]
|
||||
for x in 'test_fail test_skip skipped'.split():
|
||||
assert s.find(x), (s, x)
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ def pytest_configure(config):
|
|||
if config.getvalue("looponfail"):
|
||||
raise config.Error("--pdb incompatible with --looponfail.")
|
||||
if config.option.dist != "no":
|
||||
raise config.Error("--pdb incomptaible with distributing tests.")
|
||||
raise config.Error("--pdb incompatible with distributing tests.")
|
||||
config.pluginmanager.register(PdbInvoke())
|
||||
|
||||
class PdbInvoke:
|
||||
|
@ -108,54 +108,3 @@ def post_mortem(t):
|
|||
def set_trace():
|
||||
# again, a copy of the version in pdb.py
|
||||
Pdb().set_trace(sys._getframe().f_back)
|
||||
|
||||
|
||||
class TestPDB:
|
||||
def pytest_funcarg__pdblist(self, request):
|
||||
monkeypatch = request.getfuncargvalue("monkeypatch")
|
||||
pdblist = []
|
||||
def mypdb(*args):
|
||||
pdblist.append(args)
|
||||
monkeypatch.setitem(globals(), 'post_mortem', mypdb)
|
||||
return pdblist
|
||||
|
||||
def test_incompatibility_messages(self, testdir):
|
||||
Error = py.test.config.Error
|
||||
py.test.raises(Error, "testdir.parseconfigure('--pdb', '--looponfail')")
|
||||
py.test.raises(Error, "testdir.parseconfigure('--pdb', '-n 3')")
|
||||
py.test.raises(Error, "testdir.parseconfigure('--pdb', '-d')")
|
||||
|
||||
def test_pdb_on_fail(self, testdir, pdblist):
|
||||
rep = testdir.inline_runsource1('--pdb', """
|
||||
def test_func():
|
||||
assert 0
|
||||
""")
|
||||
assert rep.failed
|
||||
assert len(pdblist) == 1
|
||||
tb = py.code.Traceback(pdblist[0][0])
|
||||
assert tb[-1].name == "test_func"
|
||||
|
||||
def test_pdb_on_skip(self, testdir, pdblist):
|
||||
rep = testdir.inline_runsource1('--pdb', """
|
||||
import py
|
||||
def test_func():
|
||||
py.test.skip("hello")
|
||||
""")
|
||||
assert rep.skipped
|
||||
assert len(pdblist) == 0
|
||||
|
||||
def test_pdb_interaction(self, testdir):
|
||||
p1 = testdir.makepyfile("""
|
||||
def test_1():
|
||||
i = 0
|
||||
assert i == 1
|
||||
""")
|
||||
child = testdir.spawn_pytest("--pdb %s" % p1)
|
||||
#child.expect(".*def test_1.*")
|
||||
child.expect(".*i = 0.*")
|
||||
child.expect("(Pdb)")
|
||||
child.sendeof()
|
||||
child.expect("1 failed")
|
||||
if child.isalive():
|
||||
child.wait()
|
||||
|
||||
|
|
|
@ -394,55 +394,6 @@ class ReportRecorder(object):
|
|||
self.comregistry.unregister(self)
|
||||
self.hookrecorder.finish_recording()
|
||||
|
||||
def test_reportrecorder(testdir):
|
||||
registry = py._com.Registry()
|
||||
recorder = testdir.getreportrecorder(registry)
|
||||
assert not recorder.getfailures()
|
||||
item = testdir.getitem("def test_func(): pass")
|
||||
class rep:
|
||||
excinfo = None
|
||||
passed = False
|
||||
failed = True
|
||||
skipped = False
|
||||
when = "call"
|
||||
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
failures = recorder.getfailures()
|
||||
assert failures == [rep]
|
||||
failures = recorder.getfailures()
|
||||
assert failures == [rep]
|
||||
|
||||
class rep:
|
||||
excinfo = None
|
||||
passed = False
|
||||
failed = False
|
||||
skipped = True
|
||||
when = "call"
|
||||
rep.passed = False
|
||||
rep.skipped = True
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
|
||||
modcol = testdir.getmodulecol("")
|
||||
rep = modcol.config.hook.pytest_make_collect_report(collector=modcol)
|
||||
rep.passed = False
|
||||
rep.failed = True
|
||||
rep.skipped = False
|
||||
recorder.hook.pytest_collectreport(report=rep)
|
||||
|
||||
passed, skipped, failed = recorder.listoutcomes()
|
||||
assert not passed and skipped and failed
|
||||
|
||||
numpassed, numskipped, numfailed = recorder.countoutcomes()
|
||||
assert numpassed == 0
|
||||
assert numskipped == 1
|
||||
assert numfailed == 1
|
||||
assert len(recorder.getfailedcollections()) == 1
|
||||
|
||||
recorder.unregister()
|
||||
recorder.clear()
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
py.test.raises(ValueError, "recorder.getfailures()")
|
||||
|
||||
class LineComp:
|
||||
def __init__(self):
|
||||
self.stringio = py.io.TextIO()
|
||||
|
@ -498,21 +449,3 @@ class LineMatcher:
|
|||
raise AssertionError("expected line not found: %r" % line)
|
||||
extralines.extend(lines1)
|
||||
return extralines
|
||||
|
||||
def test_parseconfig(testdir):
|
||||
config1 = testdir.parseconfig()
|
||||
config2 = testdir.parseconfig()
|
||||
assert config2 != config1
|
||||
assert config1 != py.test.config
|
||||
|
||||
def test_testdir_runs_with_plugin(testdir):
|
||||
testdir.makepyfile("""
|
||||
pytest_plugins = "pytest_pytester"
|
||||
def test_hello(testdir):
|
||||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
|
||||
|
|
|
@ -118,82 +118,3 @@ class WarningsRecorder:
|
|||
|
||||
def finalize(self):
|
||||
py.std.warnings.showwarning = self.old_showwarning
|
||||
|
||||
def test_WarningRecorder():
|
||||
showwarning = py.std.warnings.showwarning
|
||||
rec = WarningsRecorder()
|
||||
assert py.std.warnings.showwarning != showwarning
|
||||
assert not rec.list
|
||||
py.std.warnings.warn_explicit("hello", UserWarning, "xyz", 13)
|
||||
assert len(rec.list) == 1
|
||||
py.std.warnings.warn(DeprecationWarning("hello"))
|
||||
assert len(rec.list) == 2
|
||||
warn = rec.pop()
|
||||
assert str(warn.message) == "hello"
|
||||
l = rec.list
|
||||
rec.clear()
|
||||
assert len(rec.list) == 0
|
||||
assert l is rec.list
|
||||
py.test.raises(AssertionError, "rec.pop()")
|
||||
rec.finalize()
|
||||
assert showwarning == py.std.warnings.showwarning
|
||||
|
||||
def test_recwarn_functional(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
pytest_plugins = 'pytest_recwarn',
|
||||
import warnings
|
||||
oldwarn = warnings.showwarning
|
||||
def test_method(recwarn):
|
||||
assert warnings.showwarning != oldwarn
|
||||
warnings.warn("hello")
|
||||
warn = recwarn.pop()
|
||||
assert isinstance(warn.message, UserWarning)
|
||||
def test_finalized():
|
||||
assert warnings.showwarning == oldwarn
|
||||
""")
|
||||
res = reprec.countoutcomes()
|
||||
assert tuple(res) == (2, 0, 0), res
|
||||
|
||||
#
|
||||
# ============ test py.test.deprecated_call() ==============
|
||||
#
|
||||
|
||||
def dep(i):
|
||||
if i == 0:
|
||||
py.std.warnings.warn("is deprecated", DeprecationWarning)
|
||||
return 42
|
||||
|
||||
reg = {}
|
||||
def dep_explicit(i):
|
||||
if i == 0:
|
||||
py.std.warnings.warn_explicit("dep_explicit", category=DeprecationWarning,
|
||||
filename="hello", lineno=3)
|
||||
|
||||
def test_deprecated_call_raises():
|
||||
excinfo = py.test.raises(AssertionError,
|
||||
"py.test.deprecated_call(dep, 3)")
|
||||
assert str(excinfo).find("did not produce") != -1
|
||||
|
||||
def test_deprecated_call():
|
||||
py.test.deprecated_call(dep, 0)
|
||||
|
||||
def test_deprecated_call_ret():
|
||||
ret = py.test.deprecated_call(dep, 0)
|
||||
assert ret == 42
|
||||
|
||||
def test_deprecated_call_preserves():
|
||||
r = py.std.warnings.onceregistry.copy()
|
||||
f = py.std.warnings.filters[:]
|
||||
test_deprecated_call_raises()
|
||||
test_deprecated_call()
|
||||
assert r == py.std.warnings.onceregistry
|
||||
assert f == py.std.warnings.filters
|
||||
|
||||
def test_deprecated_explicit_call_raises():
|
||||
py.test.raises(AssertionError,
|
||||
"py.test.deprecated_call(dep_explicit, 3)")
|
||||
|
||||
def test_deprecated_explicit_call():
|
||||
py.test.deprecated_call(dep_explicit, 0)
|
||||
py.test.deprecated_call(dep_explicit, 0)
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
perform ReST syntax, local and remote reference tests on .rst/.txt files.
|
||||
"""
|
||||
import py
|
||||
import sys
|
||||
|
||||
def pytest_addoption(parser):
|
||||
group = parser.addgroup("ReST", "ReST documentation check options")
|
||||
|
@ -348,144 +349,3 @@ def localrefcheck(tryfn, path, lineno):
|
|||
else:
|
||||
py.test.fail("anchor reference error %s#%s in %s:%d" %(
|
||||
tryfn, anchor, path.basename, lineno+1))
|
||||
|
||||
|
||||
#
|
||||
# PLUGIN tests
|
||||
#
|
||||
|
||||
def test_deindent():
|
||||
assert deindent('foo') == 'foo'
|
||||
assert deindent('foo\n bar') == 'foo\n bar'
|
||||
assert deindent(' foo\n bar\n') == 'foo\nbar\n'
|
||||
assert deindent(' foo\n\n bar\n') == 'foo\n\nbar\n'
|
||||
assert deindent(' foo\n bar\n') == 'foo\n bar\n'
|
||||
assert deindent(' foo\n bar\n') == ' foo\nbar\n'
|
||||
|
||||
class TestApigenLinkRole:
|
||||
disabled = True
|
||||
|
||||
# these tests are moved here from the former py/doc/conftest.py
|
||||
def test_resolve_linkrole(self):
|
||||
from py.__.doc.conftest import get_apigen_relpath
|
||||
apigen_relpath = get_apigen_relpath()
|
||||
|
||||
assert resolve_linkrole('api', 'py.foo.bar', False) == (
|
||||
'py.foo.bar', apigen_relpath + 'api/foo.bar.html')
|
||||
assert resolve_linkrole('api', 'py.foo.bar()', False) == (
|
||||
'py.foo.bar()', apigen_relpath + 'api/foo.bar.html')
|
||||
assert resolve_linkrole('api', 'py', False) == (
|
||||
'py', apigen_relpath + 'api/index.html')
|
||||
py.test.raises(AssertionError, 'resolve_linkrole("api", "foo.bar")')
|
||||
assert resolve_linkrole('source', 'py/foo/bar.py', False) == (
|
||||
'py/foo/bar.py', apigen_relpath + 'source/foo/bar.py.html')
|
||||
assert resolve_linkrole('source', 'py/foo/', False) == (
|
||||
'py/foo/', apigen_relpath + 'source/foo/index.html')
|
||||
assert resolve_linkrole('source', 'py/', False) == (
|
||||
'py/', apigen_relpath + 'source/index.html')
|
||||
py.test.raises(AssertionError, 'resolve_linkrole("source", "/foo/bar/")')
|
||||
|
||||
def test_resolve_linkrole_check_api(self):
|
||||
assert resolve_linkrole('api', 'py.test.ensuretemp')
|
||||
py.test.raises(AssertionError, "resolve_linkrole('api', 'py.foo.baz')")
|
||||
|
||||
def test_resolve_linkrole_check_source(self):
|
||||
assert resolve_linkrole('source', 'py/path/common.py')
|
||||
py.test.raises(AssertionError,
|
||||
"resolve_linkrole('source', 'py/foo/bar.py')")
|
||||
|
||||
|
||||
class TestDoctest:
|
||||
def pytest_funcarg__testdir(self, request):
|
||||
testdir = request.getfuncargvalue("testdir")
|
||||
assert request.module.__name__ == __name__
|
||||
testdir.makepyfile(confrest="from py.__.rest.resthtml import Project")
|
||||
for p in testdir.plugins:
|
||||
if p == globals():
|
||||
break
|
||||
else:
|
||||
testdir.plugins.append(globals())
|
||||
return testdir
|
||||
|
||||
def test_doctest_extra_exec(self, testdir):
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
hello::
|
||||
.. >>> raise ValueError
|
||||
>>> None
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 1
|
||||
|
||||
def test_doctest_basic(self, testdir):
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
..
|
||||
>>> from os.path import abspath
|
||||
|
||||
hello world
|
||||
|
||||
>>> assert abspath
|
||||
>>> i=3
|
||||
>>> print (i)
|
||||
3
|
||||
|
||||
yes yes
|
||||
|
||||
>>> i
|
||||
3
|
||||
|
||||
end
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert passed + skipped == 2
|
||||
|
||||
def test_doctest_eol(self, testdir):
|
||||
ytxt = testdir.maketxtfile(y=".. >>> 1 + 1\r\n 2\r\n\r\n")
|
||||
reprec = testdir.inline_run(ytxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert passed + skipped == 2
|
||||
|
||||
def test_doctest_indentation(self, testdir):
|
||||
footxt = testdir.maketxtfile(foo=
|
||||
'..\n >>> print ("foo\\n bar")\n foo\n bar\n')
|
||||
reprec = testdir.inline_run(footxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert skipped + passed == 2
|
||||
|
||||
def test_js_ignore(self, testdir):
|
||||
xtxt = testdir.maketxtfile(xtxt="""
|
||||
`blah`_
|
||||
|
||||
.. _`blah`: javascript:some_function()
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert skipped + passed == 3
|
||||
|
||||
def test_pytest_doctest_prepare_content(self, testdir):
|
||||
l = []
|
||||
class MyPlugin:
|
||||
def pytest_doctest_prepare_content(self, content):
|
||||
l.append(content)
|
||||
return content.replace("False", "True")
|
||||
|
||||
testdir.plugins.append(MyPlugin())
|
||||
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
hello:
|
||||
|
||||
>>> 2 == 2
|
||||
False
|
||||
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
assert len(l) == 1
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert passed >= 1
|
||||
assert not failed
|
||||
assert skipped <= 1
|
||||
|
|
|
@ -83,162 +83,3 @@ class ResultLog(object):
|
|||
def pytest_internalerror(self, excrepr):
|
||||
path = excrepr.reprcrash.path
|
||||
self.write_log_entry(path, '!', str(excrepr))
|
||||
|
||||
|
||||
# ===============================================================================
|
||||
#
|
||||
# plugin tests
|
||||
#
|
||||
# ===============================================================================
|
||||
|
||||
import os
|
||||
|
||||
def test_generic_path():
|
||||
from py.__.test.collect import Node, Item, FSCollector
|
||||
p1 = Node('a')
|
||||
assert p1.fspath is None
|
||||
p2 = Node('B', parent=p1)
|
||||
p3 = Node('()', parent = p2)
|
||||
item = Item('c', parent = p3)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == 'a.B().c'
|
||||
|
||||
p0 = FSCollector('proj/test')
|
||||
p1 = FSCollector('proj/test/a', parent=p0)
|
||||
p2 = Node('B', parent=p1)
|
||||
p3 = Node('()', parent = p2)
|
||||
p4 = Node('c', parent=p3)
|
||||
item = Item('[1]', parent = p4)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == 'test/a:B().c[1]'
|
||||
|
||||
def test_write_log_entry():
|
||||
reslog = ResultLog(None)
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', '.', '')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 1
|
||||
assert entry_lines[0] == '. name'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', 's', 'Skipped')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 2
|
||||
assert entry_lines[0] == 's name'
|
||||
assert entry_lines[1] == ' Skipped'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', 's', 'Skipped\n')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 2
|
||||
assert entry_lines[0] == 's name'
|
||||
assert entry_lines[1] == ' Skipped'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
longrepr = ' tb1\n tb 2\nE tb3\nSome Error'
|
||||
reslog.write_log_entry('name', 'F', longrepr)
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 5
|
||||
assert entry_lines[0] == 'F name'
|
||||
assert entry_lines[1:] == [' '+line for line in longrepr.splitlines()]
|
||||
|
||||
|
||||
class TestWithFunctionIntegration:
|
||||
# XXX (hpk) i think that the resultlog plugin should
|
||||
# provide a Parser object so that one can remain
|
||||
# ignorant regarding formatting details.
|
||||
def getresultlog(self, testdir, arg):
|
||||
resultlog = testdir.tmpdir.join("resultlog")
|
||||
testdir.plugins.append("resultlog")
|
||||
args = ["--resultlog=%s" % resultlog] + [arg]
|
||||
testdir.runpytest(*args)
|
||||
return [x for x in resultlog.readlines(cr=0) if x]
|
||||
|
||||
def test_collection_report(self, testdir):
|
||||
ok = testdir.makepyfile(test_collection_ok="")
|
||||
skip = testdir.makepyfile(test_collection_skip="import py ; py.test.skip('hello')")
|
||||
fail = testdir.makepyfile(test_collection_fail="XXX")
|
||||
lines = self.getresultlog(testdir, ok)
|
||||
assert not lines
|
||||
|
||||
lines = self.getresultlog(testdir, skip)
|
||||
assert len(lines) == 2
|
||||
assert lines[0].startswith("S ")
|
||||
assert lines[0].endswith("test_collection_skip.py")
|
||||
assert lines[1].startswith(" ")
|
||||
assert lines[1].endswith("test_collection_skip.py:1: Skipped: 'hello'")
|
||||
|
||||
lines = self.getresultlog(testdir, fail)
|
||||
assert lines
|
||||
assert lines[0].startswith("F ")
|
||||
assert lines[0].endswith("test_collection_fail.py"), lines[0]
|
||||
for x in lines[1:]:
|
||||
assert x.startswith(" ")
|
||||
assert "XXX" in "".join(lines[1:])
|
||||
|
||||
def test_log_test_outcomes(self, testdir):
|
||||
mod = testdir.makepyfile(test_mod="""
|
||||
import py
|
||||
def test_pass(): pass
|
||||
def test_skip(): py.test.skip("hello")
|
||||
def test_fail(): raise ValueError("val")
|
||||
""")
|
||||
lines = self.getresultlog(testdir, mod)
|
||||
assert len(lines) >= 3
|
||||
assert lines[0].startswith(". ")
|
||||
assert lines[0].endswith("test_pass")
|
||||
assert lines[1].startswith("s "), lines[1]
|
||||
assert lines[1].endswith("test_skip")
|
||||
assert lines[2].find("hello") != -1
|
||||
|
||||
assert lines[3].startswith("F ")
|
||||
assert lines[3].endswith("test_fail")
|
||||
tb = "".join(lines[4:])
|
||||
assert tb.find("ValueError") != -1
|
||||
|
||||
def test_internal_exception(self):
|
||||
# they are produced for example by a teardown failing
|
||||
# at the end of the run
|
||||
try:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
excinfo = py.code.ExceptionInfo()
|
||||
reslog = ResultLog(py.io.TextIO())
|
||||
reslog.pytest_internalerror(excinfo.getrepr())
|
||||
entry = reslog.logfile.getvalue()
|
||||
entry_lines = entry.splitlines()
|
||||
|
||||
assert entry_lines[0].startswith('! ')
|
||||
assert os.path.basename(__file__)[:-1] in entry_lines[0] #.py/.pyc
|
||||
assert entry_lines[-1][0] == ' '
|
||||
assert 'ValueError' in entry
|
||||
|
||||
def test_generic(testdir, LineMatcher):
|
||||
testdir.plugins.append("resultlog")
|
||||
testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
testdir.runpytest("--resultlog=result.log")
|
||||
lines = testdir.tmpdir.join("result.log").readlines(cr=0)
|
||||
LineMatcher(lines).fnmatch_lines([
|
||||
". *:test_pass",
|
||||
"F *:test_fail",
|
||||
"s *:test_skip",
|
||||
])
|
||||
|
||||
|
|
|
@ -3,7 +3,6 @@ collect and run test items and create reports.
|
|||
"""
|
||||
|
||||
import py
|
||||
|
||||
from py.__.test.outcome import Skipped
|
||||
|
||||
#
|
||||
|
|
|
@ -14,18 +14,3 @@ import py
|
|||
def pytest_funcarg__tmpdir(request):
|
||||
name = request.function.__name__
|
||||
return request.config.mktemp(name, numbered=True)
|
||||
|
||||
# ===============================================================================
|
||||
#
|
||||
# plugin tests
|
||||
#
|
||||
# ===============================================================================
|
||||
#
|
||||
|
||||
def test_funcarg(testdir):
|
||||
from py.__.test.funcargs import FuncargRequest
|
||||
item = testdir.getitem("def test_func(tmpdir): pass")
|
||||
p = pytest_funcarg__tmpdir(FuncargRequest(item))
|
||||
assert p.check()
|
||||
bn = p.basename.strip("0123456789-")
|
||||
assert bn.endswith("test_func")
|
||||
|
|
|
@ -72,63 +72,3 @@ class UnitTestFunction(py.test.collect.Function):
|
|||
instance = py.builtin._getimself(self.obj)
|
||||
instance.tearDown()
|
||||
|
||||
|
||||
def test_simple_unittest(testdir):
|
||||
testpath = testdir.makepyfile("""
|
||||
import unittest
|
||||
pytest_plugins = "pytest_unittest"
|
||||
class MyTestCase(unittest.TestCase):
|
||||
def testpassing(self):
|
||||
self.assertEquals('foo', 'foo')
|
||||
def test_failing(self):
|
||||
self.assertEquals('foo', 'bar')
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath)
|
||||
assert reprec.matchreport("testpassing").passed
|
||||
assert reprec.matchreport("test_failing").failed
|
||||
|
||||
def test_setup(testdir):
|
||||
testpath = testdir.makepyfile(test_two="""
|
||||
import unittest
|
||||
class MyTestCase(unittest.TestCase):
|
||||
def setUp(self):
|
||||
self.foo = 1
|
||||
def test_setUp(self):
|
||||
self.assertEquals(1, self.foo)
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath)
|
||||
rep = reprec.matchreport("test_setUp")
|
||||
assert rep.passed
|
||||
|
||||
def test_new_instances(testdir):
|
||||
testpath = testdir.makepyfile("""
|
||||
import unittest
|
||||
class MyTestCase(unittest.TestCase):
|
||||
def test_func1(self):
|
||||
self.x = 2
|
||||
def test_func2(self):
|
||||
assert not hasattr(self, 'x')
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath)
|
||||
reprec.assertoutcome(passed=2)
|
||||
|
||||
def test_teardown(testdir):
|
||||
testpath = testdir.makepyfile(test_three="""
|
||||
import unittest
|
||||
pytest_plugins = "pytest_unittest" # XXX
|
||||
class MyTestCase(unittest.TestCase):
|
||||
l = []
|
||||
def test_one(self):
|
||||
pass
|
||||
def tearDown(self):
|
||||
self.l.append(None)
|
||||
class Second(unittest.TestCase):
|
||||
def test_check(self):
|
||||
self.assertEquals(MyTestCase.l, [None])
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0, failed
|
||||
assert passed == 2
|
||||
assert passed + skipped + failed == 2
|
||||
|
||||
|
|
|
@ -63,31 +63,3 @@ def pytest_terminal_summary(terminalreporter):
|
|||
fspath, lineno, modpath = rep.item.reportinfo()
|
||||
pos = "%s %s:%d: unexpectedly passing" %(modpath, fspath, lineno)
|
||||
tr._tw.line(pos)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
#
|
||||
# plugin tests
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
def test_xfail(testdir):
|
||||
p = testdir.makepyfile(test_one="""
|
||||
import py
|
||||
@py.test.mark.xfail
|
||||
def test_this():
|
||||
assert 0
|
||||
|
||||
@py.test.mark.xfail
|
||||
def test_that():
|
||||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
extra = result.stdout.fnmatch_lines([
|
||||
"*expected failures*",
|
||||
"*test_one.test_this*test_one.py:4*",
|
||||
"*UNEXPECTEDLY PASSING*",
|
||||
"*test_that*",
|
||||
])
|
||||
assert result.ret == 1
|
||||
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
#
|
|
@ -1 +0,0 @@
|
|||
#
|
|
@ -512,7 +512,7 @@ class TestSocketGateway(SocketGatewaySetup, BasicRemoteExecution):
|
|||
|
||||
class TestSshGateway(BasicRemoteExecution):
|
||||
def setup_class(cls):
|
||||
from py.__.conftest import getspecssh
|
||||
from conftest import getspecssh
|
||||
cls.sshhost = getspecssh().ssh
|
||||
cls.gw = py.execnet.SshGateway(cls.sshhost)
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import sys
|
||||
import py
|
||||
from py.__.path.testing.common import CommonFSTests
|
||||
from py.__.path import svnwc as svncommon
|
||||
from testing.path.common import CommonFSTests
|
||||
|
||||
class CommonSvnTests(CommonFSTests):
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
import py
|
||||
import sys
|
||||
from py.path import local
|
||||
from py.__.path.testing import common
|
||||
from testing.path import common
|
||||
|
||||
def pytest_funcarg__path1(request):
|
||||
def setup():
|
|
@ -1,5 +1,5 @@
|
|||
import py
|
||||
from py.__.path.testing import svntestbase
|
||||
from testing.path import svntestbase
|
||||
from py.path import SvnAuth
|
||||
import time
|
||||
import sys
|
|
@ -1,8 +1,8 @@
|
|||
import py
|
||||
from py.__.path.svnurl import InfoSvnCommand
|
||||
from py.__.path.testing.svntestbase import CommonSvnTests
|
||||
import datetime
|
||||
import time
|
||||
from testing.path.svntestbase import CommonSvnTests
|
||||
|
||||
def pytest_funcarg__path1(request):
|
||||
repo, wc = request.getfuncargvalue("repowc1")
|
|
@ -1,8 +1,8 @@
|
|||
import py
|
||||
import sys
|
||||
from py.__.path.testing.svntestbase import CommonSvnTests
|
||||
from py.__.path.svnwc import InfoSvnWCCommand, XMLWCStatus, parse_wcinfotime
|
||||
from py.__.path import svnwc as svncommon
|
||||
from testing.path.svntestbase import CommonSvnTests
|
||||
|
||||
if sys.platform == 'win32':
|
||||
def normpath(p):
|
|
@ -124,7 +124,7 @@ class TestPickleChannelFunctional:
|
|||
channel = self.gw.remote_exec("""
|
||||
from py.__.test.dist.mypickle import PickleChannel
|
||||
channel = PickleChannel(channel)
|
||||
from py.__.test.dist.testing.test_mypickle import A
|
||||
from testing.pytest.dist.test_mypickle import A
|
||||
a1 = A()
|
||||
a1.hello = 10
|
||||
channel.send(a1)
|
||||
|
@ -143,7 +143,7 @@ class TestPickleChannelFunctional:
|
|||
channel = self.gw.remote_exec("""
|
||||
from py.__.test.dist.mypickle import PickleChannel
|
||||
channel = PickleChannel(channel)
|
||||
from py.__.test.dist.testing.test_mypickle import A
|
||||
from testing.pytest.dist.test_mypickle import A
|
||||
l = [A() for i in range(10)]
|
||||
channel.send(l)
|
||||
other_l = channel.receive()
|
||||
|
@ -173,7 +173,7 @@ class TestPickleChannelFunctional:
|
|||
channel = self.gw.remote_exec("""
|
||||
from py.__.test.dist.mypickle import PickleChannel
|
||||
channel = PickleChannel(channel)
|
||||
from py.__.test.dist.testing.test_mypickle import A
|
||||
from testing.pytest.dist.test_mypickle import A
|
||||
a1 = A()
|
||||
a1.hello = 10
|
||||
channel.send(a1)
|
||||
|
@ -194,7 +194,7 @@ class TestPickleChannelFunctional:
|
|||
channel = self.gw.remote_exec("""
|
||||
from py.__.test.dist.mypickle import PickleChannel
|
||||
channel = PickleChannel(channel)
|
||||
from py.__.test.dist.testing.test_mypickle import A
|
||||
from testing.pytest.dist.test_mypickle import A
|
||||
a1 = A()
|
||||
a1.hello = 10
|
||||
channel.send(a1)
|
||||
|
@ -218,7 +218,7 @@ class TestPickleChannelFunctional:
|
|||
channel = self.gw.remote_exec("""
|
||||
from py.__.test.dist.mypickle import PickleChannel
|
||||
channel = PickleChannel(channel)
|
||||
from py.__.test.dist.testing.test_mypickle import A
|
||||
from testing.pytest.dist.test_mypickle import A
|
||||
a1 = A()
|
||||
channel.send(a1)
|
||||
channel.send(a1)
|
|
@ -1,13 +1,16 @@
|
|||
import py
|
||||
|
||||
pytest_plugins = "pytester"
|
||||
plugindir = py.path.local(py.__file__).dirpath('test', 'plugin')
|
||||
from py.__.test.defaultconftest import pytest_plugins as default_plugins
|
||||
|
||||
def pytest_collect_file(path, parent):
|
||||
if path.basename.startswith("pytest_") and path.ext == ".py":
|
||||
mod = parent.Module(path, parent=parent)
|
||||
return mod
|
||||
|
||||
# decorate testdir to contain plugin under test
|
||||
# for plugin test we try to automatically make sure that
|
||||
# the according plugin is loaded
|
||||
def pytest_funcarg__testdir(request):
|
||||
testdir = request.getfuncargvalue("testdir")
|
||||
#for obj in (request.cls, request.module):
|
||||
|
@ -16,9 +19,12 @@ def pytest_funcarg__testdir(request):
|
|||
# break
|
||||
#else:
|
||||
modname = request.module.__name__.split(".")[-1]
|
||||
if modname.startswith("pytest_"):
|
||||
testdir.plugins.append(vars(request.module))
|
||||
testdir.plugins.append(modname)
|
||||
if modname.startswith("test_pytest_"):
|
||||
modname = modname[5:]
|
||||
if plugindir.join("%s.py" % modname).check():
|
||||
if modname[7:] not in default_plugins:
|
||||
testdir.plugins.append(vars(request.module))
|
||||
testdir.plugins.append(modname)
|
||||
#elif modname.startswith("test_pytest"):
|
||||
# pname = modname[5:]
|
||||
# assert pname not in testdir.plugins
|
|
@ -0,0 +1,50 @@
|
|||
import py
|
||||
from py.__.test.plugin.pytest__pytest import HookRecorder
|
||||
|
||||
def test_hookrecorder_basic():
|
||||
comregistry = py._com.Registry()
|
||||
rec = HookRecorder(comregistry)
|
||||
class ApiClass:
|
||||
def xyz(self, arg):
|
||||
pass
|
||||
rec.start_recording(ApiClass)
|
||||
rec.hook.xyz(arg=123)
|
||||
call = rec.popcall("xyz")
|
||||
assert call.arg == 123
|
||||
assert call._name == "xyz"
|
||||
py.test.raises(ValueError, "rec.popcall('abc')")
|
||||
|
||||
def test_hookrecorder_basic_no_args_hook():
|
||||
import sys
|
||||
comregistry = py._com.Registry()
|
||||
rec = HookRecorder(comregistry)
|
||||
apimod = type(sys)('api')
|
||||
def xyz():
|
||||
pass
|
||||
apimod.xyz = xyz
|
||||
rec.start_recording(apimod)
|
||||
rec.hook.xyz()
|
||||
call = rec.popcall("xyz")
|
||||
assert call._name == "xyz"
|
||||
|
||||
reg = py._com.comregistry
|
||||
def test_functional_default(testdir, _pytest):
|
||||
assert _pytest.comregistry == py._com.comregistry
|
||||
assert _pytest.comregistry != reg
|
||||
|
||||
def test_functional(testdir, linecomp):
|
||||
reprec = testdir.inline_runsource("""
|
||||
import py
|
||||
pytest_plugins="_pytest"
|
||||
def test_func(_pytest):
|
||||
class ApiClass:
|
||||
def xyz(self, arg): pass
|
||||
rec = _pytest.gethookrecorder(ApiClass)
|
||||
class Plugin:
|
||||
def xyz(self, arg):
|
||||
return arg + 1
|
||||
rec._comregistry.register(Plugin())
|
||||
res = rec.hook.xyz(arg=41)
|
||||
assert res == [42]
|
||||
""")
|
||||
reprec.assertoutcome(passed=1)
|
|
@ -0,0 +1,40 @@
|
|||
def test_functional(testdir):
|
||||
testdir.makepyfile("""
|
||||
def test_hello():
|
||||
x = 3
|
||||
assert x == 4
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert "3 == 4" in result.stdout.str()
|
||||
result = testdir.runpytest("--no-assert")
|
||||
assert "3 == 4" not in result.stdout.str()
|
||||
|
||||
def test_traceback_failure(testdir):
|
||||
p1 = testdir.makepyfile("""
|
||||
def g():
|
||||
return 2
|
||||
def f(x):
|
||||
assert x == g()
|
||||
def test_onefails():
|
||||
f(3)
|
||||
""")
|
||||
result = testdir.runpytest(p1)
|
||||
result.stdout.fnmatch_lines([
|
||||
"*test_traceback_failure.py F",
|
||||
"====* FAILURES *====",
|
||||
"____*____",
|
||||
"",
|
||||
" def test_onefails():",
|
||||
"> f(3)",
|
||||
"",
|
||||
"*test_*.py:6: ",
|
||||
"_ _ _ *",
|
||||
#"",
|
||||
" def f(x):",
|
||||
"> assert x == g()",
|
||||
"E assert 3 == 2",
|
||||
"E + where 2 = g()",
|
||||
"",
|
||||
"*test_traceback_failure.py:4: AssertionError"
|
||||
])
|
||||
|
|
@ -0,0 +1,82 @@
|
|||
import py
|
||||
from py.__.test.plugin.pytest_default import pytest_report_iteminfo
|
||||
|
||||
def test_implied_different_sessions(tmpdir):
|
||||
def x(*args):
|
||||
config = py.test.config._reparse([tmpdir] + list(args))
|
||||
try:
|
||||
config.pluginmanager.do_configure(config)
|
||||
except ValueError:
|
||||
return Exception
|
||||
return getattr(config._sessionclass, '__name__', None)
|
||||
assert x() == None
|
||||
assert x('-d') == 'DSession'
|
||||
assert x('--dist=each') == 'DSession'
|
||||
assert x('-n3') == 'DSession'
|
||||
assert x('-f') == 'LooponfailingSession'
|
||||
|
||||
def test_plugin_specify(testdir):
|
||||
testdir.chdir()
|
||||
config = py.test.raises(ImportError, """
|
||||
testdir.parseconfig("-p", "nqweotexistent")
|
||||
""")
|
||||
#py.test.raises(ImportError,
|
||||
# "config.pluginmanager.do_configure(config)"
|
||||
#)
|
||||
|
||||
def test_plugin_already_exists(testdir):
|
||||
config = testdir.parseconfig("-p", "default")
|
||||
assert config.option.plugins == ['default']
|
||||
config.pluginmanager.do_configure(config)
|
||||
|
||||
|
||||
class TestDistOptions:
|
||||
def test_getxspecs(self, testdir):
|
||||
config = testdir.parseconfigure("--tx=popen", "--tx", "ssh=xyz")
|
||||
xspecs = config.getxspecs()
|
||||
assert len(xspecs) == 2
|
||||
print(xspecs)
|
||||
assert xspecs[0].popen
|
||||
assert xspecs[1].ssh == "xyz"
|
||||
|
||||
def test_xspecs_multiplied(self, testdir):
|
||||
xspecs = testdir.parseconfigure("--tx=3*popen",).getxspecs()
|
||||
assert len(xspecs) == 3
|
||||
assert xspecs[1].popen
|
||||
|
||||
def test_getrsyncdirs(self, testdir):
|
||||
config = testdir.parseconfigure('--rsyncdir=' + str(testdir.tmpdir))
|
||||
roots = config.getrsyncdirs()
|
||||
assert len(roots) == 1 + 1
|
||||
assert testdir.tmpdir in roots
|
||||
|
||||
def test_getrsyncdirs_with_conftest(self, testdir):
|
||||
p = py.path.local()
|
||||
for bn in 'x y z'.split():
|
||||
p.mkdir(bn)
|
||||
testdir.makeconftest("""
|
||||
rsyncdirs= 'x',
|
||||
""")
|
||||
config = testdir.parseconfigure(testdir.tmpdir, '--rsyncdir=y', '--rsyncdir=z')
|
||||
roots = config.getrsyncdirs()
|
||||
assert len(roots) == 3 + 1
|
||||
assert py.path.local('y') in roots
|
||||
assert py.path.local('z') in roots
|
||||
assert testdir.tmpdir.join('x') in roots
|
||||
|
||||
def test_dist_options(testdir):
|
||||
config = testdir.parseconfigure("-n 2")
|
||||
assert config.option.dist == "load"
|
||||
assert config.option.tx == ['popen'] * 2
|
||||
|
||||
config = testdir.parseconfigure("-d")
|
||||
assert config.option.dist == "load"
|
||||
|
||||
def test_pytest_report_iteminfo():
|
||||
class FakeItem(object):
|
||||
|
||||
def reportinfo(self):
|
||||
return "-reportinfo-"
|
||||
|
||||
res = pytest_report_iteminfo(FakeItem())
|
||||
assert res == "-reportinfo-"
|
|
@ -0,0 +1,103 @@
|
|||
from py.__.test.plugin.pytest_doctest import DoctestModule, DoctestTextfile
|
||||
|
||||
class TestDoctests:
|
||||
|
||||
def test_collect_testtextfile(self, testdir):
|
||||
testdir.maketxtfile(whatever="")
|
||||
checkfile = testdir.maketxtfile(test_something="""
|
||||
alskdjalsdk
|
||||
>>> i = 5
|
||||
>>> i-1
|
||||
4
|
||||
""")
|
||||
for x in (testdir.tmpdir, checkfile):
|
||||
#print "checking that %s returns custom items" % (x,)
|
||||
items, reprec = testdir.inline_genitems(x)
|
||||
assert len(items) == 1
|
||||
assert isinstance(items[0], DoctestTextfile)
|
||||
|
||||
def test_collect_module(self, testdir):
|
||||
path = testdir.makepyfile(whatever="#")
|
||||
for p in (path, testdir.tmpdir):
|
||||
items, reprec = testdir.inline_genitems(p, '--doctest-modules')
|
||||
assert len(items) == 1
|
||||
assert isinstance(items[0], DoctestModule)
|
||||
|
||||
def test_simple_doctestfile(self, testdir):
|
||||
p = testdir.maketxtfile(test_doc="""
|
||||
>>> x = 1
|
||||
>>> x == 1
|
||||
False
|
||||
""")
|
||||
reprec = testdir.inline_run(p)
|
||||
reprec.assertoutcome(failed=1)
|
||||
|
||||
def test_doctest_unexpected_exception(self, testdir):
|
||||
from py.__.test.outcome import Failed
|
||||
|
||||
p = testdir.maketxtfile("""
|
||||
>>> i = 0
|
||||
>>> i = 1
|
||||
>>> x
|
||||
2
|
||||
""")
|
||||
reprec = testdir.inline_run(p)
|
||||
call = reprec.getcall("pytest_runtest_logreport")
|
||||
assert call.report.failed
|
||||
assert call.report.longrepr
|
||||
# XXX
|
||||
#testitem, = items
|
||||
#excinfo = py.test.raises(Failed, "testitem.runtest()")
|
||||
#repr = testitem.repr_failure(excinfo, ("", ""))
|
||||
#assert repr.reprlocation
|
||||
|
||||
def test_doctestmodule(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
'''
|
||||
>>> x = 1
|
||||
>>> x == 1
|
||||
False
|
||||
|
||||
'''
|
||||
""")
|
||||
reprec = testdir.inline_run(p, "--doctest-modules")
|
||||
reprec.assertoutcome(failed=1)
|
||||
|
||||
def test_doctestmodule_external(self, testdir):
|
||||
p = testdir.makepyfile("""
|
||||
#
|
||||
def somefunc():
|
||||
'''
|
||||
>>> i = 0
|
||||
>>> i + 1
|
||||
2
|
||||
'''
|
||||
""")
|
||||
result = testdir.runpytest(p, "--doctest-modules")
|
||||
result.stdout.fnmatch_lines([
|
||||
'004 *>>> i = 0',
|
||||
'005 *>>> i + 1',
|
||||
'*Expected:',
|
||||
"* 2",
|
||||
"*Got:",
|
||||
"* 1",
|
||||
"*:5: DocTestFailure"
|
||||
])
|
||||
|
||||
|
||||
def test_txtfile_failing(self, testdir):
|
||||
p = testdir.maketxtfile("""
|
||||
>>> i = 0
|
||||
>>> i + 1
|
||||
2
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
result.stdout.fnmatch_lines([
|
||||
'001 >>> i = 0',
|
||||
'002 >>> i + 1',
|
||||
'Expected:',
|
||||
" 2",
|
||||
"Got:",
|
||||
" 1",
|
||||
"*test_txtfile_failing.txt:2: DocTestFailure"
|
||||
])
|
|
@ -0,0 +1,12 @@
|
|||
def test_execnetplugin(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
import py
|
||||
import sys
|
||||
def test_hello():
|
||||
sys._gw = py.execnet.PopenGateway()
|
||||
def test_world():
|
||||
assert hasattr(sys, '_gw')
|
||||
assert sys._gw not in sys._gw._cleanup._activegateways
|
||||
|
||||
""", "-s", "--debug")
|
||||
reprec.assertoutcome(passed=2)
|
|
@ -0,0 +1,17 @@
|
|||
import py
|
||||
|
||||
def test_functional(testdir):
|
||||
py.test.importorskip("figleaf")
|
||||
testdir.plugins.append("figleaf")
|
||||
testdir.makepyfile("""
|
||||
def f():
|
||||
x = 42
|
||||
def test_whatever():
|
||||
pass
|
||||
""")
|
||||
result = testdir.runpytest('-F')
|
||||
assert result.ret == 0
|
||||
assert result.stdout.fnmatch_lines([
|
||||
'*figleaf html*'
|
||||
])
|
||||
#print result.stdout.str()
|
|
@ -0,0 +1,12 @@
|
|||
import py
|
||||
|
||||
def test_functional(testdir):
|
||||
testdir.makepyfile("""
|
||||
def test_pass():
|
||||
pass
|
||||
""")
|
||||
testdir.runpytest("--hooklog=hook.log")
|
||||
s = testdir.tmpdir.join("hook.log").read()
|
||||
assert s.find("pytest_sessionstart") != -1
|
||||
assert s.find("ItemTestReport") != -1
|
||||
assert s.find("sessionfinish") != -1
|
|
@ -0,0 +1,28 @@
|
|||
import py
|
||||
from py.__.test.plugin.pytest_keyword import Mark
|
||||
|
||||
def test_pytest_mark_api():
|
||||
mark = Mark()
|
||||
py.test.raises(TypeError, "mark(x=3)")
|
||||
|
||||
def f(): pass
|
||||
mark.hello(f)
|
||||
assert f.hello
|
||||
|
||||
mark.world(x=3, y=4)(f)
|
||||
assert f.world
|
||||
assert f.world.x == 3
|
||||
assert f.world.y == 4
|
||||
|
||||
py.test.raises(TypeError, "mark.some(x=3)(f=5)")
|
||||
|
||||
def test_mark_plugin(testdir):
|
||||
p = testdir.makepyfile("""
|
||||
import py
|
||||
pytest_plugins = "keyword"
|
||||
@py.test.mark.hello
|
||||
def test_hello():
|
||||
assert hasattr(test_hello, 'hello')
|
||||
""")
|
||||
result = testdir.runpytest(p)
|
||||
assert result.stdout.fnmatch_lines(["*passed*"])
|
|
@ -0,0 +1,137 @@
|
|||
import os, sys
|
||||
import py
|
||||
from py.__.test.plugin.pytest_monkeypatch import MonkeyPatch
|
||||
|
||||
def test_setattr():
|
||||
class A:
|
||||
x = 1
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setattr(A, 'x', 2)
|
||||
assert A.x == 2
|
||||
monkeypatch.setattr(A, 'x', 3)
|
||||
assert A.x == 3
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
A.x = 5
|
||||
monkeypatch.undo() # double-undo makes no modification
|
||||
assert A.x == 5
|
||||
|
||||
monkeypatch.setattr(A, 'y', 3)
|
||||
assert A.y == 3
|
||||
monkeypatch.undo()
|
||||
assert not hasattr(A, 'y')
|
||||
|
||||
def test_delattr():
|
||||
class A:
|
||||
x = 1
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delattr(A, 'x')
|
||||
assert not hasattr(A, 'x')
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delattr(A, 'x')
|
||||
py.test.raises(AttributeError, "monkeypatch.delattr(A, 'y')")
|
||||
monkeypatch.delattr(A, 'y', raising=False)
|
||||
monkeypatch.setattr(A, 'x', 5)
|
||||
assert A.x == 5
|
||||
monkeypatch.undo()
|
||||
assert A.x == 1
|
||||
|
||||
def test_setitem():
|
||||
d = {'x': 1}
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setitem(d, 'x', 2)
|
||||
monkeypatch.setitem(d, 'y', 1700)
|
||||
assert d['x'] == 2
|
||||
assert d['y'] == 1700
|
||||
monkeypatch.setitem(d, 'x', 3)
|
||||
assert d['x'] == 3
|
||||
monkeypatch.undo()
|
||||
assert d['x'] == 1
|
||||
assert 'y' not in d
|
||||
d['x'] = 5
|
||||
monkeypatch.undo()
|
||||
assert d['x'] == 5
|
||||
|
||||
def test_delitem():
|
||||
d = {'x': 1}
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delitem(d, 'x')
|
||||
assert 'x' not in d
|
||||
monkeypatch.delitem(d, 'y', raising=False)
|
||||
py.test.raises(KeyError, "monkeypatch.delitem(d, 'y')")
|
||||
assert not d
|
||||
monkeypatch.setitem(d, 'y', 1700)
|
||||
assert d['y'] == 1700
|
||||
d['hello'] = 'world'
|
||||
monkeypatch.setitem(d, 'x', 1500)
|
||||
assert d['x'] == 1500
|
||||
monkeypatch.undo()
|
||||
assert d == {'hello': 'world', 'x': 1}
|
||||
|
||||
def test_setenv():
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setenv('XYZ123', 2)
|
||||
import os
|
||||
assert os.environ['XYZ123'] == "2"
|
||||
monkeypatch.undo()
|
||||
assert 'XYZ123' not in os.environ
|
||||
|
||||
def test_delenv():
|
||||
name = 'xyz1234'
|
||||
assert name not in os.environ
|
||||
monkeypatch = MonkeyPatch()
|
||||
py.test.raises(KeyError, "monkeypatch.delenv(%r, raising=True)" % name)
|
||||
monkeypatch.delenv(name, raising=False)
|
||||
monkeypatch.undo()
|
||||
os.environ[name] = "1"
|
||||
try:
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.delenv(name)
|
||||
assert name not in os.environ
|
||||
monkeypatch.setenv(name, "3")
|
||||
assert os.environ[name] == "3"
|
||||
monkeypatch.undo()
|
||||
assert os.environ[name] == "1"
|
||||
finally:
|
||||
if name in os.environ:
|
||||
del os.environ[name]
|
||||
|
||||
def test_setenv_prepend():
|
||||
import os
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.setenv('XYZ123', 2, prepend="-")
|
||||
assert os.environ['XYZ123'] == "2"
|
||||
monkeypatch.setenv('XYZ123', 3, prepend="-")
|
||||
assert os.environ['XYZ123'] == "3-2"
|
||||
monkeypatch.undo()
|
||||
assert 'XYZ123' not in os.environ
|
||||
|
||||
def test_monkeypatch_plugin(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
pytest_plugins = 'pytest_monkeypatch',
|
||||
def test_method(monkeypatch):
|
||||
assert monkeypatch.__class__.__name__ == "MonkeyPatch"
|
||||
""")
|
||||
res = reprec.countoutcomes()
|
||||
assert tuple(res) == (1, 0, 0), res
|
||||
|
||||
def test_syspath_prepend():
|
||||
old = list(sys.path)
|
||||
try:
|
||||
monkeypatch = MonkeyPatch()
|
||||
monkeypatch.syspath_prepend('world')
|
||||
monkeypatch.syspath_prepend('hello')
|
||||
assert sys.path[0] == "hello"
|
||||
assert sys.path[1] == "world"
|
||||
monkeypatch.undo()
|
||||
assert sys.path == old
|
||||
monkeypatch.undo()
|
||||
assert sys.path == old
|
||||
finally:
|
||||
sys.path[:] = old
|
||||
|
||||
|
|
@ -0,0 +1,47 @@
|
|||
|
||||
class TestPasting:
|
||||
def pytest_funcarg__pastebinlist(self, request):
|
||||
mp = request.getfuncargvalue("monkeypatch")
|
||||
pastebinlist = []
|
||||
class MockProxy:
|
||||
def newPaste(self, language, code):
|
||||
pastebinlist.append((language, code))
|
||||
plugin = request.config.pluginmanager.impname2plugin['pytest_pastebin']
|
||||
mp.setattr(plugin, 'getproxy', MockProxy)
|
||||
return pastebinlist
|
||||
|
||||
def test_failed(self, testdir, pastebinlist):
|
||||
testpath = testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath, "--paste=failed")
|
||||
assert len(pastebinlist) == 1
|
||||
assert pastebinlist[0][0] == "python"
|
||||
s = pastebinlist[0][1]
|
||||
assert s.find("def test_fail") != -1
|
||||
assert reprec.countoutcomes() == [1,1,1]
|
||||
|
||||
def test_all(self, testdir, pastebinlist):
|
||||
testpath = testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
reprec = testdir.inline_run(testpath, "--pastebin=all")
|
||||
assert reprec.countoutcomes() == [1,1,1]
|
||||
assert len(pastebinlist) == 1
|
||||
assert pastebinlist[0][0] == "python"
|
||||
s = pastebinlist[0][1]
|
||||
for x in 'test_fail test_skip skipped'.split():
|
||||
assert s.find(x), (s, x)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
import py
|
||||
|
||||
class TestPDB:
|
||||
def pytest_funcarg__pdblist(self, request):
|
||||
monkeypatch = request.getfuncargvalue("monkeypatch")
|
||||
pdblist = []
|
||||
def mypdb(*args):
|
||||
pdblist.append(args)
|
||||
plugin = request.config.pluginmanager.impname2plugin['pytest_pdb']
|
||||
monkeypatch.setattr(plugin, 'post_mortem', mypdb)
|
||||
return pdblist
|
||||
|
||||
def test_pdb_on_fail(self, testdir, pdblist):
|
||||
rep = testdir.inline_runsource1('--pdb', """
|
||||
def test_func():
|
||||
assert 0
|
||||
""")
|
||||
assert rep.failed
|
||||
assert len(pdblist) == 1
|
||||
tb = py.code.Traceback(pdblist[0][0])
|
||||
assert tb[-1].name == "test_func"
|
||||
|
||||
def test_pdb_on_skip(self, testdir, pdblist):
|
||||
rep = testdir.inline_runsource1('--pdb', """
|
||||
import py
|
||||
def test_func():
|
||||
py.test.skip("hello")
|
||||
""")
|
||||
assert rep.skipped
|
||||
assert len(pdblist) == 0
|
||||
|
||||
def test_pdb_interaction(self, testdir):
|
||||
p1 = testdir.makepyfile("""
|
||||
def test_1():
|
||||
i = 0
|
||||
assert i == 1
|
||||
""")
|
||||
child = testdir.spawn_pytest("--pdb %s" % p1)
|
||||
#child.expect(".*def test_1.*")
|
||||
child.expect(".*i = 0.*")
|
||||
child.expect("(Pdb)")
|
||||
child.sendeof()
|
||||
child.expect("1 failed")
|
||||
if child.isalive():
|
||||
child.wait()
|
||||
|
||||
def test_incompatibility_messages(self, testdir):
|
||||
Error = py.test.config.Error
|
||||
py.test.raises(Error, "testdir.parseconfigure('--pdb', '--looponfail')")
|
||||
result = testdir.runpytest("--pdb", "-n", "3")
|
||||
assert result.ret != 0
|
||||
assert "incompatible" in result.stdout.str()
|
||||
result = testdir.runpytest("--pdb", "-d", "--tx", "popen")
|
||||
assert result.ret != 0
|
||||
assert "incompatible" in result.stdout.str()
|
|
@ -0,0 +1,70 @@
|
|||
import py
|
||||
from py.__.test.plugin.pytest_pytester import LineMatcher, LineComp
|
||||
|
||||
def test_reportrecorder(testdir):
|
||||
registry = py._com.Registry()
|
||||
recorder = testdir.getreportrecorder(registry)
|
||||
assert not recorder.getfailures()
|
||||
item = testdir.getitem("def test_func(): pass")
|
||||
class rep:
|
||||
excinfo = None
|
||||
passed = False
|
||||
failed = True
|
||||
skipped = False
|
||||
when = "call"
|
||||
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
failures = recorder.getfailures()
|
||||
assert failures == [rep]
|
||||
failures = recorder.getfailures()
|
||||
assert failures == [rep]
|
||||
|
||||
class rep:
|
||||
excinfo = None
|
||||
passed = False
|
||||
failed = False
|
||||
skipped = True
|
||||
when = "call"
|
||||
rep.passed = False
|
||||
rep.skipped = True
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
|
||||
modcol = testdir.getmodulecol("")
|
||||
rep = modcol.config.hook.pytest_make_collect_report(collector=modcol)
|
||||
rep.passed = False
|
||||
rep.failed = True
|
||||
rep.skipped = False
|
||||
recorder.hook.pytest_collectreport(report=rep)
|
||||
|
||||
passed, skipped, failed = recorder.listoutcomes()
|
||||
assert not passed and skipped and failed
|
||||
|
||||
numpassed, numskipped, numfailed = recorder.countoutcomes()
|
||||
assert numpassed == 0
|
||||
assert numskipped == 1
|
||||
assert numfailed == 1
|
||||
assert len(recorder.getfailedcollections()) == 1
|
||||
|
||||
recorder.unregister()
|
||||
recorder.clear()
|
||||
recorder.hook.pytest_runtest_logreport(report=rep)
|
||||
py.test.raises(ValueError, "recorder.getfailures()")
|
||||
|
||||
|
||||
def test_parseconfig(testdir):
|
||||
config1 = testdir.parseconfig()
|
||||
config2 = testdir.parseconfig()
|
||||
assert config2 != config1
|
||||
assert config1 != py.test.config
|
||||
|
||||
def test_testdir_runs_with_plugin(testdir):
|
||||
testdir.makepyfile("""
|
||||
pytest_plugins = "pytest_pytester"
|
||||
def test_hello(testdir):
|
||||
assert 1
|
||||
""")
|
||||
result = testdir.runpytest()
|
||||
assert result.stdout.fnmatch_lines([
|
||||
"*1 passed*"
|
||||
])
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
import py
|
||||
from py.__.test.plugin.pytest_recwarn import WarningsRecorder
|
||||
|
||||
def test_WarningRecorder():
|
||||
showwarning = py.std.warnings.showwarning
|
||||
rec = WarningsRecorder()
|
||||
assert py.std.warnings.showwarning != showwarning
|
||||
assert not rec.list
|
||||
py.std.warnings.warn_explicit("hello", UserWarning, "xyz", 13)
|
||||
assert len(rec.list) == 1
|
||||
py.std.warnings.warn(DeprecationWarning("hello"))
|
||||
assert len(rec.list) == 2
|
||||
warn = rec.pop()
|
||||
assert str(warn.message) == "hello"
|
||||
l = rec.list
|
||||
rec.clear()
|
||||
assert len(rec.list) == 0
|
||||
assert l is rec.list
|
||||
py.test.raises(AssertionError, "rec.pop()")
|
||||
rec.finalize()
|
||||
assert showwarning == py.std.warnings.showwarning
|
||||
|
||||
def test_recwarn_functional(testdir):
|
||||
reprec = testdir.inline_runsource("""
|
||||
pytest_plugins = 'pytest_recwarn',
|
||||
import warnings
|
||||
oldwarn = warnings.showwarning
|
||||
def test_method(recwarn):
|
||||
assert warnings.showwarning != oldwarn
|
||||
warnings.warn("hello")
|
||||
warn = recwarn.pop()
|
||||
assert isinstance(warn.message, UserWarning)
|
||||
def test_finalized():
|
||||
assert warnings.showwarning == oldwarn
|
||||
""")
|
||||
res = reprec.countoutcomes()
|
||||
assert tuple(res) == (2, 0, 0), res
|
||||
|
||||
#
|
||||
# ============ test py.test.deprecated_call() ==============
|
||||
#
|
||||
|
||||
def dep(i):
|
||||
if i == 0:
|
||||
py.std.warnings.warn("is deprecated", DeprecationWarning)
|
||||
return 42
|
||||
|
||||
reg = {}
|
||||
def dep_explicit(i):
|
||||
if i == 0:
|
||||
py.std.warnings.warn_explicit("dep_explicit", category=DeprecationWarning,
|
||||
filename="hello", lineno=3)
|
||||
|
||||
def test_deprecated_call_raises():
|
||||
excinfo = py.test.raises(AssertionError,
|
||||
"py.test.deprecated_call(dep, 3)")
|
||||
assert str(excinfo).find("did not produce") != -1
|
||||
|
||||
def test_deprecated_call():
|
||||
py.test.deprecated_call(dep, 0)
|
||||
|
||||
def test_deprecated_call_ret():
|
||||
ret = py.test.deprecated_call(dep, 0)
|
||||
assert ret == 42
|
||||
|
||||
def test_deprecated_call_preserves():
|
||||
r = py.std.warnings.onceregistry.copy()
|
||||
f = py.std.warnings.filters[:]
|
||||
test_deprecated_call_raises()
|
||||
test_deprecated_call()
|
||||
assert r == py.std.warnings.onceregistry
|
||||
assert f == py.std.warnings.filters
|
||||
|
||||
def test_deprecated_explicit_call_raises():
|
||||
py.test.raises(AssertionError,
|
||||
"py.test.deprecated_call(dep_explicit, 3)")
|
||||
|
||||
def test_deprecated_explicit_call():
|
||||
py.test.deprecated_call(dep_explicit, 0)
|
||||
py.test.deprecated_call(dep_explicit, 0)
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
from py.__.test.plugin.pytest_restdoc import deindent
|
||||
|
||||
def test_deindent():
|
||||
assert deindent('foo') == 'foo'
|
||||
assert deindent('foo\n bar') == 'foo\n bar'
|
||||
assert deindent(' foo\n bar\n') == 'foo\nbar\n'
|
||||
assert deindent(' foo\n\n bar\n') == 'foo\n\nbar\n'
|
||||
assert deindent(' foo\n bar\n') == 'foo\n bar\n'
|
||||
assert deindent(' foo\n bar\n') == ' foo\nbar\n'
|
||||
|
||||
class TestApigenLinkRole:
|
||||
disabled = True
|
||||
|
||||
# these tests are moved here from the former py/doc/conftest.py
|
||||
def test_resolve_linkrole(self):
|
||||
from py.__.doc.conftest import get_apigen_relpath
|
||||
apigen_relpath = get_apigen_relpath()
|
||||
|
||||
assert resolve_linkrole('api', 'py.foo.bar', False) == (
|
||||
'py.foo.bar', apigen_relpath + 'api/foo.bar.html')
|
||||
assert resolve_linkrole('api', 'py.foo.bar()', False) == (
|
||||
'py.foo.bar()', apigen_relpath + 'api/foo.bar.html')
|
||||
assert resolve_linkrole('api', 'py', False) == (
|
||||
'py', apigen_relpath + 'api/index.html')
|
||||
py.test.raises(AssertionError, 'resolve_linkrole("api", "foo.bar")')
|
||||
assert resolve_linkrole('source', 'py/foo/bar.py', False) == (
|
||||
'py/foo/bar.py', apigen_relpath + 'source/foo/bar.py.html')
|
||||
assert resolve_linkrole('source', 'py/foo/', False) == (
|
||||
'py/foo/', apigen_relpath + 'source/foo/index.html')
|
||||
assert resolve_linkrole('source', 'py/', False) == (
|
||||
'py/', apigen_relpath + 'source/index.html')
|
||||
py.test.raises(AssertionError, 'resolve_linkrole("source", "/foo/bar/")')
|
||||
|
||||
def test_resolve_linkrole_check_api(self):
|
||||
assert resolve_linkrole('api', 'py.test.ensuretemp')
|
||||
py.test.raises(AssertionError, "resolve_linkrole('api', 'py.foo.baz')")
|
||||
|
||||
def test_resolve_linkrole_check_source(self):
|
||||
assert resolve_linkrole('source', 'py/path/common.py')
|
||||
py.test.raises(AssertionError,
|
||||
"resolve_linkrole('source', 'py/foo/bar.py')")
|
||||
|
||||
|
||||
class TestDoctest:
|
||||
def pytest_funcarg__testdir(self, request):
|
||||
testdir = request.getfuncargvalue("testdir")
|
||||
assert request.module.__name__ == __name__
|
||||
testdir.makepyfile(confrest="from py.__.rest.resthtml import Project")
|
||||
for p in testdir.plugins:
|
||||
if p == globals():
|
||||
break
|
||||
else:
|
||||
testdir.plugins.append(globals())
|
||||
return testdir
|
||||
|
||||
def test_doctest_extra_exec(self, testdir):
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
hello::
|
||||
.. >>> raise ValueError
|
||||
>>> None
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 1
|
||||
|
||||
def test_doctest_basic(self, testdir):
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
..
|
||||
>>> from os.path import abspath
|
||||
|
||||
hello world
|
||||
|
||||
>>> assert abspath
|
||||
>>> i=3
|
||||
>>> print (i)
|
||||
3
|
||||
|
||||
yes yes
|
||||
|
||||
>>> i
|
||||
3
|
||||
|
||||
end
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert passed + skipped == 2
|
||||
|
||||
def test_doctest_eol(self, testdir):
|
||||
ytxt = testdir.maketxtfile(y=".. >>> 1 + 1\r\n 2\r\n\r\n")
|
||||
reprec = testdir.inline_run(ytxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert passed + skipped == 2
|
||||
|
||||
def test_doctest_indentation(self, testdir):
|
||||
footxt = testdir.maketxtfile(foo=
|
||||
'..\n >>> print ("foo\\n bar")\n foo\n bar\n')
|
||||
reprec = testdir.inline_run(footxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert skipped + passed == 2
|
||||
|
||||
def test_js_ignore(self, testdir):
|
||||
xtxt = testdir.maketxtfile(xtxt="""
|
||||
`blah`_
|
||||
|
||||
.. _`blah`: javascript:some_function()
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert failed == 0
|
||||
assert skipped + passed == 3
|
||||
|
||||
def test_pytest_doctest_prepare_content(self, testdir):
|
||||
l = []
|
||||
class MyPlugin:
|
||||
def pytest_doctest_prepare_content(self, content):
|
||||
l.append(content)
|
||||
return content.replace("False", "True")
|
||||
|
||||
testdir.plugins.append(MyPlugin())
|
||||
|
||||
xtxt = testdir.maketxtfile(x="""
|
||||
hello:
|
||||
|
||||
>>> 2 == 2
|
||||
False
|
||||
|
||||
""")
|
||||
reprec = testdir.inline_run(xtxt)
|
||||
assert len(l) == 1
|
||||
passed, skipped, failed = reprec.countoutcomes()
|
||||
assert passed >= 1
|
||||
assert not failed
|
||||
assert skipped <= 1
|
|
@ -0,0 +1,153 @@
|
|||
import py
|
||||
import os
|
||||
from py.__.test.plugin.pytest_resultlog import generic_path, ResultLog
|
||||
from py.__.test.collect import Node, Item, FSCollector
|
||||
|
||||
def test_generic_path():
|
||||
p1 = Node('a')
|
||||
assert p1.fspath is None
|
||||
p2 = Node('B', parent=p1)
|
||||
p3 = Node('()', parent = p2)
|
||||
item = Item('c', parent = p3)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == 'a.B().c'
|
||||
|
||||
p0 = FSCollector('proj/test')
|
||||
p1 = FSCollector('proj/test/a', parent=p0)
|
||||
p2 = Node('B', parent=p1)
|
||||
p3 = Node('()', parent = p2)
|
||||
p4 = Node('c', parent=p3)
|
||||
item = Item('[1]', parent = p4)
|
||||
|
||||
res = generic_path(item)
|
||||
assert res == 'test/a:B().c[1]'
|
||||
|
||||
def test_write_log_entry():
|
||||
reslog = ResultLog(None)
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', '.', '')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 1
|
||||
assert entry_lines[0] == '. name'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', 's', 'Skipped')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 2
|
||||
assert entry_lines[0] == 's name'
|
||||
assert entry_lines[1] == ' Skipped'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
reslog.write_log_entry('name', 's', 'Skipped\n')
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 2
|
||||
assert entry_lines[0] == 's name'
|
||||
assert entry_lines[1] == ' Skipped'
|
||||
|
||||
reslog.logfile = py.io.TextIO()
|
||||
longrepr = ' tb1\n tb 2\nE tb3\nSome Error'
|
||||
reslog.write_log_entry('name', 'F', longrepr)
|
||||
entry = reslog.logfile.getvalue()
|
||||
assert entry[-1] == '\n'
|
||||
entry_lines = entry.splitlines()
|
||||
assert len(entry_lines) == 5
|
||||
assert entry_lines[0] == 'F name'
|
||||
assert entry_lines[1:] == [' '+line for line in longrepr.splitlines()]
|
||||
|
||||
|
||||
class TestWithFunctionIntegration:
|
||||
# XXX (hpk) i think that the resultlog plugin should
|
||||
# provide a Parser object so that one can remain
|
||||
# ignorant regarding formatting details.
|
||||
def getresultlog(self, testdir, arg):
|
||||
resultlog = testdir.tmpdir.join("resultlog")
|
||||
testdir.plugins.append("resultlog")
|
||||
args = ["--resultlog=%s" % resultlog] + [arg]
|
||||
testdir.runpytest(*args)
|
||||
return [x for x in resultlog.readlines(cr=0) if x]
|
||||
|
||||
def test_collection_report(self, testdir):
|
||||
ok = testdir.makepyfile(test_collection_ok="")
|
||||
skip = testdir.makepyfile(test_collection_skip="import py ; py.test.skip('hello')")
|
||||
fail = testdir.makepyfile(test_collection_fail="XXX")
|
||||
lines = self.getresultlog(testdir, ok)
|
||||
assert not lines
|
||||
|
||||
lines = self.getresultlog(testdir, skip)
|
||||
assert len(lines) == 2
|
||||
assert lines[0].startswith("S ")
|
||||
assert lines[0].endswith("test_collection_skip.py")
|
||||
assert lines[1].startswith(" ")
|
||||
assert lines[1].endswith("test_collection_skip.py:1: Skipped: 'hello'")
|
||||
|
||||
lines = self.getresultlog(testdir, fail)
|
||||
assert lines
|
||||
assert lines[0].startswith("F ")
|
||||
assert lines[0].endswith("test_collection_fail.py"), lines[0]
|
||||
for x in lines[1:]:
|
||||
assert x.startswith(" ")
|
||||
assert "XXX" in "".join(lines[1:])
|
||||
|
||||
def test_log_test_outcomes(self, testdir):
|
||||
mod = testdir.makepyfile(test_mod="""
|
||||
import py
|
||||
def test_pass(): pass
|
||||
def test_skip(): py.test.skip("hello")
|
||||
def test_fail(): raise ValueError("val")
|
||||
""")
|
||||
lines = self.getresultlog(testdir, mod)
|
||||
assert len(lines) >= 3
|
||||
assert lines[0].startswith(". ")
|
||||
assert lines[0].endswith("test_pass")
|
||||
assert lines[1].startswith("s "), lines[1]
|
||||
assert lines[1].endswith("test_skip")
|
||||
assert lines[2].find("hello") != -1
|
||||
|
||||
assert lines[3].startswith("F ")
|
||||
assert lines[3].endswith("test_fail")
|
||||
tb = "".join(lines[4:])
|
||||
assert tb.find("ValueError") != -1
|
||||
|
||||
def test_internal_exception(self):
|
||||
# they are produced for example by a teardown failing
|
||||
# at the end of the run
|
||||
try:
|
||||
raise ValueError
|
||||
except ValueError:
|
||||
excinfo = py.code.ExceptionInfo()
|
||||
reslog = ResultLog(py.io.TextIO())
|
||||
reslog.pytest_internalerror(excinfo.getrepr())
|
||||
entry = reslog.logfile.getvalue()
|
||||
entry_lines = entry.splitlines()
|
||||
|
||||
assert entry_lines[0].startswith('! ')
|
||||
assert os.path.basename(__file__)[:-1] in entry_lines[0] #.py/.pyc
|
||||
assert entry_lines[-1][0] == ' '
|
||||
assert 'ValueError' in entry
|
||||
|
||||
def test_generic(testdir, LineMatcher):
|
||||
testdir.plugins.append("resultlog")
|
||||
testdir.makepyfile("""
|
||||
import py
|
||||
def test_pass():
|
||||
pass
|
||||
def test_fail():
|
||||
assert 0
|
||||
def test_skip():
|
||||
py.test.skip("")
|
||||
""")
|
||||
testdir.runpytest("--resultlog=result.log")
|
||||
lines = testdir.tmpdir.join("result.log").readlines(cr=0)
|
||||
LineMatcher(lines).fnmatch_lines([
|
||||
". *:test_pass",
|
||||
"F *:test_fail",
|
||||
"s *:test_skip",
|
||||
])
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue