* use py.builtin._getimself instead of getattr(..., '*self*') everywhere
* fix logging to work with 3k, implement buffering manually * fix unicode capturing issue - re-introduce EncodedFile for <3K file writes --HG-- branch : trunk
This commit is contained in:
parent
43b8bd7df7
commit
c1fcf9c4d8
|
@ -151,6 +151,7 @@ initpkg(__name__,
|
||||||
'builtin._totext' : ('./builtin/builtin31.py', '_totext'),
|
'builtin._totext' : ('./builtin/builtin31.py', '_totext'),
|
||||||
'builtin._isbytes' : ('./builtin/builtin31.py', '_isbytes'),
|
'builtin._isbytes' : ('./builtin/builtin31.py', '_isbytes'),
|
||||||
'builtin._istext' : ('./builtin/builtin31.py', '_istext'),
|
'builtin._istext' : ('./builtin/builtin31.py', '_istext'),
|
||||||
|
'builtin._getimself' : ('./builtin/builtin31.py', '_getimself'),
|
||||||
'builtin.builtins' : ('./builtin/builtin31.py', 'builtins'),
|
'builtin.builtins' : ('./builtin/builtin31.py', 'builtins'),
|
||||||
'builtin.execfile' : ('./builtin/builtin31.py', 'execfile'),
|
'builtin.execfile' : ('./builtin/builtin31.py', 'execfile'),
|
||||||
'builtin.callable' : ('./builtin/builtin31.py', 'callable'),
|
'builtin.callable' : ('./builtin/builtin31.py', 'callable'),
|
||||||
|
|
|
@ -75,7 +75,8 @@ class Registry:
|
||||||
l = []
|
l = []
|
||||||
if plugins is None:
|
if plugins is None:
|
||||||
plugins = self._plugins
|
plugins = self._plugins
|
||||||
for plugin in list(plugins) + list(extra):
|
candidates = list(plugins) + list(extra)
|
||||||
|
for plugin in candidates:
|
||||||
try:
|
try:
|
||||||
l.append(getattr(plugin, attrname))
|
l.append(getattr(plugin, attrname))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
@ -105,7 +106,7 @@ class HookRelay:
|
||||||
return multicall.execute()
|
return multicall.execute()
|
||||||
|
|
||||||
class HookCaller:
|
class HookCaller:
|
||||||
def __init__(self, hookrelay, name, firstresult, extralookup=()):
|
def __init__(self, hookrelay, name, firstresult, extralookup=None):
|
||||||
self.hookrelay = hookrelay
|
self.hookrelay = hookrelay
|
||||||
self.name = name
|
self.name = name
|
||||||
self.firstresult = firstresult
|
self.firstresult = firstresult
|
||||||
|
|
|
@ -19,6 +19,9 @@ if sys.version_info >= (3, 0):
|
||||||
def _istext(x):
|
def _istext(x):
|
||||||
return isinstance(x, str)
|
return isinstance(x, str)
|
||||||
|
|
||||||
|
def _getimself(function):
|
||||||
|
return getattr(function, '__self__', None)
|
||||||
|
|
||||||
def execfile(fn, globs=None, locs=None):
|
def execfile(fn, globs=None, locs=None):
|
||||||
if globs is None:
|
if globs is None:
|
||||||
back = sys._getframe(1)
|
back = sys._getframe(1)
|
||||||
|
@ -51,6 +54,9 @@ else:
|
||||||
def _istext(x):
|
def _istext(x):
|
||||||
return isinstance(x, unicode)
|
return isinstance(x, unicode)
|
||||||
|
|
||||||
|
def _getimself(function):
|
||||||
|
return getattr(function, 'im_self', None)
|
||||||
|
|
||||||
import __builtin__ as builtins
|
import __builtin__ as builtins
|
||||||
def print_(*args, **kwargs):
|
def print_(*args, **kwargs):
|
||||||
""" minimal backport of py3k print statement. """
|
""" minimal backport of py3k print statement. """
|
||||||
|
|
|
@ -181,6 +181,7 @@ class Message:
|
||||||
|
|
||||||
|
|
||||||
def _setupmessages():
|
def _setupmessages():
|
||||||
|
# XXX use metaclass for registering
|
||||||
|
|
||||||
class CHANNEL_OPEN(Message):
|
class CHANNEL_OPEN(Message):
|
||||||
def received(self, gateway):
|
def received(self, gateway):
|
||||||
|
@ -211,14 +212,13 @@ def _setupmessages():
|
||||||
def received(self, gateway):
|
def received(self, gateway):
|
||||||
gateway._channelfactory._local_last_message(self.channelid)
|
gateway._channelfactory._local_last_message(self.channelid)
|
||||||
|
|
||||||
classes = [x for x in locals().values() if hasattr(x, '__bases__')]
|
classes = [CHANNEL_OPEN, CHANNEL_NEW, CHANNEL_DATA,
|
||||||
classes.sort(lambda x,y : cmp(x.__name__, y.__name__))
|
CHANNEL_CLOSE, CHANNEL_CLOSE_ERROR, CHANNEL_LAST_MESSAGE]
|
||||||
i = 0
|
|
||||||
for cls in classes:
|
for i, cls in enumerate(classes):
|
||||||
Message._types[i] = cls
|
Message._types[i] = cls
|
||||||
cls.msgtype = i
|
cls.msgtype = i
|
||||||
setattr(Message, cls.__name__, cls)
|
setattr(Message, cls.__name__, cls)
|
||||||
i+=1
|
|
||||||
|
|
||||||
_setupmessages()
|
_setupmessages()
|
||||||
|
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
import py, os, stat
|
import py, os, stat
|
||||||
from Queue import Queue
|
|
||||||
try:
|
try:
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from md5 import md5
|
from md5 import md5
|
||||||
|
try:
|
||||||
|
from queue import Queue
|
||||||
|
except ImportError:
|
||||||
|
from Queue import Queue
|
||||||
|
|
||||||
class RSync(object):
|
class RSync(object):
|
||||||
""" This class allows to send a directory structure (recursively)
|
""" This class allows to send a directory structure (recursively)
|
||||||
|
|
|
@ -96,13 +96,37 @@ def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
|
||||||
return f
|
return f
|
||||||
newfd = os.dup(fd)
|
newfd = os.dup(fd)
|
||||||
mode = mode and mode or f.mode
|
mode = mode and mode or f.mode
|
||||||
if encoding is not None and sys.version_info >= (3,0):
|
if sys.version_info >= (3,0):
|
||||||
mode = mode.replace("b", "")
|
if encoding is not None:
|
||||||
buffering = True
|
mode = mode.replace("b", "")
|
||||||
|
buffering = True
|
||||||
return os.fdopen(newfd, mode, buffering, encoding, closefd=False)
|
return os.fdopen(newfd, mode, buffering, encoding, closefd=False)
|
||||||
else:
|
else:
|
||||||
return os.fdopen(newfd, mode, buffering)
|
f = os.fdopen(newfd, mode, buffering)
|
||||||
|
if encoding is not None:
|
||||||
|
return EncodedFile(f, encoding)
|
||||||
|
return f
|
||||||
|
|
||||||
|
class EncodedFile(object):
|
||||||
|
def __init__(self, _stream, encoding):
|
||||||
|
self._stream = _stream
|
||||||
|
self.encoding = encoding
|
||||||
|
|
||||||
|
def write(self, obj):
|
||||||
|
if isinstance(obj, unicode):
|
||||||
|
obj = obj.encode(self.encoding)
|
||||||
|
elif isinstance(obj, str):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
obj = str(obj)
|
||||||
|
self._stream.write(obj)
|
||||||
|
|
||||||
|
def writelines(self, linelist):
|
||||||
|
data = ''.join(linelist)
|
||||||
|
self.write(data)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
return getattr(self._stream, name)
|
||||||
|
|
||||||
class Capture(object):
|
class Capture(object):
|
||||||
def call(cls, func, *args, **kwargs):
|
def call(cls, func, *args, **kwargs):
|
||||||
|
|
|
@ -92,12 +92,12 @@ class KeywordMapper:
|
||||||
""" set a consumer for a set of keywords. """
|
""" set a consumer for a set of keywords. """
|
||||||
# normalize to tuples
|
# normalize to tuples
|
||||||
if isinstance(keywords, str):
|
if isinstance(keywords, str):
|
||||||
keywords = tuple(map(None, keywords.split()))
|
keywords = tuple(filter(None, keywords.split()))
|
||||||
elif hasattr(keywords, '_keywords'):
|
elif hasattr(keywords, '_keywords'):
|
||||||
keywords = keywords._keywords
|
keywords = keywords._keywords
|
||||||
elif not isinstance(keywords, tuple):
|
elif not isinstance(keywords, tuple):
|
||||||
raise TypeError("key %r is not a string or tuple" % (keywords,))
|
raise TypeError("key %r is not a string or tuple" % (keywords,))
|
||||||
if consumer is not None and not callable(consumer):
|
if consumer is not None and not py.builtin.callable(consumer):
|
||||||
if not hasattr(consumer, 'write'):
|
if not hasattr(consumer, 'write'):
|
||||||
raise TypeError(
|
raise TypeError(
|
||||||
"%r should be None, callable or file-like" % (consumer,))
|
"%r should be None, callable or file-like" % (consumer,))
|
||||||
|
@ -123,11 +123,10 @@ def getstate():
|
||||||
#
|
#
|
||||||
|
|
||||||
class File(object):
|
class File(object):
|
||||||
""" log consumer wrapping a file(-like) object
|
""" log consumer wrapping a file(-like) object """
|
||||||
"""
|
|
||||||
def __init__(self, f):
|
def __init__(self, f):
|
||||||
assert hasattr(f, 'write')
|
assert hasattr(f, 'write')
|
||||||
assert isinstance(f, file) or not hasattr(f, 'open')
|
#assert isinstance(f, file) or not hasattr(f, 'open')
|
||||||
self._file = f
|
self._file = f
|
||||||
|
|
||||||
def __call__(self, msg):
|
def __call__(self, msg):
|
||||||
|
@ -135,19 +134,18 @@ class File(object):
|
||||||
self._file.write(str(msg) + "\n")
|
self._file.write(str(msg) + "\n")
|
||||||
|
|
||||||
class Path(object):
|
class Path(object):
|
||||||
""" log consumer able to write log messages into
|
""" log consumer that opens and writes to a Path """
|
||||||
"""
|
def __init__(self, filename, append=False,
|
||||||
def __init__(self, filename, append=False, delayed_create=False,
|
delayed_create=False, buffering=False):
|
||||||
buffering=1):
|
|
||||||
self._append = append
|
self._append = append
|
||||||
self._filename = filename
|
self._filename = str(filename)
|
||||||
self._buffering = buffering
|
self._buffering = buffering
|
||||||
if not delayed_create:
|
if not delayed_create:
|
||||||
self._openfile()
|
self._openfile()
|
||||||
|
|
||||||
def _openfile(self):
|
def _openfile(self):
|
||||||
mode = self._append and 'a' or 'w'
|
mode = self._append and 'a' or 'w'
|
||||||
f = open(str(self._filename), mode, buffering=self._buffering)
|
f = open(self._filename, mode)
|
||||||
self._file = f
|
self._file = f
|
||||||
|
|
||||||
def __call__(self, msg):
|
def __call__(self, msg):
|
||||||
|
@ -155,6 +153,8 @@ class Path(object):
|
||||||
if not hasattr(self, "_file"):
|
if not hasattr(self, "_file"):
|
||||||
self._openfile()
|
self._openfile()
|
||||||
self._file.write(str(msg) + "\n")
|
self._file.write(str(msg) + "\n")
|
||||||
|
if not self._buffering:
|
||||||
|
self._file.flush()
|
||||||
|
|
||||||
def STDOUT(msg):
|
def STDOUT(msg):
|
||||||
""" consumer that writes to sys.stdout """
|
""" consumer that writes to sys.stdout """
|
||||||
|
@ -167,12 +167,6 @@ def STDERR(msg):
|
||||||
class Syslog:
|
class Syslog:
|
||||||
""" consumer that writes to the syslog daemon """
|
""" consumer that writes to the syslog daemon """
|
||||||
|
|
||||||
for priority in "LOG_EMERG LOG_ALERT LOG_CRIT LOG_ERR LOG_WARNING LOG_NOTICE LOG_INFO LOG_DEBUG".split():
|
|
||||||
try:
|
|
||||||
exec("%s = py.std.syslog.%s" % (priority, priority))
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __init__(self, priority = None):
|
def __init__(self, priority = None):
|
||||||
if priority is None:
|
if priority is None:
|
||||||
priority = self.LOG_INFO
|
priority = self.LOG_INFO
|
||||||
|
@ -181,3 +175,10 @@ class Syslog:
|
||||||
def __call__(self, msg):
|
def __call__(self, msg):
|
||||||
""" write a message to the log """
|
""" write a message to the log """
|
||||||
py.std.syslog.syslog(self.priority, str(msg))
|
py.std.syslog.syslog(self.priority, str(msg))
|
||||||
|
|
||||||
|
for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
|
||||||
|
_prio = "LOG_" + _prio
|
||||||
|
try:
|
||||||
|
setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
|
|
@ -117,14 +117,15 @@ class TestLogConsumer:
|
||||||
assert out.strip() == '[xyz] hello'
|
assert out.strip() == '[xyz] hello'
|
||||||
|
|
||||||
def test_log_file(self):
|
def test_log_file(self):
|
||||||
custom_log = tempdir.join('log.out')
|
customlog = tempdir.join('log.out')
|
||||||
py.log.setconsumer("default", open(str(custom_log), 'w', buffering=0))
|
py.log.setconsumer("default", open(str(customlog), 'w', buffering=1))
|
||||||
py.log.Producer("default")("hello world #1")
|
py.log.Producer("default")("hello world #1")
|
||||||
assert custom_log.readlines() == ['[default] hello world #1\n']
|
assert customlog.readlines() == ['[default] hello world #1\n']
|
||||||
|
|
||||||
py.log.setconsumer("default", py.log.Path(custom_log, buffering=0))
|
py.log.setconsumer("default", py.log.Path(customlog, buffering=False))
|
||||||
py.log.Producer("default")("hello world #2")
|
py.log.Producer("default")("hello world #2")
|
||||||
assert custom_log.readlines() == ['[default] hello world #2\n'] # no append by default!
|
res = customlog.readlines()
|
||||||
|
assert res == ['[default] hello world #2\n'] # no append by default!
|
||||||
|
|
||||||
def test_log_file_append_mode(self):
|
def test_log_file_append_mode(self):
|
||||||
logfilefn = tempdir.join('log_append.out')
|
logfilefn = tempdir.join('log_append.out')
|
||||||
|
|
|
@ -11,7 +11,7 @@ def test_apiwarn_functional():
|
||||||
py.builtin.print_("out", out)
|
py.builtin.print_("out", out)
|
||||||
py.builtin.print_("err", err)
|
py.builtin.print_("err", err)
|
||||||
assert err.find("x.y.z") != -1
|
assert err.find("x.y.z") != -1
|
||||||
lno = test_apiwarn_functional.func_code.co_firstlineno + 2
|
lno = py.code.getrawcode(test_apiwarn_functional).co_firstlineno + 2
|
||||||
exp = "%s:%s" % (mypath, lno)
|
exp = "%s:%s" % (mypath, lno)
|
||||||
assert err.find(exp) != -1
|
assert err.find(exp) != -1
|
||||||
|
|
||||||
|
@ -23,7 +23,7 @@ def test_stacklevel():
|
||||||
capture = py.io.StdCapture()
|
capture = py.io.StdCapture()
|
||||||
f()
|
f()
|
||||||
out, err = capture.reset()
|
out, err = capture.reset()
|
||||||
lno = test_stacklevel.func_code.co_firstlineno + 6
|
lno = py.code.getrawcode(test_stacklevel).co_firstlineno + 6
|
||||||
warning = str(err)
|
warning = str(err)
|
||||||
assert warning.find(":%s" % lno) != -1
|
assert warning.find(":%s" % lno) != -1
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ def test_stacklevel_initpkg_with_resolve(testdir):
|
||||||
capture = py.io.StdCapture()
|
capture = py.io.StdCapture()
|
||||||
mod.__getattr__()
|
mod.__getattr__()
|
||||||
out, err = capture.reset()
|
out, err = capture.reset()
|
||||||
lno = test_stacklevel_initpkg_with_resolve.func_code.co_firstlineno + 9
|
lno = py.code.getrawcode(test_stacklevel_initpkg_with_resolve).co_firstlineno + 9
|
||||||
warning = str(err)
|
warning = str(err)
|
||||||
assert warning.find(":%s" % lno) != -1
|
assert warning.find(":%s" % lno) != -1
|
||||||
|
|
||||||
|
@ -48,7 +48,7 @@ def test_stacklevel_initpkg_no_resolve():
|
||||||
capture = py.io.StdCapture()
|
capture = py.io.StdCapture()
|
||||||
f()
|
f()
|
||||||
out, err = capture.reset()
|
out, err = capture.reset()
|
||||||
lno = test_stacklevel_initpkg_no_resolve.func_code.co_firstlineno + 2
|
lno = py.code.getrawcode(test_stacklevel_initpkg_no_resolve).co_firstlineno + 2
|
||||||
warning = str(err)
|
warning = str(err)
|
||||||
assert warning.find(":%s" % lno) != -1
|
assert warning.find(":%s" % lno) != -1
|
||||||
|
|
||||||
|
@ -60,7 +60,7 @@ def test_function():
|
||||||
py.builtin.print_("out", out)
|
py.builtin.print_("out", out)
|
||||||
py.builtin.print_("err", err)
|
py.builtin.print_("err", err)
|
||||||
assert err.find("x.y.z") != -1
|
assert err.find("x.y.z") != -1
|
||||||
lno = test_function.func_code.co_firstlineno
|
lno = py.code.getrawcode(test_function).co_firstlineno
|
||||||
exp = "%s:%s" % (mypath, lno)
|
exp = "%s:%s" % (mypath, lno)
|
||||||
assert err.find(exp) != -1
|
assert err.find(exp) != -1
|
||||||
|
|
||||||
|
|
|
@ -31,7 +31,7 @@ def _apiwarn(startversion, msg, stacklevel=1, function=None):
|
||||||
def warn(msg, stacklevel=1, function=None):
|
def warn(msg, stacklevel=1, function=None):
|
||||||
if function is not None:
|
if function is not None:
|
||||||
filename = py.std.inspect.getfile(function)
|
filename = py.std.inspect.getfile(function)
|
||||||
lineno = function.func_code.co_firstlineno
|
lineno = py.code.getrawcode(function).co_firstlineno
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
caller = sys._getframe(stacklevel)
|
caller = sys._getframe(stacklevel)
|
||||||
|
|
|
@ -316,7 +316,7 @@ class Collector(Node):
|
||||||
if method is not None and method != Collector.run:
|
if method is not None and method != Collector.run:
|
||||||
warnoldcollect(function=method)
|
warnoldcollect(function=method)
|
||||||
names = self.run()
|
names = self.run()
|
||||||
return filter(None, [self.join(name) for name in names])
|
return [x for x in [self.join(name) for name in names] if x]
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
""" DEPRECATED: returns a list of names available from this collector.
|
""" DEPRECATED: returns a list of names available from this collector.
|
||||||
|
|
|
@ -2,18 +2,13 @@ import py
|
||||||
|
|
||||||
from py.test.collect import Function
|
from py.test.collect import Function
|
||||||
|
|
||||||
if py.std.sys.version_info > (3, 0):
|
|
||||||
_self = "__self__"
|
|
||||||
else:
|
|
||||||
_self = "im_self"
|
|
||||||
|
|
||||||
class TestCaseUnit(Function):
|
class TestCaseUnit(Function):
|
||||||
""" compatibility Unit executor for TestCase methods
|
""" compatibility Unit executor for TestCase methods
|
||||||
honouring setUp and tearDown semantics.
|
honouring setUp and tearDown semantics.
|
||||||
"""
|
"""
|
||||||
def runtest(self, _deprecated=None):
|
def runtest(self, _deprecated=None):
|
||||||
boundmethod = self.obj
|
boundmethod = self.obj
|
||||||
instance = getattr(boundmethod, _self)
|
instance = py.builtin._getimself(boundmethod)
|
||||||
instance.setUp()
|
instance.setUp()
|
||||||
try:
|
try:
|
||||||
boundmethod()
|
boundmethod()
|
||||||
|
|
|
@ -8,8 +8,10 @@ import py
|
||||||
from py.__.test.session import Session
|
from py.__.test.session import Session
|
||||||
from py.__.test import outcome
|
from py.__.test import outcome
|
||||||
from py.__.test.dist.nodemanage import NodeManager
|
from py.__.test.dist.nodemanage import NodeManager
|
||||||
|
try:
|
||||||
import Queue
|
import queue
|
||||||
|
except ImportError:
|
||||||
|
import Queue as queue
|
||||||
|
|
||||||
debug_file = None # open('/tmp/loop.log', 'w')
|
debug_file = None # open('/tmp/loop.log', 'w')
|
||||||
def debug(*args):
|
def debug(*args):
|
||||||
|
@ -72,7 +74,7 @@ class DSession(Session):
|
||||||
MAXITEMSPERHOST = 15
|
MAXITEMSPERHOST = 15
|
||||||
|
|
||||||
def __init__(self, config):
|
def __init__(self, config):
|
||||||
self.queue = Queue.Queue()
|
self.queue = queue.Queue()
|
||||||
self.node2pending = {}
|
self.node2pending = {}
|
||||||
self.item2nodes = {}
|
self.item2nodes = {}
|
||||||
super(DSession, self).__init__(config=config)
|
super(DSession, self).__init__(config=config)
|
||||||
|
@ -107,7 +109,7 @@ class DSession(Session):
|
||||||
try:
|
try:
|
||||||
eventcall = self.queue.get(timeout=2.0)
|
eventcall = self.queue.get(timeout=2.0)
|
||||||
break
|
break
|
||||||
except Queue.Empty:
|
except queue.Empty:
|
||||||
continue
|
continue
|
||||||
loopstate.dowork = True
|
loopstate.dowork = True
|
||||||
|
|
||||||
|
|
|
@ -14,17 +14,24 @@ def pytest_generate_tests(metafunc):
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
metafunc.addcall(funcargs={'picklemod': cPpickle})
|
metafunc.addcall(funcargs={'picklemod': cPickle})
|
||||||
|
elif "obj" in metafunc.funcargnames and "proto" in metafunc.funcargnames:
|
||||||
|
a1 = A()
|
||||||
|
a2 = A()
|
||||||
|
a2.a1 = a1
|
||||||
|
for proto in (0,1,2, -1):
|
||||||
|
for obj in {1:2}, [1,2,3], a1, a2:
|
||||||
|
metafunc.addcall(funcargs=dict(obj=obj, proto=proto))
|
||||||
|
|
||||||
def xxx_test_underlying_basic_pickling_mechanisms(picklemod):
|
def test_underlying_basic_pickling_mechanisms(picklemod):
|
||||||
f1 = py.io.TextIO()
|
f1 = py.io.TextIO()
|
||||||
f2 = py.io.TextIO()
|
f2 = py.io.TextIO()
|
||||||
|
|
||||||
pickler1 = picklingmod.Pickler(f1)
|
pickler1 = picklemod.Pickler(f1)
|
||||||
unpickler1 = picklingmod.Unpickler(f2)
|
unpickler1 = picklemod.Unpickler(f2)
|
||||||
|
|
||||||
pickler2 = picklingmod.Pickler(f2)
|
pickler2 = picklemod.Pickler(f2)
|
||||||
unpickler2 = picklingmod.Unpickler(f1)
|
unpickler2 = picklemod.Unpickler(f1)
|
||||||
|
|
||||||
#pickler1.memo = unpickler1.memo = {}
|
#pickler1.memo = unpickler1.memo = {}
|
||||||
#pickler2.memo = unpickler2.memo = {}
|
#pickler2.memo = unpickler2.memo = {}
|
||||||
|
@ -50,14 +57,6 @@ def xxx_test_underlying_basic_pickling_mechanisms(picklemod):
|
||||||
class A:
|
class A:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def pytest_generate_tests(metafunc):
|
|
||||||
if "obj" in metafunc.funcargnames and "proto" in metafunc.funcargnames:
|
|
||||||
a1 = A()
|
|
||||||
a2 = A()
|
|
||||||
a2.a1 = a1
|
|
||||||
for proto in (0,1,2, -1):
|
|
||||||
for obj in {1:2}, [1,2,3], a1, a2:
|
|
||||||
metafunc.addcall(funcargs=dict(obj=obj, proto=proto))
|
|
||||||
|
|
||||||
def test_pickle_and_back_IS_same(obj, proto):
|
def test_pickle_and_back_IS_same(obj, proto):
|
||||||
p1 = ImmutablePickler(uneven=False, protocol=proto)
|
p1 = ImmutablePickler(uneven=False, protocol=proto)
|
||||||
|
|
|
@ -83,8 +83,9 @@ class FuncargRequest:
|
||||||
self._pyfuncitem = pyfuncitem
|
self._pyfuncitem = pyfuncitem
|
||||||
self.function = pyfuncitem.obj
|
self.function = pyfuncitem.obj
|
||||||
self.module = pyfuncitem.getparent(py.test.collect.Module).obj
|
self.module = pyfuncitem.getparent(py.test.collect.Module).obj
|
||||||
self.cls = getattr(self.function, 'im_class', None)
|
clscol = pyfuncitem.getparent(py.test.collect.Class)
|
||||||
self.instance = getattr(self.function, 'im_self', None)
|
self.cls = clscol and clscol.obj or None
|
||||||
|
self.instance = py.builtin._getimself(self.function)
|
||||||
self.config = pyfuncitem.config
|
self.config = pyfuncitem.config
|
||||||
self.fspath = pyfuncitem.fspath
|
self.fspath = pyfuncitem.fspath
|
||||||
if hasattr(pyfuncitem, '_requestparam'):
|
if hasattr(pyfuncitem, '_requestparam'):
|
||||||
|
@ -99,7 +100,8 @@ class FuncargRequest:
|
||||||
def _fillfuncargs(self):
|
def _fillfuncargs(self):
|
||||||
argnames = getfuncargnames(self.function)
|
argnames = getfuncargnames(self.function)
|
||||||
if argnames:
|
if argnames:
|
||||||
assert not getattr(self._pyfuncitem, '_args', None), "yielded functions cannot have funcargs"
|
assert not getattr(self._pyfuncitem, '_args', None), (
|
||||||
|
"yielded functions cannot have funcargs")
|
||||||
for argname in argnames:
|
for argname in argnames:
|
||||||
if argname not in self._pyfuncitem.funcargs:
|
if argname not in self._pyfuncitem.funcargs:
|
||||||
self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname)
|
self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname)
|
||||||
|
|
|
@ -151,7 +151,7 @@ class TmpTestdir:
|
||||||
# used from runner functional tests
|
# used from runner functional tests
|
||||||
item = self.getitem(source)
|
item = self.getitem(source)
|
||||||
# the test class where we are called from wants to provide the runner
|
# the test class where we are called from wants to provide the runner
|
||||||
testclassinstance = self.request.function.im_self
|
testclassinstance = py.builtin._getimself(self.request.function)
|
||||||
runner = testclassinstance.getrunner()
|
runner = testclassinstance.getrunner()
|
||||||
return runner(item)
|
return runner(item)
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import py
|
import py
|
||||||
|
from py.builtin import print_
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
group = parser.addgroup("resultlog", "resultlog plugin options")
|
group = parser.addgroup("resultlog", "resultlog plugin options")
|
||||||
|
@ -51,9 +52,9 @@ class ResultLog(object):
|
||||||
self.logfile = logfile # preferably line buffered
|
self.logfile = logfile # preferably line buffered
|
||||||
|
|
||||||
def write_log_entry(self, testpath, shortrepr, longrepr):
|
def write_log_entry(self, testpath, shortrepr, longrepr):
|
||||||
print >>self.logfile, "%s %s" % (shortrepr, testpath)
|
print_("%s %s" % (shortrepr, testpath), file=self.logfile)
|
||||||
for line in longrepr.splitlines():
|
for line in longrepr.splitlines():
|
||||||
print >>self.logfile, " %s" % line
|
print_(" %s" % line, file=self.logfile)
|
||||||
|
|
||||||
def log_outcome(self, node, shortrepr, longrepr):
|
def log_outcome(self, node, shortrepr, longrepr):
|
||||||
testpath = generic_path(node)
|
testpath = generic_path(node)
|
||||||
|
@ -161,7 +162,7 @@ class TestWithFunctionIntegration:
|
||||||
testdir.plugins.append("resultlog")
|
testdir.plugins.append("resultlog")
|
||||||
args = ["--resultlog=%s" % resultlog] + [arg]
|
args = ["--resultlog=%s" % resultlog] + [arg]
|
||||||
testdir.runpytest(*args)
|
testdir.runpytest(*args)
|
||||||
return filter(None, resultlog.readlines(cr=0))
|
return [x for x in resultlog.readlines(cr=0) if x]
|
||||||
|
|
||||||
def test_collection_report(self, testdir):
|
def test_collection_report(self, testdir):
|
||||||
ok = testdir.makepyfile(test_collection_ok="")
|
ok = testdir.makepyfile(test_collection_ok="")
|
||||||
|
|
|
@ -40,7 +40,7 @@ class UnitTestCaseInstance(py.test.collect.Instance):
|
||||||
l = []
|
l = []
|
||||||
for name in names:
|
for name in names:
|
||||||
callobj = getattr(self.obj, name)
|
callobj = getattr(self.obj, name)
|
||||||
if callable(callobj):
|
if py.builtin.callable(callobj):
|
||||||
l.append(UnitTestFunction(name, parent=self))
|
l.append(UnitTestFunction(name, parent=self))
|
||||||
return l
|
return l
|
||||||
|
|
||||||
|
@ -65,11 +65,11 @@ class UnitTestFunction(py.test.collect.Function):
|
||||||
target(*args)
|
target(*args)
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
instance = self.obj.im_self
|
instance = py.builtin._getimself(self.obj)
|
||||||
instance.setUp()
|
instance.setUp()
|
||||||
|
|
||||||
def teardown(self):
|
def teardown(self):
|
||||||
instance = self.obj.im_self
|
instance = py.builtin._getimself(self.obj)
|
||||||
instance.tearDown()
|
instance.tearDown()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -56,14 +56,16 @@ class TestCaptureManager:
|
||||||
@py.test.mark.multi(method=['fd', 'sys'])
|
@py.test.mark.multi(method=['fd', 'sys'])
|
||||||
def test_capturing_unicode(testdir, method):
|
def test_capturing_unicode(testdir, method):
|
||||||
if sys.version_info >= (3,0):
|
if sys.version_info >= (3,0):
|
||||||
py.test.skip("test not applicable on 3k")
|
obj = "'b\u00f6y'"
|
||||||
|
else:
|
||||||
|
obj = "u'\u00f6y'"
|
||||||
testdir.makepyfile("""
|
testdir.makepyfile("""
|
||||||
# taken from issue 227 from nosetests
|
# taken from issue 227 from nosetests
|
||||||
def test_unicode():
|
def test_unicode():
|
||||||
import sys
|
import sys
|
||||||
print (sys.stdout)
|
print (sys.stdout)
|
||||||
print u'b\\u00f6y'
|
print (%s)
|
||||||
""")
|
""" % obj)
|
||||||
result = testdir.runpytest("--capture=%s" % method)
|
result = testdir.runpytest("--capture=%s" % method)
|
||||||
result.stdout.fnmatch_lines([
|
result.stdout.fnmatch_lines([
|
||||||
"*1 passed*"
|
"*1 passed*"
|
||||||
|
|
|
@ -17,6 +17,7 @@ a tree of collectors and test items that this modules provides::
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import py
|
import py
|
||||||
|
import inspect
|
||||||
from py.__.test.collect import configproperty, warnoldcollect
|
from py.__.test.collect import configproperty, warnoldcollect
|
||||||
pydir = py.path.local(py.__file__).dirpath()
|
pydir = py.path.local(py.__file__).dirpath()
|
||||||
from py.__.test import funcargs
|
from py.__.test import funcargs
|
||||||
|
@ -100,7 +101,7 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
|
||||||
# NB. we avoid random getattrs and peek in the __dict__ instead
|
# NB. we avoid random getattrs and peek in the __dict__ instead
|
||||||
d = {}
|
d = {}
|
||||||
dicts = [getattr(self.obj, '__dict__', {})]
|
dicts = [getattr(self.obj, '__dict__', {})]
|
||||||
for basecls in py.std.inspect.getmro(self.obj.__class__):
|
for basecls in inspect.getmro(self.obj.__class__):
|
||||||
dicts.append(basecls.__dict__)
|
dicts.append(basecls.__dict__)
|
||||||
seen = {}
|
seen = {}
|
||||||
for dic in dicts:
|
for dic in dicts:
|
||||||
|
@ -140,7 +141,7 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
|
||||||
|
|
||||||
def _istestclasscandidate(self, name, obj):
|
def _istestclasscandidate(self, name, obj):
|
||||||
if self.classnamefilter(name) and \
|
if self.classnamefilter(name) and \
|
||||||
py.std.inspect.isclass(obj):
|
inspect.isclass(obj):
|
||||||
if hasinit(obj):
|
if hasinit(obj):
|
||||||
# XXX WARN
|
# XXX WARN
|
||||||
return False
|
return False
|
||||||
|
@ -149,8 +150,6 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
|
||||||
|
|
||||||
def _genfunctions(self, name, funcobj):
|
def _genfunctions(self, name, funcobj):
|
||||||
module = self.getparent(Module).obj
|
module = self.getparent(Module).obj
|
||||||
# due to _buildname2items funcobj is the raw function, we need
|
|
||||||
# to work to get at the class
|
|
||||||
clscol = self.getparent(Class)
|
clscol = self.getparent(Class)
|
||||||
cls = clscol and clscol.obj or None
|
cls = clscol and clscol.obj or None
|
||||||
metafunc = funcargs.Metafunc(funcobj, config=self.config,
|
metafunc = funcargs.Metafunc(funcobj, config=self.config,
|
||||||
|
@ -163,15 +162,11 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
|
||||||
return funcargs.FunctionCollector(name=name,
|
return funcargs.FunctionCollector(name=name,
|
||||||
parent=self, calls=metafunc._calls)
|
parent=self, calls=metafunc._calls)
|
||||||
|
|
||||||
if py.std.sys.version_info > (3, 0):
|
|
||||||
_code_attr = "__code__"
|
|
||||||
else:
|
|
||||||
_code_attr = "func_code"
|
|
||||||
|
|
||||||
def is_generator(func):
|
def is_generator(func):
|
||||||
try:
|
try:
|
||||||
return (getattr(func, _code_attr).co_flags & 32) # generator function
|
return py.code.getrawcode(func).co_flags & 32 # generator function
|
||||||
except AttributeError: # c / builtin functions have no func_code
|
except AttributeError: # builtin functions have no bytecode
|
||||||
|
# assume them to not be generators
|
||||||
return False
|
return False
|
||||||
|
|
||||||
class Module(py.test.collect.File, PyCollectorMixin):
|
class Module(py.test.collect.File, PyCollectorMixin):
|
||||||
|
@ -242,7 +237,7 @@ class FunctionMixin(PyobjMixin):
|
||||||
|
|
||||||
def setup(self):
|
def setup(self):
|
||||||
""" perform setup for this test function. """
|
""" perform setup for this test function. """
|
||||||
if py.std.inspect.ismethod(self.obj):
|
if inspect.ismethod(self.obj):
|
||||||
name = 'setup_method'
|
name = 'setup_method'
|
||||||
else:
|
else:
|
||||||
name = 'setup_function'
|
name = 'setup_function'
|
||||||
|
@ -257,7 +252,7 @@ class FunctionMixin(PyobjMixin):
|
||||||
|
|
||||||
def teardown(self):
|
def teardown(self):
|
||||||
""" perform teardown for this test function. """
|
""" perform teardown for this test function. """
|
||||||
if hasattr(self.obj, 'im_self'):
|
if inspect.ismethod(self.obj):
|
||||||
name = 'teardown_method'
|
name = 'teardown_method'
|
||||||
else:
|
else:
|
||||||
name = 'teardown_function'
|
name = 'teardown_function'
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import py
|
import py, sys
|
||||||
from py.__.test import funcargs
|
from py.__.test import funcargs
|
||||||
|
|
||||||
def test_getfuncargnames():
|
def test_getfuncargnames():
|
||||||
|
@ -14,7 +14,8 @@ def test_getfuncargnames():
|
||||||
def f(self, arg1, arg2="hello"):
|
def f(self, arg1, arg2="hello"):
|
||||||
pass
|
pass
|
||||||
assert funcargs.getfuncargnames(A().f) == ['arg1']
|
assert funcargs.getfuncargnames(A().f) == ['arg1']
|
||||||
assert funcargs.getfuncargnames(A.f) == ['arg1']
|
if sys.version_info < (3,0):
|
||||||
|
assert funcargs.getfuncargnames(A.f) == ['arg1']
|
||||||
|
|
||||||
class TestFillFuncArgs:
|
class TestFillFuncArgs:
|
||||||
def test_funcarg_lookupfails(self, testdir):
|
def test_funcarg_lookupfails(self, testdir):
|
||||||
|
@ -361,7 +362,7 @@ class TestGenfuncFunctional:
|
||||||
"*2 passed in*",
|
"*2 passed in*",
|
||||||
])
|
])
|
||||||
|
|
||||||
def test_addcall_with_funcargs_two(self, testdir):
|
def test_addcall_with_two_funcargs_generators(self, testdir):
|
||||||
testdir.makeconftest("""
|
testdir.makeconftest("""
|
||||||
def pytest_generate_tests(metafunc):
|
def pytest_generate_tests(metafunc):
|
||||||
assert "arg1" in metafunc.funcargnames
|
assert "arg1" in metafunc.funcargnames
|
||||||
|
|
|
@ -26,7 +26,7 @@ class Test_genitems:
|
||||||
|
|
||||||
def test_subdir_conftest_error(self, testdir):
|
def test_subdir_conftest_error(self, testdir):
|
||||||
tmp = testdir.tmpdir
|
tmp = testdir.tmpdir
|
||||||
tmp.ensure("sub", "conftest.py").write("raise SyntaxError\n")
|
tmp.ensure("sub", "conftest.py").write("raise SyntaxError()\n")
|
||||||
items, reprec = testdir.inline_genitems(tmp)
|
items, reprec = testdir.inline_genitems(tmp)
|
||||||
collectionfailures = reprec.getfailedcollections()
|
collectionfailures = reprec.getfailedcollections()
|
||||||
assert len(collectionfailures) == 1
|
assert len(collectionfailures) == 1
|
||||||
|
|
|
@ -153,7 +153,7 @@ class TestConfigPickling:
|
||||||
assert col2.listnames() == col.listnames()
|
assert col2.listnames() == col.listnames()
|
||||||
|
|
||||||
def test_config_and_collector_pickling(self, testdir):
|
def test_config_and_collector_pickling(self, testdir):
|
||||||
from cPickle import Pickler, Unpickler
|
from pickle import Pickler, Unpickler
|
||||||
tmpdir = testdir.tmpdir
|
tmpdir = testdir.tmpdir
|
||||||
dir1 = tmpdir.ensure("somedir", dir=1)
|
dir1 = tmpdir.ensure("somedir", dir=1)
|
||||||
config = testdir.parseconfig()
|
config = testdir.parseconfig()
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
|
|
||||||
import thread
|
try:
|
||||||
|
from _thread import get_ident
|
||||||
|
except ImportError:
|
||||||
|
from thread import get_ident
|
||||||
|
|
||||||
class ThreadOut(object):
|
class ThreadOut(object):
|
||||||
""" A file like object that diverts writing operations
|
""" A file like object that diverts writing operations
|
||||||
|
@ -51,14 +54,13 @@ class ThreadOut(object):
|
||||||
setattr(obj, attrname, self._oldout)
|
setattr(obj, attrname, self._oldout)
|
||||||
|
|
||||||
def setwritefunc(self, writefunc, tid=None):
|
def setwritefunc(self, writefunc, tid=None):
|
||||||
assert callable(writefunc)
|
|
||||||
if tid is None:
|
if tid is None:
|
||||||
tid = thread.get_ident()
|
tid = get_ident()
|
||||||
self._tid2out[tid] = [0, writefunc]
|
self._tid2out[tid] = [0, writefunc]
|
||||||
|
|
||||||
def delwritefunc(self, tid=None, ignoremissing=True):
|
def delwritefunc(self, tid=None, ignoremissing=True):
|
||||||
if tid is None:
|
if tid is None:
|
||||||
tid = thread.get_ident()
|
tid = get_ident()
|
||||||
try:
|
try:
|
||||||
del self._tid2out[tid]
|
del self._tid2out[tid]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
@ -66,7 +68,7 @@ class ThreadOut(object):
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _get(self):
|
def _get(self):
|
||||||
tid = thread.get_ident()
|
tid = get_ident()
|
||||||
try:
|
try:
|
||||||
return self._tid2out[tid]
|
return self._tid2out[tid]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
import Queue
|
|
||||||
import threading
|
import threading
|
||||||
import time
|
import time
|
||||||
import sys
|
import sys
|
||||||
import py
|
import py
|
||||||
|
try:
|
||||||
|
import queue
|
||||||
|
except ImportError:
|
||||||
|
import Queue as queue
|
||||||
|
|
||||||
ERRORMARKER = object()
|
ERRORMARKER = object()
|
||||||
|
|
||||||
|
@ -14,7 +17,7 @@ class Reply(object):
|
||||||
_excinfo = None
|
_excinfo = None
|
||||||
def __init__(self, task):
|
def __init__(self, task):
|
||||||
self.task = task
|
self.task = task
|
||||||
self._queue = Queue.Queue()
|
self._queue = queue.Queue()
|
||||||
|
|
||||||
def _set(self, result):
|
def _set(self, result):
|
||||||
self._queue.put(result)
|
self._queue.put(result)
|
||||||
|
@ -31,7 +34,7 @@ class Reply(object):
|
||||||
while 1:
|
while 1:
|
||||||
try:
|
try:
|
||||||
return self._queue.get_nowait()
|
return self._queue.get_nowait()
|
||||||
except Queue.Empty:
|
except queue.Empty:
|
||||||
remaining = endtime - time.time()
|
remaining = endtime - time.time()
|
||||||
if remaining <= 0: #time is over and no element arrived
|
if remaining <= 0: #time is over and no element arrived
|
||||||
raise IOError("timeout waiting for task %r" %(self.task,))
|
raise IOError("timeout waiting for task %r" %(self.task,))
|
||||||
|
@ -59,7 +62,7 @@ class Reply(object):
|
||||||
class WorkerThread(threading.Thread):
|
class WorkerThread(threading.Thread):
|
||||||
def __init__(self, pool):
|
def __init__(self, pool):
|
||||||
threading.Thread.__init__(self)
|
threading.Thread.__init__(self)
|
||||||
self._queue = Queue.Queue()
|
self._queue = queue.Queue()
|
||||||
self._pool = pool
|
self._pool = pool
|
||||||
self.setDaemon(1)
|
self.setDaemon(1)
|
||||||
|
|
||||||
|
@ -149,7 +152,7 @@ class WorkerPool(object):
|
||||||
"""
|
"""
|
||||||
if not self._shuttingdown:
|
if not self._shuttingdown:
|
||||||
self._shuttingdown = True
|
self._shuttingdown = True
|
||||||
for t in self._alive.keys():
|
for t in list(self._alive):
|
||||||
t.stop()
|
t.stop()
|
||||||
|
|
||||||
def join(self, timeout=None):
|
def join(self, timeout=None):
|
||||||
|
@ -158,7 +161,7 @@ class WorkerPool(object):
|
||||||
deadline = delta = None
|
deadline = delta = None
|
||||||
if timeout is not None:
|
if timeout is not None:
|
||||||
deadline = time.time() + timeout
|
deadline = time.time() + timeout
|
||||||
for thread in self._alive.keys():
|
for thread in list(self._alive):
|
||||||
if deadline:
|
if deadline:
|
||||||
delta = deadline - time.time()
|
delta = deadline - time.time()
|
||||||
if delta <= 0:
|
if delta <= 0:
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
|
|
||||||
import py
|
import py
|
||||||
import sys
|
import sys
|
||||||
|
from py.__.thread.pool import queue
|
||||||
|
|
||||||
WorkerPool = py._thread.WorkerPool
|
WorkerPool = py._thread.WorkerPool
|
||||||
ThreadOut = py._thread.ThreadOut
|
ThreadOut = py._thread.ThreadOut
|
||||||
|
|
||||||
def test_some():
|
def test_some():
|
||||||
pool = WorkerPool()
|
pool = WorkerPool()
|
||||||
q = py.std.Queue.Queue()
|
q = queue.Queue()
|
||||||
num = 4
|
num = 4
|
||||||
|
|
||||||
def f(i):
|
def f(i):
|
||||||
|
@ -66,7 +67,7 @@ def test_maxthreads():
|
||||||
|
|
||||||
def test_join_timeout():
|
def test_join_timeout():
|
||||||
pool = WorkerPool()
|
pool = WorkerPool()
|
||||||
q = py.std.Queue.Queue()
|
q = queue.Queue()
|
||||||
def f():
|
def f():
|
||||||
q.get()
|
q.get()
|
||||||
reply = pool.dispatch(f)
|
reply = pool.dispatch(f)
|
||||||
|
|
Loading…
Reference in New Issue