remove pylib things and move things to new pytest namespace

--HG--
branch : trunk
This commit is contained in:
holger krekel 2010-10-07 11:59:00 +02:00
parent f488da5cc8
commit d1aff902d5
157 changed files with 176 additions and 15061 deletions

View File

@ -17,7 +17,7 @@ syntax:glob
build/
dist/
py.egg-info
*.egg-info
issue/
env/
3rdparty/

View File

@ -259,7 +259,7 @@ class PluginDoc(RestWriter):
warn("missing docstring", func)
def emit_options(self, plugin):
from py._test.parseopt import Parser
from pytest.parseopt import Parser
options = []
parser = Parser(processopt=options.append)
if hasattr(plugin, 'pytest_addoption'):

View File

@ -1,95 +0,0 @@
import py
import sys
pytest_plugins = '_pytest doctest pytester'.split()
collect_ignore = ['build', 'doc/_build']
rsyncdirs = ['conftest.py', 'bin', 'py', 'doc', 'testing']
import os, py
pid = os.getpid()
def pytest_addoption(parser):
group = parser.getgroup("pylib", "py lib testing options")
group.addoption('--sshhost',
action="store", dest="sshhost", default=None,
help=("ssh xspec for ssh functional tests. "))
group.addoption('--runslowtests',
action="store_true", dest="runslowtests", default=False,
help=("run slow tests"))
group.addoption('--lsof',
action="store_true", dest="lsof", default=False,
help=("run FD checks if lsof is available"))
def pytest_configure(config):
if config.getvalue("lsof"):
try:
out = py.process.cmdexec("lsof -p %d" % pid)
except py.process.cmdexec.Error:
pass
else:
config._numfiles = len([x for x in out.split("\n") if "REG" in x])
def pytest_unconfigure(config, __multicall__):
if not hasattr(config, '_numfiles'):
return
__multicall__.execute()
out2 = py.process.cmdexec("lsof -p %d" % pid)
len2 = len([x for x in out2.split("\n") if "REG" in x])
assert len2 < config._numfiles + 7, out2
def pytest_funcarg__sshhost(request):
val = request.config.getvalue("sshhost")
if val:
return val
py.test.skip("need --sshhost option")
def pytest_generate_tests(metafunc):
multi = getattr(metafunc.function, 'multi', None)
if multi is not None:
assert len(multi.kwargs) == 1
for name, l in multi.kwargs.items():
for val in l:
metafunc.addcall(funcargs={name: val})
elif 'anypython' in metafunc.funcargnames:
for name in ('python2.4', 'python2.5', 'python2.6',
'python2.7', 'python3.1', 'pypy-c', 'jython'):
metafunc.addcall(id=name, param=name)
# XXX copied from execnet's conftest.py - needs to be merged
winpymap = {
'python2.7': r'C:\Python27\python.exe',
'python2.6': r'C:\Python26\python.exe',
'python2.5': r'C:\Python25\python.exe',
'python2.4': r'C:\Python24\python.exe',
'python3.1': r'C:\Python31\python.exe',
}
def getexecutable(name, cache={}):
try:
return cache[name]
except KeyError:
executable = py.path.local.sysfind(name)
if executable:
if name == "jython":
import subprocess
popen = subprocess.Popen([str(executable), "--version"],
universal_newlines=True, stderr=subprocess.PIPE)
out, err = popen.communicate()
if not err or "2.5" not in err:
executable = None
cache[name] = executable
return executable
def pytest_funcarg__anypython(request):
name = request.param
executable = getexecutable(name)
if executable is None:
if sys.platform == "win32":
executable = winpymap.get(name, None)
if executable:
executable = py.path.local(executable)
if executable.check():
return executable
py.test.skip("no %s found" % (name,))
return executable

View File

@ -1,8 +0,0 @@
import py
def pytest_runtest_call(item, __multicall__):
cap = py.io.StdCapture()
try:
return __multicall__.execute()
finally:
outerr = cap.reset()

View File

@ -1,374 +0,0 @@
"""XXX in progress: resultdb plugin for database logging of test results.
Saves test results to a datastore.
XXX this needs to be merged with resultlog plugin
Also mixes in some early ideas about an archive abstraction for test
results.
"""
import py
py.test.skip("XXX needs to be merged with resultlog")
from pytest_resultlog import ResultLog
def pytest_addoption(parser):
group = parser.addgroup("resultdb", "resultdb plugin options")
group.addoption('--resultdb', action="store", dest="resultdb",
metavar="path",
help="path to the file to store test results.")
group.addoption('--resultdb_format', action="store",
dest="resultdbformat", default='json',
help="data format (json, sqlite)")
def pytest_configure(config):
# XXX using config.XYZ is not good
if config.getvalue('resultdb'):
if config.option.resultdb:
# local import so missing module won't crash py.test
try:
import sqlite3
except ImportError:
raise config.Error('Could not import sqlite3 module')
try:
import simplejson
except ImportError:
raise config.Error('Could not import simplejson module')
if config.option.resultdbformat.lower() == 'json':
resultdb = ResultDB(JSONResultArchive,
config.option.resultdb)
elif config.option.resultdbformat.lower() == 'sqlite':
resultdb = ResultDB(SQLiteResultArchive,
config.option.resultdb)
else:
raise config.Error('Unknown --resultdb_format: %s' %
config.option.resultdbformat)
config.pluginmanager.register(resultdb)
class JSONResultArchive(object):
def __init__(self, archive_path):
self.archive_path = archive_path
import simplejson
self.simplejson = simplejson
def init_db(self):
if os.path.exists(self.archive_path):
data_file = open(self.archive_path)
archive = self.simplejson.load(data_file)
self.archive = archive
else:
self.archive = []
self._flush()
def append_data(self, data):
runid = py.std.uuid.uuid4()
for item in data:
item = item.copy()
item['runid'] = str(runid)
self.archive.append(item)
self._flush()
def get_all_data(self):
return self.archive
def _flush(self):
data_file = open(self.archive_path, 'w')
self.simplejson.dump(self.archive, data_file)
data_file.close()
class SQLiteResultArchive(object):
def __init__(self, archive_path):
self.archive_path = archive_path
import sqlite3
self.sqlite3 = sqlite3
def init_db(self):
if not os.path.exists(self.archive_path):
conn = self.sqlite3.connect(self.archive_path)
cursor = conn.cursor()
try:
cursor.execute(SQL_CREATE_TABLES)
conn.commit()
finally:
cursor.close()
conn.close()
def append_data(self, data):
flat_data = []
runid = py.std.uuid.uuid4()
for item in data:
item = item.copy()
item['runid'] = str(runid)
flat_data.append(self.flatten(item))
conn = self.sqlite3.connect(self.archive_path)
cursor = conn.cursor()
cursor.executemany(SQL_INSERT_DATA, flat_data)
conn.commit()
cursor.close()
conn.close()
def get_all_data(self):
conn = self.sqlite3.connect(self.archive_path)
conn.row_factory = self.sqlite3.Row
cursor = conn.cursor()
cursor.execute(SQL_SELECT_DATA)
data = cursor.fetchall()
cursor.close()
conn.close()
data = [self.unflatten(item) for item in data]
return data
def flatten(self, item):
return (item.get('runid', None),
item.get('name', None),
item.get('passed', False),
item.get('skipped', False),
item.get('failed', False),
item.get('shortrepr', None),
item.get('longrepr', None),
item.get('fspath', None),
item.get('itemname', None),
)
def unflatten(self, item):
names = ("runid name passed skipped failed shortrepr "
"longrepr fspath itemname").split()
d = {}
for i, name in enumerate(names):
d[name] = item[i]
return d
class ResultDB(ResultLog):
def __init__(self, cls, db_path):
self.archive = cls(db_path)
self.archive.init_db()
def write_log_entry(self, testpath, shortrepr, longrepr):
data = {}
event_excludes = ['colitem', 'longrepr']
for item in vars(event).keys():
if item not in event_excludes:
data[item] = getattr(event, item)
# use the locally calculated longrepr & shortrepr
data['longrepr'] = longrepr
data['shortrepr'] = shortrepr
data['testpath'] = unicode(testpath)
self.archive.append_data([data])
SQL_CREATE_TABLES = """
create table pytest_results (
runid varchar(36),
name varchar,
passed int,
skipped int,
failed int,
shortrepr varchar,
longrepr varchar,
fspath varchar,
itemname varchar
);
"""
SQL_INSERT_DATA = """
insert into pytest_results (
runid,
name,
passed,
skipped,
failed,
shortrepr,
longrepr,
fspath,
itemname)
values (?, ?, ?, ?, ?, ?, ?, ?, ?);
"""
SQL_SELECT_DATA = """
select
runid,
name,
passed,
skipped,
failed,
shortrepr,
longrepr,
fspath,
itemname
from pytest_results;
"""
# ===============================================================================
#
# plugin tests
#
# ===============================================================================
import os, StringIO
class BaseResultArchiveTests(object):
cls = None
def setup_class(cls):
# XXX refactor setup into a funcarg?
cls.tempdb = "test_tempdb"
def test_init_db(self, testdir):
tempdb_path = unicode(testdir.tmpdir.join(self.tempdb))
archive = self.cls(tempdb_path)
archive.init_db()
assert os.path.exists(tempdb_path)
def test_db_insert(self, testdir):
tempdb_path = unicode(testdir.tmpdir.join(self.tempdb))
archive = self.cls(tempdb_path)
archive.init_db()
assert len(archive.get_all_data()) == 0
data = [{'name': 'tmppackage/test_whatever.py:test_hello',
'fspath': '/Users/brian/work/tmppackage/test_whatever.py',
'name': 'test_hello',
'longrepr': '',
'passed': True,
'shortrepr': '.'
}]
archive.append_data(data)
result = archive.get_all_data()
print result
assert len(result) == 1
for key, value in data[0].items():
assert value == result[0][key]
assert 'runid' in result[0]
# make sure the data is persisted
tempdb_path = unicode(testdir.tmpdir.join(self.tempdb))
archive = self.cls(tempdb_path)
archive.init_db()
assert len(archive.get_all_data()) == 1
class TestJSONResultArchive(BaseResultArchiveTests):
cls = JSONResultArchive
def setup_method(self, method):
py.test.importorskip("simplejson")
class TestSQLiteResultArchive(BaseResultArchiveTests):
cls = SQLiteResultArchive
def setup_method(self, method):
py.test.importorskip("sqlite3")
def test_init_db_sql(self, testdir):
py.test.importorskip("sqlite3")
tempdb_path = unicode(testdir.tmpdir.join(self.tempdb))
archive = self.cls(tempdb_path)
archive.init_db()
assert os.path.exists(tempdb_path)
# is table in the database?
import sqlite3
conn = sqlite3.connect(tempdb_path)
cursor = conn.cursor()
cursor.execute("""SELECT name FROM sqlite_master
ORDER BY name;""")
tables = cursor.fetchall()
cursor.close()
conn.close()
assert len(tables) == 1
def verify_archive_item_shape(item):
names = ("runid name passed skipped failed shortrepr "
"longrepr fspath itemname").split()
for name in names:
assert name in item
class TestWithFunctionIntegration:
def getarchive(self, testdir, arg):
py.test.importorskip("sqlite3")
py.test.importorskip("simplejson")
resultdb = testdir.tmpdir.join("resultdb")
args = ["--resultdb=%s" % resultdb, "--resultdb_format=sqlite"] + [arg]
testdir.runpytest(*args)
assert resultdb.check(file=1)
archive = SQLiteResultArchive(unicode(resultdb))
archive.init_db()
return archive
def test_collection_report(self, testdir):
py.test.skip("Needs a rewrite for db version.")
ok = testdir.makepyfile(test_collection_ok="")
skip = testdir.makepyfile(test_collection_skip="import py ; py.test.skip('hello')")
fail = testdir.makepyfile(test_collection_fail="XXX")
lines = self.getresultdb(testdir, ok)
assert not lines
lines = self.getresultdb(testdir, skip)
assert len(lines) == 2
assert lines[0].startswith("S ")
assert lines[0].endswith("test_collection_skip.py")
assert lines[1].startswith(" ")
assert lines[1].endswith("test_collection_skip.py:1: Skipped: 'hello'")
lines = self.getresultdb(testdir, fail)
assert lines
assert lines[0].startswith("F ")
assert lines[0].endswith("test_collection_fail.py"), lines[0]
for x in lines[1:]:
assert x.startswith(" ")
assert "XXX" in "".join(lines[1:])
def test_log_test_outcomes(self, testdir):
mod = testdir.makepyfile(test_mod="""
import py
def test_pass(): pass
def test_skip(): py.test.skip("hello")
def test_fail(): raise ValueError("val")
""")
archive = self.getarchive(testdir, mod)
data = archive.get_all_data()
for item in data:
verify_archive_item_shape(item)
assert len(data) == 3
assert len([item for item in data if item['passed'] == True]) == 1
assert len([item for item in data if item['skipped'] == True]) == 1
assert len([item for item in data if item['failed'] == True]) == 1
def test_internal_exception(self):
py.test.skip("Needs a rewrite for db version.")
# they are produced for example by a teardown failing
# at the end of the run
try:
raise ValueError
except ValueError:
excinfo = py.code.ExceptionInfo()
reslog = ResultDB(StringIO.StringIO())
reslog.pytest_internalerror(excinfo.getrepr)
entry = reslog.logfile.getvalue()
entry_lines = entry.splitlines()
assert entry_lines[0].startswith('! ')
assert os.path.basename(__file__)[:-1] in entry_lines[0] #.py/.pyc
assert entry_lines[-1][0] == ' '
assert 'ValueError' in entry
def test_generic(testdir):
testdir.makepyfile("""
import py
def test_pass():
pass
def test_fail():
assert 0
def test_skip():
py.test.skip("")
""")
testdir.runpytest("--resultdb=result.sqlite")
#testdir.tmpdir.join("result.sqlite")

View File

@ -1,190 +0,0 @@
"""
Allows to test twisted applications with pytest.
Notes: twisted's asynchronous behavior may have influence on the order of test-functions
TODO:
+ credits to Ralf Schmitt See: http://twistedmatrix.com/pipermail/twisted-python/2007-February/014872.html
+ get test to work
"""
import sys
try:
from twisted.internet import reactor, defer
from twisted.python import failure, log
except ImportError:
print "To use the twisted option you have to install twisted."
sys.exit(10)
try:
from greenlet import greenlet
except ImportError:
print "Since pylib 1.0 greenlet are removed and separately packaged: " \
"http://pypi.python.org/pypi/greenlet"
sys.exit(10)
def _start_twisted_logging():
"""Enables twisted internal logging"""
class Logger(object):
"""late-bound sys.stdout"""
def write(self, msg):
sys.stdout.write(msg)
def flush(self):
sys.stdout.flush()
# sys.stdout will be changed by py.test later.
log.startLogging(Logger(), setStdout=0)
def _run_twisted(logging=False):
"""Start twisted mainloop and initialize recursive calling of doit()."""
# make twisted copy traceback...
failure.Failure.cleanFailure = lambda *args: None
if logging:
_start_twisted_logging()
def fix_signal_handling():
# see http://twistedmatrix.com/trac/ticket/733
import signal
if hasattr(signal, "siginterrupt"):
signal.siginterrupt(signal.SIGCHLD, False)
def start():
fix_signal_handling()
doit(None)
# recursively called for each test-function/method due done()
def doit(val): # val always None
# switch context to wait that wrapper() passes back to test-method
res = gr_tests.switch(val)
if res is None:
reactor.stop()
return
def done(res):
reactor.callLater(0.0, doit, None) # recursive call of doit()
def err(res):
reactor.callLater(0.0, doit, res)
# the test-function *may* return a deferred
# here the test-function will actually been called
# done() is finalizing a test-process by assuring recursive invoking
# of doit()
defer.maybeDeferred(res).addCallback(done).addErrback(err)
# initially preparing the calling of doit() and starting the reactor
reactor.callLater(0.0, start)
reactor.run()
def pytest_addoption(parser):
group = parser.addgroup('twisted options')
group.addoption('--twisted-logging', action='store_true', default=False,
dest='twisted_logging',
help="switch on twisted internal logging")
def pytest_configure(config):
twisted_logging = config.getvalue("twisted_logging")
gr_twisted.switch(twisted_logging)
def pytest_unconfigure(config):
gr_twisted.switch(None)
def pytest_pyfunc_call(pyfuncitem):
# XXX1 kwargs?
# XXX2 we want to delegate actual call to next plugin
# (which may want to produce test coverage, etc.)
res = gr_twisted.switch(lambda: pyfuncitem.call())
if res:
res.raiseException()
return True # indicates that we performed the function call
gr_twisted = greenlet(_run_twisted)
gr_tests = greenlet.getcurrent()
# ===============================================================================
# plugin tests
# ===============================================================================
def test_generic(testdir):
testdir.makepyfile('''
def test_pass():
pass
from twisted.internet import defer, reactor
from twisted.python import failure
from twisted.python import log
def test_no_deferred():
assert True is True
def test_deferred():
log.msg("test_deferred() called")
d = defer.Deferred()
def done():
log.msg("test_deferred.done() CALLBACK DONE")
d.callback(None)
reactor.callLater(2.5, done)
log.msg("test_deferred() returning deferred: %r" % (d,))
return d
def test_deferred2():
log.msg("test_deferred2() called")
d = defer.Deferred()
def done():
log.msg("test_deferred2.done() CALLBACK DONE")
d.callback(None)
reactor.callLater(2.5, done)
log.msg("test_deferred2() returning deferred: %r" % (d,))
return d
def test_deferred4():
log.msg("test_deferred4() called")
from twisted.web.client import getPage
def printContents(contents):
assert contents == ""
deferred = getPage('http://twistedmatrix.com/')
deferred.addCallback(printContents)
return deferred
def test_deferred3():
log.msg("test_deferred3() called")
d = defer.Deferred()
def done():
log.msg("test_deferred3.done() CALLBACK DONE")
d.callback(None)
reactor.callLater(2.5, done)
log.msg("test_deferred3() returning deferred: %r" % (d,))
return d
class TestTwistedSetupMethod:
def setup_method(self, method):
log.msg("TestTwistedSetupMethod.setup_method() called")
def test_deferred(self):
log.msg("TestTwistedSetupMethod.test_deferred() called")
d = defer.Deferred()
def done():
log.msg("TestTwistedSetupMethod.test_deferred() CALLBACK DONE")
d.callback(None)
reactor.callLater(2.5, done)
log.msg("TestTwistedSetupMethod.test_deferred() returning deferred: %r" % (d,))
return d
def test_defer_fail():
def fun():
log.msg("provoking NameError")
rsdfg
return defer.maybeDeferred(fun)
''')
testdir.runpytest("-T")
# XXX: what to do?
# s = testdir.tmpdir.join("event.log").read()
# assert s.find("TestrunFinish") != -1

View File

@ -1,2 +0,0 @@
pygreen: experimental IO and execnet operations through greenlets

View File

@ -1,20 +0,0 @@
"""
this little helper allows to run tests multiple times
in the same process. useful for running tests from
a console.
NOTE: since 1.3.1 you can just call py.test.cmdline.main()
multiple times - no special logic needed.
"""
import py, sys
def pytest(argv=None):
if argv is None:
argv = []
try:
sys.argv[1:] = argv
py.cmdline.pytest()
except SystemExit:
pass
# we need to reset the global py.test.config object
py.test.config = py.test.config.__class__()

View File

@ -1,6 +1,6 @@
import py
from py._plugin.pytest_restdoc import convert_rest_html, strip_html_header
from pytest.plugin.pytest_restdoc import convert_rest_html, strip_html_header
html = py.xml.html

View File

@ -1,122 +0,0 @@
=============================================================================
Channel implementation notes
=============================================================================
The public API of channels make them appear either opened or closed.
When a channel is closed, we can't send any more items, and it will not
receive any more items than already queued.
Callbacks make the situation slightly more subtle. Callbacks are
attached to the ChannelFactory object, so that Channel objects can be
garbage-collected and still leave behind an active callback that can
continue to receive items.
The CHANNEL_CLOSE message is sent when a channel id is about to be removed
from the ChannelFactory, which means when the Channel object has been
garbage-collected *and* there is no callback any more.
If a Channel object is garbage-collected but the ChannelFactory has a
callback for it, a CHANNEL_LAST_MESSAGE message is sent. It is only useful
if both sides' Channel objects have an associated callback. In this
situation, CHANNEL_LAST_MESSAGE allows its receiver to un-register its own
callback; if/when in addition the receiver side also looses the last
reference to its Channel object, the Channel is closed. So in this particular
situation both sides must forget about the Channel object for it to be
automatically closed.
gateway <---> channelfactory ---> {id: weakref(channel)}
---> {id: callback}
State and invariants of Channel objects
---------------------------------------
_channels and _callbacks are dictionaries on the ChannelFactory.
Other attributes are on the Channel objects.
All states are valid at any time (even with multithreading) unless
marked with {E}, which means that they may be temporary invalid.
They are eventually restored.
States ("sendonly" means opened but won't receive any more items):
opened sendonly closed deleted
================= ============== ================== ===============
not _closed not _closed _closed <no ref left>
not _receiveclosed _receiveclosed {E} _receiveclosed
In the presence of callbacks, "deleted" does not imply "closed" nor "sendonly".
It only means that no more items can be sent. The (logical) channel can
continue to receive data via the call-back even if the channel object no
longer exists.
The two kinds of channels, with or without callback:
items read by receive() has a callback
============================= =======================================
_items is a Queue _items is None
id not in _callbacks
state==opened: id in _callbacks
{E} state==sendonly: there is {E} state!=opened: id not in _callbacks
an ENDMARKER in _items
{E} state==closed: there is
an ENDMARKER in _items
Callback calls should be considered asynchronuous. The channel can be in any
state and change its state while the callback runs.
The ChannelFactory's WeakValueDictionary _channels maps some ids to their
channel object, depending on their state:
opened sendonly closed deleted
================= ============== ================ ===============
id in _channels {E} not in {E} not in not in
All received RemoteErrors are handled exactly once: they are normally
re-raised once in waitclose() or receive(). If it is not possible, they are
at the moment dumped to stderr. (XXX should use logging/tracing)
Only channels in {E} "closed" state can hold RemoteErrors.
Methods:
* close() returns with the channel in "closed" state
* send() either send the data or raise if "closed"
* receive() wait for the next item. If no item left and the state
changes to non-"opened", raise
* waitclose() wait for a non-"opened" state
Assuming the channel is connected and the connection is alive, the local state
eventually influences the state of the corresponding remote channel object:
local | opened sendonly closed deleted
remote |
=======================================================
|
opened | ok n/a (1) (2)
|
sendonly | n/a n/a n/a ok
|
closed | (1) n/a ok ok
|
deleted | (2) ok ok ok
(1) The side with the closed channel object must send a CHANNEL_CLOSE message,
which will eventually put the other side's channel in "closed" state if
it is still "opened".
(2) If the deleted channel has no callback, this is equivalent to (1).
Otherwide, the side with the deleted channel must send a
CHANNEL_LAST_MESSAGE, which will eventually put the other side's channel in
"sendonly" state if it is still "opened".
n/a These configuration should never occur.

View File

@ -1,26 +0,0 @@
Execnet / Path combination
I think the nice code in this directory
should be refactored so that you can use
it like this:
rp = gateway.get_remote_path(relpath)
and relpath could be absolute, relative (should
follow remote-platform syntax) or None/"." (the
current dir on the other side).
The tricky part probably is defining sensible
setup/teardown semantics with respect to
starting the "Path" server on the other side,
we at least don't want to have multiple threads
that serve path requests and maybe we want
to be able to explicitely shutdown a once
started RemotePath server (not sure though).
For a single-threaded py.execnet it might be helpful to be
able to install new network messages (which are lower level
than remote_exec() and work with callbacks, so don't follow
the nice "synchronous" programming model that you get with
threads/greenlets/tasklets).

View File

@ -1,81 +0,0 @@
URL escaping in Subversion
==========================
A quick document describing the rules (as far as we've found out, that is) that
apply to quoting of URLs and file paths in Subversion. Handling quoting
properly is a bit of a challenge, since different rules apply for file paths
and URLs, and those rules aren't entirely clear in either case.
What follows is a list of semi-random notes that need to be taken into
consideration when implementing proper quoting in the 'py lib'.
**DISCLAIMER**: currently the idea is just to have this document around as a
TODO list for implementation, not sure what will happen to it in the future...
Don't consider it part of the py lib documentation, and do understand it may be
incomplete or even incorrect...
* SVN deals with remote objects using URLs and local ones using paths
URL related notes
-----------------
* URLs follow (almost) normal `URL encoding rules`_
characters that aren't allowed in URL paths (such as :, @, %, etc.) should
be replaced with a % sign following the ASCII value of the character (two
digit HEX)
an exception (the only one I could find so far) is the drive letter in a file
URL in windows, the following path was required to get a file 'bar' from a
repo in 'c:\\foo'::
file:///c:/foo/bar
* URLs always have / as seperator
on Windows, the \\ characters in paths will have to be replaced with a /
also (see above) if the path contains a drive letter, a / should be prepended
* ignore casing on Windows?
since Windows is case-insensitive, it may make sense to consider ignoring
case on that platform(?)
* long file names
don't even want to go there... `filed an issue on this on in the tracker`_...
Path related notes
------------------
* all characters that are supported in paths by any operating system seem to
be supported by SVN
basically SVN doesn't think about platforms that aren't capable of using
certain characters: it will happily allow you to check a file with a name
containing a backslash (\\) in, resulting in a repo that isn't usable in
Windows anymore (you'll get a nasty message explaining how your local
checkout is broken on checking it out)...
I think py.path.svn* should take the approach of not allowing the characters
that will result in failing checkouts on Windows. These characters are (I
think, list taken from `some website`_)::
* | \ / : < > ?
This would mean that both svnwc and svnurl should fail on initializing when
the path (or the path part of the URL) contains one of these characters. Also
join() and other functions that take (parts of) paths as arguments should
check for, and fail on, these characters.
* paths don't require encoding
normally paths don't have to be encoded, however @ can confuse SVN in certain
cases; a workaround is to add @HEAD after the path (also works for relative
paths, I encountered this doing an SVN info on a file called 'bar@baz', in
the end the command 'svn info bar@baz@HEAD' worked)
.. _`filed an issue on this on in the tracker`: https://codespeak.net/issue/py-dev/issue38
.. _`URL encoding rules`: http://en.wikipedia.org/wiki/Percent-encoding
.. _`some website`: http://linuxboxadmin.com/articles/filefriction.php

View File

@ -1,174 +0,0 @@
"""
py.test and pylib: rapid testing and development utils
this module uses apipkg.py for lazy-loading sub modules
and classes. The initpkg-dictionary below specifies
name->value mappings where value can be another namespace
dictionary or an import path.
(c) Holger Krekel and others, 2004-2010
"""
__version__ = version = "1.4.0a1"
import py.apipkg
py.apipkg.initpkg(__name__, dict(
# access to all standard lib modules
std = '._std:std',
# access to all posix errno's as classes
error = '._error:error',
_pydir = '.__metainfo:pydir',
version = 'py:__version__', # backward compatibility
cmdline = {
'pytest': '._cmdline.pytest:main',
'pylookup': '._cmdline.pylookup:main',
'pycountloc': '._cmdline.pycountlog:main',
'pylookup': '._cmdline.pylookup:main',
'pycountloc': '._cmdline.pycountloc:main',
'pycleanup': '._cmdline.pycleanup:main',
'pywhich' : '._cmdline.pywhich:main',
'pysvnwcrevert' : '._cmdline.pysvnwcrevert:main',
'pyconvert_unittest' : '._cmdline.pyconvert_unittest:main',
},
test = {
# helpers for use from test functions or collectors
'__onfirstaccess__' : '._test.config:onpytestaccess',
'__doc__' : '._test:__doc__',
# configuration/initialization related test api
'config' : '._test.config:config_per_process',
'ensuretemp' : '._test.config:ensuretemp',
'collect': {
'Collector' : '._test.collect:Collector',
'Directory' : '._test.collect:Directory',
'File' : '._test.collect:File',
'Item' : '._test.collect:Item',
},
'cmdline': {
'main' : '._test.session:main', # backward compat
},
},
# hook into the top-level standard library
process = {
'__doc__' : '._process:__doc__',
'cmdexec' : '._process.cmdexec:cmdexec',
'kill' : '._process.killproc:kill',
'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
},
path = {
'__doc__' : '._path:__doc__',
'svnwc' : '._path.svnwc:SvnWCCommandPath',
'svnurl' : '._path.svnurl:SvnCommandPath',
'local' : '._path.local:LocalPath',
'SvnAuth' : '._path.svnwc:SvnAuth',
},
# some nice slightly magic APIs
magic = {
'invoke' : '._code.oldmagic:invoke',
'revoke' : '._code.oldmagic:revoke',
'patch' : '._code.oldmagic:patch',
'revert' : '._code.oldmagic:revert',
'autopath' : '._path.local:autopath',
'AssertionError' : '._code.oldmagic2:AssertionError',
},
# python inspection/code-generation API
code = {
'__doc__' : '._code:__doc__',
'compile' : '._code.source:compile_',
'Source' : '._code.source:Source',
'Code' : '._code.code:Code',
'Frame' : '._code.code:Frame',
'ExceptionInfo' : '._code.code:ExceptionInfo',
'Traceback' : '._code.code:Traceback',
'getfslineno' : '._code.source:getfslineno',
'getrawcode' : '._code.code:getrawcode',
'patch_builtins' : '._code.code:patch_builtins',
'unpatch_builtins' : '._code.code:unpatch_builtins',
'_AssertionError' : '._code.assertion:AssertionError',
'_reinterpret_old' : '._code.assertion:reinterpret_old',
'_reinterpret' : '._code.assertion:reinterpret',
'_reprcompare' : '._code.assertion:_reprcompare',
},
# backports and additions of builtins
builtin = {
'__doc__' : '._builtin:__doc__',
'enumerate' : '._builtin:enumerate',
'reversed' : '._builtin:reversed',
'sorted' : '._builtin:sorted',
'any' : '._builtin:any',
'set' : '._builtin:set',
'frozenset' : '._builtin:frozenset',
'BaseException' : '._builtin:BaseException',
'GeneratorExit' : '._builtin:GeneratorExit',
'_sysex' : '._builtin:_sysex',
'print_' : '._builtin:print_',
'_reraise' : '._builtin:_reraise',
'_tryimport' : '._builtin:_tryimport',
'exec_' : '._builtin:exec_',
'_basestring' : '._builtin:_basestring',
'_totext' : '._builtin:_totext',
'_isbytes' : '._builtin:_isbytes',
'_istext' : '._builtin:_istext',
'_getimself' : '._builtin:_getimself',
'_getfuncdict' : '._builtin:_getfuncdict',
'_getcode' : '._builtin:_getcode',
'builtins' : '._builtin:builtins',
'execfile' : '._builtin:execfile',
'callable' : '._builtin:callable',
},
# input-output helping
io = {
'__doc__' : '._io:__doc__',
'dupfile' : '._io.capture:dupfile',
'TextIO' : '._io.capture:TextIO',
'BytesIO' : '._io.capture:BytesIO',
'FDCapture' : '._io.capture:FDCapture',
'StdCapture' : '._io.capture:StdCapture',
'StdCaptureFD' : '._io.capture:StdCaptureFD',
'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
'ansi_print' : '._io.terminalwriter:ansi_print',
'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
'saferepr' : '._io.saferepr:saferepr',
},
# small and mean xml/html generation
xml = {
'__doc__' : '._xmlgen:__doc__',
'html' : '._xmlgen:html',
'Tag' : '._xmlgen:Tag',
'raw' : '._xmlgen:raw',
'Namespace' : '._xmlgen:Namespace',
'escape' : '._xmlgen:escape',
},
log = {
# logging API ('producers' and 'consumers' connected via keywords)
'__doc__' : '._log:__doc__',
'_apiwarn' : '._log.warning:_apiwarn',
'Producer' : '._log.log:Producer',
'setconsumer' : '._log.log:setconsumer',
'_setstate' : '._log.log:setstate',
'_getstate' : '._log.log:getstate',
'Path' : '._log.log:Path',
'STDOUT' : '._log.log:STDOUT',
'STDERR' : '._log.log:STDERR',
'Syslog' : '._log.log:Syslog',
},
# compatibility modules (deprecated)
compat = {
'__doc__' : '._compat:__doc__',
'doctest' : '._compat.dep_doctest:doctest',
'optparse' : '._compat.dep_optparse:optparse',
'textwrap' : '._compat.dep_textwrap:textwrap',
'subprocess' : '._compat.dep_subprocess:subprocess',
},
))

View File

@ -1,2 +0,0 @@
import py
pydir = py.path.local(py.__file__).dirpath()

View File

@ -1,225 +0,0 @@
import sys
try:
reversed = reversed
except NameError:
def reversed(sequence):
"""reversed(sequence) -> reverse iterator over values of the sequence
Return a reverse iterator
"""
if hasattr(sequence, '__reversed__'):
return sequence.__reversed__()
if not hasattr(sequence, '__getitem__'):
raise TypeError("argument to reversed() must be a sequence")
return reversed_iterator(sequence)
class reversed_iterator(object):
def __init__(self, seq):
self.seq = seq
self.remaining = len(seq)
def __iter__(self):
return self
def next(self):
i = self.remaining
if i > 0:
i -= 1
item = self.seq[i]
self.remaining = i
return item
raise StopIteration
def __length_hint__(self):
return self.remaining
try:
any = any
except NameError:
def any(iterable):
for x in iterable:
if x:
return True
return False
try:
sorted = sorted
except NameError:
builtin_cmp = cmp # need to use cmp as keyword arg
def sorted(iterable, cmp=None, key=None, reverse=0):
use_cmp = None
if key is not None:
if cmp is None:
def use_cmp(x, y):
return builtin_cmp(x[0], y[0])
else:
def use_cmp(x, y):
return cmp(x[0], y[0])
l = [(key(element), element) for element in iterable]
else:
if cmp is not None:
use_cmp = cmp
l = list(iterable)
if use_cmp is not None:
l.sort(use_cmp)
else:
l.sort()
if reverse:
l.reverse()
if key is not None:
return [element for (_, element) in l]
return l
try:
set, frozenset = set, frozenset
except NameError:
from sets import set, frozenset
# pass through
enumerate = enumerate
try:
BaseException = BaseException
except NameError:
BaseException = Exception
try:
GeneratorExit = GeneratorExit
except NameError:
class GeneratorExit(Exception):
""" This exception is never raised, it is there to make it possible to
write code compatible with CPython 2.5 even in lower CPython
versions."""
pass
GeneratorExit.__module__ = 'exceptions'
_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
if sys.version_info >= (3, 0):
exec ("print_ = print ; exec_=exec")
import builtins
# some backward compatibility helpers
_basestring = str
def _totext(obj, encoding=None):
if isinstance(obj, bytes):
obj = obj.decode(encoding)
elif not isinstance(obj, str):
obj = str(obj)
return obj
def _isbytes(x):
return isinstance(x, bytes)
def _istext(x):
return isinstance(x, str)
def _getimself(function):
return getattr(function, '__self__', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
return getattr(function, "__code__", None)
def execfile(fn, globs=None, locs=None):
if globs is None:
back = sys._getframe(1)
globs = back.f_globals
locs = back.f_locals
del back
elif locs is None:
locs = globs
fp = open(fn, "rb")
try:
source = fp.read()
finally:
fp.close()
co = compile(source, fn, "exec", dont_inherit=True)
exec_(co, globs, locs)
def callable(obj):
return hasattr(obj, "__call__")
else:
import __builtin__ as builtins
_totext = unicode
_basestring = basestring
execfile = execfile
callable = callable
def _isbytes(x):
return isinstance(x, str)
def _istext(x):
return isinstance(x, unicode)
def _getimself(function):
return getattr(function, 'im_self', None)
def _getfuncdict(function):
return getattr(function, "__dict__", None)
def _getcode(function):
try:
return getattr(function, "__code__")
except AttributeError:
return getattr(function, "func_code", None)
def print_(*args, **kwargs):
""" minimal backport of py3k print statement. """
sep = ' '
if 'sep' in kwargs:
sep = kwargs.pop('sep')
end = '\n'
if 'end' in kwargs:
end = kwargs.pop('end')
file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
if kwargs:
args = ", ".join([str(x) for x in kwargs])
raise TypeError("invalid keyword arguments: %s" % args)
at_start = True
for x in args:
if not at_start:
file.write(sep)
file.write(str(x))
at_start = False
file.write(end)
def exec_(obj, globals=None, locals=None):
""" minimal backport of py3k exec statement. """
__tracebackhide__ = True
if globals is None:
frame = sys._getframe(1)
globals = frame.f_globals
if locals is None:
locals = frame.f_locals
elif locals is None:
locals = globals
exec2(obj, globals, locals)
if sys.version_info >= (3,0):
def _reraise(cls, val, tb):
__tracebackhide__ = True
assert hasattr(val, '__traceback__')
raise val
else:
exec ("""
def _reraise(cls, val, tb):
__tracebackhide__ = True
raise cls, val, tb
def exec2(obj, globals, locals):
__tracebackhide__ = True
exec obj in globals, locals
""")
def _tryimport(*names):
""" return the first successfully imported module. """
assert names
for name in names:
try:
return __import__(name, None, None, '__doc__')
except ImportError:
excinfo = sys.exc_info()
_reraise(*excinfo)

View File

@ -1,86 +0,0 @@
#!/usr/bin/env python
"""\
py.cleanup [PATH] ...
Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot. Optionally remove setup.py related files and empty
directories.
"""
import py
import sys, subprocess
def main():
parser = py.std.optparse.OptionParser(usage=__doc__)
parser.add_option("-e", metavar="ENDING",
dest="endings", default=[".pyc", "$py.class"], action="append",
help=("(multi) recursively remove files with the given ending."
" '.pyc' and '$py.class' are in the default list."))
parser.add_option("-d", action="store_true", dest="removedir",
help="remove empty directories.")
parser.add_option("-s", action="store_true", dest="setup",
help="remove 'build' and 'dist' directories next to setup.py files")
parser.add_option("-a", action="store_true", dest="all",
help="synonym for '-S -d -e pip-log.txt'")
parser.add_option("-n", "--dryrun", dest="dryrun", default=False,
action="store_true",
help="don't actually delete but display would-be-removed filenames.")
(options, args) = parser.parse_args()
Cleanup(options, args).main()
class Cleanup:
def __init__(self, options, args):
if not args:
args = ["."]
self.options = options
self.args = [py.path.local(x) for x in args]
if options.all:
options.setup = True
options.removedir = True
options.endings.append("pip-log.txt")
def main(self):
if self.options.setup:
for arg in self.args:
self.setupclean(arg)
for path in self.args:
py.builtin.print_("cleaning path", path,
"of extensions", self.options.endings)
for x in path.visit(self.shouldremove, self.recursedir):
self.remove(x)
if self.options.removedir:
for x in path.visit(lambda x: x.check(dir=1), self.recursedir):
if not x.listdir():
self.remove(x)
def shouldremove(self, p):
for ending in self.options.endings:
if p.basename.endswith(ending):
return True
def recursedir(self, path):
return path.check(dotfile=0, link=0)
def remove(self, path):
if not path.check():
return
if self.options.dryrun:
py.builtin.print_("would remove", path)
else:
py.builtin.print_("removing", path)
path.remove()
def XXXcallsetup(self, setup, *args):
old = setup.dirpath().chdir()
try:
subprocess.call([sys.executable, str(setup)] + list(args))
finally:
old.chdir()
def setupclean(self, path):
for x in path.visit("setup.py", self.recursedir):
basepath = x.dirpath()
self.remove(basepath / "build")
self.remove(basepath / "dist")

View File

@ -1,253 +0,0 @@
import re
import sys
try:
import parser
except ImportError:
parser = None
d={}
# d is the dictionary of unittest changes, keyed to the old name
# used by unittest.
# d[old][0] is the new replacement function.
# d[old][1] is the operator you will substitute, or '' if there is none.
# d[old][2] is the possible number of arguments to the unittest
# function.
# Old Unittest Name new name operator # of args
d['assertRaises'] = ('raises', '', ['Any'])
d['fail'] = ('raise AssertionError', '', [0,1])
d['assert_'] = ('assert', '', [1,2])
d['failIf'] = ('assert not', '', [1,2])
d['assertEqual'] = ('assert', ' ==', [2,3])
d['failIfEqual'] = ('assert not', ' ==', [2,3])
d['assertIn'] = ('assert', ' in', [2,3])
d['assertNotIn'] = ('assert', ' not in', [2,3])
d['assertNotEqual'] = ('assert', ' !=', [2,3])
d['failUnlessEqual'] = ('assert', ' ==', [2,3])
d['assertAlmostEqual'] = ('assert round', ' ==', [2,3,4])
d['failIfAlmostEqual'] = ('assert not round', ' ==', [2,3,4])
d['assertNotAlmostEqual'] = ('assert round', ' !=', [2,3,4])
d['failUnlessAlmostEquals'] = ('assert round', ' ==', [2,3,4])
# the list of synonyms
d['failUnlessRaises'] = d['assertRaises']
d['failUnless'] = d['assert_']
d['assertEquals'] = d['assertEqual']
d['assertNotEquals'] = d['assertNotEqual']
d['assertAlmostEquals'] = d['assertAlmostEqual']
d['assertNotAlmostEquals'] = d['assertNotAlmostEqual']
# set up the regular expressions we will need
leading_spaces = re.compile(r'^(\s*)') # this never fails
pat = ''
for k in d.keys(): # this complicated pattern to match all unittests
pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever(
old_names = re.compile(pat[1:])
linesep='\n' # nobody will really try to convert files not read
# in text mode, will they?
def blocksplitter(fp):
'''split a file into blocks that are headed by functions to rename'''
blocklist = []
blockstring = ''
for line in fp:
interesting = old_names.match(line)
if interesting :
if blockstring:
blocklist.append(blockstring)
blockstring = line # reset the block
else:
blockstring += line
blocklist.append(blockstring)
return blocklist
def rewrite_utest(block):
'''rewrite every block to use the new utest functions'''
'''returns the rewritten unittest, unless it ran into problems,
in which case it just returns the block unchanged.
'''
utest = old_names.match(block)
if not utest:
return block
old = utest.group(0).lstrip()[5:-1] # the name we want to replace
new = d[old][0] # the name of the replacement function
op = d[old][1] # the operator you will use , or '' if there is none.
possible_args = d[old][2] # a list of the number of arguments the
# unittest function could possibly take.
if possible_args == ['Any']: # just rename assertRaises & friends
return re.sub('self.'+old, new, block)
message_pos = possible_args[-1]
# the remaining unittests can have an optional message to print
# when they fail. It is always the last argument to the function.
try:
indent, argl, trailer = decompose_unittest(old, block)
except SyntaxError: # but we couldn't parse it!
return block
argnum = len(argl)
if argnum not in possible_args:
# sanity check - this one isn't real either
return block
elif argnum == message_pos:
message = argl[-1]
argl = argl[:-1]
else:
message = None
if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail()
string = ''
if message:
message = ' ' + message
elif message_pos is 4: # assertAlmostEqual & friends
try:
pos = argl[2].lstrip()
except IndexError:
pos = '7' # default if none is specified
string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op )
else: # assert_, assertEquals and all the rest
string = ' ' + op.join(argl)
if message:
string = string + ',' + message
return indent + new + string + trailer
def decompose_unittest(old, block):
'''decompose the block into its component parts'''
''' returns indent, arglist, trailer
indent -- the indentation
arglist -- the arguments to the unittest function
trailer -- any extra junk after the closing paren, such as #commment
'''
indent = re.match(r'(\s*)', block).group()
pat = re.search('self.' + old + r'\(', block)
args, trailer = get_expr(block[pat.end():], ')')
arglist = break_args(args, [])
if arglist == ['']: # there weren't any
return indent, [], trailer
for i in range(len(arglist)):
try:
parser.expr(arglist[i].lstrip('\t '))
except SyntaxError:
if i == 0:
arglist[i] = '(' + arglist[i] + ')'
else:
arglist[i] = ' (' + arglist[i] + ')'
return indent, arglist, trailer
def break_args(args, arglist):
'''recursively break a string into a list of arguments'''
try:
first, rest = get_expr(args, ',')
if not rest:
return arglist + [first]
else:
return [first] + break_args(rest, arglist)
except SyntaxError:
return arglist + [args]
def get_expr(s, char):
'''split a string into an expression, and the rest of the string'''
pos=[]
for i in range(len(s)):
if s[i] == char:
pos.append(i)
if pos == []:
raise SyntaxError # we didn't find the expected char. Ick.
for p in pos:
# make the python parser do the hard work of deciding which comma
# splits the string into two expressions
try:
parser.expr('(' + s[:p] + ')')
return s[:p], s[p+1:]
except SyntaxError: # It's not an expression yet
pass
raise SyntaxError # We never found anything that worked.
def main():
import sys
import py
usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]"
optparser = py.std.optparse.OptionParser(usage)
def select_output (option, opt, value, optparser, **kw):
if hasattr(optparser, 'output'):
optparser.error(
'Cannot combine -s -i and -c options. Use one only.')
else:
optparser.output = kw['output']
optparser.add_option("-s", "--stdout", action="callback",
callback=select_output,
callback_kwargs={'output':'stdout'},
help="send your output to stdout")
optparser.add_option("-i", "--inplace", action="callback",
callback=select_output,
callback_kwargs={'output':'inplace'},
help="overwrite files in place")
optparser.add_option("-c", "--copy", action="callback",
callback=select_output,
callback_kwargs={'output':'copy'},
help="copy files ... fn.py --> fn_cp.py")
options, args = optparser.parse_args()
output = getattr(optparser, 'output', 'stdout')
if output in ['inplace', 'copy'] and not args:
optparser.error(
'-i and -c option require at least one filename')
if not args:
s = ''
for block in blocksplitter(sys.stdin):
s += rewrite_utest(block)
sys.stdout.write(s)
else:
for infilename in args: # no error checking to see if we can open, etc.
infile = file(infilename)
s = ''
for block in blocksplitter(infile):
s += rewrite_utest(block)
if output == 'inplace':
outfile = file(infilename, 'w+')
elif output == 'copy': # yes, just go clobber any existing .cp
outfile = file (infilename[:-3]+ '_cp.py', 'w+')
else:
outfile = sys.stdout
outfile.write(s)
if __name__ == '__main__':
main()

View File

@ -1,94 +0,0 @@
#!/usr/bin/env python
# hands on script to compute the non-empty Lines of Code
# for tests and non-test code
"""\
py.countloc [PATHS]
Count (non-empty) lines of python code and number of python files recursively
starting from a list of paths given on the command line (starting from the
current working directory). Distinguish between test files and normal ones and
report them separately.
"""
import py
def main():
parser = py.std.optparse.OptionParser(usage=__doc__)
(options, args) = parser.parse_args()
countloc(args)
def nodot(p):
return p.check(dotfile=0)
class FileCounter(object):
def __init__(self):
self.file2numlines = {}
self.numlines = 0
self.numfiles = 0
def addrecursive(self, directory, fil="*.py", rec=nodot):
for x in directory.visit(fil, rec):
self.addfile(x)
def addfile(self, fn, emptylines=False):
if emptylines:
s = len(p.readlines())
else:
s = 0
for i in fn.readlines():
if i.strip():
s += 1
self.file2numlines[fn] = s
self.numfiles += 1
self.numlines += s
def getnumlines(self, fil):
numlines = 0
for path, value in self.file2numlines.items():
if fil(path):
numlines += value
return numlines
def getnumfiles(self, fil):
numfiles = 0
for path in self.file2numlines:
if fil(path):
numfiles += 1
return numfiles
def get_loccount(locations=None):
if locations is None:
localtions = [py.path.local()]
counter = FileCounter()
for loc in locations:
counter.addrecursive(loc, '*.py', rec=nodot)
def istestfile(p):
return p.check(fnmatch='test_*.py')
isnottestfile = lambda x: not istestfile(x)
numfiles = counter.getnumfiles(isnottestfile)
numlines = counter.getnumlines(isnottestfile)
numtestfiles = counter.getnumfiles(istestfile)
numtestlines = counter.getnumlines(istestfile)
return counter, numfiles, numlines, numtestfiles, numtestlines
def countloc(paths=None):
if not paths:
paths = ['.']
locations = [py.path.local(x) for x in paths]
(counter, numfiles, numlines, numtestfiles,
numtestlines) = get_loccount(locations)
items = counter.file2numlines.items()
items.sort(lambda x,y: cmp(x[1], y[1]))
for x, y in items:
print("%3d %30s" % (y,x))
print("%30s %3d" %("number of testfiles", numtestfiles))
print("%30s %3d" %("number of non-empty testlines", numtestlines))
print("%30s %3d" %("number of files", numfiles))
print("%30s %3d" %("number of non-empty lines", numlines))

View File

@ -1,85 +0,0 @@
#!/usr/bin/env python
"""\
py.lookup [search_directory] SEARCH_STRING [options]
Looks recursively at Python files for a SEARCH_STRING, starting from the
present working directory. Prints the line, with the filename and line-number
prepended."""
import sys, os
import py
from py.io import ansi_print, get_terminal_width
import re
def rec(p):
return p.check(dotfile=0)
parser = py.std.optparse.OptionParser(usage=__doc__)
parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase",
help="ignore case distinctions")
parser.add_option("-C", "--context", action="store", type="int", dest="context",
default=0, help="How many lines of output to show")
terminal_width = get_terminal_width()
def find_indexes(search_line, string):
indexes = []
before = 0
while 1:
i = search_line.find(string, before)
if i == -1:
break
indexes.append(i)
before = i + len(string)
return indexes
def main():
(options, args) = parser.parse_args()
if len(args) == 2:
search_dir, string = args
search_dir = py.path.local(search_dir)
else:
search_dir = py.path.local()
string = args[0]
if options.ignorecase:
string = string.lower()
for x in search_dir.visit('*.py', rec):
# match filename directly
s = x.relto(search_dir)
if options.ignorecase:
s = s.lower()
if s.find(string) != -1:
sys.stdout.write("%s: filename matches %r" %(x, string) + "\n")
try:
s = x.read()
except py.error.ENOENT:
pass # whatever, probably broken link (ie emacs lock)
searchs = s
if options.ignorecase:
searchs = s.lower()
if s.find(string) != -1:
lines = s.splitlines()
if options.ignorecase:
searchlines = s.lower().splitlines()
else:
searchlines = lines
for i, (line, searchline) in enumerate(zip(lines, searchlines)):
indexes = find_indexes(searchline, string)
if not indexes:
continue
if not options.context:
sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1))
last_index = 0
for index in indexes:
sys.stdout.write(line[last_index: index])
ansi_print(line[index: index+len(string)],
file=sys.stdout, esc=31, newline=False)
last_index = index + len(string)
sys.stdout.write(line[last_index:] + "\n")
else:
context = (options.context)/2
for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)):
print("%s:%d: %s" %(x.relto(search_dir), count+1, lines[count].rstrip()))
print("-" * terminal_width)

View File

@ -1,55 +0,0 @@
#! /usr/bin/env python
"""\
py.svnwcrevert [options] WCPATH
Running this script and then 'svn up' puts the working copy WCPATH in a state
as clean as a fresh check-out.
WARNING: you'll loose all local changes, obviously!
This script deletes all files that have been modified
or that svn doesn't explicitly know about, including svn:ignored files
(like .pyc files, hint hint).
The goal of this script is to leave the working copy with some files and
directories possibly missing, but - most importantly - in a state where
the following 'svn up' won't just crash.
"""
import sys, py
def kill(p, root):
print('< %s' % (p.relto(root),))
p.remove(rec=1)
def svnwcrevert(path, root=None, precious=[]):
if root is None:
root = path
wcpath = py.path.svnwc(path)
try:
st = wcpath.status()
except ValueError: # typically, "bad char in wcpath"
kill(path, root)
return
for p in path.listdir():
if p.basename == '.svn' or p.basename in precious:
continue
wcp = py.path.svnwc(p)
if wcp not in st.unchanged and wcp not in st.external:
kill(p, root)
elif p.check(dir=1):
svnwcrevert(p, root)
# XXX add a functional test
parser = py.std.optparse.OptionParser(usage=__doc__)
parser.add_option("-p", "--precious",
action="append", dest="precious", default=[],
help="preserve files with this name")
def main():
opts, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(2)
svnwcrevert(py.path.local(args[0]), precious=opts.precious)

View File

@ -1,5 +0,0 @@
#!/usr/bin/env python
import py
def main(args=None):
raise SystemExit(py.test.cmdline.main(args))

View File

@ -1,23 +0,0 @@
#!/usr/bin/env python
"""\
py.which [name]
print the location of the given python module or package name
"""
import sys
def main():
name = sys.argv[1]
try:
mod = __import__(name)
except ImportError:
sys.stderr.write("could not import: " + name + "\n")
else:
try:
location = mod.__file__
except AttributeError:
sys.stderr.write("module (has no __file__): " + str(mod))
else:
print(location)

View File

@ -1 +0,0 @@
""" python inspection/code generation API """

View File

@ -1,345 +0,0 @@
"""
Find intermediate evalutation results in assert statements through builtin AST.
This should replace _assertionold.py eventually.
"""
import sys
import ast
import py
from py._code.assertion import _format_explanation, BuiltinAssertionError
if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
# See http://bugs.jython.org/issue1497
_exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
"ListComp", "GeneratorExp", "Yield", "Compare", "Call",
"Repr", "Num", "Str", "Attribute", "Subscript", "Name",
"List", "Tuple")
_stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
"AugAssign", "Print", "For", "While", "If", "With", "Raise",
"TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
"Exec", "Global", "Expr", "Pass", "Break", "Continue")
_expr_nodes = set(getattr(ast, name) for name in _exprs)
_stmt_nodes = set(getattr(ast, name) for name in _stmts)
def _is_ast_expr(node):
return node.__class__ in _expr_nodes
def _is_ast_stmt(node):
return node.__class__ in _stmt_nodes
else:
def _is_ast_expr(node):
return isinstance(node, ast.expr)
def _is_ast_stmt(node):
return isinstance(node, ast.stmt)
class Failure(Exception):
"""Error found while interpreting AST."""
def __init__(self, explanation=""):
self.cause = sys.exc_info()
self.explanation = explanation
def interpret(source, frame, should_fail=False):
mod = ast.parse(source)
visitor = DebugInterpreter(frame)
try:
visitor.visit(mod)
except Failure:
failure = sys.exc_info()[1]
return getfailure(failure)
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --no-assert)")
def run(offending_line, frame=None):
if frame is None:
frame = py.code.Frame(sys._getframe(1))
return interpret(offending_line, frame)
def getfailure(failure):
explanation = _format_explanation(failure.explanation)
value = failure.cause[1]
if str(value):
lines = explanation.splitlines()
if not lines:
lines.append("")
lines[0] += " << %s" % (value,)
explanation = "\n".join(lines)
text = "%s: %s" % (failure.cause[0].__name__, explanation)
if text.startswith("AssertionError: assert "):
text = text[16:]
return text
operator_map = {
ast.BitOr : "|",
ast.BitXor : "^",
ast.BitAnd : "&",
ast.LShift : "<<",
ast.RShift : ">>",
ast.Add : "+",
ast.Sub : "-",
ast.Mult : "*",
ast.Div : "/",
ast.FloorDiv : "//",
ast.Mod : "%",
ast.Eq : "==",
ast.NotEq : "!=",
ast.Lt : "<",
ast.LtE : "<=",
ast.Gt : ">",
ast.GtE : ">=",
ast.Pow : "**",
ast.Is : "is",
ast.IsNot : "is not",
ast.In : "in",
ast.NotIn : "not in"
}
unary_map = {
ast.Not : "not %s",
ast.Invert : "~%s",
ast.USub : "-%s",
ast.UAdd : "+%s"
}
class DebugInterpreter(ast.NodeVisitor):
"""Interpret AST nodes to gleam useful debugging information. """
def __init__(self, frame):
self.frame = frame
def generic_visit(self, node):
# Fallback when we don't have a special implementation.
if _is_ast_expr(node):
mod = ast.Expression(node)
co = self._compile(mod)
try:
result = self.frame.eval(co)
except Exception:
raise Failure()
explanation = self.frame.repr(result)
return explanation, result
elif _is_ast_stmt(node):
mod = ast.Module([node])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co)
except Exception:
raise Failure()
return None, None
else:
raise AssertionError("can't handle %s" %(node,))
def _compile(self, source, mode="eval"):
return compile(source, "<assertion interpretation>", mode)
def visit_Expr(self, expr):
return self.visit(expr.value)
def visit_Module(self, mod):
for stmt in mod.body:
self.visit(stmt)
def visit_Name(self, name):
explanation, result = self.generic_visit(name)
# See if the name is local.
source = "%r in locals() is not globals()" % (name.id,)
co = self._compile(source)
try:
local = self.frame.eval(co)
except Exception:
# have to assume it isn't
local = False
if not local:
return name.id, result
return explanation, result
def visit_Compare(self, comp):
left = comp.left
left_explanation, left_result = self.visit(left)
for op, next_op in zip(comp.ops, comp.comparators):
next_explanation, next_result = self.visit(next_op)
op_symbol = operator_map[op.__class__]
explanation = "%s %s %s" % (left_explanation, op_symbol,
next_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=next_result)
except Exception:
raise Failure(explanation)
if not result:
break
left_explanation, left_result = next_explanation, next_result
rcomp = py.code._reprcompare
if rcomp:
res = rcomp(op_symbol, left_result, next_result)
if res:
explanation = res
return explanation, result
def visit_BoolOp(self, boolop):
is_or = isinstance(boolop.op, ast.Or)
explanations = []
for operand in boolop.values:
explanation, result = self.visit(operand)
explanations.append(explanation)
if result == is_or:
break
name = is_or and " or " or " and "
explanation = "(" + name.join(explanations) + ")"
return explanation, result
def visit_UnaryOp(self, unary):
pattern = unary_map[unary.op.__class__]
operand_explanation, operand_result = self.visit(unary.operand)
explanation = pattern % (operand_explanation,)
co = self._compile(pattern % ("__exprinfo_expr",))
try:
result = self.frame.eval(co, __exprinfo_expr=operand_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_BinOp(self, binop):
left_explanation, left_result = self.visit(binop.left)
right_explanation, right_result = self.visit(binop.right)
symbol = operator_map[binop.op.__class__]
explanation = "(%s %s %s)" % (left_explanation, symbol,
right_explanation)
source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_left=left_result,
__exprinfo_right=right_result)
except Exception:
raise Failure(explanation)
return explanation, result
def visit_Call(self, call):
func_explanation, func = self.visit(call.func)
arg_explanations = []
ns = {"__exprinfo_func" : func}
arguments = []
for arg in call.args:
arg_explanation, arg_result = self.visit(arg)
arg_name = "__exprinfo_%s" % (len(ns),)
ns[arg_name] = arg_result
arguments.append(arg_name)
arg_explanations.append(arg_explanation)
for keyword in call.keywords:
arg_explanation, arg_result = self.visit(keyword.value)
arg_name = "__exprinfo_%s" % (len(ns),)
ns[arg_name] = arg_result
keyword_source = "%s=%%s" % (keyword.arg)
arguments.append(keyword_source % (arg_name,))
arg_explanations.append(keyword_source % (arg_explanation,))
if call.starargs:
arg_explanation, arg_result = self.visit(call.starargs)
arg_name = "__exprinfo_star"
ns[arg_name] = arg_result
arguments.append("*%s" % (arg_name,))
arg_explanations.append("*%s" % (arg_explanation,))
if call.kwargs:
arg_explanation, arg_result = self.visit(call.kwargs)
arg_name = "__exprinfo_kwds"
ns[arg_name] = arg_result
arguments.append("**%s" % (arg_name,))
arg_explanations.append("**%s" % (arg_explanation,))
args_explained = ", ".join(arg_explanations)
explanation = "%s(%s)" % (func_explanation, args_explained)
args = ", ".join(arguments)
source = "__exprinfo_func(%s)" % (args,)
co = self._compile(source)
try:
result = self.frame.eval(co, **ns)
except Exception:
raise Failure(explanation)
# Only show result explanation if it's not a builtin call or returns a
# bool.
if not isinstance(call.func, ast.Name) or \
not self._is_builtin_name(call.func):
source = "isinstance(__exprinfo_value, bool)"
co = self._compile(source)
try:
is_bool = self.frame.eval(co, __exprinfo_value=result)
except Exception:
is_bool = False
if not is_bool:
pattern = "%s\n{%s = %s\n}"
rep = self.frame.repr(result)
explanation = pattern % (rep, rep, explanation)
return explanation, result
def _is_builtin_name(self, name):
pattern = "%r not in globals() and %r not in locals()"
source = pattern % (name.id, name.id)
co = self._compile(source)
try:
return self.frame.eval(co)
except Exception:
return False
def visit_Attribute(self, attr):
if not isinstance(attr.ctx, ast.Load):
return self.generic_visit(attr)
source_explanation, source_result = self.visit(attr.value)
explanation = "%s.%s" % (source_explanation, attr.attr)
source = "__exprinfo_expr.%s" % (attr.attr,)
co = self._compile(source)
try:
result = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
raise Failure(explanation)
explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
self.frame.repr(result),
source_explanation, attr.attr)
# Check if the attr is from an instance.
source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
source = source % (attr.attr,)
co = self._compile(source)
try:
from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
except Exception:
from_instance = True
if from_instance:
rep = self.frame.repr(result)
pattern = "%s\n{%s = %s\n}"
explanation = pattern % (rep, rep, explanation)
return explanation, result
def visit_Assert(self, assrt):
test_explanation, test_result = self.visit(assrt.test)
if test_explanation.startswith("False\n{False =") and \
test_explanation.endswith("\n"):
test_explanation = test_explanation[15:-2]
explanation = "assert %s" % (test_explanation,)
if not test_result:
try:
raise BuiltinAssertionError
except Exception:
raise Failure(explanation)
return explanation, test_result
def visit_Assign(self, assign):
value_explanation, value_result = self.visit(assign.value)
explanation = "... = %s" % (value_explanation,)
name = ast.Name("__exprinfo_expr", ast.Load(),
lineno=assign.value.lineno,
col_offset=assign.value.col_offset)
new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
col_offset=assign.col_offset)
mod = ast.Module([new_assign])
co = self._compile(mod, "exec")
try:
self.frame.exec_(co, __exprinfo_expr=value_result)
except Exception:
raise Failure(explanation)
return explanation, value_result

View File

@ -1,555 +0,0 @@
import py
import sys, inspect
from compiler import parse, ast, pycodegen
from py._code.assertion import BuiltinAssertionError, _format_explanation
passthroughex = py.builtin._sysex
class Failure:
def __init__(self, node):
self.exc, self.value, self.tb = sys.exc_info()
self.node = node
class View(object):
"""View base class.
If C is a subclass of View, then C(x) creates a proxy object around
the object x. The actual class of the proxy is not C in general,
but a *subclass* of C determined by the rules below. To avoid confusion
we call view class the class of the proxy (a subclass of C, so of View)
and object class the class of x.
Attributes and methods not found in the proxy are automatically read on x.
Other operations like setting attributes are performed on the proxy, as
determined by its view class. The object x is available from the proxy
as its __obj__ attribute.
The view class selection is determined by the __view__ tuples and the
optional __viewkey__ method. By default, the selected view class is the
most specific subclass of C whose __view__ mentions the class of x.
If no such subclass is found, the search proceeds with the parent
object classes. For example, C(True) will first look for a subclass
of C with __view__ = (..., bool, ...) and only if it doesn't find any
look for one with __view__ = (..., int, ...), and then ..., object,...
If everything fails the class C itself is considered to be the default.
Alternatively, the view class selection can be driven by another aspect
of the object x, instead of the class of x, by overriding __viewkey__.
See last example at the end of this module.
"""
_viewcache = {}
__view__ = ()
def __new__(rootclass, obj, *args, **kwds):
self = object.__new__(rootclass)
self.__obj__ = obj
self.__rootclass__ = rootclass
key = self.__viewkey__()
try:
self.__class__ = self._viewcache[key]
except KeyError:
self.__class__ = self._selectsubclass(key)
return self
def __getattr__(self, attr):
# attributes not found in the normal hierarchy rooted on View
# are looked up in the object's real class
return getattr(self.__obj__, attr)
def __viewkey__(self):
return self.__obj__.__class__
def __matchkey__(self, key, subclasses):
if inspect.isclass(key):
keys = inspect.getmro(key)
else:
keys = [key]
for key in keys:
result = [C for C in subclasses if key in C.__view__]
if result:
return result
return []
def _selectsubclass(self, key):
subclasses = list(enumsubclasses(self.__rootclass__))
for C in subclasses:
if not isinstance(C.__view__, tuple):
C.__view__ = (C.__view__,)
choices = self.__matchkey__(key, subclasses)
if not choices:
return self.__rootclass__
elif len(choices) == 1:
return choices[0]
else:
# combine the multiple choices
return type('?', tuple(choices), {})
def __repr__(self):
return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
def enumsubclasses(cls):
for subcls in cls.__subclasses__():
for subsubclass in enumsubclasses(subcls):
yield subsubclass
yield cls
class Interpretable(View):
"""A parse tree node with a few extra methods."""
explanation = None
def is_builtin(self, frame):
return False
def eval(self, frame):
# fall-back for unknown expression nodes
try:
expr = ast.Expression(self.__obj__)
expr.filename = '<eval>'
self.__obj__.filename = '<eval>'
co = pycodegen.ExpressionCodeGenerator(expr).getCode()
result = frame.eval(co)
except passthroughex:
raise
except:
raise Failure(self)
self.result = result
self.explanation = self.explanation or frame.repr(self.result)
def run(self, frame):
# fall-back for unknown statement nodes
try:
expr = ast.Module(None, ast.Stmt([self.__obj__]))
expr.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(expr).getCode()
frame.exec_(co)
except passthroughex:
raise
except:
raise Failure(self)
def nice_explanation(self):
return _format_explanation(self.explanation)
class Name(Interpretable):
__view__ = ast.Name
def is_local(self, frame):
source = '%r in locals() is not globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_global(self, frame):
source = '%r in globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_builtin(self, frame):
source = '%r not in locals() and %r not in globals()' % (
self.name, self.name)
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def eval(self, frame):
super(Name, self).eval(frame)
if not self.is_local(frame):
self.explanation = self.name
class Compare(Interpretable):
__view__ = ast.Compare
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
for operation, expr2 in self.ops:
if hasattr(self, 'result'):
# shortcutting in chained expressions
if not frame.is_true(self.result):
break
expr2 = Interpretable(expr2)
expr2.eval(frame)
self.explanation = "%s %s %s" % (
expr.explanation, operation, expr2.explanation)
source = "__exprinfo_left %s __exprinfo_right" % operation
try:
self.result = frame.eval(source,
__exprinfo_left=expr.result,
__exprinfo_right=expr2.result)
except passthroughex:
raise
except:
raise Failure(self)
expr = expr2
class And(Interpretable):
__view__ = ast.And
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if not frame.is_true(expr.result):
break
self.explanation = '(' + ' and '.join(explanations) + ')'
class Or(Interpretable):
__view__ = ast.Or
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if frame.is_true(expr.result):
break
self.explanation = '(' + ' or '.join(explanations) + ')'
# == Unary operations ==
keepalive = []
for astclass, astpattern in {
ast.Not : 'not __exprinfo_expr',
ast.Invert : '(~__exprinfo_expr)',
}.items():
class UnaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
expr = Interpretable(self.expr)
expr.eval(frame)
self.explanation = astpattern.replace('__exprinfo_expr',
expr.explanation)
try:
self.result = frame.eval(astpattern,
__exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(UnaryArith)
# == Binary operations ==
for astclass, astpattern in {
ast.Add : '(__exprinfo_left + __exprinfo_right)',
ast.Sub : '(__exprinfo_left - __exprinfo_right)',
ast.Mul : '(__exprinfo_left * __exprinfo_right)',
ast.Div : '(__exprinfo_left / __exprinfo_right)',
ast.Mod : '(__exprinfo_left % __exprinfo_right)',
ast.Power : '(__exprinfo_left ** __exprinfo_right)',
}.items():
class BinaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
left = Interpretable(self.left)
left.eval(frame)
right = Interpretable(self.right)
right.eval(frame)
self.explanation = (astpattern
.replace('__exprinfo_left', left .explanation)
.replace('__exprinfo_right', right.explanation))
try:
self.result = frame.eval(astpattern,
__exprinfo_left=left.result,
__exprinfo_right=right.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(BinaryArith)
class CallFunc(Interpretable):
__view__ = ast.CallFunc
def is_bool(self, frame):
source = 'isinstance(__exprinfo_value, bool)'
try:
return frame.is_true(frame.eval(source,
__exprinfo_value=self.result))
except passthroughex:
raise
except:
return False
def eval(self, frame):
node = Interpretable(self.node)
node.eval(frame)
explanations = []
vars = {'__exprinfo_fn': node.result}
source = '__exprinfo_fn('
for a in self.args:
if isinstance(a, ast.Keyword):
keyword = a.name
a = a.expr
else:
keyword = None
a = Interpretable(a)
a.eval(frame)
argname = '__exprinfo_%d' % len(vars)
vars[argname] = a.result
if keyword is None:
source += argname + ','
explanations.append(a.explanation)
else:
source += '%s=%s,' % (keyword, argname)
explanations.append('%s=%s' % (keyword, a.explanation))
if self.star_args:
star_args = Interpretable(self.star_args)
star_args.eval(frame)
argname = '__exprinfo_star'
vars[argname] = star_args.result
source += '*' + argname + ','
explanations.append('*' + star_args.explanation)
if self.dstar_args:
dstar_args = Interpretable(self.dstar_args)
dstar_args.eval(frame)
argname = '__exprinfo_kwds'
vars[argname] = dstar_args.result
source += '**' + argname + ','
explanations.append('**' + dstar_args.explanation)
self.explanation = "%s(%s)" % (
node.explanation, ', '.join(explanations))
if source.endswith(','):
source = source[:-1]
source += ')'
try:
self.result = frame.eval(source, **vars)
except passthroughex:
raise
except:
raise Failure(self)
if not node.is_builtin(frame) or not self.is_bool(frame):
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
class Getattr(Interpretable):
__view__ = ast.Getattr
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
source = '__exprinfo_expr.%s' % self.attrname
try:
self.result = frame.eval(source, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
self.explanation = '%s.%s' % (expr.explanation, self.attrname)
# if the attribute comes from the instance, its value is interesting
source = ('hasattr(__exprinfo_expr, "__dict__") and '
'%r in __exprinfo_expr.__dict__' % self.attrname)
try:
from_instance = frame.is_true(
frame.eval(source, __exprinfo_expr=expr.result))
except passthroughex:
raise
except:
from_instance = True
if from_instance:
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
# == Re-interpretation of full statements ==
class Assert(Interpretable):
__view__ = ast.Assert
def run(self, frame):
test = Interpretable(self.test)
test.eval(frame)
# simplify 'assert False where False = ...'
if (test.explanation.startswith('False\n{False = ') and
test.explanation.endswith('\n}')):
test.explanation = test.explanation[15:-2]
# print the result as 'assert <explanation>'
self.result = test.result
self.explanation = 'assert ' + test.explanation
if not frame.is_true(test.result):
try:
raise BuiltinAssertionError
except passthroughex:
raise
except:
raise Failure(self)
class Assign(Interpretable):
__view__ = ast.Assign
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = '... = ' + expr.explanation
# fall-back-run the rest of the assignment
ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
mod = ast.Module(None, ast.Stmt([ass]))
mod.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(mod).getCode()
try:
frame.exec_(co, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
class Discard(Interpretable):
__view__ = ast.Discard
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = expr.explanation
class Stmt(Interpretable):
__view__ = ast.Stmt
def run(self, frame):
for stmt in self.nodes:
stmt = Interpretable(stmt)
stmt.run(frame)
def report_failure(e):
explanation = e.node.nice_explanation()
if explanation:
explanation = ", in: " + explanation
else:
explanation = ""
sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
def check(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
expr = parse(s, 'eval')
assert isinstance(expr, ast.Expression)
node = Interpretable(expr.node)
try:
node.eval(frame)
except passthroughex:
raise
except Failure:
e = sys.exc_info()[1]
report_failure(e)
else:
if not frame.is_true(node.result):
sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
###########################################################
# API / Entry points
# #########################################################
def interpret(source, frame, should_fail=False):
module = Interpretable(parse(source, 'exec').node)
#print "got module", module
if isinstance(frame, py.std.types.FrameType):
frame = py.code.Frame(frame)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
return getfailure(e)
except passthroughex:
raise
except:
import traceback
traceback.print_exc()
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --nomagic)")
else:
return None
def getmsg(excinfo):
if isinstance(excinfo, tuple):
excinfo = py.code.ExceptionInfo(excinfo)
#frame, line = gettbline(tb)
#frame = py.code.Frame(frame)
#return interpret(line, frame)
tb = excinfo.traceback[-1]
source = str(tb.statement).strip()
x = interpret(source, tb.frame, should_fail=True)
if not isinstance(x, str):
raise TypeError("interpret returned non-string %r" % (x,))
return x
def getfailure(e):
explanation = e.node.nice_explanation()
if str(e.value):
lines = explanation.split('\n')
lines[0] += " << %s" % (e.value,)
explanation = '\n'.join(lines)
text = "%s: %s" % (e.exc.__name__, explanation)
if text.startswith('AssertionError: assert '):
text = text[16:]
return text
def run(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
module = Interpretable(parse(s, 'exec').node)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
report_failure(e)
if __name__ == '__main__':
# example:
def f():
return 5
def g():
return 3
def h(x):
return 'never'
check("f() * g() == 5")
check("not f()")
check("not (f() and g() or 0)")
check("f() == g()")
i = 4
check("i == f()")
check("len(f()) == 0")
check("isinstance(2+3+4, float)")
run("x = i")
check("x == 5")
run("assert not f(), 'oops'")
run("a, b, c = 1, 2")
run("a, b, c = f()")
check("max([f(),g()]) == 4")
check("'hello'[g()] == 'h'")
run("'guk%d' % h(f())")

View File

@ -1,88 +0,0 @@
import sys
import py
BuiltinAssertionError = py.builtin.builtins.AssertionError
_reprcompare = None # if set, will be called by assert reinterp for comparison ops
def _format_explanation(explanation):
"""This formats an explanation
Normally all embedded newlines are escaped, however there are
three exceptions: \n{, \n} and \n~. The first two are intended
cover nested explanations, see function and attribute explanations
for examples (.visit_Call(), visit_Attribute()). The last one is
for when one explanation needs to span multiple lines, e.g. when
displaying diffs.
"""
raw_lines = (explanation or '').split('\n')
# escape newlines not followed by {, } and ~
lines = [raw_lines[0]]
for l in raw_lines[1:]:
if l.startswith('{') or l.startswith('}') or l.startswith('~'):
lines.append(l)
else:
lines[-1] += '\\n' + l
result = lines[:1]
stack = [0]
stackcnt = [0]
for line in lines[1:]:
if line.startswith('{'):
if stackcnt[-1]:
s = 'and '
else:
s = 'where '
stack.append(len(result))
stackcnt[-1] += 1
stackcnt.append(0)
result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
elif line.startswith('}'):
assert line.startswith('}')
stack.pop()
stackcnt.pop()
result[stack[-1]] += line[1:]
else:
assert line.startswith('~')
result.append(' '*len(stack) + line[1:])
assert len(stack) == 1
return '\n'.join(result)
class AssertionError(BuiltinAssertionError):
def __init__(self, *args):
BuiltinAssertionError.__init__(self, *args)
if args:
try:
self.msg = str(args[0])
except py.builtin._sysex:
raise
except:
self.msg = "<[broken __repr__] %s at %0xd>" %(
args[0].__class__, id(args[0]))
else:
f = py.code.Frame(sys._getframe(1))
try:
source = f.statement
source = str(source.deindent()).strip()
except py.error.ENOENT:
source = None
# this can also occur during reinterpretation, when the
# co_filename is set to "<run>".
if source:
self.msg = reinterpret(source, f, should_fail=True)
if not self.args:
self.args = (self.msg,)
else:
self.msg = None
if sys.version_info > (3, 0):
AssertionError.__module__ = "builtins"
reinterpret_old = "old reinterpretation not available for py3"
else:
from py._code._assertionold import interpret as reinterpret_old
if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
from py._code._assertionnew import interpret as reinterpret
else:
reinterpret = reinterpret_old

View File

@ -1,719 +0,0 @@
import py
import sys, os.path
builtin_repr = repr
reprlib = py.builtin._tryimport('repr', 'reprlib')
class Code(object):
""" wrapper around Python code objects """
def __init__(self, rawcode):
rawcode = py.code.getrawcode(rawcode)
self.raw = rawcode
try:
self.filename = rawcode.co_filename
self.firstlineno = rawcode.co_firstlineno - 1
self.name = rawcode.co_name
except AttributeError:
raise TypeError("not a code object: %r" %(rawcode,))
def __eq__(self, other):
return self.raw == other.raw
def __ne__(self, other):
return not self == other
def path(self):
""" return a path object pointing to source code"""
p = py.path.local(self.raw.co_filename)
if not p.check():
# XXX maybe try harder like the weird logic
# in the standard lib [linecache.updatecache] does?
p = self.raw.co_filename
return p
path = property(path, None, None, "path of this code object")
def fullsource(self):
""" return a py.code.Source object for the full source file of the code
"""
from py._code import source
full, _ = source.findsource(self.raw)
return full
fullsource = property(fullsource, None, None,
"full source containing this code object")
def source(self):
""" return a py.code.Source object for the code object's source only
"""
# return source only for that part of code
return py.code.Source(self.raw)
def getargs(self):
""" return a tuple with the argument names for the code object
"""
# handfull shortcut for getting args
raw = self.raw
return raw.co_varnames[:raw.co_argcount]
class Frame(object):
"""Wrapper around a Python frame holding f_locals and f_globals
in which expressions can be evaluated."""
def __init__(self, frame):
self.code = py.code.Code(frame.f_code)
self.lineno = frame.f_lineno - 1
self.f_globals = frame.f_globals
self.f_locals = frame.f_locals
self.raw = frame
def statement(self):
if self.code.fullsource is None:
return py.code.Source("")
return self.code.fullsource.getstatement(self.lineno)
statement = property(statement, None, None,
"statement this frame is at")
def eval(self, code, **vars):
""" evaluate 'code' in the frame
'vars' are optional additional local variables
returns the result of the evaluation
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
return eval(code, self.f_globals, f_locals)
def exec_(self, code, **vars):
""" exec 'code' in the frame
'vars' are optiona; additional local variables
"""
f_locals = self.f_locals.copy()
f_locals.update(vars)
py.builtin.exec_(code, self.f_globals, f_locals )
def repr(self, object):
""" return a 'safe' (non-recursive, one-line) string repr for 'object'
"""
return py.io.saferepr(object)
def is_true(self, object):
return object
def getargs(self):
""" return a list of tuples (name, value) for all arguments
"""
retval = []
for arg in self.code.getargs():
try:
retval.append((arg, self.f_locals[arg]))
except KeyError:
pass # this can occur when using Psyco
return retval
class TracebackEntry(object):
""" a single entry in a traceback """
exprinfo = None
def __init__(self, rawentry):
self._rawentry = rawentry
self.frame = py.code.Frame(rawentry.tb_frame)
# Ugh. 2.4 and 2.5 differs here when encountering
# multi-line statements. Not sure about the solution, but
# should be portable
self.lineno = rawentry.tb_lineno - 1
self.relline = self.lineno - self.frame.code.firstlineno
def __repr__(self):
return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
def statement(self):
""" return a py.code.Source object for the current statement """
source = self.frame.code.fullsource
return source.getstatement(self.lineno)
statement = property(statement, None, None,
"statement of this traceback entry.")
def path(self):
return self.frame.code.path
path = property(path, None, None, "path to the full source code")
def getlocals(self):
return self.frame.f_locals
locals = property(getlocals, None, None, "locals of underlaying frame")
def reinterpret(self):
"""Reinterpret the failing statement and returns a detailed information
about what operations are performed."""
if self.exprinfo is None:
source = str(self.statement).strip()
x = py.code._reinterpret(source, self.frame, should_fail=True)
if not isinstance(x, str):
raise TypeError("interpret returned non-string %r" % (x,))
self.exprinfo = x
return self.exprinfo
def getfirstlinesource(self):
# on Jython this firstlineno can be -1 apparently
return max(self.frame.code.firstlineno, 0)
def getsource(self):
""" return failing source code. """
source = self.frame.code.fullsource
if source is None:
return None
start = self.getfirstlinesource()
end = self.lineno
try:
_, end = source.getstatementrange(end)
except IndexError:
end = self.lineno + 1
# heuristic to stop displaying source on e.g.
# if something: # assume this causes a NameError
# # _this_ lines and the one
# below we don't want from entry.getsource()
for i in range(self.lineno, end):
if source[i].rstrip().endswith(':'):
end = i + 1
break
return source[start:end]
source = property(getsource)
def ishidden(self):
""" return True if the current frame has a var __tracebackhide__
resolving to True
mostly for internal use
"""
try:
return self.frame.eval("__tracebackhide__")
except py.builtin._sysex:
raise
except:
return False
def __str__(self):
try:
fn = str(self.path)
except py.error.Error:
fn = '???'
name = self.frame.code.name
try:
line = str(self.statement).lstrip()
except KeyboardInterrupt:
raise
except:
line = "???"
return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
def name(self):
return self.frame.code.raw.co_name
name = property(name, None, None, "co_name of underlaying code")
class Traceback(list):
""" Traceback objects encapsulate and offer higher level
access to Traceback entries.
"""
Entry = TracebackEntry
def __init__(self, tb):
""" initialize from given python traceback object. """
if hasattr(tb, 'tb_next'):
def f(cur):
while cur is not None:
yield self.Entry(cur)
cur = cur.tb_next
list.__init__(self, f(tb))
else:
list.__init__(self, tb)
def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
""" return a Traceback instance wrapping part of this Traceback
by provding any combination of path, lineno and firstlineno, the
first frame to start the to-be-returned traceback is determined
this allows cutting the first part of a Traceback instance e.g.
for formatting reasons (removing some uninteresting bits that deal
with handling of the exception/traceback)
"""
for x in self:
code = x.frame.code
codepath = code.path
if ((path is None or codepath == path) and
(excludepath is None or not hasattr(codepath, 'relto') or
not codepath.relto(excludepath)) and
(lineno is None or x.lineno == lineno) and
(firstlineno is None or x.frame.code.firstlineno == firstlineno)):
return Traceback(x._rawentry)
return self
def __getitem__(self, key):
val = super(Traceback, self).__getitem__(key)
if isinstance(key, type(slice(0))):
val = self.__class__(val)
return val
def filter(self, fn=lambda x: not x.ishidden()):
""" return a Traceback instance with certain items removed
fn is a function that gets a single argument, a TracebackItem
instance, and should return True when the item should be added
to the Traceback, False when not
by default this removes all the TracebackItems which are hidden
(see ishidden() above)
"""
return Traceback(filter(fn, self))
def getcrashentry(self):
""" return last non-hidden traceback entry that lead
to the exception of a traceback.
"""
tb = self.filter()
if not tb:
tb = self
return tb[-1]
def recursionindex(self):
""" return the index of the frame/TracebackItem where recursion
originates if appropriate, None if no recursion occurred
"""
cache = {}
for i, entry in enumerate(self):
key = entry.frame.code.path, entry.lineno
#print "checking for recursion at", key
l = cache.setdefault(key, [])
if l:
f = entry.frame
loc = f.f_locals
for otherloc in l:
if f.is_true(f.eval(co_equal,
__recursioncache_locals_1=loc,
__recursioncache_locals_2=otherloc)):
return i
l.append(entry.frame.f_locals)
return None
co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
'?', 'eval')
class ExceptionInfo(object):
""" wraps sys.exc_info() objects and offers
help for navigating the traceback.
"""
_striptext = ''
def __init__(self, tup=None, exprinfo=None):
# NB. all attributes are private! Subclasses or other
# ExceptionInfo-like classes may have different attributes.
if tup is None:
tup = sys.exc_info()
if exprinfo is None and isinstance(tup[1], py.code._AssertionError):
exprinfo = getattr(tup[1], 'msg', None)
if exprinfo is None:
exprinfo = str(tup[1])
if exprinfo and exprinfo.startswith('assert '):
self._striptext = 'AssertionError: '
self._excinfo = tup
self.type, self.value, tb = self._excinfo
self.typename = self.type.__name__
self.traceback = py.code.Traceback(tb)
def __repr__(self):
return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
def exconly(self, tryshort=False):
""" return the exception as a string
when 'tryshort' resolves to True, and the exception is a
py.code._AssertionError, only the actual exception part of
the exception representation is returned (so 'AssertionError: ' is
removed from the beginning)
"""
lines = py.std.traceback.format_exception_only(self.type, self.value)
text = ''.join(lines)
text = text.rstrip()
if tryshort:
if text.startswith(self._striptext):
text = text[len(self._striptext):]
return text
def errisinstance(self, exc):
""" return True if the exception is an instance of exc """
return isinstance(self.value, exc)
def _getreprcrash(self):
exconly = self.exconly(tryshort=True)
entry = self.traceback.getcrashentry()
path, lineno = entry.path, entry.lineno
reprcrash = ReprFileLocation(path, lineno+1, exconly)
return reprcrash
def getrepr(self, showlocals=False, style="long",
abspath=False, tbfilter=True, funcargs=False):
""" return str()able representation of this exception info.
showlocals: show locals per traceback entry
style: long|short|no|native traceback style
tbfilter: hide entries (where __tracebackhide__ is true)
"""
if style == 'native':
import traceback
return ''.join(traceback.format_exception(
self.type,
self.value,
self.traceback[0]._rawentry,
))
fmt = FormattedExcinfo(showlocals=showlocals, style=style,
abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
return fmt.repr_excinfo(self)
def __str__(self):
entry = self.traceback[-1]
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
return str(loc)
def __unicode__(self):
entry = self.traceback[-1]
loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
return unicode(loc)
class FormattedExcinfo(object):
""" presenting information about failing Functions and Generators. """
# for traceback entries
flow_marker = ">"
fail_marker = "E"
def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
self.showlocals = showlocals
self.style = style
self.tbfilter = tbfilter
self.funcargs = funcargs
self.abspath = abspath
def _getindent(self, source):
# figure out indent for given source
try:
s = str(source.getstatement(len(source)-1))
except KeyboardInterrupt:
raise
except:
try:
s = str(source[-1])
except KeyboardInterrupt:
raise
except:
return 0
return 4 + (len(s) - len(s.lstrip()))
def _getentrysource(self, entry):
source = entry.getsource()
if source is not None:
source = source.deindent()
return source
def _saferepr(self, obj):
return py.io.saferepr(obj)
def repr_args(self, entry):
if self.funcargs:
args = []
for argname, argvalue in entry.frame.getargs():
args.append((argname, self._saferepr(argvalue)))
return ReprFuncArgs(args)
def get_source(self, source, line_index=-1, excinfo=None, short=False):
""" return formatted and marked up source lines. """
lines = []
if source is None:
source = py.code.Source("???")
line_index = 0
if line_index < 0:
line_index += len(source)
for i in range(len(source)):
if i == line_index:
prefix = self.flow_marker + " "
else:
if short:
continue
prefix = " "
line = prefix + source[i]
lines.append(line)
if excinfo is not None:
indent = self._getindent(source)
lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
return lines
def get_exconly(self, excinfo, indent=4, markall=False):
lines = []
indent = " " * indent
# get the real exception information out
exlines = excinfo.exconly(tryshort=True).split('\n')
failindent = self.fail_marker + indent[1:]
for line in exlines:
lines.append(failindent + line)
if not markall:
failindent = indent
return lines
def repr_locals(self, locals):
if self.showlocals:
lines = []
keys = list(locals)
keys.sort()
for name in keys:
value = locals[name]
if name == '__builtins__':
lines.append("__builtins__ = <builtins>")
else:
# This formatting could all be handled by the
# _repr() function, which is only reprlib.Repr in
# disguise, so is very configurable.
str_repr = self._saferepr(value)
#if len(str_repr) < 70 or not isinstance(value,
# (list, tuple, dict)):
lines.append("%-10s = %s" %(name, str_repr))
#else:
# self._line("%-10s =\\" % (name,))
# # XXX
# py.std.pprint.pprint(value, stream=self.excinfowriter)
return ReprLocals(lines)
def repr_traceback_entry(self, entry, excinfo=None):
# excinfo is not None if this is the last tb entry
source = self._getentrysource(entry)
if source is None:
source = py.code.Source("???")
line_index = 0
else:
# entry.getfirstlinesource() can be -1, should be 0 on jython
line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
lines = []
if self.style in ("short", "long"):
short = self.style == "short"
reprargs = None
if not short:
reprargs = self.repr_args(entry)
s = self.get_source(source, line_index, excinfo, short=short)
lines.extend(s)
if short:
message = "in %s" %(entry.name)
else:
message = excinfo and excinfo.typename or ""
path = self._makepath(entry.path)
filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
localsrepr = None
if not short:
localsrepr = self.repr_locals(entry.locals)
return ReprEntry(lines, reprargs, localsrepr, filelocrepr, short)
if excinfo:
lines.extend(self.get_exconly(excinfo, indent=4))
return ReprEntry(lines, None, None, None, False)
def _makepath(self, path):
if not self.abspath:
np = py.path.local().bestrelpath(path)
if len(np) < len(str(path)):
path = np
return path
def repr_traceback(self, excinfo):
traceback = excinfo.traceback
if self.tbfilter:
traceback = traceback.filter()
recursionindex = None
if excinfo.errisinstance(RuntimeError):
recursionindex = traceback.recursionindex()
last = traceback[-1]
entries = []
extraline = None
for index, entry in enumerate(traceback):
einfo = (last == entry) and excinfo or None
reprentry = self.repr_traceback_entry(entry, einfo)
entries.append(reprentry)
if index == recursionindex:
extraline = "!!! Recursion detected (same locals & position)"
break
return ReprTraceback(entries, extraline, style=self.style)
def repr_excinfo(self, excinfo):
reprtraceback = self.repr_traceback(excinfo)
reprcrash = excinfo._getreprcrash()
return ReprExceptionInfo(reprtraceback, reprcrash)
class TerminalRepr:
def __str__(self):
s = self.__unicode__()
if sys.version_info[0] < 3:
s = s.encode('utf-8')
return s
def __unicode__(self):
l = []
tw = py.io.TerminalWriter(l.append)
self.toterminal(tw)
l = map(unicode_or_repr, l)
return "".join(l).strip()
def __repr__(self):
return "<%s instance at %0x>" %(self.__class__, id(self))
def unicode_or_repr(obj):
try:
return py.builtin._totext(obj)
except KeyboardInterrupt:
raise
except Exception:
return "<print-error: %r>" % py.io.saferepr(obj)
class ReprExceptionInfo(TerminalRepr):
def __init__(self, reprtraceback, reprcrash):
self.reprtraceback = reprtraceback
self.reprcrash = reprcrash
self.sections = []
def addsection(self, name, content, sep="-"):
self.sections.append((name, content, sep))
def toterminal(self, tw):
self.reprtraceback.toterminal(tw)
for name, content, sep in self.sections:
tw.sep(sep, name)
tw.line(content)
class ReprTraceback(TerminalRepr):
entrysep = "_ "
def __init__(self, reprentries, extraline, style):
self.reprentries = reprentries
self.extraline = extraline
self.style = style
def toterminal(self, tw):
sepok = False
for entry in self.reprentries:
if self.style == "long":
if sepok:
tw.sep(self.entrysep)
tw.line("")
sepok = True
entry.toterminal(tw)
if self.extraline:
tw.line(self.extraline)
class ReprEntry(TerminalRepr):
localssep = "_ "
def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, short):
self.lines = lines
self.reprfuncargs = reprfuncargs
self.reprlocals = reprlocals
self.reprfileloc = filelocrepr
self.short = short
def toterminal(self, tw):
if self.short:
self.reprfileloc.toterminal(tw)
for line in self.lines:
red = line.startswith("E ")
tw.line(line, bold=True, red=red)
#tw.line("")
return
if self.reprfuncargs:
self.reprfuncargs.toterminal(tw)
for line in self.lines:
red = line.startswith("E ")
tw.line(line, bold=True, red=red)
if self.reprlocals:
#tw.sep(self.localssep, "Locals")
tw.line("")
self.reprlocals.toterminal(tw)
if self.reprfileloc:
tw.line("")
self.reprfileloc.toterminal(tw)
def __str__(self):
return "%s\n%s\n%s" % ("\n".join(self.lines),
self.reprlocals,
self.reprfileloc)
class ReprFileLocation(TerminalRepr):
def __init__(self, path, lineno, message):
self.path = str(path)
self.lineno = lineno
self.message = message
def toterminal(self, tw):
# filename and lineno output for each entry,
# using an output format that most editors unterstand
msg = self.message
i = msg.find("\n")
if i != -1:
msg = msg[:i]
tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
class ReprLocals(TerminalRepr):
def __init__(self, lines):
self.lines = lines
def toterminal(self, tw):
for line in self.lines:
tw.line(line)
class ReprFuncArgs(TerminalRepr):
def __init__(self, args):
self.args = args
def toterminal(self, tw):
if self.args:
linesofar = ""
for name, value in self.args:
ns = "%s = %s" %(name, value)
if len(ns) + len(linesofar) + 2 > tw.fullwidth:
if linesofar:
tw.line(linesofar)
linesofar = ns
else:
if linesofar:
linesofar += ", " + ns
else:
linesofar = ns
if linesofar:
tw.line(linesofar)
tw.line("")
oldbuiltins = {}
def patch_builtins(assertion=True, compile=True):
""" put compile and AssertionError builtins to Python's builtins. """
if assertion:
from py._code import assertion
l = oldbuiltins.setdefault('AssertionError', [])
l.append(py.builtin.builtins.AssertionError)
py.builtin.builtins.AssertionError = assertion.AssertionError
if compile:
l = oldbuiltins.setdefault('compile', [])
l.append(py.builtin.builtins.compile)
py.builtin.builtins.compile = py.code.compile
def unpatch_builtins(assertion=True, compile=True):
""" remove compile and AssertionError builtins from Python builtins. """
if assertion:
py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
if compile:
py.builtin.builtins.compile = oldbuiltins['compile'].pop()
def getrawcode(obj):
""" return code object for given function. """
obj = getattr(obj, 'im_func', obj)
obj = getattr(obj, 'func_code', obj)
obj = getattr(obj, 'f_code', obj)
obj = getattr(obj, '__code__', obj)
return obj

View File

@ -1,62 +0,0 @@
""" deprecated module for turning on/off some features. """
import py
from py.builtin import builtins as cpy_builtin
def invoke(assertion=False, compile=False):
""" (deprecated) invoke magic, currently you can specify:
assertion patches the builtin AssertionError to try to give
more meaningful AssertionErrors, which by means
of deploying a mini-interpreter constructs
a useful error message.
"""
py.log._apiwarn("1.1",
"py.magic.invoke() is deprecated, use py.code.patch_builtins()",
stacklevel=2,
)
py.code.patch_builtins(assertion=assertion, compile=compile)
def revoke(assertion=False, compile=False):
""" (deprecated) revoke previously invoked magic (see invoke())."""
py.log._apiwarn("1.1",
"py.magic.revoke() is deprecated, use py.code.unpatch_builtins()",
stacklevel=2,
)
py.code.unpatch_builtins(assertion=assertion, compile=compile)
patched = {}
def patch(namespace, name, value):
""" (deprecated) rebind the 'name' on the 'namespace' to the 'value',
possibly and remember the original value. Multiple
invocations to the same namespace/name pair will
remember a list of old values.
"""
py.log._apiwarn("1.1",
"py.magic.patch() is deprecated, in tests use monkeypatch funcarg.",
stacklevel=2,
)
nref = (namespace, name)
orig = getattr(namespace, name)
patched.setdefault(nref, []).append(orig)
setattr(namespace, name, value)
return orig
def revert(namespace, name):
""" (deprecated) revert to the orginal value the last patch modified.
Raise ValueError if no such original value exists.
"""
py.log._apiwarn("1.1",
"py.magic.revert() is deprecated, in tests use monkeypatch funcarg.",
stacklevel=2,
)
nref = (namespace, name)
if nref not in patched or not patched[nref]:
raise ValueError("No original value stored for %s.%s" % nref)
current = getattr(namespace, name)
orig = patched[nref].pop()
setattr(namespace, name, orig)
return current

View File

@ -1,6 +0,0 @@
import py
py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2)
from py.code import _AssertionError as AssertionError

View File

@ -1,339 +0,0 @@
from __future__ import generators
import sys
import inspect, tokenize
import py
from types import ModuleType
cpy_compile = compile
try:
import _ast
from _ast import PyCF_ONLY_AST as _AST_FLAG
except ImportError:
_AST_FLAG = 0
_ast = None
class Source(object):
""" a immutable object holding a source code fragment,
possibly deindenting it.
"""
_compilecounter = 0
def __init__(self, *parts, **kwargs):
self.lines = lines = []
de = kwargs.get('deindent', True)
rstrip = kwargs.get('rstrip', True)
for part in parts:
if not part:
partlines = []
if isinstance(part, Source):
partlines = part.lines
elif isinstance(part, (tuple, list)):
partlines = [x.rstrip("\n") for x in part]
elif isinstance(part, py.builtin._basestring):
partlines = part.split('\n')
if rstrip:
while partlines:
if partlines[-1].strip():
break
partlines.pop()
else:
partlines = getsource(part, deindent=de).lines
if de:
partlines = deindent(partlines)
lines.extend(partlines)
def __eq__(self, other):
try:
return self.lines == other.lines
except AttributeError:
if isinstance(other, str):
return str(self) == other
return False
def __getitem__(self, key):
if isinstance(key, int):
return self.lines[key]
else:
if key.step not in (None, 1):
raise IndexError("cannot slice a Source with a step")
return self.__getslice__(key.start, key.stop)
def __len__(self):
return len(self.lines)
def __getslice__(self, start, end):
newsource = Source()
newsource.lines = self.lines[start:end]
return newsource
def strip(self):
""" return new source object with trailing
and leading blank lines removed.
"""
start, end = 0, len(self)
while start < end and not self.lines[start].strip():
start += 1
while end > start and not self.lines[end-1].strip():
end -= 1
source = Source()
source.lines[:] = self.lines[start:end]
return source
def putaround(self, before='', after='', indent=' ' * 4):
""" return a copy of the source object with
'before' and 'after' wrapped around it.
"""
before = Source(before)
after = Source(after)
newsource = Source()
lines = [ (indent + line) for line in self.lines]
newsource.lines = before.lines + lines + after.lines
return newsource
def indent(self, indent=' ' * 4):
""" return a copy of the source object with
all lines indented by the given indent-string.
"""
newsource = Source()
newsource.lines = [(indent+line) for line in self.lines]
return newsource
def getstatement(self, lineno):
""" return Source statement which contains the
given linenumber (counted from 0).
"""
start, end = self.getstatementrange(lineno)
return self[start:end]
def getstatementrange(self, lineno):
""" return (start, end) tuple which spans the minimal
statement region which containing the given lineno.
"""
# XXX there must be a better than these heuristic ways ...
# XXX there may even be better heuristics :-)
if not (0 <= lineno < len(self)):
raise IndexError("lineno out of range")
# 1. find the start of the statement
from codeop import compile_command
for start in range(lineno, -1, -1):
trylines = self.lines[start:lineno+1]
# quick hack to indent the source and get it as a string in one go
trylines.insert(0, 'def xxx():')
trysource = '\n '.join(trylines)
# ^ space here
try:
compile_command(trysource)
except (SyntaxError, OverflowError, ValueError):
continue
# 2. find the end of the statement
for end in range(lineno+1, len(self)+1):
trysource = self[start:end]
if trysource.isparseable():
return start, end
return start, end
def getblockend(self, lineno):
# XXX
lines = [x + '\n' for x in self.lines[lineno:]]
blocklines = inspect.getblock(lines)
#print blocklines
return lineno + len(blocklines) - 1
def deindent(self, offset=None):
""" return a new source object deindented by offset.
If offset is None then guess an indentation offset from
the first non-blank line. Subsequent lines which have a
lower indentation offset will be copied verbatim as
they are assumed to be part of multilines.
"""
# XXX maybe use the tokenizer to properly handle multiline
# strings etc.pp?
newsource = Source()
newsource.lines[:] = deindent(self.lines, offset)
return newsource
def isparseable(self, deindent=True):
""" return True if source is parseable, heuristically
deindenting it by default.
"""
try:
import parser
except ImportError:
syntax_checker = lambda x: compile(x, 'asd', 'exec')
else:
syntax_checker = parser.suite
if deindent:
source = str(self.deindent())
else:
source = str(self)
try:
#compile(source+'\n', "x", "exec")
syntax_checker(source+'\n')
except KeyboardInterrupt:
raise
except Exception:
return False
else:
return True
def __str__(self):
return "\n".join(self.lines)
def compile(self, filename=None, mode='exec',
flag=generators.compiler_flag,
dont_inherit=0, _genframe=None):
""" return compiled code object. if filename is None
invent an artificial filename which displays
the source/line position of the caller frame.
"""
if not filename or py.path.local(filename).check(file=0):
if _genframe is None:
_genframe = sys._getframe(1) # the caller
fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
base = "<%d-codegen " % self._compilecounter
self.__class__._compilecounter += 1
if not filename:
filename = base + '%s:%d>' % (fn, lineno)
else:
filename = base + '%r %s:%d>' % (filename, fn, lineno)
source = "\n".join(self.lines) + '\n'
try:
co = cpy_compile(source, filename, mode, flag)
except SyntaxError:
ex = sys.exc_info()[1]
# re-represent syntax errors from parsing python strings
msglines = self.lines[:ex.lineno]
if ex.offset:
msglines.append(" "*ex.offset + '^')
msglines.append("syntax error probably generated here: %s" % filename)
newex = SyntaxError('\n'.join(msglines))
newex.offset = ex.offset
newex.lineno = ex.lineno
newex.text = ex.text
raise newex
else:
if flag & _AST_FLAG:
return co
lines = [(x + "\n") for x in self.lines]
if sys.version_info[0] >= 3:
# XXX py3's inspect.getsourcefile() checks for a module
# and a pep302 __loader__ ... we don't have a module
# at code compile-time so we need to fake it here
m = ModuleType("_pycodecompile_pseudo_module")
py.std.inspect.modulesbyfile[filename] = None
py.std.sys.modules[None] = m
m.__loader__ = 1
py.std.linecache.cache[filename] = (1, None, lines, filename)
return co
#
# public API shortcut functions
#
def compile_(source, filename=None, mode='exec', flags=
generators.compiler_flag, dont_inherit=0):
""" compile the given source to a raw code object,
and maintain an internal cache which allows later
retrieval of the source code for the code object
and any recursively created code objects.
"""
if _ast is not None and isinstance(source, _ast.AST):
# XXX should Source support having AST?
return cpy_compile(source, filename, mode, flags, dont_inherit)
_genframe = sys._getframe(1) # the caller
s = Source(source)
co = s.compile(filename, mode, flags, _genframe=_genframe)
return co
def getfslineno(obj):
try:
code = py.code.Code(obj)
except TypeError:
# fallback to
fn = (py.std.inspect.getsourcefile(obj) or
py.std.inspect.getfile(obj))
fspath = fn and py.path.local(fn) or None
if fspath:
try:
_, lineno = findsource(obj)
except IOError:
lineno = None
else:
lineno = None
else:
fspath = code.path
lineno = code.firstlineno
return fspath, lineno
#
# helper functions
#
def findsource(obj):
try:
sourcelines, lineno = py.std.inspect.findsource(obj)
except py.builtin._sysex:
raise
except:
return None, None
source = Source()
source.lines = [line.rstrip() for line in sourcelines]
return source, lineno
def getsource(obj, **kwargs):
obj = py.code.getrawcode(obj)
try:
strsrc = inspect.getsource(obj)
except IndentationError:
strsrc = "\"Buggy python version consider upgrading, cannot get source\""
assert isinstance(strsrc, str)
return Source(strsrc, **kwargs)
def deindent(lines, offset=None):
if offset is None:
for line in lines:
line = line.expandtabs()
s = line.lstrip()
if s:
offset = len(line)-len(s)
break
else:
offset = 0
if offset == 0:
return list(lines)
newlines = []
def readline_generator(lines):
for line in lines:
yield line + '\n'
while True:
yield ''
r = readline_generator(lines)
try:
readline = r.next
except AttributeError:
readline = r.__next__
try:
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(readline):
if sline > len(lines):
break # End of input reached
if sline > len(newlines):
line = lines[sline - 1].expandtabs()
if line.lstrip() and line[:offset].isspace():
line = line[offset:] # Deindent
newlines.append(line)
for i in range(sline, eline):
# Don't deindent continuing lines of
# multiline tokens (i.e. multiline strings)
newlines.append(lines[i])
except (IndentationError, tokenize.TokenError):
pass
# Add any lines we didn't see. E.g. if an exception was raised.
newlines.extend(lines[len(newlines):])
return newlines

View File

@ -1,2 +0,0 @@
""" compatibility modules (taken from 2.4.4) """

View File

@ -1,5 +0,0 @@
import py
py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.",
stacklevel="apipkg")
doctest = py.std.doctest

View File

@ -1,4 +0,0 @@
import py
py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg")
optparse = py.std.optparse

View File

@ -1,5 +0,0 @@
import py
py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.",
stacklevel="apipkg")
subprocess = py.std.subprocess

View File

@ -1,5 +0,0 @@
import py
py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.",
stacklevel="apipkg")
textwrap = py.std.textwrap

View File

@ -1,83 +0,0 @@
"""
create errno-specific classes for IO or os calls.
"""
import sys, os, errno
class Error(EnvironmentError):
def __repr__(self):
return "%s.%s %r: %s " %(self.__class__.__module__,
self.__class__.__name__,
self.__class__.__doc__,
" ".join(map(str, self.args)),
#repr(self.args)
)
def __str__(self):
s = "[%s]: %s" %(self.__class__.__doc__,
" ".join(map(str, self.args)),
)
return s
_winerrnomap = {
2: errno.ENOENT,
3: errno.ENOENT,
17: errno.EEXIST,
22: errno.ENOTDIR,
267: errno.ENOTDIR,
5: errno.EACCES, # anything better?
}
class ErrorMaker(object):
""" lazily provides Exception classes for each possible POSIX errno
(as defined per the 'errno' module). All such instances
subclass EnvironmentError.
"""
Error = Error
_errno2class = {}
def __getattr__(self, name):
eno = getattr(errno, name)
cls = self._geterrnoclass(eno)
setattr(self, name, cls)
return cls
def _geterrnoclass(self, eno):
try:
return self._errno2class[eno]
except KeyError:
clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
errorcls = type(Error)(clsname, (Error,),
{'__module__':'py.error',
'__doc__': os.strerror(eno)})
self._errno2class[eno] = errorcls
return errorcls
def checked_call(self, func, *args, **kwargs):
""" call a function and raise an errno-exception if applicable. """
__tracebackhide__ = True
try:
return func(*args, **kwargs)
except self.Error:
raise
except EnvironmentError:
cls, value, tb = sys.exc_info()
if not hasattr(value, 'errno'):
raise
__tracebackhide__ = False
errno = value.errno
try:
if not isinstance(value, WindowsError):
raise NameError
except NameError:
# we are not on Windows, or we got a proper OSError
cls = self._geterrnoclass(errno)
else:
try:
cls = self._geterrnoclass(_winerrnomap[errno])
except KeyError:
raise value
raise cls("%s%r" % (func.__name__, args))
__tracebackhide__ = True
error = ErrorMaker()

View File

@ -1 +0,0 @@
""" input/output helping """

View File

@ -1,352 +0,0 @@
import os
import sys
import py
import tempfile
try:
from io import StringIO
except ImportError:
from StringIO import StringIO
if sys.version_info < (3,0):
class TextIO(StringIO):
def write(self, data):
if not isinstance(data, unicode):
data = unicode(data, getattr(self, '_encoding', 'UTF-8'))
StringIO.write(self, data)
else:
TextIO = StringIO
try:
from io import BytesIO
except ImportError:
class BytesIO(StringIO):
def write(self, data):
if isinstance(data, unicode):
raise TypeError("not a byte value: %r" %(data,))
StringIO.write(self, data)
patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
class FDCapture:
""" Capture IO to/from a given os-level filedescriptor. """
def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
""" save targetfd descriptor, and open a new
temporary file there. If no tmpfile is
specified a tempfile.Tempfile() will be opened
in text mode.
"""
self.targetfd = targetfd
if tmpfile is None and targetfd != 0:
f = tempfile.TemporaryFile('wb+')
tmpfile = dupfile(f, encoding="UTF-8")
f.close()
self.tmpfile = tmpfile
self._savefd = os.dup(self.targetfd)
if patchsys:
self._oldsys = getattr(sys, patchsysdict[targetfd])
if now:
self.start()
def start(self):
try:
os.fstat(self._savefd)
except OSError:
raise ValueError("saved filedescriptor not valid, "
"did you call start() twice?")
if self.targetfd == 0 and not self.tmpfile:
fd = os.open(devnullpath, os.O_RDONLY)
os.dup2(fd, 0)
os.close(fd)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
else:
fd = self.tmpfile.fileno()
os.dup2(self.tmpfile.fileno(), self.targetfd)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
def done(self):
""" unpatch and clean up, returns the self.tmpfile (file object)
"""
os.dup2(self._savefd, self.targetfd)
os.close(self._savefd)
if self.targetfd != 0:
self.tmpfile.seek(0)
if hasattr(self, '_oldsys'):
setattr(sys, patchsysdict[self.targetfd], self._oldsys)
return self.tmpfile
def writeorg(self, data):
""" write a string to the original file descriptor
"""
tempfp = tempfile.TemporaryFile()
try:
os.dup2(self._savefd, tempfp.fileno())
tempfp.write(data)
finally:
tempfp.close()
def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
""" return a new open file object that's a duplicate of f
mode is duplicated if not given, 'buffering' controls
buffer size (defaulting to no buffering) and 'raising'
defines whether an exception is raised when an incompatible
file object is passed in (if raising is False, the file
object itself will be returned)
"""
try:
fd = f.fileno()
except AttributeError:
if raising:
raise
return f
newfd = os.dup(fd)
mode = mode and mode or f.mode
if sys.version_info >= (3,0):
if encoding is not None:
mode = mode.replace("b", "")
buffering = True
return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
else:
f = os.fdopen(newfd, mode, buffering)
if encoding is not None:
return EncodedFile(f, encoding)
return f
class EncodedFile(object):
def __init__(self, _stream, encoding):
self._stream = _stream
self.encoding = encoding
def write(self, obj):
if isinstance(obj, unicode):
obj = obj.encode(self.encoding)
elif isinstance(obj, str):
pass
else:
obj = str(obj)
self._stream.write(obj)
def writelines(self, linelist):
data = ''.join(linelist)
self.write(data)
def __getattr__(self, name):
return getattr(self._stream, name)
class Capture(object):
def call(cls, func, *args, **kwargs):
""" return a (res, out, err) tuple where
out and err represent the output/error output
during function execution.
call the given function with args/kwargs
and capture output/error during its execution.
"""
so = cls()
try:
res = func(*args, **kwargs)
finally:
out, err = so.reset()
return res, out, err
call = classmethod(call)
def reset(self):
""" reset sys.stdout/stderr and return captured output as strings. """
outfile, errfile = self.done()
out, err = "", ""
if outfile and not outfile.closed:
out = outfile.read()
outfile.close()
if errfile and errfile != outfile and not errfile.closed:
err = errfile.read()
errfile.close()
return out, err
def suspend(self):
""" return current snapshot captures, memorize tempfiles. """
outerr = self.readouterr()
outfile, errfile = self.done()
return outerr
class StdCaptureFD(Capture):
""" This class allows to capture writes to FD1 and FD2
and may connect a NULL file to FD0 (and prevent
reads from sys.stdin). If any of the 0,1,2 file descriptors
is invalid it will not be captured.
"""
def __init__(self, out=True, err=True, mixed=False,
in_=True, patchsys=True, now=True):
self._options = locals()
self._save()
if now:
self.startall()
def _save(self):
in_ = self._options['in_']
out = self._options['out']
err = self._options['err']
mixed = self._options['mixed']
patchsys = self._options['patchsys']
if in_:
try:
self.in_ = FDCapture(0, tmpfile=None, now=False,
patchsys=patchsys)
except OSError:
pass
if out:
tmpfile = None
if hasattr(out, 'write'):
tmpfile = out
try:
self.out = FDCapture(1, tmpfile=tmpfile,
now=False, patchsys=patchsys)
self._options['out'] = self.out.tmpfile
except OSError:
pass
if err:
if out and mixed:
tmpfile = self.out.tmpfile
elif hasattr(err, 'write'):
tmpfile = err
else:
tmpfile = None
try:
self.err = FDCapture(2, tmpfile=tmpfile,
now=False, patchsys=patchsys)
self._options['err'] = self.err.tmpfile
except OSError:
pass
def startall(self):
if hasattr(self, 'in_'):
self.in_.start()
if hasattr(self, 'out'):
self.out.start()
if hasattr(self, 'err'):
self.err.start()
def resume(self):
""" resume capturing with original temp files. """
self.startall()
def done(self):
""" return (outfile, errfile) and stop capturing. """
outfile = errfile = None
if hasattr(self, 'out') and not self.out.tmpfile.closed:
outfile = self.out.done()
if hasattr(self, 'err') and not self.err.tmpfile.closed:
errfile = self.err.done()
if hasattr(self, 'in_'):
tmpfile = self.in_.done()
self._save()
return outfile, errfile
def readouterr(self):
""" return snapshot value of stdout/stderr capturings. """
l = []
for name in ('out', 'err'):
res = ""
if hasattr(self, name):
f = getattr(self, name).tmpfile
f.seek(0)
res = f.read()
f.truncate(0)
f.seek(0)
l.append(res)
return l
class StdCapture(Capture):
""" This class allows to capture writes to sys.stdout|stderr "in-memory"
and will raise errors on tries to read from sys.stdin. It only
modifies sys.stdout|stderr|stdin attributes and does not
touch underlying File Descriptors (use StdCaptureFD for that).
"""
def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
self._oldout = sys.stdout
self._olderr = sys.stderr
self._oldin = sys.stdin
if out and not hasattr(out, 'file'):
out = TextIO()
self.out = out
if err:
if mixed:
err = out
elif not hasattr(err, 'write'):
err = TextIO()
self.err = err
self.in_ = in_
if now:
self.startall()
def startall(self):
if self.out:
sys.stdout = self.out
if self.err:
sys.stderr = self.err
if self.in_:
sys.stdin = self.in_ = DontReadFromInput()
def done(self):
""" return (outfile, errfile) and stop capturing. """
outfile = errfile = None
if self.out and not self.out.closed:
sys.stdout = self._oldout
outfile = self.out
outfile.seek(0)
if self.err and not self.err.closed:
sys.stderr = self._olderr
errfile = self.err
errfile.seek(0)
if self.in_:
sys.stdin = self._oldin
return outfile, errfile
def resume(self):
""" resume capturing with original temp files. """
self.startall()
def readouterr(self):
""" return snapshot value of stdout/stderr capturings. """
out = err = ""
if self.out:
out = self.out.getvalue()
self.out.truncate(0)
self.out.seek(0)
if self.err:
err = self.err.getvalue()
self.err.truncate(0)
self.err.seek(0)
return out, err
class DontReadFromInput:
"""Temporary stub class. Ideally when stdin is accessed, the
capturing should be turned off, with possibly all data captured
so far sent to the screen. This should be configurable, though,
because in automated test runs it is better to crash than
hang indefinitely.
"""
def read(self, *args):
raise IOError("reading from stdin while output is captured")
readline = read
readlines = read
__iter__ = read
def fileno(self):
raise ValueError("redirected Stdin is pseudofile, has no fileno()")
def isatty(self):
return False
def close(self):
pass
try:
devnullpath = os.devnull
except AttributeError:
if os.name == 'nt':
devnullpath = 'NUL'
else:
devnullpath = '/dev/null'

View File

@ -1,54 +0,0 @@
import py
import sys, os.path
builtin_repr = repr
reprlib = py.builtin._tryimport('repr', 'reprlib')
class SafeRepr(reprlib.Repr):
""" subclass of repr.Repr that limits the resulting size of repr()
and includes information on exceptions raised during the call.
"""
def repr(self, x):
return self._callhelper(reprlib.Repr.repr, self, x)
def repr_instance(self, x, level):
return self._callhelper(builtin_repr, x)
def _callhelper(self, call, x, *args):
try:
# Try the vanilla repr and make sure that the result is a string
s = call(x, *args)
except py.builtin._sysex:
raise
except:
cls, e, tb = sys.exc_info()
exc_name = getattr(cls, '__name__', 'unknown')
try:
exc_info = str(e)
except sysex:
raise
except:
exc_info = 'unknown'
return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
exc_name, exc_info, x.__class__.__name__, id(x))
else:
if len(s) > self.maxsize:
i = max(0, (self.maxsize-3)//2)
j = max(0, self.maxsize-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def saferepr(obj, maxsize=240):
""" return a size-limited safe repr-string for the given object.
Failing __repr__ functions of user instances will be represented
with a short exception info and 'saferepr' generally takes
care to never raise exceptions itself. This function is a wrapper
around the Repr/reprlib functionality of the standard 2.6 lib.
"""
# review exception handling
srepr = SafeRepr()
srepr.maxstring = maxsize
srepr.maxsize = maxsize
srepr.maxother = 160
return srepr.repr(obj)

View File

@ -1,285 +0,0 @@
"""
Helper functions for writing to terminals and files.
"""
import sys, os
import py
win32_and_ctypes = False
if sys.platform == "win32":
try:
import ctypes
win32_and_ctypes = True
except ImportError:
pass
def _getdimensions():
import termios,fcntl,struct
call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
height,width = struct.unpack( "hhhh", call ) [:2]
return height, width
def get_terminal_width():
try:
height, width = _getdimensions()
except py.builtin._sysex:
raise
except:
# FALLBACK
width = int(os.environ.get('COLUMNS', 80))
else:
# XXX the windows getdimensions may be bogus, let's sanify a bit
if width < 40:
width = 80
return width
terminal_width = get_terminal_width()
# XXX unify with _escaped func below
def ansi_print(text, esc, file=None, newline=True, flush=False):
if file is None:
file = sys.stderr
text = text.rstrip()
if esc and not isinstance(esc, tuple):
esc = (esc,)
if esc and sys.platform != "win32" and file.isatty():
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
text +
'\x1b[0m') # ANSI color code "reset"
if newline:
text += '\n'
if esc and win32_and_ctypes and file.isatty():
if 1 in esc:
bold = True
esc = tuple([x for x in esc if x != 1])
else:
bold = False
esctable = {() : FOREGROUND_WHITE, # normal
(31,): FOREGROUND_RED, # red
(32,): FOREGROUND_GREEN, # green
(33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
(34,): FOREGROUND_BLUE, # blue
(35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
(36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
(37,): FOREGROUND_WHITE, # white
(39,): FOREGROUND_WHITE, # reset
}
attr = esctable.get(esc, FOREGROUND_WHITE)
if bold:
attr |= FOREGROUND_INTENSITY
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
if file is sys.stderr:
handle = GetStdHandle(STD_ERROR_HANDLE)
else:
handle = GetStdHandle(STD_OUTPUT_HANDLE)
oldcolors = GetConsoleInfo(handle).wAttributes
attr |= (oldcolors & 0x0f0)
SetConsoleTextAttribute(handle, attr)
file.write(text)
SetConsoleTextAttribute(handle, oldcolors)
else:
file.write(text)
if flush:
file.flush()
def should_do_markup(file):
return hasattr(file, 'isatty') and file.isatty() \
and os.environ.get('TERM') != 'dumb' \
and not (sys.platform.startswith('java') and os._name == 'nt')
class TerminalWriter(object):
_esctable = dict(black=30, red=31, green=32, yellow=33,
blue=34, purple=35, cyan=36, white=37,
Black=40, Red=41, Green=42, Yellow=43,
Blue=44, Purple=45, Cyan=46, White=47,
bold=1, light=2, blink=5, invert=7)
# XXX deprecate stringio argument
def __init__(self, file=None, stringio=False, encoding=None):
if file is None:
if stringio:
self.stringio = file = py.io.TextIO()
else:
file = py.std.sys.stdout
if hasattr(file, 'encoding'):
encoding = file.encoding
elif hasattr(file, '__call__'):
file = WriteFile(file, encoding=encoding)
self.encoding = encoding
self._file = file
self.fullwidth = get_terminal_width()
self.hasmarkup = should_do_markup(file)
def _escaped(self, text, esc):
if esc and self.hasmarkup:
text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
text +'\x1b[0m')
return text
def markup(self, text, **kw):
esc = []
for name in kw:
if name not in self._esctable:
raise ValueError("unknown markup: %r" %(name,))
if kw[name]:
esc.append(self._esctable[name])
return self._escaped(text, tuple(esc))
def sep(self, sepchar, title=None, fullwidth=None, **kw):
if fullwidth is None:
fullwidth = self.fullwidth
# the goal is to have the line be as long as possible
# under the condition that len(line) <= fullwidth
if title is not None:
# we want 2 + 2*len(fill) + len(title) <= fullwidth
# i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
# 2*len(sepchar)*N <= fullwidth - len(title) - 2
# N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
N = (fullwidth - len(title) - 2) // (2*len(sepchar))
fill = sepchar * N
line = "%s %s %s" % (fill, title, fill)
else:
# we want len(sepchar)*N <= fullwidth
# i.e. N <= fullwidth // len(sepchar)
line = sepchar * (fullwidth // len(sepchar))
# in some situations there is room for an extra sepchar at the right,
# in particular if we consider that with a sepchar like "_ " the
# trailing space is not important at the end of the line
if len(line) + len(sepchar.rstrip()) <= fullwidth:
line += sepchar.rstrip()
self.line(line, **kw)
def write(self, s, **kw):
if s:
if not isinstance(self._file, WriteFile):
s = self._getbytestring(s)
if self.hasmarkup and kw:
s = self.markup(s, **kw)
self._file.write(s)
self._file.flush()
def _getbytestring(self, s):
# XXX review this and the whole logic
if self.encoding and sys.version_info[0] < 3 and isinstance(s, unicode):
return s.encode(self.encoding)
elif not isinstance(s, str):
try:
return str(s)
except UnicodeEncodeError:
return "<print-error '%s' object>" % type(s).__name__
return s
def line(self, s='', **kw):
self.write(s, **kw)
self.write('\n')
class Win32ConsoleWriter(TerminalWriter):
def write(self, s, **kw):
if s:
oldcolors = None
if self.hasmarkup and kw:
handle = GetStdHandle(STD_OUTPUT_HANDLE)
oldcolors = GetConsoleInfo(handle).wAttributes
default_bg = oldcolors & 0x00F0
attr = default_bg
if kw.pop('bold', False):
attr |= FOREGROUND_INTENSITY
if kw.pop('red', False):
attr |= FOREGROUND_RED
elif kw.pop('blue', False):
attr |= FOREGROUND_BLUE
elif kw.pop('green', False):
attr |= FOREGROUND_GREEN
else:
attr |= FOREGROUND_BLACK # (oldcolors & 0x0007)
SetConsoleTextAttribute(handle, attr)
if not isinstance(self._file, WriteFile):
s = self._getbytestring(s)
self._file.write(s)
self._file.flush()
if oldcolors:
SetConsoleTextAttribute(handle, oldcolors)
def line(self, s="", **kw):
self.write(s+"\n", **kw)
class WriteFile(object):
def __init__(self, writemethod, encoding=None):
self.encoding = encoding
self._writemethod = writemethod
def write(self, data):
if self.encoding:
data = data.encode(self.encoding)
self._writemethod(data)
def flush(self):
return
if win32_and_ctypes:
TerminalWriter = Win32ConsoleWriter
import ctypes
from ctypes import wintypes
# ctypes access to the Windows console
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0000 # black text
FOREGROUND_BLUE = 0x0001 # text color contains blue.
FOREGROUND_GREEN = 0x0002 # text color contains green.
FOREGROUND_RED = 0x0004 # text color contains red.
FOREGROUND_WHITE = 0x0007
FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
BACKGROUND_BLACK = 0x0000 # background color black
BACKGROUND_BLUE = 0x0010 # background color contains blue.
BACKGROUND_GREEN = 0x0020 # background color contains green.
BACKGROUND_RED = 0x0040 # background color contains red.
BACKGROUND_WHITE = 0x0070
BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
SHORT = ctypes.c_short
class COORD(ctypes.Structure):
_fields_ = [('X', SHORT),
('Y', SHORT)]
class SMALL_RECT(ctypes.Structure):
_fields_ = [('Left', SHORT),
('Top', SHORT),
('Right', SHORT),
('Bottom', SHORT)]
class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
_fields_ = [('dwSize', COORD),
('dwCursorPosition', COORD),
('wAttributes', wintypes.WORD),
('srWindow', SMALL_RECT),
('dwMaximumWindowSize', COORD)]
def GetStdHandle(kind):
return ctypes.windll.kernel32.GetStdHandle(kind)
SetConsoleTextAttribute = \
ctypes.windll.kernel32.SetConsoleTextAttribute
def GetConsoleInfo(handle):
info = CONSOLE_SCREEN_BUFFER_INFO()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(\
handle, ctypes.byref(info))
return info
def _getdimensions():
handle = GetStdHandle(STD_OUTPUT_HANDLE)
info = GetConsoleInfo(handle)
# Substract one from the width, otherwise the cursor wraps
# and the ending \n causes an empty line to display.
return info.dwSize.Y, info.dwSize.X - 1

View File

@ -1,2 +0,0 @@
""" logging API ('producers' and 'consumers' connected via keywords) """

View File

@ -1,186 +0,0 @@
"""
basic logging functionality based on a producer/consumer scheme.
XXX implement this API: (maybe put it into slogger.py?)
log = Logger(
info=py.log.STDOUT,
debug=py.log.STDOUT,
command=None)
log.info("hello", "world")
log.command("hello", "world")
log = Logger(info=Logger(something=...),
debug=py.log.STDOUT,
command=None)
"""
import py, sys
class Message(object):
def __init__(self, keywords, args):
self.keywords = keywords
self.args = args
def content(self):
return " ".join(map(str, self.args))
def prefix(self):
return "[%s] " % (":".join(self.keywords))
def __str__(self):
return self.prefix() + self.content()
class Producer(object):
""" (deprecated) Log producer API which sends messages to be logged
to a 'consumer' object, which then prints them to stdout,
stderr, files, etc. Used extensively by PyPy-1.1.
"""
Message = Message # to allow later customization
keywords2consumer = {}
def __init__(self, keywords, keywordmapper=None, **kw):
if hasattr(keywords, 'split'):
keywords = tuple(keywords.split())
self._keywords = keywords
if keywordmapper is None:
keywordmapper = default_keywordmapper
self._keywordmapper = keywordmapper
def __repr__(self):
return "<py.log.Producer %s>" % ":".join(self._keywords)
def __getattr__(self, name):
if '_' in name:
raise AttributeError(name)
producer = self.__class__(self._keywords + (name,))
setattr(self, name, producer)
return producer
def __call__(self, *args):
""" write a message to the appropriate consumer(s) """
func = self._keywordmapper.getconsumer(self._keywords)
if func is not None:
func(self.Message(self._keywords, args))
class KeywordMapper:
def __init__(self):
self.keywords2consumer = {}
def getstate(self):
return self.keywords2consumer.copy()
def setstate(self, state):
self.keywords2consumer.clear()
self.keywords2consumer.update(state)
def getconsumer(self, keywords):
""" return a consumer matching the given keywords.
tries to find the most suitable consumer by walking, starting from
the back, the list of keywords, the first consumer matching a
keyword is returned (falling back to py.log.default)
"""
for i in range(len(keywords), 0, -1):
try:
return self.keywords2consumer[keywords[:i]]
except KeyError:
continue
return self.keywords2consumer.get('default', default_consumer)
def setconsumer(self, keywords, consumer):
""" set a consumer for a set of keywords. """
# normalize to tuples
if isinstance(keywords, str):
keywords = tuple(filter(None, keywords.split()))
elif hasattr(keywords, '_keywords'):
keywords = keywords._keywords
elif not isinstance(keywords, tuple):
raise TypeError("key %r is not a string or tuple" % (keywords,))
if consumer is not None and not py.builtin.callable(consumer):
if not hasattr(consumer, 'write'):
raise TypeError(
"%r should be None, callable or file-like" % (consumer,))
consumer = File(consumer)
self.keywords2consumer[keywords] = consumer
def default_consumer(msg):
""" the default consumer, prints the message to stdout (using 'print') """
sys.stderr.write(str(msg)+"\n")
default_keywordmapper = KeywordMapper()
def setconsumer(keywords, consumer):
default_keywordmapper.setconsumer(keywords, consumer)
def setstate(state):
default_keywordmapper.setstate(state)
def getstate():
return default_keywordmapper.getstate()
#
# Consumers
#
class File(object):
""" log consumer wrapping a file(-like) object """
def __init__(self, f):
assert hasattr(f, 'write')
#assert isinstance(f, file) or not hasattr(f, 'open')
self._file = f
def __call__(self, msg):
""" write a message to the log """
self._file.write(str(msg) + "\n")
if hasattr(self._file, 'flush'):
self._file.flush()
class Path(object):
""" log consumer that opens and writes to a Path """
def __init__(self, filename, append=False,
delayed_create=False, buffering=False):
self._append = append
self._filename = str(filename)
self._buffering = buffering
if not delayed_create:
self._openfile()
def _openfile(self):
mode = self._append and 'a' or 'w'
f = open(self._filename, mode)
self._file = f
def __call__(self, msg):
""" write a message to the log """
if not hasattr(self, "_file"):
self._openfile()
self._file.write(str(msg) + "\n")
if not self._buffering:
self._file.flush()
def STDOUT(msg):
""" consumer that writes to sys.stdout """
sys.stdout.write(str(msg)+"\n")
def STDERR(msg):
""" consumer that writes to sys.stderr """
sys.stderr.write(str(msg)+"\n")
class Syslog:
""" consumer that writes to the syslog daemon """
def __init__(self, priority = None):
if priority is None:
priority = self.LOG_INFO
self.priority = priority
def __call__(self, msg):
""" write a message to the log """
py.std.syslog.syslog(self.priority, str(msg))
for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
_prio = "LOG_" + _prio
try:
setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
except AttributeError:
pass

View File

@ -1,76 +0,0 @@
import py, sys
class DeprecationWarning(DeprecationWarning):
def __init__(self, msg, path, lineno):
self.msg = msg
self.path = path
self.lineno = lineno
def __repr__(self):
return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
def __str__(self):
return self.msg
def _apiwarn(startversion, msg, stacklevel=2, function=None):
# below is mostly COPIED from python2.4/warnings.py's def warn()
# Get context information
if isinstance(stacklevel, str):
frame = sys._getframe(1)
level = 1
found = frame.f_code.co_filename.find(stacklevel) != -1
while frame:
co = frame.f_code
if co.co_filename.find(stacklevel) == -1:
if found:
stacklevel = level
break
else:
found = True
level += 1
frame = frame.f_back
else:
stacklevel = 1
msg = "%s (since version %s)" %(msg, startversion)
warn(msg, stacklevel=stacklevel+1, function=function)
def warn(msg, stacklevel=1, function=None):
if function is not None:
filename = py.std.inspect.getfile(function)
lineno = py.code.getrawcode(function).co_firstlineno
else:
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
filename = filename[:-1]
elif fnl.endswith("$py.class"):
filename = filename.replace('$py.class', '.py')
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
path = py.path.local(filename)
warning = DeprecationWarning(msg, path, lineno)
py.std.warnings.warn_explicit(warning, category=Warning,
filename=str(warning.path),
lineno=warning.lineno,
registry=py.std.warnings.__dict__.setdefault(
"__warningsregistry__", {})
)

View File

@ -1 +0,0 @@
""" unified file system api """

View File

@ -1,114 +0,0 @@
"""
This module contains multithread-safe cache implementations.
All Caches have
getorbuild(key, builder)
delentry(key)
methods and allow configuration when instantiating the cache class.
"""
from time import time as gettime
class BasicCache(object):
def __init__(self, maxentries=128):
self.maxentries = maxentries
self.prunenum = int(maxentries - maxentries/8)
self._dict = {}
def clear(self):
self._dict.clear()
def _getentry(self, key):
return self._dict[key]
def _putentry(self, key, entry):
self._prunelowestweight()
self._dict[key] = entry
def delentry(self, key, raising=False):
try:
del self._dict[key]
except KeyError:
if raising:
raise
def getorbuild(self, key, builder):
try:
entry = self._getentry(key)
except KeyError:
entry = self._build(key, builder)
self._putentry(key, entry)
return entry.value
def _prunelowestweight(self):
""" prune out entries with lowest weight. """
numentries = len(self._dict)
if numentries >= self.maxentries:
# evict according to entry's weight
items = [(entry.weight, key)
for key, entry in self._dict.items()]
items.sort()
index = numentries - self.prunenum
if index > 0:
for weight, key in items[:index]:
# in MT situations the element might be gone
self.delentry(key, raising=False)
class BuildcostAccessCache(BasicCache):
""" A BuildTime/Access-counting cache implementation.
the weight of a value is computed as the product of
num-accesses-of-a-value * time-to-build-the-value
The values with the least such weights are evicted
if the cache maxentries threshold is superceded.
For implementation flexibility more than one object
might be evicted at a time.
"""
# time function to use for measuring build-times
def _build(self, key, builder):
start = gettime()
val = builder()
end = gettime()
return WeightedCountingEntry(val, end-start)
class WeightedCountingEntry(object):
def __init__(self, value, oneweight):
self._value = value
self.weight = self._oneweight = oneweight
def value(self):
self.weight += self._oneweight
return self._value
value = property(value)
class AgingCache(BasicCache):
""" This cache prunes out cache entries that are too old.
"""
def __init__(self, maxentries=128, maxseconds=10.0):
super(AgingCache, self).__init__(maxentries)
self.maxseconds = maxseconds
def _getentry(self, key):
entry = self._dict[key]
if entry.isexpired():
self.delentry(key)
raise KeyError(key)
return entry
def _build(self, key, builder):
val = builder()
entry = AgingEntry(val, gettime() + self.maxseconds)
return entry
class AgingEntry(object):
def __init__(self, value, expirationtime):
self.value = value
self.weight = expirationtime
def isexpired(self):
t = gettime()
return t >= self.weight

View File

@ -1,336 +0,0 @@
"""
"""
import os, sys
import py
class Checkers:
_depend_on_existence = 'exists', 'link', 'dir', 'file'
def __init__(self, path):
self.path = path
def dir(self):
raise NotImplementedError
def file(self):
raise NotImplementedError
def dotfile(self):
return self.path.basename.startswith('.')
def ext(self, arg):
if not arg.startswith('.'):
arg = '.' + arg
return self.path.ext == arg
def exists(self):
raise NotImplementedError
def basename(self, arg):
return self.path.basename == arg
def basestarts(self, arg):
return self.path.basename.startswith(arg)
def relto(self, arg):
return self.path.relto(arg)
def fnmatch(self, arg):
return FNMatcher(arg)(self.path)
def endswith(self, arg):
return str(self.path).endswith(arg)
def _evaluate(self, kw):
for name, value in kw.items():
invert = False
meth = None
try:
meth = getattr(self, name)
except AttributeError:
if name[:3] == 'not':
invert = True
try:
meth = getattr(self, name[3:])
except AttributeError:
pass
if meth is None:
raise TypeError(
"no %r checker available for %r" % (name, self.path))
try:
if py.code.getrawcode(meth).co_argcount > 1:
if (not meth(value)) ^ invert:
return False
else:
if bool(value) ^ bool(meth()) ^ invert:
return False
except (py.error.ENOENT, py.error.ENOTDIR):
for name in self._depend_on_existence:
if name in kw:
if kw.get(name):
return False
name = 'not' + name
if name in kw:
if not kw.get(name):
return False
return True
class NeverRaised(Exception):
pass
class PathBase(object):
""" shared implementation for filesystem path objects."""
Checkers = Checkers
def __div__(self, other):
return self.join(str(other))
__truediv__ = __div__ # py3k
def basename(self):
""" basename part of path. """
return self._getbyspec('basename')[0]
basename = property(basename, None, None, basename.__doc__)
def purebasename(self):
""" pure base name of the path."""
return self._getbyspec('purebasename')[0]
purebasename = property(purebasename, None, None, purebasename.__doc__)
def ext(self):
""" extension of the path (including the '.')."""
return self._getbyspec('ext')[0]
ext = property(ext, None, None, ext.__doc__)
def dirpath(self, *args, **kwargs):
""" return the directory Path of the current Path joined
with any given path arguments.
"""
return self.new(basename='').join(*args, **kwargs)
def read(self, mode='r'):
""" read and return a bytestring from reading the path. """
if sys.version_info < (2,3):
for x in 'u', 'U':
if x in mode:
mode = mode.replace(x, '')
f = self.open(mode)
try:
return f.read()
finally:
f.close()
def readlines(self, cr=1):
""" read and return a list of lines from the path. if cr is False, the
newline will be removed from the end of each line. """
if not cr:
content = self.read('rU')
return content.split('\n')
else:
f = self.open('rU')
try:
return f.readlines()
finally:
f.close()
def load(self):
""" (deprecated) return object unpickled from self.read() """
f = self.open('rb')
try:
return py.error.checked_call(py.std.pickle.load, f)
finally:
f.close()
def move(self, target):
""" move this path to target. """
if target.relto(self):
raise py.error.EINVAL(target,
"cannot move path into a subdirectory of itself")
try:
self.rename(target)
except py.error.EXDEV: # invalid cross-device link
self.copy(target)
self.remove()
def __repr__(self):
""" return a string representation of this path. """
return repr(str(self))
def check(self, **kw):
""" check a path for existence, or query its properties
without arguments, this returns True if the path exists (on the
filesystem), False if not
with (keyword only) arguments, the object compares the value
of the argument with the value of a property with the same name
(if it has one, else it raises a TypeError)
when for example the keyword argument 'ext' is '.py', this will
return True if self.ext == '.py', False otherwise
"""
if not kw:
kw = {'exists' : 1}
return self.Checkers(self)._evaluate(kw)
def relto(self, relpath):
""" return a string which is the relative part of the path
to the given 'relpath'.
"""
if not isinstance(relpath, (str, PathBase)):
raise TypeError("%r: not a string or path object" %(relpath,))
strrelpath = str(relpath)
if strrelpath and strrelpath[-1] != self.sep:
strrelpath += self.sep
#assert strrelpath[-1] == self.sep
#assert strrelpath[-2] != self.sep
strself = str(self)
if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
if os.path.normcase(strself).startswith(
os.path.normcase(strrelpath)):
return strself[len(strrelpath):]
elif strself.startswith(strrelpath):
return strself[len(strrelpath):]
return ""
def bestrelpath(self, dest):
""" return a string which is a relative path from self
(assumed to be a directory) to dest such that
self.join(bestrelpath) == dest and if not such
path can be determined return dest.
"""
try:
if self == dest:
return os.curdir
base = self.common(dest)
if not base: # can be the case on windows
return str(dest)
self2base = self.relto(base)
reldest = dest.relto(base)
if self2base:
n = self2base.count(self.sep) + 1
else:
n = 0
l = [os.pardir] * n
if reldest:
l.append(reldest)
target = dest.sep.join(l)
return target
except AttributeError:
return str(dest)
def parts(self, reverse=False):
""" return a root-first list of all ancestor directories
plus the path itself.
"""
current = self
l = [self]
while 1:
last = current
current = current.dirpath()
if last == current:
break
l.insert(0, current)
if reverse:
l.reverse()
return l
def common(self, other):
""" return the common part shared with the other path
or None if there is no common part.
"""
last = None
for x, y in zip(self.parts(), other.parts()):
if x != y:
return last
last = x
return last
def __add__(self, other):
""" return new path object with 'other' added to the basename"""
return self.new(basename=self.basename+str(other))
def __cmp__(self, other):
""" return sort value (-1, 0, +1). """
try:
return cmp(self.strpath, other.strpath)
except AttributeError:
return cmp(str(self), str(other)) # self.path, other.path)
def __lt__(self, other):
try:
return self.strpath < other.strpath
except AttributeError:
return str(self) < str(other)
def visit(self, fil=None, rec=None, ignore=NeverRaised):
""" yields all paths below the current one
fil is a filter (glob pattern or callable), if not matching the
path will not be yielded, defaulting to None (everything is
returned)
rec is a filter (glob pattern or callable) that controls whether
a node is descended, defaulting to None
ignore is an Exception class that is ignoredwhen calling dirlist()
on any of the paths (by default, all exceptions are reported)
"""
if isinstance(fil, str):
fil = FNMatcher(fil)
if rec:
if isinstance(rec, str):
rec = fnmatch(fil)
elif not hasattr(rec, '__call__'):
rec = None
try:
entries = self.listdir()
except ignore:
return
dirs = [p for p in entries
if p.check(dir=1) and (rec is None or rec(p))]
for subdir in dirs:
for p in subdir.visit(fil=fil, rec=rec, ignore=ignore):
yield p
for p in entries:
if fil is None or fil(p):
yield p
def _sortlist(self, res, sort):
if sort:
if hasattr(sort, '__call__'):
res.sort(sort)
else:
res.sort()
def samefile(self, other):
""" return True if other refers to the same stat object as self. """
return self.strpath == str(other)
class FNMatcher:
def __init__(self, pattern):
self.pattern = pattern
def __call__(self, path):
"""return true if the basename/fullname matches the glob-'pattern'.
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
if the pattern contains a path-separator then the full path
is used for pattern matching and a '*' is prepended to the
pattern.
if the pattern doesn't contain a path-separator the pattern
is only matched against the basename.
"""
pattern = self.pattern
if pattern.find(path.sep) == -1:
name = path.basename
else:
name = str(path) # path.strpath # XXX svn?
pattern = '*' + path.sep + pattern
from fnmatch import fnmatch
return fnmatch(name, pattern)

View File

@ -1,811 +0,0 @@
"""
local path implementation.
"""
import sys, os, stat, re, atexit
import py
from py._path import common
iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
class Stat(object):
def __getattr__(self, name):
return getattr(self._osstatresult, "st_" + name)
def __init__(self, path, osstatresult):
self.path = path
self._osstatresult = osstatresult
def owner(self):
if iswin32:
raise NotImplementedError("XXX win32")
import pwd
entry = py.error.checked_call(pwd.getpwuid, self.uid)
return entry[0]
owner = property(owner, None, None, "owner of path")
def group(self):
""" return group name of file. """
if iswin32:
raise NotImplementedError("XXX win32")
import grp
entry = py.error.checked_call(grp.getgrgid, self.gid)
return entry[0]
group = property(group)
class PosixPath(common.PathBase):
def chown(self, user, group, rec=0):
""" change ownership to the given user and group.
user and group may be specified by a number or
by a name. if rec is True change ownership
recursively.
"""
uid = getuserid(user)
gid = getgroupid(group)
if rec:
for x in self.visit(rec=lambda x: x.check(link=0)):
if x.check(link=0):
py.error.checked_call(os.chown, str(x), uid, gid)
py.error.checked_call(os.chown, str(self), uid, gid)
def readlink(self):
""" return value of a symbolic link. """
return py.error.checked_call(os.readlink, self.strpath)
def mklinkto(self, oldname):
""" posix style hard link to another name. """
py.error.checked_call(os.link, str(oldname), str(self))
def mksymlinkto(self, value, absolute=1):
""" create a symbolic link with the given value (pointing to another name). """
if absolute:
py.error.checked_call(os.symlink, str(value), self.strpath)
else:
base = self.common(value)
# with posix local paths '/' is always a common base
relsource = self.__class__(value).relto(base)
reldest = self.relto(base)
n = reldest.count(self.sep)
target = self.sep.join(('..', )*n + (relsource, ))
py.error.checked_call(os.symlink, target, self.strpath)
def samefile(self, other):
""" return True if other refers to the same stat object as self. """
return py.error.checked_call(os.path.samefile, str(self), str(other))
def getuserid(user):
import pwd
if not isinstance(user, int):
user = pwd.getpwnam(user)[2]
return user
def getgroupid(group):
import grp
if not isinstance(group, int):
group = grp.getgrnam(group)[2]
return group
FSBase = not iswin32 and PosixPath or common.PathBase
class LocalPath(FSBase):
""" object oriented interface to os.path and other local filesystem
related information.
"""
class ImportMismatchError(ImportError):
""" raised on pyimport() if there is a mismatch of __file__'s"""
sep = os.sep
class Checkers(common.Checkers):
def _stat(self):
try:
return self._statcache
except AttributeError:
try:
self._statcache = self.path.stat()
except py.error.ELOOP:
self._statcache = self.path.lstat()
return self._statcache
def dir(self):
return stat.S_ISDIR(self._stat().mode)
def file(self):
return stat.S_ISREG(self._stat().mode)
def exists(self):
return self._stat()
def link(self):
st = self.path.lstat()
return stat.S_ISLNK(st.mode)
def __new__(cls, path=None):
""" Initialize and return a local Path instance.
Path can be relative to the current directory.
If it is None then the current working directory is taken.
Note that Path instances always carry an absolute path.
Note also that passing in a local path object will simply return
the exact same path object. Use new() to get a new copy.
"""
if isinstance(path, common.PathBase):
if path.__class__ == cls:
return path
path = path.strpath
# initialize the path
self = object.__new__(cls)
if not path:
self.strpath = os.getcwd()
elif isinstance(path, py.builtin._basestring):
self.strpath = os.path.abspath(os.path.normpath(str(path)))
else:
raise ValueError("can only pass None, Path instances "
"or non-empty strings to LocalPath")
assert isinstance(self.strpath, str)
return self
def __hash__(self):
return hash(self.strpath)
def __eq__(self, other):
s1 = str(self)
s2 = str(other)
if iswin32:
s1 = s1.lower()
s2 = s2.lower()
return s1 == s2
def __ne__(self, other):
return not (self == other)
def __lt__(self, other):
return str(self) < str(other)
def remove(self, rec=1, ignore_errors=False):
""" remove a file or directory (or a directory tree if rec=1).
if ignore_errors is True, errors while removing directories will
be ignored.
"""
if self.check(dir=1, link=0):
if rec:
# force remove of readonly files on windows
if iswin32:
self.chmod(448, rec=1) # octcal 0700
py.error.checked_call(py.std.shutil.rmtree, self.strpath,
ignore_errors=ignore_errors)
else:
py.error.checked_call(os.rmdir, self.strpath)
else:
if iswin32:
self.chmod(448) # octcal 0700
py.error.checked_call(os.remove, self.strpath)
def computehash(self, hashtype="md5", chunksize=524288):
""" return hexdigest of hashvalue for this file. """
try:
try:
import hashlib as mod
except ImportError:
if hashtype == "sha1":
hashtype = "sha"
mod = __import__(hashtype)
hash = getattr(mod, hashtype)()
except (AttributeError, ImportError):
raise ValueError("Don't know how to compute %r hash" %(hashtype,))
f = self.open('rb')
try:
while 1:
buf = f.read(chunksize)
if not buf:
return hash.hexdigest()
hash.update(buf)
finally:
f.close()
def new(self, **kw):
""" create a modified version of this path.
the following keyword arguments modify various path parts:
a:/some/path/to/a/file.ext
|| drive
|-------------| dirname
|------| basename
|--| purebasename
|--| ext
"""
obj = object.__new__(self.__class__)
drive, dirname, basename, purebasename,ext = self._getbyspec(
"drive,dirname,basename,purebasename,ext")
if 'basename' in kw:
if 'purebasename' in kw or 'ext' in kw:
raise ValueError("invalid specification %r" % kw)
else:
pb = kw.setdefault('purebasename', purebasename)
try:
ext = kw['ext']
except KeyError:
pass
else:
if ext and not ext.startswith('.'):
ext = '.' + ext
kw['basename'] = pb + ext
kw.setdefault('drive', drive)
kw.setdefault('dirname', dirname)
kw.setdefault('sep', self.sep)
obj.strpath = os.path.normpath(
"%(drive)s%(dirname)s%(sep)s%(basename)s" % kw)
return obj
def _getbyspec(self, spec):
""" return a sequence of specified path parts. 'spec' is
a comma separated string containing path part names.
according to the following convention:
a:/some/path/to/a/file.ext
|| drive
|-------------| dirname
|------| basename
|--| purebasename
|--| ext
"""
res = []
parts = self.strpath.split(self.sep)
args = filter(None, spec.split(',') )
append = res.append
for name in args:
if name == 'drive':
append(parts[0])
elif name == 'dirname':
append(self.sep.join(['']+parts[1:-1]))
else:
basename = parts[-1]
if name == 'basename':
append(basename)
else:
i = basename.rfind('.')
if i == -1:
purebasename, ext = basename, ''
else:
purebasename, ext = basename[:i], basename[i:]
if name == 'purebasename':
append(purebasename)
elif name == 'ext':
append(ext)
else:
raise ValueError("invalid part specification %r" % name)
return res
def join(self, *args, **kwargs):
""" return a new path by appending all 'args' as path
components. if abs=1 is used restart from root if any
of the args is an absolute path.
"""
if not args:
return self
strpath = self.strpath
sep = self.sep
strargs = [str(x) for x in args]
if kwargs.get('abs', 0):
for i in range(len(strargs)-1, -1, -1):
if os.path.isabs(strargs[i]):
strpath = strargs[i]
strargs = strargs[i+1:]
break
for arg in strargs:
arg = arg.strip(sep)
if iswin32:
# allow unix style paths even on windows.
arg = arg.strip('/')
arg = arg.replace('/', sep)
if arg:
if not strpath.endswith(sep):
strpath += sep
strpath += arg
obj = self.new()
obj.strpath = os.path.normpath(strpath)
return obj
def open(self, mode='r'):
""" return an opened file with the given mode. """
return py.error.checked_call(open, self.strpath, mode)
def listdir(self, fil=None, sort=None):
""" list directory contents, possibly filter by the given fil func
and possibly sorted.
"""
if isinstance(fil, str):
fil = common.FNMatcher(fil)
res = []
for name in py.error.checked_call(os.listdir, self.strpath):
childurl = self.join(name)
if fil is None or fil(childurl):
res.append(childurl)
self._sortlist(res, sort)
return res
def size(self):
""" return size of the underlying file object """
return self.stat().size
def mtime(self):
""" return last modification time of the path. """
return self.stat().mtime
def copy(self, target, archive=False):
""" copy path to target."""
assert not archive, "XXX archive-mode not supported"
if self.check(file=1):
if target.check(dir=1):
target = target.join(self.basename)
assert self!=target
copychunked(self, target)
else:
def rec(p):
return p.check(link=0)
for x in self.visit(rec=rec):
relpath = x.relto(self)
newx = target.join(relpath)
newx.dirpath().ensure(dir=1)
if x.check(link=1):
newx.mksymlinkto(x.readlink())
elif x.check(file=1):
copychunked(x, newx)
elif x.check(dir=1):
newx.ensure(dir=1)
def rename(self, target):
""" rename this path to target. """
return py.error.checked_call(os.rename, str(self), str(target))
def dump(self, obj, bin=1):
""" pickle object into path location"""
f = self.open('wb')
try:
py.error.checked_call(py.std.pickle.dump, obj, f, bin)
finally:
f.close()
def mkdir(self, *args):
""" create & return the directory joined with args. """
p = self.join(*args)
py.error.checked_call(os.mkdir, str(p))
return p
def write(self, data, mode='w'):
""" write data into path. """
if 'b' in mode:
if not py.builtin._isbytes(data):
raise ValueError("can only process bytes")
else:
if not py.builtin._istext(data):
if not py.builtin._isbytes(data):
data = str(data)
else:
data = py.builtin._totext(data, sys.getdefaultencoding())
f = self.open(mode)
try:
f.write(data)
finally:
f.close()
def _ensuredirs(self):
parent = self.dirpath()
if parent == self:
return self
if parent.check(dir=0):
parent._ensuredirs()
if self.check(dir=0):
try:
self.mkdir()
except py.error.EEXIST:
# race condition: file/dir created by another thread/process.
# complain if it is not a dir
if self.check(dir=0):
raise
return self
def ensure(self, *args, **kwargs):
""" ensure that an args-joined path exists (by default as
a file). if you specify a keyword argument 'dir=True'
then the path is forced to be a directory path.
"""
p = self.join(*args)
if kwargs.get('dir', 0):
return p._ensuredirs()
else:
p.dirpath()._ensuredirs()
if not p.check(file=1):
p.open('w').close()
return p
def stat(self):
""" Return an os.stat() tuple. """
return Stat(self, py.error.checked_call(os.stat, self.strpath))
def lstat(self):
""" Return an os.lstat() tuple. """
return Stat(self, py.error.checked_call(os.lstat, self.strpath))
def setmtime(self, mtime=None):
""" set modification time for the given path. if 'mtime' is None
(the default) then the file's mtime is set to current time.
Note that the resolution for 'mtime' is platform dependent.
"""
if mtime is None:
return py.error.checked_call(os.utime, self.strpath, mtime)
try:
return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
except py.error.EINVAL:
return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
def chdir(self):
""" change directory to self and return old current directory """
old = self.__class__()
py.error.checked_call(os.chdir, self.strpath)
return old
def realpath(self):
""" return a new path which contains no symbolic links."""
return self.__class__(os.path.realpath(self.strpath))
def atime(self):
""" return last access time of the path. """
return self.stat().atime
def __repr__(self):
return 'local(%r)' % self.strpath
def __str__(self):
""" return string representation of the Path. """
return self.strpath
def pypkgpath(self, pkgname=None):
""" return the path's package path by looking for the given
pkgname. If pkgname is None then look for the last
directory upwards which still contains an __init__.py
and whose basename is python-importable.
Return None if a pkgpath can not be determined.
"""
pkgpath = None
for parent in self.parts(reverse=True):
if pkgname is None:
if parent.check(file=1):
continue
if not isimportable(parent.basename):
break
if parent.join('__init__.py').check():
pkgpath = parent
continue
return pkgpath
else:
if parent.basename == pkgname:
return parent
return pkgpath
def _prependsyspath(self, path):
s = str(path)
if s != sys.path[0]:
#print "prepending to sys.path", s
sys.path.insert(0, s)
def chmod(self, mode, rec=0):
""" change permissions to the given mode. If mode is an
integer it directly encodes the os-specific modes.
if rec is True perform recursively.
"""
if not isinstance(mode, int):
raise TypeError("mode %r must be an integer" % (mode,))
if rec:
for x in self.visit(rec=rec):
py.error.checked_call(os.chmod, str(x), mode)
py.error.checked_call(os.chmod, str(self), mode)
def pyimport(self, modname=None, ensuresyspath=True):
""" return path as an imported python module.
if modname is None, look for the containing package
and construct an according module name.
The module will be put/looked up in sys.modules.
"""
if not self.check():
raise py.error.ENOENT(self)
#print "trying to import", self
pkgpath = None
if modname is None:
pkgpath = self.pypkgpath()
if pkgpath is not None:
if ensuresyspath:
self._prependsyspath(pkgpath.dirpath())
pkg = __import__(pkgpath.basename, None, None, [])
names = self.new(ext='').relto(pkgpath.dirpath())
names = names.split(self.sep)
if names and names[-1] == "__init__":
names.pop()
modname = ".".join(names)
else:
# no package scope, still make it possible
if ensuresyspath:
self._prependsyspath(self.dirpath())
modname = self.purebasename
mod = __import__(modname, None, None, ['__doc__'])
modfile = mod.__file__
if modfile[-4:] in ('.pyc', '.pyo'):
modfile = modfile[:-1]
elif modfile.endswith('$py.class'):
modfile = modfile[:-9] + '.py'
if modfile.endswith("__init__.py"):
if self.basename != "__init__.py":
modfile = modfile[:-12]
if not self.samefile(modfile):
raise self.ImportMismatchError(modname, modfile, self)
return mod
else:
try:
return sys.modules[modname]
except KeyError:
# we have a custom modname, do a pseudo-import
mod = py.std.types.ModuleType(modname)
mod.__file__ = str(self)
sys.modules[modname] = mod
try:
py.builtin.execfile(str(self), mod.__dict__)
except:
del sys.modules[modname]
raise
return mod
def sysexec(self, *argv, **popen_opts):
""" return stdout text from executing a system child process,
where the 'self' path points to executable.
The process is directly invoked and not through a system shell.
"""
from subprocess import Popen, PIPE
argv = map(str, argv)
popen_opts['stdout'] = popen_opts['stderr'] = PIPE
proc = Popen([str(self)] + list(argv), **popen_opts)
stdout, stderr = proc.communicate()
ret = proc.wait()
if py.builtin._isbytes(stdout):
stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
if ret != 0:
if py.builtin._isbytes(stderr):
stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
raise py.process.cmdexec.Error(ret, ret, str(self),
stdout, stderr,)
return stdout
def sysfind(cls, name, checker=None):
""" return a path object found by looking at the systems
underlying PATH specification. If the checker is not None
it will be invoked to filter matching paths. If a binary
cannot be found, None is returned
Note: This is probably not working on plain win32 systems
but may work on cygwin.
"""
if os.path.isabs(name):
p = py.path.local(name)
if p.check(file=1):
return p
else:
if iswin32:
paths = py.std.os.environ['Path'].split(';')
if '' not in paths and '.' not in paths:
paths.append('.')
try:
systemroot = os.environ['SYSTEMROOT']
except KeyError:
pass
else:
paths = [re.sub('%SystemRoot%', systemroot, path)
for path in paths]
tryadd = '', '.exe', '.com', '.bat' # XXX add more?
else:
paths = py.std.os.environ['PATH'].split(':')
tryadd = ('',)
for x in paths:
for addext in tryadd:
p = py.path.local(x).join(name, abs=True) + addext
try:
if p.check(file=1):
if checker:
if not checker(p):
continue
return p
except py.error.EACCES:
pass
return None
sysfind = classmethod(sysfind)
def _gethomedir(cls):
try:
x = os.environ['HOME']
except KeyError:
x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
return cls(x)
_gethomedir = classmethod(_gethomedir)
#"""
#special class constructors for local filesystem paths
#"""
def get_temproot(cls):
""" return the system's temporary directory
(where tempfiles are usually created in)
"""
return py.path.local(py.std.tempfile.gettempdir())
get_temproot = classmethod(get_temproot)
def mkdtemp(cls, rootdir=None):
""" return a Path object pointing to a fresh new temporary directory
(which we created ourself).
"""
import tempfile
if rootdir is None:
rootdir = cls.get_temproot()
return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
mkdtemp = classmethod(mkdtemp)
def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
lock_timeout = 172800): # two days
""" return unique directory with a number greater than the current
maximum one. The number is assumed to start directly after prefix.
if keep is true directories with a number less than (maxnum-keep)
will be removed.
"""
if rootdir is None:
rootdir = cls.get_temproot()
def parse_num(path):
""" parse the number out of a path (if it matches the prefix) """
bn = path.basename
if bn.startswith(prefix):
try:
return int(bn[len(prefix):])
except ValueError:
pass
# compute the maximum number currently in use with the
# prefix
lastmax = None
while True:
maxnum = -1
for path in rootdir.listdir():
num = parse_num(path)
if num is not None:
maxnum = max(maxnum, num)
# make the new directory
try:
udir = rootdir.mkdir(prefix + str(maxnum+1))
except py.error.EEXIST:
# race condition: another thread/process created the dir
# in the meantime. Try counting again
if lastmax == maxnum:
raise
lastmax = maxnum
continue
break
# put a .lock file in the new directory that will be removed at
# process exit
if lock_timeout:
lockfile = udir.join('.lock')
mypid = os.getpid()
if hasattr(lockfile, 'mksymlinkto'):
lockfile.mksymlinkto(str(mypid))
else:
lockfile.write(str(mypid))
def try_remove_lockfile():
# in a fork() situation, only the last process should
# remove the .lock, otherwise the other processes run the
# risk of seeing their temporary dir disappear. For now
# we remove the .lock in the parent only (i.e. we assume
# that the children finish before the parent).
if os.getpid() != mypid:
return
try:
lockfile.remove()
except py.error.Error:
pass
atexit.register(try_remove_lockfile)
# prune old directories
if keep:
for path in rootdir.listdir():
num = parse_num(path)
if num is not None and num <= (maxnum - keep):
lf = path.join('.lock')
try:
t1 = lf.lstat().mtime
t2 = lockfile.lstat().mtime
if not lock_timeout or abs(t2-t1) < lock_timeout:
continue # skip directories still locked
except py.error.Error:
pass # assume that it means that there is no 'lf'
try:
path.remove(rec=1)
except KeyboardInterrupt:
raise
except: # this might be py.error.Error, WindowsError ...
pass
# make link...
try:
username = os.environ['USER'] #linux, et al
except KeyError:
try:
username = os.environ['USERNAME'] #windows
except KeyError:
username = 'current'
src = str(udir)
dest = src[:src.rfind('-')] + '-' + username
try:
os.unlink(dest)
except OSError:
pass
try:
os.symlink(src, dest)
except (OSError, AttributeError): # AttributeError on win32
pass
return udir
make_numbered_dir = classmethod(make_numbered_dir)
def copychunked(src, dest):
chunksize = 524288 # half a meg of bytes
fsrc = src.open('rb')
try:
fdest = dest.open('wb')
try:
while 1:
buf = fsrc.read(chunksize)
if not buf:
break
fdest.write(buf)
finally:
fdest.close()
finally:
fsrc.close()
def autopath(globs=None):
""" (deprecated) return the (local) path of the "current" file pointed to by globals or - if it is none - alternatively the callers frame globals.
the path will always point to a .py file or to None.
the path will have the following payload:
pkgdir is the last parent directory path containing __init__.py
"""
py.log._apiwarn("1.1", "py.magic.autopath deprecated, "
"use py.path.local(__file__) and maybe pypkgpath/pyimport().")
if globs is None:
globs = sys._getframe(1).f_globals
try:
__file__ = globs['__file__']
except KeyError:
if not sys.argv[0]:
raise ValueError("cannot compute autopath in interactive mode")
__file__ = os.path.abspath(sys.argv[0])
ret = py.path.local(__file__)
if ret.ext in ('.pyc', '.pyo'):
ret = ret.new(ext='.py')
current = pkgdir = ret.dirpath()
while 1:
if current.join('__init__.py').check():
pkgdir = current
current = current.dirpath()
if pkgdir != current:
continue
elif str(current) not in sys.path:
sys.path.insert(0, str(current))
break
ret.pkgdir = pkgdir
return ret
def isimportable(name):
if name:
if not (name[0].isalpha() or name[0] == '_'):
return False
name= name.replace("_", '')
return not name or name.isalnum()

View File

@ -1,378 +0,0 @@
"""
module defining a subversion path object based on the external
command 'svn'. This modules aims to work with svn 1.3 and higher
but might also interact well with earlier versions.
"""
import os, sys, time, re
import py
from py import path, process
from py._path import common
from py._path import svnwc as svncommon
from py._path.cacheutil import BuildcostAccessCache, AgingCache
DEBUG=False
class SvnCommandPath(svncommon.SvnPathBase):
""" path implementation that offers access to (possibly remote) subversion
repositories. """
_lsrevcache = BuildcostAccessCache(maxentries=128)
_lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
def __new__(cls, path, rev=None, auth=None):
self = object.__new__(cls)
if isinstance(path, cls):
rev = path.rev
auth = path.auth
path = path.strpath
svncommon.checkbadchars(path)
path = path.rstrip('/')
self.strpath = path
self.rev = rev
self.auth = auth
return self
def __repr__(self):
if self.rev == -1:
return 'svnurl(%r)' % self.strpath
else:
return 'svnurl(%r, %r)' % (self.strpath, self.rev)
def _svnwithrev(self, cmd, *args):
""" execute an svn command, append our own url and revision """
if self.rev is None:
return self._svnwrite(cmd, *args)
else:
args = ['-r', self.rev] + list(args)
return self._svnwrite(cmd, *args)
def _svnwrite(self, cmd, *args):
""" execute an svn command, append our own url """
l = ['svn %s' % cmd]
args = ['"%s"' % self._escape(item) for item in args]
l.extend(args)
l.append('"%s"' % self._encodedurl())
# fixing the locale because we can't otherwise parse
string = " ".join(l)
if DEBUG:
print("execing %s" % string)
out = self._svncmdexecauth(string)
return out
def _svncmdexecauth(self, cmd):
""" execute an svn command 'as is' """
cmd = svncommon.fixlocale() + cmd
if self.auth is not None:
cmd += ' ' + self.auth.makecmdoptions()
return self._cmdexec(cmd)
def _cmdexec(self, cmd):
try:
out = process.cmdexec(cmd)
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
if (e.err.find('File Exists') != -1 or
e.err.find('File already exists') != -1):
raise py.error.EEXIST(self)
raise
return out
def _svnpopenauth(self, cmd):
""" execute an svn command, return a pipe for reading stdin """
cmd = svncommon.fixlocale() + cmd
if self.auth is not None:
cmd += ' ' + self.auth.makecmdoptions()
return self._popen(cmd)
def _popen(self, cmd):
return os.popen(cmd)
def _encodedurl(self):
return self._escape(self.strpath)
def _norev_delentry(self, path):
auth = self.auth and self.auth.makecmdoptions() or None
self._lsnorevcache.delentry((str(path), auth))
def open(self, mode='r'):
""" return an opened file with the given mode. """
if mode not in ("r", "rU",):
raise ValueError("mode %r not supported" % (mode,))
assert self.check(file=1) # svn cat returns an empty file otherwise
if self.rev is None:
return self._svnpopenauth('svn cat "%s"' % (
self._escape(self.strpath), ))
else:
return self._svnpopenauth('svn cat -r %s "%s"' % (
self.rev, self._escape(self.strpath)))
def dirpath(self, *args, **kwargs):
""" return the directory path of the current path joined
with any given path arguments.
"""
l = self.strpath.split(self.sep)
if len(l) < 4:
raise py.error.EINVAL(self, "base is not valid")
elif len(l) == 4:
return self.join(*args, **kwargs)
else:
return self.new(basename='').join(*args, **kwargs)
# modifying methods (cache must be invalidated)
def mkdir(self, *args, **kwargs):
""" create & return the directory joined with args.
pass a 'msg' keyword argument to set the commit message.
"""
commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
createpath = self.join(*args)
createpath._svnwrite('mkdir', '-m', commit_msg)
self._norev_delentry(createpath.dirpath())
return createpath
def copy(self, target, msg='copied by py lib invocation'):
""" copy path to target with checkin message msg."""
if getattr(target, 'rev', None) is not None:
raise py.error.EINVAL(target, "revisions are immutable")
self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
self._escape(self), self._escape(target)))
self._norev_delentry(target.dirpath())
def rename(self, target, msg="renamed by py lib invocation"):
""" rename this path to target with checkin message msg. """
if getattr(self, 'rev', None) is not None:
raise py.error.EINVAL(self, "revisions are immutable")
self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
msg, self._escape(self), self._escape(target)))
self._norev_delentry(self.dirpath())
self._norev_delentry(self)
def remove(self, rec=1, msg='removed by py lib invocation'):
""" remove a file or directory (or a directory tree if rec=1) with
checkin message msg."""
if self.rev is not None:
raise py.error.EINVAL(self, "revisions are immutable")
self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
self._norev_delentry(self.dirpath())
def export(self, topath):
""" export to a local path
topath should not exist prior to calling this, returns a
py.path.local instance
"""
topath = py.path.local(topath)
args = ['"%s"' % (self._escape(self),),
'"%s"' % (self._escape(topath),)]
if self.rev is not None:
args = ['-r', str(self.rev)] + args
self._svncmdexecauth('svn export %s' % (' '.join(args),))
return topath
def ensure(self, *args, **kwargs):
""" ensure that an args-joined path exists (by default as
a file). If you specify a keyword argument 'dir=True'
then the path is forced to be a directory path.
"""
if getattr(self, 'rev', None) is not None:
raise py.error.EINVAL(self, "revisions are immutable")
target = self.join(*args)
dir = kwargs.get('dir', 0)
for x in target.parts(reverse=True):
if x.check():
break
else:
raise py.error.ENOENT(target, "has not any valid base!")
if x == target:
if not x.check(dir=dir):
raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
return x
tocreate = target.relto(x)
basename = tocreate.split(self.sep, 1)[0]
tempdir = py.path.local.mkdtemp()
try:
tempdir.ensure(tocreate, dir=dir)
cmd = 'svn import -m "%s" "%s" "%s"' % (
"ensure %s" % self._escape(tocreate),
self._escape(tempdir.join(basename)),
x.join(basename)._encodedurl())
self._svncmdexecauth(cmd)
self._norev_delentry(x)
finally:
tempdir.remove()
return target
# end of modifying methods
def _propget(self, name):
res = self._svnwithrev('propget', name)
return res[:-1] # strip trailing newline
def _proplist(self):
res = self._svnwithrev('proplist')
lines = res.split('\n')
lines = [x.strip() for x in lines[1:]]
return svncommon.PropListDict(self, lines)
def info(self):
""" return an Info structure with svn-provided information. """
parent = self.dirpath()
nameinfo_seq = parent._listdir_nameinfo()
bn = self.basename
for name, info in nameinfo_seq:
if name == bn:
return info
raise py.error.ENOENT(self)
def _listdir_nameinfo(self):
""" return sequence of name-info directory entries of self """
def builder():
try:
res = self._svnwithrev('ls', '-v')
except process.cmdexec.Error:
e = sys.exc_info()[1]
if e.err.find('non-existent in that revision') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('File not found') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('not part of a repository')!=-1:
raise py.error.ENOENT(self, e.err)
elif e.err.find('Unable to open')!=-1:
raise py.error.ENOENT(self, e.err)
elif e.err.lower().find('method not allowed')!=-1:
raise py.error.EACCES(self, e.err)
raise py.error.Error(e.err)
lines = res.split('\n')
nameinfo_seq = []
for lsline in lines:
if lsline:
info = InfoSvnCommand(lsline)
if info._name != '.': # svn 1.5 produces '.' dirs,
nameinfo_seq.append((info._name, info))
nameinfo_seq.sort()
return nameinfo_seq
auth = self.auth and self.auth.makecmdoptions() or None
if self.rev is not None:
return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
builder)
else:
return self._lsnorevcache.getorbuild((self.strpath, auth),
builder)
def listdir(self, fil=None, sort=None):
""" list directory contents, possibly filter by the given fil func
and possibly sorted.
"""
if isinstance(fil, str):
fil = common.FNMatcher(fil)
nameinfo_seq = self._listdir_nameinfo()
if len(nameinfo_seq) == 1:
name, info = nameinfo_seq[0]
if name == self.basename and info.kind == 'file':
#if not self.check(dir=1):
raise py.error.ENOTDIR(self)
paths = [self.join(name) for (name, info) in nameinfo_seq]
if fil:
paths = [x for x in paths if fil(x)]
self._sortlist(paths, sort)
return paths
def log(self, rev_start=None, rev_end=1, verbose=False):
""" return a list of LogEntry instances for this path.
rev_start is the starting revision (defaulting to the first one).
rev_end is the last revision (defaulting to HEAD).
if verbose is True, then the LogEntry instances also know which files changed.
"""
assert self.check() #make it simpler for the pipe
rev_start = rev_start is None and "HEAD" or rev_start
rev_end = rev_end is None and "HEAD" or rev_end
if rev_start == "HEAD" and rev_end == 1:
rev_opt = ""
else:
rev_opt = "-r %s:%s" % (rev_start, rev_end)
verbose_opt = verbose and "-v" or ""
xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
(rev_opt, verbose_opt, self.strpath))
from xml.dom import minidom
tree = minidom.parse(xmlpipe)
result = []
for logentry in filter(None, tree.firstChild.childNodes):
if logentry.nodeType == logentry.ELEMENT_NODE:
result.append(svncommon.LogEntry(logentry))
return result
#01234567890123456789012345678901234567890123467
# 2256 hpk 165 Nov 24 17:55 __init__.py
# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
# 1312 johnny 1627 May 05 14:32 test_decorators.py
#
class InfoSvnCommand:
# the '0?' part in the middle is an indication of whether the resource is
# locked, see 'svn help ls'
lspattern = re.compile(
r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
def __init__(self, line):
# this is a typical line from 'svn ls http://...'
#_ 1127 jum 0 Jul 13 15:28 branch/
match = self.lspattern.match(line)
data = match.groupdict()
self._name = data['file']
if self._name[-1] == '/':
self._name = self._name[:-1]
self.kind = 'dir'
else:
self.kind = 'file'
#self.has_props = l.pop(0) == 'P'
self.created_rev = int(data['rev'])
self.last_author = data['author']
self.size = data['size'] and int(data['size']) or 0
self.mtime = parse_time_with_missing_year(data['date'])
self.time = self.mtime * 1000000
def __eq__(self, other):
return self.__dict__ == other.__dict__
#____________________________________________________
#
# helper functions
#____________________________________________________
def parse_time_with_missing_year(timestr):
""" analyze the time part from a single line of "svn ls -v"
the svn output doesn't show the year makes the 'timestr'
ambigous.
"""
import calendar
t_now = time.gmtime()
tparts = timestr.split()
month = time.strptime(tparts.pop(0), '%b')[1]
day = time.strptime(tparts.pop(0), '%d')[2]
last = tparts.pop(0) # year or hour:minute
try:
if ":" in last:
raise ValueError()
year = time.strptime(last, '%Y')[0]
hour = minute = 0
except ValueError:
hour, minute = time.strptime(last, '%H:%M')[3:5]
year = t_now[0]
t_result = (year, month, day, hour, minute, 0,0,0,0)
if t_result > t_now:
year -= 1
t_result = (year, month, day, hour, minute, 0,0,0,0)
return calendar.timegm(t_result)
class PathEntry:
def __init__(self, ppart):
self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
self.action = ppart.getAttribute('action').encode('UTF-8')
if self.action == 'A':
self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
if self.copyfrom_path:
self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))

File diff suppressed because it is too large Load Diff

View File

@ -1 +0,0 @@
#

View File

@ -1 +0,0 @@
""" high-level sub-process handling """

View File

@ -1,53 +0,0 @@
"""
"""
import os, sys
import subprocess
import py
from subprocess import Popen, PIPE
def cmdexec(cmd):
""" return unicode output of executing 'cmd' in a separate process.
raise cmdexec.ExecutionFailed exeception if the command failed.
the exception will provide an 'err' attribute containing
the error-output from the command.
if the subprocess module does not provide a proper encoding/unicode strings
sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
"""
process = subprocess.Popen(cmd, shell=True,
universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
try:
default_encoding = sys.getdefaultencoding() # jython may not have it
except AttributeError:
default_encoding = sys.stdout.encoding or 'UTF-8'
out = unicode(out, process.stdout.encoding or default_encoding)
err = unicode(err, process.stderr.encoding or default_encoding)
status = process.poll()
if status:
raise ExecutionFailed(status, status, cmd, out, err)
return out
class ExecutionFailed(py.error.Error):
def __init__(self, status, systemstatus, cmd, out, err):
Exception.__init__(self)
self.status = status
self.systemstatus = systemstatus
self.cmd = cmd
self.err = err
self.out = out
def __str__(self):
return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
# export the exception under the name 'py.process.cmdexec.Error'
cmdexec.Error = ExecutionFailed
try:
ExecutionFailed.__module__ = 'py.process.cmdexec'
ExecutionFailed.__name__ = 'Error'
except (AttributeError, TypeError):
pass

View File

@ -1,108 +0,0 @@
"""
ForkedFunc provides a way to run a function in a forked process
and get at its return value, stdout and stderr output as well
as signals and exitstatusus.
XXX see if tempdir handling is sane
"""
import py
import os
import sys
import marshal
class ForkedFunc(object):
EXITSTATUS_EXCEPTION = 3
def __init__(self, fun, args=None, kwargs=None, nice_level=0):
if args is None:
args = []
if kwargs is None:
kwargs = {}
self.fun = fun
self.args = args
self.kwargs = kwargs
self.tempdir = tempdir = py.path.local.mkdtemp()
self.RETVAL = tempdir.ensure('retval')
self.STDOUT = tempdir.ensure('stdout')
self.STDERR = tempdir.ensure('stderr')
pid = os.fork()
if pid: # in parent process
self.pid = pid
else: # in child process
self._child(nice_level)
def _child(self, nice_level):
# right now we need to call a function, but first we need to
# map all IO that might happen
# make sure sys.stdout points to file descriptor one
sys.stdout = stdout = self.STDOUT.open('w')
sys.stdout.flush()
fdstdout = stdout.fileno()
if fdstdout != 1:
os.dup2(fdstdout, 1)
sys.stderr = stderr = self.STDERR.open('w')
fdstderr = stderr.fileno()
if fdstderr != 2:
os.dup2(fdstderr, 2)
retvalf = self.RETVAL.open("wb")
EXITSTATUS = 0
try:
if nice_level:
os.nice(nice_level)
try:
retval = self.fun(*self.args, **self.kwargs)
retvalf.write(marshal.dumps(retval))
except:
excinfo = py.code.ExceptionInfo()
stderr.write(excinfo.exconly())
EXITSTATUS = self.EXITSTATUS_EXCEPTION
finally:
stdout.close()
stderr.close()
retvalf.close()
os.close(1)
os.close(2)
os._exit(EXITSTATUS)
def waitfinish(self, waiter=os.waitpid):
pid, systemstatus = waiter(self.pid, 0)
if systemstatus:
if os.WIFSIGNALED(systemstatus):
exitstatus = os.WTERMSIG(systemstatus) + 128
else:
exitstatus = os.WEXITSTATUS(systemstatus)
#raise ExecutionFailed(status, systemstatus, cmd,
# ''.join(out), ''.join(err))
else:
exitstatus = 0
signal = systemstatus & 0x7f
if not exitstatus and not signal:
retval = self.RETVAL.open('rb')
try:
retval_data = retval.read()
finally:
retval.close()
retval = marshal.loads(retval_data)
else:
retval = None
stdout = self.STDOUT.read()
stderr = self.STDERR.read()
self._removetemp()
return Result(exitstatus, signal, retval, stdout, stderr)
def _removetemp(self):
if self.tempdir.check():
self.tempdir.remove()
def __del__(self):
self._removetemp()
class Result(object):
def __init__(self, exitstatus, signal, retval, stdout, stderr):
self.exitstatus = exitstatus
self.signal = signal
self.retval = retval
self.out = stdout
self.err = stderr

View File

@ -1,23 +0,0 @@
import py
import os, sys
if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
try:
import ctypes
except ImportError:
def dokill(pid):
py.process.cmdexec("taskkill /F /PID %d" %(pid,))
else:
def dokill(pid):
PROCESS_TERMINATE = 1
handle = ctypes.windll.kernel32.OpenProcess(
PROCESS_TERMINATE, False, pid)
ctypes.windll.kernel32.TerminateProcess(handle, -1)
ctypes.windll.kernel32.CloseHandle(handle)
else:
def dokill(pid):
os.kill(pid, 15)
def kill(pid):
""" kill process by id. """
dokill(pid)

View File

@ -1,18 +0,0 @@
import sys
class Std(object):
""" makes top-level python modules available as an attribute,
importing them on first access.
"""
def __init__(self):
self.__dict__ = sys.modules
def __getattr__(self, name):
try:
m = __import__(name)
except ImportError:
raise AttributeError("py.std: could not import %s" % name)
return m
std = Std()

View File

@ -1 +0,0 @@
""" assertion and py.test helper API."""

View File

@ -1,244 +0,0 @@
"""
module for generating and serializing xml and html structures
by using simple python objects.
(c) holger krekel, holger at merlinux eu. 2009
"""
import py
import sys, re
if sys.version_info >= (3,0):
def u(s):
return s
def unicode(x):
if hasattr(x, '__unicode__'):
return x.__unicode__()
return str(x)
else:
def u(s):
return unicode(s)
unicode = unicode
class NamespaceMetaclass(type):
def __getattr__(self, name):
if name[:1] == '_':
raise AttributeError(name)
if self == Namespace:
raise ValueError("Namespace class is abstract")
tagspec = self.__tagspec__
if tagspec is not None and name not in tagspec:
raise AttributeError(name)
classattr = {}
if self.__stickyname__:
classattr['xmlname'] = name
cls = type(name, (self.__tagclass__,), classattr)
setattr(self, name, cls)
return cls
class Tag(list):
class Attr(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def __init__(self, *args, **kwargs):
super(Tag, self).__init__(args)
self.attr = self.Attr(**kwargs)
def __unicode__(self):
return self.unicode(indent=0)
__str__ = __unicode__
def unicode(self, indent=2):
l = []
SimpleUnicodeVisitor(l.append, indent).visit(self)
return "".join(l)
def __repr__(self):
name = self.__class__.__name__
return "<%r tag object %d>" % (name, id(self))
Namespace = NamespaceMetaclass('Namespace', (object, ), {
'__tagspec__': None,
'__tagclass__': Tag,
'__stickyname__': False,
})
class HtmlTag(Tag):
def unicode(self, indent=2):
l = []
HtmlVisitor(l.append, indent, shortempty=False).visit(self)
return u("").join(l)
# exported plain html namespace
class html(Namespace):
__tagclass__ = HtmlTag
__stickyname__ = True
__tagspec__ = dict([(x,1) for x in (
'a,abbr,acronym,address,applet,area,b,bdo,big,blink,'
'blockquote,body,br,button,caption,center,cite,code,col,'
'colgroup,comment,dd,del,dfn,dir,div,dl,dt,em,embed,'
'fieldset,font,form,frameset,h1,h2,h3,h4,h5,h6,head,html,'
'i,iframe,img,input,ins,kbd,label,legend,li,link,listing,'
'map,marquee,menu,meta,multicol,nobr,noembed,noframes,'
'noscript,object,ol,optgroup,option,p,pre,q,s,script,'
'select,small,span,strike,strong,style,sub,sup,table,'
'tbody,td,textarea,tfoot,th,thead,title,tr,tt,u,ul,xmp,'
'base,basefont,frame,hr,isindex,param,samp,var'
).split(',') if x])
class Style(object):
def __init__(self, **kw):
for x, y in kw.items():
x = x.replace('_', '-')
setattr(self, x, y)
class raw(object):
"""just a box that can contain a unicode string that will be
included directly in the output"""
def __init__(self, uniobj):
self.uniobj = uniobj
class SimpleUnicodeVisitor(object):
""" recursive visitor to write unicode. """
def __init__(self, write, indent=0, curindent=0, shortempty=True):
self.write = write
self.cache = {}
self.visited = {} # for detection of recursion
self.indent = indent
self.curindent = curindent
self.parents = []
self.shortempty = shortempty # short empty tags or not
def visit(self, node):
""" dispatcher on node's class/bases name. """
cls = node.__class__
try:
visitmethod = self.cache[cls]
except KeyError:
for subclass in cls.__mro__:
visitmethod = getattr(self, subclass.__name__, None)
if visitmethod is not None:
break
else:
visitmethod = self.object
self.cache[cls] = visitmethod
visitmethod(node)
def object(self, obj):
#self.write(obj)
self.write(escape(unicode(obj)))
def raw(self, obj):
self.write(obj.uniobj)
def list(self, obj):
assert id(obj) not in self.visited
self.visited[id(obj)] = 1
map(self.visit, obj)
def Tag(self, tag):
assert id(tag) not in self.visited
try:
tag.parent = self.parents[-1]
except IndexError:
tag.parent = None
self.visited[id(tag)] = 1
tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
if self.curindent and not self._isinline(tagname):
self.write("\n" + u(' ') * self.curindent)
if tag:
self.curindent += self.indent
self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
self.parents.append(tag)
for x in tag:
self.visit(x)
self.parents.pop()
self.write(u('</%s>') % tagname)
self.curindent -= self.indent
else:
nameattr = tagname+self.attributes(tag)
if self._issingleton(tagname):
self.write(u('<%s/>') % (nameattr,))
else:
self.write(u('<%s></%s>') % (nameattr, tagname))
def attributes(self, tag):
# serialize attributes
attrlist = dir(tag.attr)
attrlist.sort()
l = []
for name in attrlist:
res = self.repr_attribute(tag.attr, name)
if res is not None:
l.append(res)
l.extend(self.getstyle(tag))
return u("").join(l)
def repr_attribute(self, attrs, name):
if name[:2] != '__':
value = getattr(attrs, name)
if name.endswith('_'):
name = name[:-1]
return ' %s="%s"' % (name, escape(unicode(value)))
def getstyle(self, tag):
""" return attribute list suitable for styling. """
try:
styledict = tag.style.__dict__
except AttributeError:
return []
else:
stylelist = [x+': ' + y for x,y in styledict.items()]
return [u(' style="%s"') % u('; ').join(stylelist)]
def _issingleton(self, tagname):
"""can (and will) be overridden in subclasses"""
return self.shortempty
def _isinline(self, tagname):
"""can (and will) be overridden in subclasses"""
return False
class HtmlVisitor(SimpleUnicodeVisitor):
single = dict([(x, 1) for x in
('br,img,area,param,col,hr,meta,link,base,'
'input,frame').split(',')])
inline = dict([(x, 1) for x in
('a abbr acronym b basefont bdo big br cite code dfn em font '
'i img input kbd label q s samp select small span strike '
'strong sub sup textarea tt u var'.split(' '))])
def repr_attribute(self, attrs, name):
if name == 'class_':
value = getattr(attrs, name)
if value is None:
return
return super(HtmlVisitor, self).repr_attribute(attrs, name)
def _issingleton(self, tagname):
return tagname in self.single
def _isinline(self, tagname):
return tagname in self.inline
class _escape:
def __init__(self):
self.escape = {
u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
u('&') : u('&amp;'), u("'") : u('&apos;'),
}
self.charef_rex = re.compile(u("|").join(self.escape.keys()))
def _replacer(self, match):
return self.escape[match.group(0)]
def __call__(self, ustring):
""" xml-escape the given unicode string. """
ustring = unicode(ustring)
return self.charef_rex.sub(self._replacer, ustring)
escape = _escape()

View File

@ -1,107 +0,0 @@
"""
apipkg: control the exported namespace of a python package.
see http://pypi.python.org/pypi/apipkg
(c) holger krekel, 2009 - MIT license
"""
import os
import sys
from types import ModuleType
__version__ = "1.0b7"
def initpkg(pkgname, exportdefs):
""" initialize given package from the export definitions. """
oldmod = sys.modules[pkgname]
d = {}
f = getattr(oldmod, '__file__', None)
if f:
f = os.path.abspath(f)
d['__file__'] = f
d['__version__'] = getattr(oldmod, '__version__', '0')
if hasattr(oldmod, '__loader__'):
d['__loader__'] = oldmod.__loader__
if hasattr(oldmod, '__path__'):
d['__path__'] = [os.path.abspath(p) for p in oldmod.__path__]
oldmod.__dict__.update(d)
mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
sys.modules[pkgname] = mod
def importobj(modpath, attrname):
module = __import__(modpath, None, None, ['__doc__'])
return getattr(module, attrname)
class ApiModule(ModuleType):
def __init__(self, name, importspec, implprefix=None, attr=None):
self.__name__ = name
self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
self.__map__ = {}
self.__implprefix__ = implprefix or name
if attr:
for name, val in attr.items():
#print "setting", self.__name__, name, val
setattr(self, name, val)
for name, importspec in importspec.items():
if isinstance(importspec, dict):
subname = '%s.%s'%(self.__name__, name)
apimod = ApiModule(subname, importspec, implprefix)
sys.modules[subname] = apimod
setattr(self, name, apimod)
else:
modpath, attrname = importspec.split(':')
if modpath[0] == '.':
modpath = implprefix + modpath
if name == '__doc__':
self.__doc__ = importobj(modpath, attrname)
else:
self.__map__[name] = (modpath, attrname)
def __repr__(self):
l = []
if hasattr(self, '__version__'):
l.append("version=" + repr(self.__version__))
if hasattr(self, '__file__'):
l.append('from ' + repr(self.__file__))
if l:
return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
return '<ApiModule %r>' % (self.__name__,)
def __makeattr(self, name):
"""lazily compute value for name or raise AttributeError if unknown."""
#print "makeattr", self.__name__, name
target = None
if '__onfirstaccess__' in self.__map__:
target = self.__map__.pop('__onfirstaccess__')
importobj(*target)()
try:
modpath, attrname = self.__map__[name]
except KeyError:
if target is not None and name != '__onfirstaccess__':
# retry, onfirstaccess might have set attrs
return getattr(self, name)
raise AttributeError(name)
else:
result = importobj(modpath, attrname)
setattr(self, name, result)
try:
del self.__map__[name]
except KeyError:
pass # in a recursive-import situation a double-del can happen
return result
__getattr__ = __makeattr
def __dict__(self):
# force all the content of the module to be loaded when __dict__ is read
dictdescr = ModuleType.__dict__['__dict__']
dict = dictdescr.__get__(self)
if dict is not None:
hasattr(self, 'some')
for name in self.__all__:
try:
self.__makeattr(name)
except AttributeError:
pass
return dict
__dict__ = property(__dict__)

View File

@ -1,6 +0,0 @@
if __name__ == '__main__':
import sys, py
sys.exit(py.test.cmdline.main())
# for more API entry points see the 'tests' definition
# in __init__.py

15
pytest/__init__.py Normal file
View File

@ -0,0 +1,15 @@
"""
extensible functional and unit testing with Python.
(c) Holger Krekel and others, 2004-2010
"""
__version__ = "1.4.0a1"
#__all__ = ['collect']
import pytest.collect
import pytest.config
from pytest import collect
def __main__():
from pytest.session import main
raise SystemExit(main())

View File

@ -1,5 +1,5 @@
import py, os
from py._test.pluginmanager import PluginManager
from pytest.pluginmanager import PluginManager
import optparse
class Parser:

View File

@ -1,6 +1,6 @@
import py
from py._test.pluginmanager import HookRelay
from pytest.pluginmanager import HookRelay
def pytest_funcarg___pytest(request):
return PytestArg(request)

View File

@ -4,7 +4,7 @@ import sys
import py
def pytest_cmdline_main(config):
from py._test.session import Session
from pytest.session import Session
return Session(config).main()
def pytest_perform_collection(session):

View File

@ -8,7 +8,7 @@ import re
import inspect
import time
from fnmatch import fnmatch
from py._test.config import Config as pytestConfig
from pytest.config import Config as pytestConfig
from py.builtin import print_
def pytest_addoption(parser):
@ -149,7 +149,7 @@ class TmpTestdir:
return p
def getnode(self, config, arg):
from py._test.session import Collection
from pytest.session import Collection
collection = Collection(config)
return collection.getbyid(collection._normalizearg(arg))[0]
@ -161,7 +161,7 @@ class TmpTestdir:
def inline_genitems(self, *args):
#config = self.parseconfig(*args)
from py._test.session import Collection
from pytest.session import Collection
config = self.parseconfigure(*args)
rec = self.getreportrecorder(config)
items = Collection(config).perform_collect()
@ -223,7 +223,7 @@ class TmpTestdir:
""" this is used from tests that want to re-invoke parse(). """
if not args:
args = [self.tmpdir]
from py._test import config
from pytest import config
oldconfig = config.config_per_process # py.test.config
try:
c = config.config_per_process = py.test.config = pytestConfig()

View File

@ -4,7 +4,8 @@ Python related collection nodes.
import py
import inspect
import sys
from py._test.collect import configproperty, warnoldcollect
import pytest
from pytest.collect import configproperty, warnoldcollect
from py._code.code import TerminalRepr
def pytest_addoption(parser):
@ -19,13 +20,10 @@ def pytest_cmdline_main(config):
return 0
def pytest_namespace():
# XXX rather return than set directly
py.test.collect.Module = Module
py.test.collect.Class = Class
py.test.collect.Instance = Instance
py.test.collect.Function = Function
py.test.collect.Generator = Generator
py.test.collect._fillfuncargs = fillfuncargs
return {'collect': {
'Module': Module, 'Class': Class, 'Instance': Instance,
'Function': Function, 'Generator': Generator,
'_fillfuncargs': fillfuncargs}}
def pytest_funcarg__pytestconfig(request):
""" the pytest config object with access to command line opts."""
@ -134,7 +132,7 @@ class PyobjMixin(object):
modpath = self.getmodpath()
return fspath, lineno, modpath
class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
class PyCollectorMixin(PyobjMixin, pytest.collect.Collector):
Class = configproperty('Class')
Instance = configproperty('Instance')
Function = configproperty('Function')
@ -171,7 +169,7 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
return l
def _deprecated_join(self, name):
if self.__class__.join != py.test.collect.Collector.join:
if self.__class__.join != pytest.collect.Collector.join:
warnoldcollect()
return self.join(name)
@ -206,7 +204,7 @@ class PyCollectorMixin(PyobjMixin, py.test.collect.Collector):
l.append(function)
return l
class Module(py.test.collect.File, PyCollectorMixin):
class Module(pytest.collect.File, PyCollectorMixin):
def _getobj(self):
return self._memoizedcall('_obj', self._importtestmodule)
@ -236,10 +234,10 @@ class Module(py.test.collect.File, PyCollectorMixin):
if getattr(self.obj, 'disabled', 0):
py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, "
"use pytestmark=..., see pytest_skipping plugin" % (self.obj,))
py.test.skip("%r is disabled" %(self.obj,))
pytest.skip("%r is disabled" %(self.obj,))
if hasattr(self.obj, 'setup_module'):
#XXX: nose compat hack, move to nose plugin
# if it takes a positional arg, its probably a py.test style one
# if it takes a positional arg, its probably a pytest style one
# so we pass the current module object
if inspect.getargspec(self.obj.setup_module)[0]:
self.obj.setup_module(self.obj)
@ -256,7 +254,7 @@ class Module(py.test.collect.File, PyCollectorMixin):
else:
self.obj.teardown_module()
class Class(PyCollectorMixin, py.test.collect.Collector):
class Class(PyCollectorMixin, pytest.collect.Collector):
def collect(self):
l = self._deprecated_collect()
@ -268,7 +266,7 @@ class Class(PyCollectorMixin, py.test.collect.Collector):
if getattr(self.obj, 'disabled', 0):
py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, "
"use pytestmark=..., see pytest_skipping plugin" % (self.obj,))
py.test.skip("%r is disabled" %(self.obj,))
pytest.skip("%r is disabled" %(self.obj,))
setup_class = getattr(self.obj, 'setup_class', None)
if setup_class is not None:
setup_class = getattr(setup_class, 'im_func', setup_class)
@ -280,7 +278,7 @@ class Class(PyCollectorMixin, py.test.collect.Collector):
teardown_class = getattr(teardown_class, 'im_func', teardown_class)
teardown_class(self.obj)
class Instance(PyCollectorMixin, py.test.collect.Collector):
class Instance(PyCollectorMixin, pytest.collect.Collector):
def _getobj(self):
return self.parent.obj()
@ -367,7 +365,7 @@ class FuncargLookupErrorRepr(TerminalRepr):
tw.line()
tw.line("%s:%d" % (self.filename, self.firstlineno+1))
class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector):
class Generator(FunctionMixin, PyCollectorMixin, pytest.collect.Collector):
def collect(self):
# test generators are seen as collectors but they also
# invoke setup/teardown on popular request
@ -406,7 +404,7 @@ class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector):
# Test Items
#
_dummy = object()
class Function(FunctionMixin, py.test.collect.Item):
class Function(FunctionMixin, pytest.collect.Item):
""" a Function Item is responsible for setting up
and executing a Python callable test object.
"""
@ -494,10 +492,10 @@ def fillfuncargs(function):
def getplugins(node, withpy=False): # might by any node
plugins = node.config._getmatchingplugins(node.fspath)
if withpy:
mod = node.getparent(py.test.collect.Module)
mod = node.getparent(pytest.collect.Module)
if mod is not None:
plugins.append(mod.obj)
inst = node.getparent(py.test.collect.Instance)
inst = node.getparent(pytest.collect.Instance)
if inst is not None:
plugins.append(inst.obj)
return plugins
@ -546,8 +544,8 @@ class FuncargRequest:
def __init__(self, pyfuncitem):
self._pyfuncitem = pyfuncitem
self.function = pyfuncitem.obj
self.module = pyfuncitem.getparent(py.test.collect.Module).obj
clscol = pyfuncitem.getparent(py.test.collect.Class)
self.module = pyfuncitem.getparent(pytest.collect.Module).obj
clscol = pyfuncitem.getparent(pytest.collect.Class)
self.cls = clscol and clscol.obj or None
self.instance = py.builtin._getimself(self.function)
self.config = pyfuncitem.config
@ -572,7 +570,7 @@ class FuncargRequest:
def applymarker(self, marker):
""" apply a marker to a test function invocation.
The 'marker' must be created with py.test.mark.* XYZ.
The 'marker' must be created with pytest.mark.* XYZ.
"""
if not isinstance(marker, py.test.mark.XYZ.__class__):
raise ValueError("%r is not a py.test.mark.* object")
@ -662,7 +660,7 @@ class FuncargRequest:
raise self.LookupError(msg)
def showfuncargs(config):
from py._test.session import Collection
from pytest.session import Collection
collection = Collection(config)
firstid = collection._normalizearg(config.args[0])
colitem = collection.getbyid(firstid)[0]

View File

@ -4,7 +4,7 @@ managing loading and interacting with pytest plugins.
import py
import sys
import inspect
from py._plugin import hookspec
from pytest.plugin import hookspec
default_plugins = (
"default terminal python runner pdb capture mark skipping tmpdir monkeypatch "
@ -138,9 +138,9 @@ class PluginManager(object):
mod = importplugin(modname)
except KeyboardInterrupt:
raise
except py.test.skip.Exception:
e = py.std.sys.exc_info()[1]
self._hints.append("skipped plugin %r: %s" %((modname, e.msg)))
#except py.test.skip.Exception:
# e = py.std.sys.exc_info()[1]
# self._hints.append("skipped plugin %r: %s" %((modname, e.msg)))
else:
check_old_use(mod, modname)
self.register(mod)
@ -176,11 +176,21 @@ class PluginManager(object):
mc = MultiCall(methods, {'parser': parser})
mc.execute()
def _setns(self, obj, dic):
for name, value in dic.items():
if isinstance(value, dict):
self._setns(getattr(obj, name), value)
else:
#print "setting", name, value
setattr(obj, name, value)
if hasattr(obj, '__all__'):
py.test.__all__.append(name)
def pytest_plugin_registered(self, plugin):
dic = self.call_plugin(plugin, "pytest_namespace", {}) or {}
for name, value in dic.items():
setattr(py.test, name, value)
py.test.__all__.append(name)
import pytest
if dic:
self._setns(pytest, dic)
if hasattr(self, '_config'):
self.call_plugin(plugin, "pytest_addoption",
{'parser': self._config._parser})
@ -211,6 +221,7 @@ def canonical_importname(name):
return name
def importplugin(importspec):
#print "importing", importspec
try:
return __import__(importspec)
except ImportError:
@ -218,7 +229,7 @@ def importplugin(importspec):
if str(e).find(importspec) == -1:
raise
try:
return __import__("py._plugin.%s" %(importspec),
return __import__("pytest.plugin.%s" %(importspec),
None, None, '__doc__')
except ImportError:
e = py.std.sys.exc_info()[1]

View File

@ -7,6 +7,7 @@
import py
import os, sys
import pytest.config
#
# main entry point
@ -15,7 +16,7 @@ import os, sys
def main(args=None):
if args is None:
args = sys.argv[1:]
config = py.test.config
config = pytest.config.Config()
config.parse(args)
try:
exitstatus = config.hook.pytest_cmdline_main(config=config)
@ -23,7 +24,6 @@ def main(args=None):
e = sys.exc_info()[1]
sys.stderr.write("ERROR: %s\n" %(e.args[0],))
exitstatus = EXIT_INTERNALERROR
py.test.config = py.test.config.__class__()
return exitstatus
# exitcodes for the command line
@ -93,7 +93,7 @@ class Collection:
self.topdir = gettopdir(self.config.args)
self._argfspaths = [py.path.local(decodearg(x)[0])
for x in self.config.args]
x = py.test.collect.Directory(fspath=self.topdir,
x = pytest.collect.Directory(fspath=self.topdir,
config=config, collection=self)
self._topcollector = x.consider_dir(self.topdir)
self._topcollector.parent = None
@ -180,12 +180,12 @@ class Collection:
else:
name = None
for node in matching:
if isinstance(node, py.test.collect.Item):
if isinstance(node, pytest.collect.Item):
if name is None:
node.ihook.pytest_log_itemcollect(item=node)
result.append(node)
continue
assert isinstance(node, py.test.collect.Collector)
assert isinstance(node, pytest.collect.Collector)
node.ihook.pytest_collectstart(collector=node)
rep = node.ihook.pytest_make_collect_report(collector=node)
#print "matching", rep.result, "against name", name

View File

@ -5,11 +5,7 @@ if sys.version_info >= (3,0):
from setuptools import setup
long_description = """
py.test and pylib: rapid testing and development utils
- `py.test`_: cross-project testing tool with many advanced features
- `py.path`_: path abstractions over local and subversion files
- `py.code`_: dynamic code compile and traceback printing support
cross-project testing tool with many advanced features
Platforms: Linux, Win32, OSX
@ -27,8 +23,8 @@ Mailing lists and more contact points: http://pylib.org/contact.html
"""
def main():
setup(
name='py',
description='py.test and pylib: rapid testing and development utils.',
name='pytest',
description='py.test: simple testing with Python',
long_description = long_description,
version= '1.4.0a1',
url='http://pylib.org',
@ -48,36 +44,20 @@ def main():
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3'],
packages=['py',
'py._plugin',
'py._cmdline',
'py._code',
'py._compat',
'py._io',
'py._log',
'py._path',
'py._process',
'py._test',
],
packages=['pytest', 'pytest.plugin', ],
zip_safe=False,
)
def cmdline_entrypoints(versioninfo, platform, basename):
target = 'pytest:__main__'
if platform.startswith('java'):
points = {'py.test-jython': 'py.cmdline:pytest',
'py.which-jython': 'py.cmdline:pywhich'}
points = {'py.test-jython': target}
else:
if basename.startswith("pypy"):
points = {'py.test-%s' % basename: 'py.cmdline:pytest',
'py.which-%s' % basename: 'py.cmdline:pywhich',}
points = {'py.test-%s' % basename: target}
else: # cpython
points = {
'py.test-%s.%s' % versioninfo[:2] : 'py.cmdline:pytest',
'py.which-%s.%s' % versioninfo[:2] : 'py.cmdline:pywhich'
}
for x in ['py.cleanup', 'py.convert_unittest', 'py.countloc',
'py.lookup', 'py.svnwcrevert', 'py.which', 'py.test']:
points[x] = "py.cmdline:%s" % x.replace('.','')
points = {'py.test-%s.%s' % versioninfo[:2] : target,}
points['py.test'] = target
return points
def make_entry_points():

View File

@ -1 +0,0 @@
#

View File

@ -1,103 +0,0 @@
import sys, py
pytest_plugins = "pytest_pytester"
@py.test.mark.multi(name=[x for x in dir(py.cmdline) if x[0] != "_"])
def test_cmdmain(name, pytestconfig):
main = getattr(py.cmdline, name)
assert py.builtin.callable(main)
assert name[:2] == "py"
if not pytestconfig.getvalue("notoolsonpath"):
scriptname = "py." + name[2:]
assert py.path.local.sysfind(scriptname), scriptname
class TestPyLookup:
def test_basic(self, testdir):
p = testdir.makepyfile(hello="def x(): pass")
result = testdir.runpybin("py.lookup", "pass")
result.stdout.fnmatch_lines(
['%s:*def x(): pass' %(p.basename)]
)
def test_search_in_filename(self, testdir):
p = testdir.makepyfile(hello="def x(): pass")
result = testdir.runpybin("py.lookup", "hello")
result.stdout.fnmatch_lines(
['*%s:*' %(p.basename)]
)
def test_with_explicit_path(self, testdir):
sub1 = testdir.mkdir("things")
sub2 = testdir.mkdir("foo")
sub1.join("pyfile.py").write("def stuff(): pass")
searched = sub2.join("other.py")
searched.write("stuff = x")
result = testdir.runpybin("py.lookup", sub2.basename, "stuff")
result.stdout.fnmatch_lines(
["%s:1: stuff = x" % (searched.basename,)]
)
class TestPyCleanup:
def test_basic(self, testdir, tmpdir):
p = tmpdir.ensure("hello.py")
result = testdir.runpybin("py.cleanup", tmpdir)
assert result.ret == 0
assert p.check()
pyc = p.new(ext='pyc')
pyc.ensure()
pyclass = p.new(basename=p.basename + '$py.class')
result = testdir.runpybin("py.cleanup", tmpdir)
assert not pyc.check()
assert not pyclass.check()
def test_dir_remove_simple(self, testdir, tmpdir):
subdir = tmpdir.mkdir("subdir")
p = subdir.ensure("file")
result = testdir.runpybin("py.cleanup", "-d", tmpdir)
assert result.ret == 0
assert subdir.check()
p.remove()
p = tmpdir.mkdir("hello")
result = testdir.runpybin("py.cleanup", tmpdir, '-d')
assert result.ret == 0
assert not subdir.check()
@py.test.mark.multi(opt=["-s"])
def test_remove_setup_simple(self, testdir, tmpdir, opt):
subdir = tmpdir.mkdir("subdir")
p = subdir.ensure("setup.py")
subdir.mkdir("build").ensure("hello", "world.py")
egg1 = subdir.mkdir("something.egg-info")
egg1.mkdir("whatever")
okbuild = subdir.mkdir("preserved1").mkdir("build")
egg2 = subdir.mkdir("preserved2").mkdir("other.egg-info")
subdir.mkdir("dist")
result = testdir.runpybin("py.cleanup", opt, subdir)
assert result.ret == 0
assert okbuild.check()
assert egg1.check()
assert egg2.check()
assert subdir.join("preserved1").check()
assert subdir.join("preserved2").check()
assert not subdir.join("build").check()
assert not subdir.join("dist").check()
def test_remove_all(self, testdir, tmpdir):
tmpdir.ensure("setup.py")
tmpdir.ensure("build", "xyz.py")
tmpdir.ensure("dist", "abc.py")
piplog = tmpdir.ensure("preserved2", "pip-log.txt")
tmpdir.ensure("hello.egg-info")
setup = tmpdir.ensure("setup.py")
tmpdir.ensure("src/a/b")
x = tmpdir.ensure("src/x.py")
x2 = tmpdir.ensure("src/x.pyc")
x3 = tmpdir.ensure("src/x$py.class")
result = testdir.runpybin("py.cleanup", "-a", tmpdir)
assert result.ret == 0
assert len(tmpdir.listdir()) == 3
assert setup.check()
assert x.check()
assert not x2.check()
assert not x3.check()
assert not piplog.check()

View File

@ -1,415 +0,0 @@
import py
py.test.importorskip("parser")
from py._cmdline.pyconvert_unittest import rewrite_utest
class Test_UTestConvert:
def testall(self):
assert rewrite_utest("badger badger badger") == (
"badger badger badger")
assert rewrite_utest(
"self.assertRaises(excClass, callableObj, *args, **kwargs)"
) == (
"raises(excClass, callableObj, *args, **kwargs)"
)
assert rewrite_utest(
"""
self.failUnlessRaises(TypeError, func, 42, **{'arg1': 23})
"""
) == (
"""
raises(TypeError, func, 42, **{'arg1': 23})
"""
)
assert rewrite_utest(
"""
self.assertRaises(TypeError,
func,
mushroom)
"""
) == (
"""
raises(TypeError,
func,
mushroom)
"""
)
assert rewrite_utest("self.fail()") == "raise AssertionError"
assert rewrite_utest("self.fail('mushroom, mushroom')") == (
"raise AssertionError, 'mushroom, mushroom'")
assert rewrite_utest("self.assert_(x)") == "assert x"
assert rewrite_utest("self.failUnless(func(x)) # XXX") == (
"assert func(x) # XXX")
assert rewrite_utest(
"""
self.assert_(1 + f(y)
+ z) # multiline, keep parentheses
"""
) == (
"""
assert (1 + f(y)
+ z) # multiline, keep parentheses
"""
)
assert rewrite_utest("self.assert_(0, 'badger badger')") == (
"assert 0, 'badger badger'")
assert rewrite_utest("self.assert_(0, '''badger badger''')") == (
"assert 0, '''badger badger'''")
assert rewrite_utest(
r"""
self.assert_(0,
'Meet the badger.\n')
"""
) == (
r"""
assert 0, (
'Meet the badger.\n')
"""
)
assert rewrite_utest(
r"""
self.failIf(0 + 0
+ len('badger\n')
+ 0, '''badger badger badger badger
mushroom mushroom
Snake! Ooh a snake!
''') # multiline, must move the parens
"""
) == (
r"""
assert not (0 + 0
+ len('badger\n')
+ 0), '''badger badger badger badger
mushroom mushroom
Snake! Ooh a snake!
''' # multiline, must move the parens
"""
)
assert rewrite_utest("self.assertEquals(0, 0)") == (
"assert 0 == 0")
assert rewrite_utest(
r"""
self.assertEquals(0,
'Run away from the snake.\n')
"""
) == (
r"""
assert 0 == (
'Run away from the snake.\n')
"""
)
assert rewrite_utest(
"""
self.assertEquals(badger + 0
+ mushroom
+ snake, 0)
"""
) == (
"""
assert (badger + 0
+ mushroom
+ snake) == 0
"""
)
assert rewrite_utest(
"""
self.assertNotEquals(badger + 0
+ mushroom
+ snake,
mushroom
- badger)
"""
) == (
"""
assert (badger + 0
+ mushroom
+ snake) != (
mushroom
- badger)
"""
)
assert rewrite_utest(
"""
self.assertEquals(badger(),
mushroom()
+ snake(mushroom)
- badger())
"""
) == (
"""
assert badger() == (
mushroom()
+ snake(mushroom)
- badger())
"""
)
assert rewrite_utest("self.failIfEqual(0, 0)") == (
"assert not 0 == 0")
assert rewrite_utest("self.failUnlessEqual(0, 0)") == (
"assert 0 == 0")
assert rewrite_utest(
"""
self.failUnlessEqual(mushroom()
+ mushroom()
+ mushroom(), '''badger badger badger
badger badger badger badger
badger badger badger badger
''') # multiline, must move the parens
"""
) == (
"""
assert (mushroom()
+ mushroom()
+ mushroom()) == '''badger badger badger
badger badger badger badger
badger badger badger badger
''' # multiline, must move the parens
"""
)
assert rewrite_utest(
"""
self.assertEquals('''snake snake snake
snake snake snake''', mushroom)
"""
) == (
"""
assert '''snake snake snake
snake snake snake''' == mushroom
"""
)
assert rewrite_utest(
"""
self.assertEquals(badger(),
snake(), 'BAD BADGER')
"""
) == (
"""
assert badger() == (
snake()), 'BAD BADGER'
"""
)
assert rewrite_utest(
"""
self.assertNotEquals(badger(),
snake()+
snake(), 'POISONOUS MUSHROOM!\
Ai! I ate a POISONOUS MUSHROOM!!')
"""
) == (
"""
assert badger() != (
snake()+
snake()), 'POISONOUS MUSHROOM!\
Ai! I ate a POISONOUS MUSHROOM!!'
"""
)
assert rewrite_utest(
"""
self.assertEquals(badger(),
snake(), '''BAD BADGER
BAD BADGER
BAD BADGER'''
)
"""
) == (
"""
assert badger() == (
snake()), ( '''BAD BADGER
BAD BADGER
BAD BADGER'''
)
"""
)
assert rewrite_utest(
"""
self.assertEquals('''BAD BADGER
BAD BADGER
BAD BADGER''', '''BAD BADGER
BAD BADGER
BAD BADGER''')
"""
) == (
"""
assert '''BAD BADGER
BAD BADGER
BAD BADGER''' == '''BAD BADGER
BAD BADGER
BAD BADGER'''
"""
)
assert rewrite_utest(
"""
self.assertEquals('''GOOD MUSHROOM
GOOD MUSHROOM
GOOD MUSHROOM''',
'''GOOD MUSHROOM
GOOD MUSHROOM
GOOD MUSHROOM''',
''' FAILURE
FAILURE
FAILURE''')
"""
) == (
"""
assert '''GOOD MUSHROOM
GOOD MUSHROOM
GOOD MUSHROOM''' == (
'''GOOD MUSHROOM
GOOD MUSHROOM
GOOD MUSHROOM'''), (
''' FAILURE
FAILURE
FAILURE''')
"""
)
assert rewrite_utest(
"""
self.assertAlmostEquals(first, second, 5, 'A Snake!')
"""
) == (
"""
assert round(first - second, 5) == 0, 'A Snake!'
"""
)
assert rewrite_utest(
"""
self.assertAlmostEquals(first, second, 120)
"""
) == (
"""
assert round(first - second, 120) == 0
"""
)
assert rewrite_utest(
"""
self.assertAlmostEquals(first, second)
"""
) == (
"""
assert round(first - second, 7) == 0
"""
)
assert rewrite_utest(
"""
self.assertAlmostEqual(first, second, 5, '''A Snake!
Ohh A Snake! A Snake!!
''')
"""
) == (
"""
assert round(first - second, 5) == 0, '''A Snake!
Ohh A Snake! A Snake!!
'''
"""
)
assert rewrite_utest(
"""
self.assertNotAlmostEqual(first, second, 5, 'A Snake!')
"""
) == (
"""
assert round(first - second, 5) != 0, 'A Snake!'
"""
)
assert rewrite_utest(
"""
self.failIfAlmostEqual(first, second, 5, 'A Snake!')
"""
) == (
"""
assert not round(first - second, 5) == 0, 'A Snake!'
"""
)
assert rewrite_utest(
"""
self.failIfAlmostEqual(first, second, 5, 6, 7, 'Too Many Args')
"""
) == (
"""
self.failIfAlmostEqual(first, second, 5, 6, 7, 'Too Many Args')
"""
)
assert rewrite_utest(
"""
self.failUnlessAlmostEquals(first, second, 5, 'A Snake!')
"""
) == (
"""
assert round(first - second, 5) == 0, 'A Snake!'
"""
)
assert rewrite_utest(
"""
self.assertAlmostEquals(now do something reasonable ..()
oops, I am inside a comment as a ''' string, and the fname was
mentioned in passing, leaving us with something that isn't an
expression ... will this blow up?
"""
) == (
"""
self.assertAlmostEquals(now do something reasonable ..()
oops, I am inside a comment as a ''' string, and the fname was
mentioned in passing, leaving us with something that isn't an
expression ... will this blow up?
"""
)
assert rewrite_utest(
"""
self.failUnless('__builtin__' in modules, "An entry for __builtin__ "
"is not in sys.modules.")
"""
) == (
"""
assert '__builtin__' in modules, ( "An entry for __builtin__ "
"is not in sys.modules.")
"""
)
# two unittests on the same line separated by a semi-colon is
# only half-converted. Just so you know.
assert rewrite_utest(
"""
self.assertEquals(0, 0); self.assertEquals(1, 1) #not 2 per line!
"""
) == (
"""
assert 0 == 0; self.assertEquals(1, 1) #not 2 per line!
"""
)
if __name__ == '__main__':
unittest.main()

View File

@ -1 +0,0 @@
#

View File

@ -1,253 +0,0 @@
import py
def exvalue():
return py.std.sys.exc_info()[1]
def setup_module(mod):
py.code.patch_builtins(assertion=True, compile=False)
def teardown_module(mod):
py.code.unpatch_builtins(assertion=True, compile=False)
def f():
return 2
def test_assert():
try:
assert f() == 3
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_with_explicit_message():
try:
assert f() == 3, "hello"
except AssertionError:
e = exvalue()
assert e.msg == 'hello'
def test_assert_within_finally():
class A:
def f():
pass
excinfo = py.test.raises(TypeError, """
try:
A().f()
finally:
i = 42
""")
s = excinfo.exconly()
assert s.find("takes no argument") != -1
#def g():
# A.f()
#excinfo = getexcinfo(TypeError, g)
#msg = getmsg(excinfo)
#assert msg.find("must be called with A") != -1
def test_assert_multiline_1():
try:
assert (f() ==
3)
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 == 3\n')
def test_assert_multiline_2():
try:
assert (f() == (4,
3)[-1])
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith('assert 2 ==')
def test_in():
try:
assert "hi" in [1, 2]
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 'hi' in")
def test_is():
try:
assert 1 is 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 is 2")
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_attrib():
class Foo(object):
b = 1
i = Foo()
try:
assert i.b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_attrib_inst():
class Foo(object):
b = 1
try:
assert Foo().b == 2
except AssertionError:
e = exvalue()
s = str(e)
assert s.startswith("assert 1 == 2")
def test_assert_non_string_message():
class A:
def __str__(self):
return "hello"
try:
assert 0 == 1, A()
except AssertionError:
e = exvalue()
assert e.msg == "hello"
def test_assert_keyword_arg():
def f(x=3):
return False
try:
assert f(x=5)
except AssertionError:
e = exvalue()
assert "x=5" in e.msg
# These tests should both fail, but should fail nicely...
class WeirdRepr:
def __repr__(self):
return '<WeirdRepr\nsecond line>'
def bug_test_assert_repr():
v = WeirdRepr()
try:
assert v == 1
except AssertionError:
e = exvalue()
assert e.msg.find('WeirdRepr') != -1
assert e.msg.find('second line') != -1
assert 0
def test_assert_non_string():
try:
assert 0, ['list']
except AssertionError:
e = exvalue()
assert e.msg.find("list") != -1
def test_assert_implicit_multiline():
try:
x = [1,2,3]
assert x != [1,
2, 3]
except AssertionError:
e = exvalue()
assert e.msg.find('assert [1, 2, 3] !=') != -1
def test_assert_with_brokenrepr_arg():
class BrokenRepr:
def __repr__(self): 0 / 0
e = AssertionError(BrokenRepr())
if e.msg.find("broken __repr__") == -1:
py.test.fail("broken __repr__ not handle correctly")
def test_multiple_statements_per_line():
try:
a = 1; assert a == 2
except AssertionError:
e = exvalue()
assert "assert 1 == 2" in e.msg
def test_power():
try:
assert 2**3 == 7
except AssertionError:
e = exvalue()
assert "assert (2 ** 3) == 7" in e.msg
class TestView:
def setup_class(cls):
cls.View = py.test.importorskip("py._code._assertionold").View
def test_class_dispatch(self):
### Use a custom class hierarchy with existing instances
class Picklable(self.View):
pass
class Simple(Picklable):
__view__ = object
def pickle(self):
return repr(self.__obj__)
class Seq(Picklable):
__view__ = list, tuple, dict
def pickle(self):
return ';'.join(
[Picklable(item).pickle() for item in self.__obj__])
class Dict(Seq):
__view__ = dict
def pickle(self):
return Seq.pickle(self) + '!' + Seq(self.values()).pickle()
assert Picklable(123).pickle() == '123'
assert Picklable([1,[2,3],4]).pickle() == '1;2;3;4'
assert Picklable({1:2}).pickle() == '1!2'
def test_viewtype_class_hierarchy(self):
# Use a custom class hierarchy based on attributes of existing instances
class Operation:
"Existing class that I don't want to change."
def __init__(self, opname, *args):
self.opname = opname
self.args = args
existing = [Operation('+', 4, 5),
Operation('getitem', '', 'join'),
Operation('setattr', 'x', 'y', 3),
Operation('-', 12, 1)]
class PyOp(self.View):
def __viewkey__(self):
return self.opname
def generate(self):
return '%s(%s)' % (self.opname, ', '.join(map(repr, self.args)))
class PyBinaryOp(PyOp):
__view__ = ('+', '-', '*', '/')
def generate(self):
return '%s %s %s' % (self.args[0], self.opname, self.args[1])
codelines = [PyOp(op).generate() for op in existing]
assert codelines == ["4 + 5", "getitem('', 'join')",
"setattr('x', 'y', 3)", "12 - 1"]
def test_underscore_api():
py.code._AssertionError
py.code._reinterpret_old # used by pypy
py.code._reinterpret
@py.test.mark.skipif("sys.version_info < (2,6)")
def test_assert_customizable_reprcompare(monkeypatch):
monkeypatch.setattr(py.code, '_reprcompare', lambda *args: 'hello')
try:
assert 3 == 4
except AssertionError:
e = exvalue()
s = str(e)
assert "hello" in s

View File

@ -1,106 +0,0 @@
import py
import sys
def test_ne():
code1 = py.code.Code(compile('foo = "bar"', '', 'exec'))
assert code1 == code1
code2 = py.code.Code(compile('foo = "baz"', '', 'exec'))
assert code2 != code1
def test_code_gives_back_name_for_not_existing_file():
name = 'abc-123'
co_code = compile("pass\n", name, 'exec')
assert co_code.co_filename == name
code = py.code.Code(co_code)
assert str(code.path) == name
assert code.fullsource is None
def test_code_with_class():
class A:
pass
py.test.raises(TypeError, "py.code.Code(A)")
if True:
def x():
pass
def test_code_fullsource():
code = py.code.Code(x)
full = code.fullsource
assert 'test_code_fullsource()' in str(full)
def test_code_source():
code = py.code.Code(x)
src = code.source()
expected = """def x():
pass"""
assert str(src) == expected
def test_frame_getsourcelineno_myself():
def func():
return sys._getframe(0)
f = func()
f = py.code.Frame(f)
source, lineno = f.code.fullsource, f.lineno
assert source[lineno].startswith(" return sys._getframe(0)")
def test_getstatement_empty_fullsource():
def func():
return sys._getframe(0)
f = func()
f = py.code.Frame(f)
prop = f.code.__class__.fullsource
try:
f.code.__class__.fullsource = None
assert f.statement == py.code.Source("")
finally:
f.code.__class__.fullsource = prop
def test_code_from_func():
co = py.code.Code(test_frame_getsourcelineno_myself)
assert co.firstlineno
assert co.path
def test_builtin_patch_unpatch(monkeypatch):
cpy_builtin = py.builtin.builtins
comp = cpy_builtin.compile
def mycompile(*args, **kwargs):
return comp(*args, **kwargs)
class Sub(AssertionError):
pass
monkeypatch.setattr(cpy_builtin, 'AssertionError', Sub)
monkeypatch.setattr(cpy_builtin, 'compile', mycompile)
py.code.patch_builtins()
assert cpy_builtin.AssertionError != Sub
assert cpy_builtin.compile != mycompile
py.code.unpatch_builtins()
assert cpy_builtin.AssertionError is Sub
assert cpy_builtin.compile == mycompile
def test_unicode_handling():
value = py.builtin._totext('\xc4\x85\xc4\x87\n', 'utf-8').encode('utf8')
def f():
raise Exception(value)
excinfo = py.test.raises(Exception, f)
s = str(excinfo)
if sys.version_info[0] < 3:
u = unicode(excinfo)
def test_unicode_or_repr():
from py._code.code import unicode_or_repr
assert unicode_or_repr('hello') == "hello"
if sys.version_info[0] < 3:
s = unicode_or_repr('\xf6\xc4\x85')
else:
s = eval("unicode_or_repr(b'\\f6\\xc4\\x85')")
assert 'print-error' in s
assert 'c4' in s
class A:
def __repr__(self):
raise ValueError()
s = unicode_or_repr(A())
assert 'print-error' in s
assert 'ValueError' in s

Some files were not shown because too many files have changed in this diff Show More