[svn r37264] create the new development trunk
--HG-- branch : trunk
This commit is contained in:
commit
5992a8ef21
|
@ -0,0 +1,45 @@
|
|||
py lib Copyright holders, 2003-2005
|
||||
=======================================
|
||||
|
||||
Except when otherwise stated (look for LICENSE files or information at
|
||||
the beginning of each file) the files in the 'py' directory are
|
||||
copyrighted by one or more of the following people and organizations:
|
||||
|
||||
Holger Krekel <hpk@trillke.net>
|
||||
merlinux GmbH, Germany <office@merlinux.de>
|
||||
Armin Rigo <arigo@tunes.org>
|
||||
Carl Friedrich Bolz <cfbolz@merlinux.de>
|
||||
Maciek Fijalkowski <fijal@genesilico.pl>
|
||||
Guido Wesdorp <guido@merlinux.de>
|
||||
Jan Balster <jan@balster.info>
|
||||
|
||||
Contributors include::
|
||||
|
||||
Ian Bicking <ianb@colorstudy.com>
|
||||
Grig Gheorghiu <grig@gheorghiu.net>
|
||||
Bob Ippolito <bob@redivi.com>
|
||||
Christian Tismer <tismer@stackless.com>
|
||||
Samuele Pedroni <pedronis@strakt.com>
|
||||
|
||||
Except when otherwise stated (look for LICENSE files or information at
|
||||
the beginning of each file) all files in the 'py' directory are
|
||||
licensed under the MIT license:
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
|
@ -0,0 +1,129 @@
|
|||
"""\
|
||||
the py lib is a development support library featuring
|
||||
py.test, an interactive testing tool which supports
|
||||
unit-testing with practically no boilerplate.
|
||||
"""
|
||||
from initpkg import initpkg
|
||||
|
||||
version = "0.8.80-alpha2"
|
||||
|
||||
initpkg(__name__,
|
||||
description = "py.test and the py lib",
|
||||
revision = int('$LastChangedRevision: 37056 $'.split(':')[1][:-1]),
|
||||
lastchangedate = '$LastChangedDate: 2007-01-20 13:11:01 +0100 (Sat, 20 Jan 2007) $',
|
||||
version = version,
|
||||
url = "http://codespeak.net/py",
|
||||
download_url = "http://codespeak.net/download/py/%s.tar.gz" %(version,),
|
||||
license = "MIT license",
|
||||
platforms = ['unix', 'linux', 'cygwin'],
|
||||
author = "holger krekel & others",
|
||||
author_email = "py-dev@codespeak.net",
|
||||
long_description = globals()['__doc__'],
|
||||
|
||||
exportdefs = {
|
||||
'_dist.setup' : ('./misc/_dist.py', 'setup'),
|
||||
|
||||
# helpers for use from test functions or collectors
|
||||
'test.raises' : ('./test/raises.py', 'raises'),
|
||||
'test.deprecated_call' : ('./test/deprecate.py', 'deprecated_call'),
|
||||
'test.skip' : ('./test/item.py', 'skip'),
|
||||
'test.fail' : ('./test/item.py', 'fail'),
|
||||
'test.exit' : ('./test/session.py', 'exit'),
|
||||
'test.compat.TestCase' : ('./test/compat.py', 'TestCase'),
|
||||
|
||||
# configuration/initialization related test api
|
||||
'test.config' : ('./test/config.py', 'config'),
|
||||
'test.ensuretemp' : ('./test/config.py', 'ensuretemp'),
|
||||
'test.cmdline.main' : ('./test/cmdline.py', 'main'),
|
||||
|
||||
# for customization of collecting/running tests
|
||||
'test.collect.Collector' : ('./test/collect.py', 'Collector'),
|
||||
'test.collect.Directory' : ('./test/collect.py', 'Directory'),
|
||||
'test.collect.Module' : ('./test/collect.py', 'Module'),
|
||||
'test.collect.DoctestFile' : ('./test/collect.py', 'DoctestFile'),
|
||||
'test.collect.Class' : ('./test/collect.py', 'Class'),
|
||||
'test.collect.Instance' : ('./test/collect.py', 'Instance'),
|
||||
'test.collect.Generator' : ('./test/collect.py', 'Generator'),
|
||||
'test.Item' : ('./test/item.py', 'Item'),
|
||||
'test.Function' : ('./test/item.py', 'Function'),
|
||||
|
||||
# thread related API (still in early design phase)
|
||||
'_thread.WorkerPool' : ('./thread/pool.py', 'WorkerPool'),
|
||||
'_thread.NamedThreadPool' : ('./thread/pool.py', 'NamedThreadPool'),
|
||||
'_thread.ThreadOut' : ('./thread/io.py', 'ThreadOut'),
|
||||
|
||||
# hook into the top-level standard library
|
||||
'std' : ('./misc/std.py', 'std'),
|
||||
|
||||
'process.cmdexec' : ('./process/cmdexec.py', 'cmdexec'),
|
||||
|
||||
# path implementations
|
||||
'path.svnwc' : ('./path/svn/wccommand.py', 'SvnWCCommandPath'),
|
||||
'path.svnurl' : ('./path/svn/urlcommand.py', 'SvnCommandPath'),
|
||||
'path.local' : ('./path/local/local.py', 'LocalPath'),
|
||||
'path.extpy' : ('./path/extpy/extpy.py', 'Extpy'),
|
||||
|
||||
# some nice slightly magic APIs
|
||||
'magic.greenlet' : ('./magic/greenlet.py', 'greenlet'),
|
||||
'magic.invoke' : ('./magic/invoke.py', 'invoke'),
|
||||
'magic.revoke' : ('./magic/invoke.py', 'revoke'),
|
||||
'magic.patch' : ('./magic/patch.py', 'patch'),
|
||||
'magic.revert' : ('./magic/patch.py', 'revert'),
|
||||
'magic.autopath' : ('./magic/autopath.py', 'autopath'),
|
||||
'magic.AssertionError' : ('./magic/assertion.py', 'AssertionError'),
|
||||
|
||||
# python inspection/code-generation API
|
||||
'code.compile' : ('./code/source.py', 'compile_'),
|
||||
'code.Source' : ('./code/source.py', 'Source'),
|
||||
'code.Code' : ('./code/code.py', 'Code'),
|
||||
'code.Frame' : ('./code/frame.py', 'Frame'),
|
||||
'code.ExceptionInfo' : ('./code/excinfo.py', 'ExceptionInfo'),
|
||||
'code.Traceback' : ('./code/traceback2.py', 'Traceback'),
|
||||
|
||||
# backports and additions of builtins
|
||||
'builtin.enumerate' : ('./builtin/enumerate.py', 'enumerate'),
|
||||
'builtin.reversed' : ('./builtin/reversed.py', 'reversed'),
|
||||
'builtin.sorted' : ('./builtin/sorted.py', 'sorted'),
|
||||
'builtin.BaseException' : ('./builtin/exception.py', 'BaseException'),
|
||||
'builtin.set' : ('./builtin/set.py', 'set'),
|
||||
'builtin.frozenset' : ('./builtin/set.py', 'frozenset'),
|
||||
|
||||
# gateways into remote contexts
|
||||
'execnet.SocketGateway' : ('./execnet/register.py', 'SocketGateway'),
|
||||
'execnet.PopenGateway' : ('./execnet/register.py', 'PopenGateway'),
|
||||
'execnet.SshGateway' : ('./execnet/register.py', 'SshGateway'),
|
||||
|
||||
# input-output helping
|
||||
'io.dupfile' : ('./io/dupfile.py', 'dupfile'),
|
||||
'io.FDCapture' : ('./io/capture.py', 'FDCapture'),
|
||||
'io.OutErrCapture' : ('./io/capture.py', 'OutErrCapture'),
|
||||
'io.callcapture' : ('./io/capture.py', 'callcapture'),
|
||||
|
||||
# error module, defining all errno's as Classes
|
||||
'error' : ('./misc/error.py', 'error'),
|
||||
|
||||
# small and mean xml/html generation
|
||||
'xml.html' : ('./xmlobj/html.py', 'html'),
|
||||
'xml.Tag' : ('./xmlobj/xml.py', 'Tag'),
|
||||
'xml.raw' : ('./xmlobj/xml.py', 'raw'),
|
||||
'xml.Namespace' : ('./xmlobj/xml.py', 'Namespace'),
|
||||
'xml.escape' : ('./xmlobj/misc.py', 'escape'),
|
||||
|
||||
# logging API ('producers' and 'consumers' connected via keywords)
|
||||
'log.Producer' : ('./log/producer.py', 'Producer'),
|
||||
'log.default' : ('./log/producer.py', 'default'),
|
||||
'log._getstate' : ('./log/producer.py', '_getstate'),
|
||||
'log._setstate' : ('./log/producer.py', '_setstate'),
|
||||
'log.setconsumer' : ('./log/consumer.py', 'setconsumer'),
|
||||
'log.Path' : ('./log/consumer.py', 'Path'),
|
||||
'log.STDOUT' : ('./log/consumer.py', 'STDOUT'),
|
||||
'log.STDERR' : ('./log/consumer.py', 'STDERR'),
|
||||
'log.Syslog' : ('./log/consumer.py', 'Syslog'),
|
||||
'log.get' : ('./log/logger.py', 'get'),
|
||||
|
||||
# compatibility modules (taken from 2.4.4)
|
||||
'compat.doctest' : ('./compat/doctest.py', '*'),
|
||||
'compat.optparse' : ('./compat/optparse.py', '*'),
|
||||
'compat.textwrap' : ('./compat/textwrap.py', '*'),
|
||||
'compat.subprocess' : ('./compat/subprocess.py', '*'),
|
||||
})
|
|
@ -0,0 +1,11 @@
|
|||
function loadloc() {
|
||||
/* load iframe content using # part of the url */
|
||||
var loc = document.location.toString();
|
||||
if (loc.indexOf('#') == -1) {
|
||||
return;
|
||||
};
|
||||
var chunks = loc.split('#');
|
||||
var anchor = chunks[chunks.length - 1];
|
||||
var iframe = document.getElementsByTagName('iframe')[0];
|
||||
iframe.src = anchor;
|
||||
};
|
|
@ -0,0 +1,55 @@
|
|||
""" run 'py.test --apigen=<this script>' to get documentation exported
|
||||
|
||||
exports to /tmp/output by default, set the environment variable
|
||||
'APIGEN_TARGET' to override
|
||||
"""
|
||||
|
||||
import os
|
||||
import py
|
||||
from py.__.apigen import htmlgen
|
||||
from py.__.apigen import linker
|
||||
from py.__.apigen import project
|
||||
|
||||
def import_pkgdir(pkgdir):
|
||||
if pkgdir.check(dir=True):
|
||||
return pkgdir.join('__init__.py').getpymodule()
|
||||
else:
|
||||
# XXX not sure if this is ever used normally...
|
||||
return pkgdir.getpymodule()
|
||||
|
||||
def get_documentable_items(pkgdir):
|
||||
rootmod = import_pkgdir(pkgdir)
|
||||
if hasattr(rootmod, '__package__'):
|
||||
return rootmod
|
||||
# XXX fix non-initpkg situations(?)
|
||||
return {}
|
||||
|
||||
def build(pkgdir, dsa):
|
||||
l = linker.Linker()
|
||||
proj = project.Project()
|
||||
|
||||
if 'APIGEN_TARGET' in os.environ:
|
||||
targetdir = py.path.local(os.environ['APIGEN_TARGET'])
|
||||
else:
|
||||
targetdir = pkgdir.dirpath().join('apigen')
|
||||
targetdir.ensure(dir=True)
|
||||
|
||||
all_names = dsa._get_names(filter=lambda x, y: True)
|
||||
namespace_tree = htmlgen.create_namespace_tree(all_names)
|
||||
apb = htmlgen.ApiPageBuilder(targetdir, l, dsa, pkgdir)
|
||||
spb = htmlgen.SourcePageBuilder(targetdir, l, pkgdir)
|
||||
|
||||
ns_data = apb.prepare_namespace_pages(namespace_tree)
|
||||
class_names = dsa.get_class_names()
|
||||
class_data, method_data = apb.prepare_class_pages(namespace_tree,
|
||||
class_names)
|
||||
function_names = dsa.get_function_names()
|
||||
func_data = apb.prepare_function_pages(namespace_tree, function_names)
|
||||
source_data = spb.prepare_pages(pkgdir)
|
||||
|
||||
apb.build_namespace_pages(ns_data, proj)
|
||||
apb.build_class_pages(class_data, proj)
|
||||
apb.build_method_pages(method_data, proj)
|
||||
apb.build_function_pages(func_data, proj)
|
||||
spb.build_pages(source_data, proj, pkgdir)
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
import py
|
||||
|
||||
Option = py.test.config.Option
|
||||
option = py.test.config.addoptions("apigen test options",
|
||||
Option('', '--webcheck',
|
||||
action="store_true", dest="webcheck", default=False,
|
||||
help="run XHTML validation tests"
|
||||
),
|
||||
)
|
||||
|
|
@ -0,0 +1,567 @@
|
|||
import py
|
||||
import inspect
|
||||
from py.__.apigen.layout import LayoutPage
|
||||
from py.__.apigen.source import browser as source_browser
|
||||
from py.__.apigen.source import html as source_html
|
||||
from py.__.apigen.tracer.description import is_private
|
||||
from py.__.apigen.rest.genrest import split_of_last_part
|
||||
from py.__.apigen.linker import relpath
|
||||
|
||||
sorted = py.builtin.sorted
|
||||
html = py.xml.html
|
||||
raw = py.xml.raw
|
||||
|
||||
# HTML related stuff
|
||||
class H(html):
|
||||
class Description(html.div):
|
||||
style = html.Style(margin_left='15em')
|
||||
|
||||
class NamespaceDescription(Description):
|
||||
pass
|
||||
|
||||
class NamespaceItem(html.div):
|
||||
pass
|
||||
|
||||
class NamespaceDef(html.h1):
|
||||
pass
|
||||
|
||||
class ClassDescription(Description):
|
||||
pass
|
||||
|
||||
class ClassDef(html.h1):
|
||||
pass
|
||||
|
||||
class MethodDescription(Description):
|
||||
pass
|
||||
|
||||
class MethodDef(html.h2):
|
||||
pass
|
||||
|
||||
class FunctionDescription(Description):
|
||||
pass
|
||||
|
||||
class FunctionDef(html.h2):
|
||||
pass
|
||||
|
||||
class ParameterDescription(html.div):
|
||||
pass
|
||||
|
||||
class Docstring(html.div):
|
||||
style = html.Style(white_space='pre', min_height='3em')
|
||||
|
||||
class Navigation(html.div):
|
||||
style = html.Style(min_height='99%', float='left', margin_top='1.2em',
|
||||
overflow='auto', width='15em', white_space='nowrap')
|
||||
|
||||
class NavigationItem(html.div):
|
||||
pass
|
||||
|
||||
class BaseDescription(html.a):
|
||||
pass
|
||||
|
||||
class SourceDef(html.div):
|
||||
pass
|
||||
|
||||
class NonPythonSource(html.pre):
|
||||
style = html.Style(margin_left='15em')
|
||||
|
||||
class DirList(html.div):
|
||||
style = html.Style(margin_left='15em')
|
||||
|
||||
class DirListItem(html.div):
|
||||
pass
|
||||
|
||||
class ValueDescList(html.ul):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(H.ValueDescList, self).__init__(*args, **kwargs)
|
||||
|
||||
class ValueDescItem(html.li):
|
||||
pass
|
||||
|
||||
def get_param_htmldesc(linker, func):
|
||||
""" get the html for the parameters of a function """
|
||||
import inspect
|
||||
# XXX copy and modify formatargspec to produce html
|
||||
return H.em(inspect.formatargspec(*inspect.getargspec(func)))
|
||||
|
||||
def build_navitem_html(linker, name, linkid, indent, selected):
|
||||
href = linker.get_lazyhref(linkid)
|
||||
navitem = H.NavigationItem((indent * 2 * u'\xa0'), H.a(name, href=href))
|
||||
if selected:
|
||||
navitem.attr.class_ = 'selected'
|
||||
return navitem
|
||||
|
||||
# some helper functionality
|
||||
def source_dirs_files(fspath):
|
||||
""" returns a tuple (dirs, files) for fspath
|
||||
|
||||
dirs are all the subdirs, files are the files which are interesting
|
||||
in building source documentation for a Python code tree (basically all
|
||||
normal files excluding .pyc and .pyo ones)
|
||||
|
||||
all files and dirs that have a name starting with . are considered
|
||||
hidden
|
||||
"""
|
||||
dirs = []
|
||||
files = []
|
||||
for child in fspath.listdir():
|
||||
if child.basename.startswith('.'):
|
||||
continue
|
||||
if child.check(dir=True):
|
||||
dirs.append(child)
|
||||
elif child.check(file=True):
|
||||
if child.ext in ['.pyc', '.pyo']:
|
||||
continue
|
||||
files.append(child)
|
||||
return sorted(dirs), sorted(files)
|
||||
|
||||
def create_namespace_tree(dotted_names):
|
||||
""" creates a tree (in dict form) from a set of dotted names
|
||||
"""
|
||||
ret = {}
|
||||
for dn in dotted_names:
|
||||
path = dn.split('.')
|
||||
for i in xrange(len(path)):
|
||||
ns = '.'.join(path[:i])
|
||||
itempath = '.'.join(path[:i + 1])
|
||||
if ns not in ret:
|
||||
ret[ns] = []
|
||||
if itempath not in ret[ns]:
|
||||
ret[ns].append(itempath)
|
||||
return ret
|
||||
|
||||
def wrap_page(project, title, contentel, navel, outputpath, stylesheeturl):
|
||||
page = LayoutPage(project, title, nav=navel, encoding='UTF-8',
|
||||
stylesheeturl=stylesheeturl)
|
||||
page.set_content(contentel)
|
||||
here = py.magic.autopath().dirpath()
|
||||
style = here.join('style.css').read()
|
||||
outputpath.join('style.css').write(style)
|
||||
return page
|
||||
|
||||
# the PageBuilder classes take care of producing the docs (using the stuff
|
||||
# above)
|
||||
class AbstractPageBuilder(object):
|
||||
def write_page(self, title, reltargetpath, project, tag, nav):
|
||||
targetpath = self.base.join(reltargetpath)
|
||||
stylesheeturl = relpath('%s/' % (targetpath.dirpath(),),
|
||||
self.base.join('style.css').strpath)
|
||||
page = wrap_page(project, title,
|
||||
tag, nav, self.base, stylesheeturl)
|
||||
content = self.linker.call_withbase(reltargetpath, page.unicode)
|
||||
targetpath.ensure()
|
||||
targetpath.write(content.encode("utf8"))
|
||||
|
||||
class SourcePageBuilder(AbstractPageBuilder):
|
||||
""" builds the html for a source docs page """
|
||||
def __init__(self, base, linker, projroot):
|
||||
self.base = base
|
||||
self.linker = linker
|
||||
self.projroot = projroot
|
||||
|
||||
def build_navigation(self, fspath):
|
||||
nav = H.Navigation()
|
||||
relpath = fspath.relto(self.projroot)
|
||||
path = relpath.split('/')
|
||||
indent = 0
|
||||
# build links to parents
|
||||
for i in xrange(len(path)):
|
||||
dirpath = '/'.join(path[:i])
|
||||
abspath = self.projroot.join(dirpath).strpath
|
||||
if i == 0:
|
||||
text = 'root'
|
||||
else:
|
||||
text = path[i-1]
|
||||
nav.append(build_navitem_html(self.linker, text, abspath,
|
||||
indent, False))
|
||||
indent += 1
|
||||
# build siblings or children and self
|
||||
if fspath.check(dir=True):
|
||||
# we're a dir, build ourselves and our children
|
||||
dirpath = fspath
|
||||
nav.append(build_navitem_html(self.linker, dirpath.basename,
|
||||
dirpath.strpath, indent, True))
|
||||
indent += 1
|
||||
elif fspath.strpath == self.projroot.strpath:
|
||||
dirpath = fspath
|
||||
else:
|
||||
# we're a file, build our parent's children only
|
||||
dirpath = fspath.dirpath()
|
||||
diritems, fileitems = source_dirs_files(dirpath)
|
||||
for dir in diritems:
|
||||
nav.append(build_navitem_html(self.linker, dir.basename,
|
||||
dir.strpath, indent, False))
|
||||
for file in fileitems:
|
||||
selected = (fspath.check(file=True) and
|
||||
file.basename == fspath.basename)
|
||||
nav.append(build_navitem_html(self.linker, file.basename,
|
||||
file.strpath, indent, selected))
|
||||
return nav
|
||||
|
||||
re = py.std.re
|
||||
_reg_body = re.compile(r'<body[^>]*>(.*)</body>', re.S)
|
||||
def build_python_page(self, fspath):
|
||||
mod = source_browser.parse_path(fspath)
|
||||
# XXX let's cheat a bit here... there should be a different function
|
||||
# using the linker, and returning a proper py.xml.html element,
|
||||
# at some point
|
||||
html = source_html.create_html(mod)
|
||||
snippet = self._reg_body.search(html).group(1)
|
||||
tag = H.SourceDef(raw(snippet))
|
||||
nav = self.build_navigation(fspath)
|
||||
return tag, nav
|
||||
|
||||
def build_dir_page(self, fspath):
|
||||
tag = H.DirList()
|
||||
dirs, files = source_dirs_files(fspath)
|
||||
tag.append(H.h2('directories'))
|
||||
for path in dirs:
|
||||
tag.append(H.DirListItem(H.a(path.basename,
|
||||
href=self.linker.get_lazyhref(str(path)))))
|
||||
tag.append(H.h2('files'))
|
||||
for path in files:
|
||||
tag.append(H.DirListItem(H.a(path.basename,
|
||||
href=self.linker.get_lazyhref(str(path)))))
|
||||
nav = self.build_navigation(fspath)
|
||||
return tag, nav
|
||||
|
||||
def build_nonpython_page(self, fspath):
|
||||
try:
|
||||
tag = H.NonPythonSource(unicode(fspath.read(), 'utf-8'))
|
||||
except UnicodeError:
|
||||
tag = H.NonPythonSource('no source available (binary file?)')
|
||||
nav = self.build_navigation(fspath)
|
||||
return tag, nav
|
||||
|
||||
def prepare_pages(self, base):
|
||||
passed = []
|
||||
for fspath in [base] + list(base.visit()):
|
||||
if fspath.ext in ['.pyc', '.pyo']:
|
||||
continue
|
||||
relfspath = fspath.relto(base)
|
||||
if relfspath.find('/.') > -1:
|
||||
# skip hidden dirs and files
|
||||
continue
|
||||
elif fspath.check(dir=True):
|
||||
if relfspath != '':
|
||||
relfspath += '/'
|
||||
reloutputpath = 'source/%sindex.html' % (relfspath,)
|
||||
else:
|
||||
reloutputpath = "source/%s.html" % (relfspath,)
|
||||
outputpath = self.base.join(reloutputpath)
|
||||
self.linker.set_link(str(fspath), reloutputpath)
|
||||
passed.append((fspath, outputpath))
|
||||
return passed
|
||||
|
||||
def build_pages(self, data, project, base):
|
||||
""" build syntax-colored source views """
|
||||
for fspath, outputpath in data:
|
||||
if fspath.check(ext='.py'):
|
||||
try:
|
||||
tag, nav = self.build_python_page(fspath)
|
||||
except (KeyboardInterrupt, SystemError):
|
||||
raise
|
||||
except: # XXX strange stuff going wrong at times... need to fix
|
||||
exc, e, tb = py.std.sys.exc_info()
|
||||
print '%s - %s' % (exc, e)
|
||||
print
|
||||
print ''.join(py.std.traceback.format_tb(tb))
|
||||
print '-' * 79
|
||||
del tb
|
||||
tag, nav = self.build_nonpython_page(fspath)
|
||||
elif fspath.check(dir=True):
|
||||
tag, nav = self.build_dir_page(fspath)
|
||||
else:
|
||||
tag, nav = self.build_nonpython_page(fspath)
|
||||
title = 'sources for %s' % (fspath.basename,)
|
||||
reltargetpath = outputpath.relto(self.base)
|
||||
self.write_page(title, reltargetpath, project, tag, nav)
|
||||
|
||||
class ApiPageBuilder(AbstractPageBuilder):
|
||||
""" builds the html for an api docs page """
|
||||
def __init__(self, base, linker, dsa, projroot):
|
||||
self.base = base
|
||||
self.linker = linker
|
||||
self.dsa = dsa
|
||||
self.projroot = projroot
|
||||
self.projpath = py.path.local(projroot)
|
||||
|
||||
def build_callable_view(self, dotted_name):
|
||||
""" build the html for a class method """
|
||||
# XXX we may want to have seperate
|
||||
func = self.dsa.get_obj(dotted_name)
|
||||
docstring = func.__doc__
|
||||
localname = func.__name__
|
||||
argdesc = get_param_htmldesc(self.linker, func)
|
||||
valuedesc = self.build_callable_value_description(dotted_name)
|
||||
|
||||
sourcefile = inspect.getsourcefile(func)
|
||||
callable_source = self.dsa.get_function_source(dotted_name)
|
||||
is_in_pkg = py.path.local(sourcefile).relto(self.projpath)
|
||||
# i assume they're both either available or unavailable(XXX ?)
|
||||
if is_in_pkg and sourcefile and callable_source:
|
||||
csource = H.div(H.br(),
|
||||
H.a('origin: %s' % (sourcefile,),
|
||||
href=self.linker.get_lazyhref(sourcefile)),
|
||||
H.br(),
|
||||
H.SourceDef(H.pre(callable_source)))
|
||||
elif not is_in_pkg and sourcefile and callable_source:
|
||||
csource = H.div(H.br(),
|
||||
H.em('origin: %s' % (sourcefile,)),
|
||||
H.br(),
|
||||
H.SourceDef(H.pre(callable_source)))
|
||||
else:
|
||||
csource = H.SourceDef('could not get source file')
|
||||
|
||||
snippet = H.FunctionDescription(
|
||||
H.FunctionDef(localname, argdesc),
|
||||
valuedesc,
|
||||
H.Docstring(docstring or H.em('no docstring available')),
|
||||
csource,
|
||||
)
|
||||
|
||||
return snippet
|
||||
|
||||
def build_class_view(self, dotted_name):
|
||||
""" build the html for a class """
|
||||
cls = self.dsa.get_obj(dotted_name)
|
||||
# XXX is this a safe check?
|
||||
try:
|
||||
sourcefile = inspect.getsourcefile(cls)
|
||||
except TypeError:
|
||||
sourcelink = 'builtin file, no source available'
|
||||
else:
|
||||
if sourcefile is None:
|
||||
sourcelink = H.div('no source available')
|
||||
else:
|
||||
if sourcefile[-1] in ['o', 'c']:
|
||||
sourcefile = sourcefile[:-1]
|
||||
sourcelink = H.div(H.a('view source',
|
||||
href=self.linker.get_lazyhref(sourcefile)))
|
||||
|
||||
docstring = cls.__doc__
|
||||
methods = self.dsa.get_class_methods(dotted_name)
|
||||
basehtml = []
|
||||
bases = self.dsa.get_possible_base_classes(dotted_name)
|
||||
for base in bases:
|
||||
try:
|
||||
obj = self.dsa.get_obj(base.name)
|
||||
except KeyError:
|
||||
basehtml.append(base.name)
|
||||
else:
|
||||
href = self.linker.get_lazyhref(base.name)
|
||||
basehtml.append(H.BaseDescription(base.name, href=href))
|
||||
basehtml.append(',')
|
||||
if basehtml:
|
||||
basehtml.pop()
|
||||
basehtml.append('):')
|
||||
if not hasattr(cls, '__name__'):
|
||||
clsname = 'instance of %s' % (cls.__class__.__name__,)
|
||||
else:
|
||||
clsname = cls.__name__
|
||||
snippet = H.ClassDescription(
|
||||
# XXX bases HTML
|
||||
H.ClassDef('%s(' % (clsname,), *basehtml),
|
||||
H.Docstring(docstring or H.em('no docstring available')),
|
||||
sourcelink,
|
||||
)
|
||||
if methods:
|
||||
snippet.append(H.h2('methods:'))
|
||||
for method in methods:
|
||||
snippet += self.build_callable_view('%s.%s' % (dotted_name,
|
||||
method))
|
||||
# XXX properties
|
||||
return snippet
|
||||
|
||||
def build_namespace_view(self, namespace_dotted_name, item_dotted_names):
|
||||
""" build the html for a namespace (module) """
|
||||
try:
|
||||
obj = self.dsa.get_obj(namespace_dotted_name)
|
||||
except KeyError:
|
||||
docstring = None
|
||||
else:
|
||||
docstring = obj.__doc__
|
||||
snippet = H.NamespaceDescription(
|
||||
H.NamespaceDef(namespace_dotted_name),
|
||||
H.Docstring(docstring or H.em('no docstring available'))
|
||||
)
|
||||
for dotted_name in item_dotted_names:
|
||||
itemname = dotted_name.split('.')[-1]
|
||||
if is_private(itemname):
|
||||
continue
|
||||
snippet.append(
|
||||
H.NamespaceItem(
|
||||
H.a(itemname,
|
||||
href=self.linker.get_lazyhref(dotted_name)
|
||||
)
|
||||
)
|
||||
)
|
||||
return snippet
|
||||
|
||||
def prepare_class_pages(self, namespace_tree, classes_dotted_names):
|
||||
passed = []
|
||||
methodsdata = []
|
||||
for dotted_name in classes_dotted_names:
|
||||
parent_dotted_name, _ = split_of_last_part(dotted_name)
|
||||
try:
|
||||
sibling_dotted_names = namespace_tree[parent_dotted_name]
|
||||
except KeyError:
|
||||
# no siblings (built-in module or sth)
|
||||
sibling_dotted_names = []
|
||||
tag = self.build_class_view(dotted_name)
|
||||
nav = self.build_navigation(parent_dotted_name,
|
||||
sibling_dotted_names, dotted_name)
|
||||
reltargetpath = "api/%s.html" % (dotted_name,)
|
||||
self.linker.set_link(dotted_name, reltargetpath)
|
||||
passed.append((dotted_name, tag, nav, reltargetpath))
|
||||
method_dotted_names = ['%s.%s' % (dotted_name, method_name) for
|
||||
method_name in
|
||||
self.dsa.get_class_methods(dotted_name)]
|
||||
methodsdata += self.prepare_method_pages(namespace_tree,
|
||||
method_dotted_names)
|
||||
return passed, methodsdata
|
||||
|
||||
def build_class_pages(self, data, project):
|
||||
""" build the full api pages for a set of classes """
|
||||
for dotted_name, tag, nav, reltargetpath in data:
|
||||
title = 'api documentation for %s' % (dotted_name,)
|
||||
self.write_page(title, reltargetpath, project, tag, nav)
|
||||
|
||||
def prepare_method_pages(self, namespace_tree, method_dotted_names):
|
||||
# XXX note that even though these pages are still built, there's no nav
|
||||
# pointing to them anymore...
|
||||
passed = []
|
||||
for dotted_name in method_dotted_names:
|
||||
parent_dotted_name, _ = split_of_last_part(dotted_name)
|
||||
module_dotted_name, _ = split_of_last_part(parent_dotted_name)
|
||||
sibling_dotted_names = namespace_tree[module_dotted_name]
|
||||
tag = self.build_callable_view(dotted_name)
|
||||
nav = self.build_navigation(parent_dotted_name,
|
||||
sibling_dotted_names, dotted_name)
|
||||
reltargetpath = "api/%s.html" % (dotted_name,)
|
||||
self.linker.set_link(dotted_name, reltargetpath)
|
||||
passed.append((dotted_name, tag, nav, reltargetpath))
|
||||
return passed
|
||||
|
||||
def build_method_pages(self, data, project):
|
||||
for dotted_name, tag, nav, reltargetpath in data:
|
||||
title = 'api documentation for %s' % (dotted_name,)
|
||||
self.write_page(title, reltargetpath, project, tag, nav)
|
||||
|
||||
def prepare_function_pages(self, namespace_tree, method_dotted_names):
|
||||
passed = []
|
||||
for dotted_name in method_dotted_names:
|
||||
# XXX should we create a build_function_view instead?
|
||||
parent_dotted_name, _ = split_of_last_part(dotted_name)
|
||||
sibling_dotted_names = namespace_tree[parent_dotted_name]
|
||||
tag = self.build_callable_view(dotted_name)
|
||||
nav = self.build_navigation(parent_dotted_name,
|
||||
sibling_dotted_names, dotted_name)
|
||||
reltargetpath = "api/%s.html" % (dotted_name,)
|
||||
self.linker.set_link(dotted_name, reltargetpath)
|
||||
passed.append((dotted_name, tag, nav, reltargetpath))
|
||||
return passed
|
||||
|
||||
def build_function_pages(self, data, project):
|
||||
for dotted_name, tag, nav, reltargetpath in data:
|
||||
title = 'api documentation for %s' % (dotted_name,)
|
||||
self.write_page(title, reltargetpath, project, tag, nav)
|
||||
|
||||
def prepare_namespace_pages(self, namespace_tree):
|
||||
passed = []
|
||||
module_name = self.dsa.get_module_name().split('/')[-1]
|
||||
|
||||
names = namespace_tree.keys()
|
||||
names.sort()
|
||||
for dotted_name in names:
|
||||
subitem_dotted_names = namespace_tree[dotted_name]
|
||||
tag = self.build_namespace_view(dotted_name, subitem_dotted_names)
|
||||
nav = self.build_navigation(dotted_name, subitem_dotted_names,
|
||||
dotted_name)
|
||||
if dotted_name == '':
|
||||
reltargetpath = 'api/index.html'
|
||||
else:
|
||||
reltargetpath = 'api/%s.html' % (dotted_name,)
|
||||
self.linker.set_link(dotted_name, reltargetpath)
|
||||
passed.append((dotted_name, tag, nav, reltargetpath))
|
||||
return passed
|
||||
|
||||
def build_namespace_pages(self, data, project):
|
||||
for dotted_name, tag, nav, reltargetpath in data:
|
||||
if dotted_name == '':
|
||||
dotted_name = self.dsa.get_module_name().split('/')[-1]
|
||||
title = 'index of %s namespace' % (dotted_name,)
|
||||
self.write_page(title, reltargetpath, project, tag, nav)
|
||||
|
||||
def build_navigation(self, dotted_name, item_dotted_names, selection):
|
||||
navitems = []
|
||||
|
||||
# top namespace, index.html
|
||||
module_name = self.dsa.get_module_name().split('/')[-1]
|
||||
navitems.append(build_navitem_html(self.linker, module_name, '', 0,
|
||||
(selection == '')))
|
||||
|
||||
indent = 1
|
||||
path = dotted_name.split('.')
|
||||
if dotted_name != '':
|
||||
# build html for each item in path to dotted_name item
|
||||
for i in xrange(len(path)):
|
||||
name = path[i]
|
||||
item_dotted_name = '.'.join(path[:i+1])
|
||||
selected = (selection == item_dotted_name)
|
||||
navitems.append(build_navitem_html(self.linker, name,
|
||||
item_dotted_name, indent,
|
||||
selected))
|
||||
indent += 1
|
||||
|
||||
# build sub items of dotted_name item
|
||||
for item_dotted_name in py.builtin.sorted(item_dotted_names):
|
||||
itemname = item_dotted_name.split('.')[-1]
|
||||
if is_private(itemname):
|
||||
continue
|
||||
selected = (item_dotted_name == selection)
|
||||
navitems.append(build_navitem_html(self.linker, itemname,
|
||||
item_dotted_name, indent,
|
||||
selected))
|
||||
return H.Navigation(*navitems)
|
||||
|
||||
def build_callable_value_description(self, dotted_name):
|
||||
args, retval = self.dsa.get_function_signature(dotted_name)
|
||||
valuedesc = H.ValueDescList()
|
||||
for name, _type in args + [('return value', retval)]:
|
||||
l = self.process_type_link(_type)
|
||||
items = []
|
||||
next = "%s :: " % name
|
||||
for item in l:
|
||||
if isinstance(item, str):
|
||||
next += item
|
||||
else:
|
||||
if next:
|
||||
items.append(next)
|
||||
next = ""
|
||||
items.append(item)
|
||||
if next:
|
||||
items.append(next)
|
||||
valuedesc.append(H.ValueDescItem(*items))
|
||||
return H.div(H.div('where:'), valuedesc)
|
||||
|
||||
def process_type_link(self, _type):
|
||||
# now we do simple type dispatching and provide a link in this case
|
||||
lst = []
|
||||
data = self.dsa.get_type_desc(_type)
|
||||
if not data:
|
||||
for i in _type.striter():
|
||||
if isinstance(i, str):
|
||||
lst.append(i)
|
||||
else:
|
||||
lst += self.process_type_link(i)
|
||||
return lst
|
||||
name, _desc_type, is_degenerated = data
|
||||
if not is_degenerated:
|
||||
linktarget = self.linker.get_lazyhref(name)
|
||||
lst.append(H.a(str(_type), href=linktarget))
|
||||
else:
|
||||
# we should provide here some way of linking to sourcegen directly
|
||||
lst.append(name)
|
||||
return lst
|
||||
|
|
@ -0,0 +1,23 @@
|
|||
""" layout definition for generating api/source documents
|
||||
|
||||
this is the place where customization can be done
|
||||
"""
|
||||
|
||||
import py
|
||||
from py.__.doc.confrest import Page
|
||||
|
||||
class LayoutPage(Page):
|
||||
""" this provides the layout and style information """
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.nav = kwargs.pop('nav')
|
||||
super(LayoutPage, self).__init__(*args, **kwargs)
|
||||
|
||||
def set_content(self, contentel):
|
||||
self.contentspace.append(contentel)
|
||||
|
||||
def fill(self):
|
||||
super(LayoutPage, self).fill()
|
||||
self.menubar[:] = []
|
||||
self.menubar.append(self.nav)
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
import py
|
||||
html = py.xml.html
|
||||
|
||||
def getrelfspath(dotted_name):
|
||||
# XXX need to make sure its imported on non-py lib
|
||||
return eval(dotted_name, {"py": py})
|
||||
|
||||
class LazyHref(object):
|
||||
def __init__(self, linker, linkid):
|
||||
self._linker = linker
|
||||
self._linkid = linkid
|
||||
|
||||
def __unicode__(self):
|
||||
return unicode(self._linker.get_target(self._linkid))
|
||||
|
||||
class Linker(object):
|
||||
fromlocation = None
|
||||
|
||||
def __init__(self):
|
||||
self._linkid2target = {}
|
||||
|
||||
def get_lazyhref(self, linkid):
|
||||
return LazyHref(self, linkid)
|
||||
|
||||
def set_link(self, linkid, target):
|
||||
assert (linkid not in self._linkid2target,
|
||||
'linkid %r already used' % (linkid,))
|
||||
self._linkid2target[linkid] = target
|
||||
|
||||
def get_target(self, linkid):
|
||||
linktarget = self._linkid2target[linkid]
|
||||
if self.fromlocation is not None:
|
||||
linktarget = relpath(self.fromlocation, linktarget)
|
||||
return linktarget
|
||||
|
||||
def call_withbase(self, base, func, *args, **kwargs):
|
||||
assert self.fromlocation is None
|
||||
self.fromlocation = base
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
del self.fromlocation
|
||||
|
||||
def relpath(p1, p2, sep='/', back='..'):
|
||||
if (p1.startswith(sep) ^ p2.startswith(sep)):
|
||||
raise ValueError("mixed absolute relative path: %r -> %r" %(p1, p2))
|
||||
fromlist = p1.split(sep)
|
||||
tolist = p2.split(sep)
|
||||
|
||||
# AA
|
||||
# AA BB -> AA/BB
|
||||
#
|
||||
# AA BB
|
||||
# AA CC -> CC
|
||||
#
|
||||
# AA BB
|
||||
# AA -> ../AA
|
||||
|
||||
diffindex = 0
|
||||
for x1, x2 in zip(fromlist, tolist):
|
||||
if x1 != x2:
|
||||
break
|
||||
diffindex += 1
|
||||
commonindex = diffindex - 1
|
||||
|
||||
fromlist_diff = fromlist[diffindex:]
|
||||
tolist_diff = tolist[diffindex:]
|
||||
|
||||
if not fromlist_diff:
|
||||
return sep.join(tolist[commonindex:])
|
||||
backcount = len(fromlist_diff)
|
||||
if tolist_diff:
|
||||
return sep.join([back,]*(backcount-1) + tolist_diff)
|
||||
return sep.join([back,]*(backcount) + tolist[commonindex:])
|
||||
|
|
@ -0,0 +1,65 @@
|
|||
""" this contains the code that actually builds the pages using layout.py
|
||||
|
||||
building the docs happens in two passes: the first one takes care of
|
||||
collecting contents and navigation items, the second builds the actual
|
||||
HTML
|
||||
"""
|
||||
|
||||
import py
|
||||
from layout import LayoutPage
|
||||
|
||||
class Project(py.__.doc.confrest.Project):
|
||||
""" a full project
|
||||
|
||||
this takes care of storing information on the first pass, and building
|
||||
pages + indexes on the second
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self.content_items = {}
|
||||
|
||||
def add_item(self, path, content):
|
||||
""" add a single item (page)
|
||||
|
||||
path is a (relative) path to the object, used for building links
|
||||
and navigation
|
||||
|
||||
content is an instance of some py.xml.html item
|
||||
"""
|
||||
assert path not in self.content_items, 'duplicate path %s' % (path,)
|
||||
self.content_items[path] = content
|
||||
|
||||
def build(self, outputpath):
|
||||
""" convert the tree to actual HTML
|
||||
|
||||
uses the LayoutPage class below for each page and takes care of
|
||||
building index documents for the root and each sub directory
|
||||
"""
|
||||
opath = py.path.local(outputpath)
|
||||
opath.ensure(dir=True)
|
||||
paths = self.content_items.keys()
|
||||
paths.sort()
|
||||
for path in paths:
|
||||
# build the page using the LayoutPage class
|
||||
page = self.Page(self, path, stylesheeturl=self.stylesheet)
|
||||
page.contentspace.append(self.content_items[path])
|
||||
ipath = opath.join(path)
|
||||
if not ipath.dirpath().check():
|
||||
# XXX create index.html(?)
|
||||
ipath.ensure(file=True)
|
||||
ipath.write(page.unicode().encode(self.encoding))
|
||||
|
||||
def process(self, txtpath):
|
||||
""" this allows using the project from confrest """
|
||||
# XXX not interesting yet, but who knows later (because of the
|
||||
# cool nav)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# XXX just to have an idea of how to use this...
|
||||
proj = Project()
|
||||
here = py.path.local('.')
|
||||
for fpath in here.visit():
|
||||
if fpath.check(file=True):
|
||||
proj.add_item(fpath, convert_to_html_somehow(fpath))
|
||||
proj.build()
|
||||
|
|
@ -0,0 +1,524 @@
|
|||
|
||||
""" Generating ReST output (raw, not python)
|
||||
out of data that we know about function calls
|
||||
"""
|
||||
|
||||
import py
|
||||
import sys
|
||||
import re
|
||||
|
||||
from py.__.apigen.tracer.docstorage import DocStorageAccessor
|
||||
from py.__.rest.rst import * # XXX Maybe we should list it here
|
||||
from py.__.apigen.tracer import model
|
||||
from py.__.rest.transform import RestTransformer
|
||||
|
||||
def split_of_last_part(name):
|
||||
name = name.split(".")
|
||||
return ".".join(name[:-1]), name[-1]
|
||||
|
||||
class AbstractLinkWriter(object):
|
||||
""" Class implementing writing links to source code.
|
||||
There should exist various classes for that, different for Trac,
|
||||
different for CVSView, etc.
|
||||
"""
|
||||
def getlinkobj(self, obj, name):
|
||||
return None
|
||||
|
||||
def getlink(self, filename, lineno, funcname):
|
||||
raise NotImplementedError("Abstract link writer")
|
||||
|
||||
def getpkgpath(self, filename):
|
||||
# XXX: very simple thing
|
||||
path = py.path.local(filename).dirpath()
|
||||
while 1:
|
||||
try:
|
||||
path.join('__init__.py').stat()
|
||||
path = path.dirpath()
|
||||
except py.error.ENOENT:
|
||||
return path
|
||||
|
||||
class ViewVC(AbstractLinkWriter):
|
||||
""" Link writer for ViewVC version control viewer
|
||||
"""
|
||||
def __init__(self, basepath):
|
||||
# XXX: should try to guess from a working copy of svn
|
||||
self.basepath = basepath
|
||||
|
||||
def getlink(self, filename, lineno, funcname):
|
||||
path = str(self.getpkgpath(filename))
|
||||
assert filename.startswith(path), (
|
||||
"%s does not belong to package %s" % (filename, path))
|
||||
relname = filename[len(path):]
|
||||
if relname.endswith('.pyc'):
|
||||
relname = relname[:-1]
|
||||
sep = py.std.os.sep
|
||||
if sep != '/':
|
||||
relname = relname.replace(sep, '/')
|
||||
return ('%s:%s' % (filename, lineno),
|
||||
self.basepath + relname[1:] + '?view=markup')
|
||||
|
||||
class SourceView(AbstractLinkWriter):
|
||||
def __init__(self, baseurl):
|
||||
self.baseurl = baseurl
|
||||
if self.baseurl.endswith("/"):
|
||||
self.baseurl = baseurl[:-1]
|
||||
|
||||
def getlink(self, filename, lineno, funcname):
|
||||
if filename.endswith('.pyc'):
|
||||
filename = filename[:-1]
|
||||
if filename is None:
|
||||
return "<UNKNOWN>:%s" % funcname,""
|
||||
pkgpath = self.getpkgpath(filename)
|
||||
if not filename.startswith(str(pkgpath)):
|
||||
# let's leave it
|
||||
return "<UNKNOWN>:%s" % funcname,""
|
||||
|
||||
relname = filename[len(str(pkgpath)):]
|
||||
if relname.endswith('.pyc'):
|
||||
relname = relname[:-1]
|
||||
sep = py.std.os.sep
|
||||
if sep != '/':
|
||||
relname = relname.replace(sep, '/')
|
||||
return "%s:%s" % (relname, funcname),\
|
||||
"%s%s#%s" % (self.baseurl, relname, funcname)
|
||||
|
||||
def getlinkobj(self, name, obj):
|
||||
try:
|
||||
filename = sys.modules[obj.__module__].__file__
|
||||
return self.getlink(filename, 0, name)
|
||||
except AttributeError:
|
||||
return None
|
||||
|
||||
class DirectPaste(AbstractLinkWriter):
|
||||
""" No-link writer (inliner)
|
||||
"""
|
||||
def getlink(self, filename, lineno, funcname):
|
||||
return ('%s:%s' % (filename, lineno), "")
|
||||
|
||||
class DirectFS(AbstractLinkWriter):
|
||||
""" Creates links to the files on the file system (for local docs)
|
||||
"""
|
||||
def getlink(self, filename, lineno, funcname):
|
||||
return ('%s:%s' % (filename, lineno), 'file://%s' % (filename,))
|
||||
|
||||
class PipeWriter(object):
|
||||
def __init__(self, output=sys.stdout):
|
||||
self.output = output
|
||||
|
||||
def write_section(self, name, rest):
|
||||
text = "Contents of file %s.txt:" % (name,)
|
||||
self.output.write(text + "\n")
|
||||
self.output.write("=" * len(text) + "\n")
|
||||
self.output.write("\n")
|
||||
self.output.write(rest.text() + "\n")
|
||||
|
||||
def getlink(self, type, targetname, targetfilename):
|
||||
return '%s.txt' % (targetfilename,)
|
||||
|
||||
class DirWriter(object):
|
||||
def __init__(self, directory=None):
|
||||
if directory is None:
|
||||
self.directory = py.test.ensuretemp("rstoutput")
|
||||
else:
|
||||
self.directory = py.path.local(directory)
|
||||
|
||||
def write_section(self, name, rest):
|
||||
filename = '%s.txt' % (name,)
|
||||
self.directory.ensure(filename).write(rest.text())
|
||||
|
||||
def getlink(self, type, targetname, targetfilename):
|
||||
# we assume the result will get converted to HTML...
|
||||
return '%s.html' % (targetfilename,)
|
||||
|
||||
class FileWriter(object):
|
||||
def __init__(self, fpath):
|
||||
self.fpath = fpath
|
||||
self.fp = fpath.open('w+')
|
||||
self._defined_targets = []
|
||||
|
||||
def write_section(self, name, rest):
|
||||
self.fp.write(rest.text())
|
||||
self.fp.flush()
|
||||
|
||||
def getlink(self, type, targetname, targetbasename):
|
||||
# XXX problem: because of docutils' named anchor generation scheme,
|
||||
# a method Foo.__init__ would clash with Foo.init (underscores are
|
||||
# removed)
|
||||
if targetname in self._defined_targets:
|
||||
return None
|
||||
self._defined_targets.append(targetname)
|
||||
targetname = targetname.lower().replace('.', '-').replace('_', '-')
|
||||
while '--' in targetname:
|
||||
targetname = targetname.replace('--', '-')
|
||||
if targetname.startswith('-'):
|
||||
targetname = targetname[1:]
|
||||
if targetname.endswith('-'):
|
||||
targetname = targetname[:-1]
|
||||
return '#%s-%s' % (type, targetname)
|
||||
|
||||
class HTMLDirWriter(object):
|
||||
def __init__(self, indexhandler, filehandler, directory=None):
|
||||
self.indexhandler = indexhandler
|
||||
self.filehandler = filehandler
|
||||
if directory is None:
|
||||
self.directory = py.test.ensuretemp('dirwriter')
|
||||
else:
|
||||
self.directory = py.path.local(directory)
|
||||
|
||||
def write_section(self, name, rest):
|
||||
if name == 'index':
|
||||
handler = self.indexhandler
|
||||
else:
|
||||
handler = self.filehandler
|
||||
h = handler(name)
|
||||
t = RestTransformer(rest)
|
||||
t.parse(h)
|
||||
self.directory.ensure('%s.html' % (name,)).write(h.html)
|
||||
|
||||
def getlink(self, type, targetname, targetfilename):
|
||||
return '%s.html' % (targetfilename,)
|
||||
|
||||
class RestGen(object):
|
||||
def __init__(self, dsa, linkgen, writer=PipeWriter()):
|
||||
#assert isinstance(linkgen, DirectPaste), (
|
||||
# "Cannot use different linkgen by now")
|
||||
self.dsa = dsa
|
||||
self.linkgen = linkgen
|
||||
self.writer = writer
|
||||
self.tracebacks = {}
|
||||
|
||||
def write(self):
|
||||
"""write the data to the writer"""
|
||||
modlist = self.get_module_list()
|
||||
classlist = self.get_class_list(module='')
|
||||
funclist = self.get_function_list()
|
||||
modlist.insert(0, ['', classlist, funclist])
|
||||
|
||||
indexrest = self.build_index([t[0] for t in modlist])
|
||||
self.writer.write_section('index', Rest(*indexrest))
|
||||
|
||||
self.build_modrest(modlist)
|
||||
|
||||
def build_modrest(self, modlist):
|
||||
modrest = self.build_modules(modlist)
|
||||
for name, rest, classlist, funclist in modrest:
|
||||
mname = name
|
||||
if mname == '':
|
||||
mname = self.dsa.get_module_name()
|
||||
self.writer.write_section('module_%s' % (mname,),
|
||||
Rest(*rest))
|
||||
for cname, crest, cfunclist in classlist:
|
||||
self.writer.write_section('class_%s' % (cname,),
|
||||
Rest(*crest))
|
||||
for fname, frest, tbdata in cfunclist:
|
||||
self.writer.write_section('method_%s' % (fname,),
|
||||
Rest(*frest))
|
||||
for tbname, tbrest in tbdata:
|
||||
self.writer.write_section('traceback_%s' % (tbname,),
|
||||
Rest(*tbrest))
|
||||
for fname, frest, tbdata in funclist:
|
||||
self.writer.write_section('function_%s' % (fname,),
|
||||
Rest(*frest))
|
||||
for tbname, tbrest in tbdata:
|
||||
self.writer.write_section('traceback_%s' % (tbname,),
|
||||
Rest(*tbrest))
|
||||
|
||||
def build_classrest(self, classlist):
|
||||
classrest = self.build_classes(classlist)
|
||||
for cname, rest, cfunclist in classrest:
|
||||
self.writer.write_section('class_%s' % (cname,),
|
||||
Rest(*rest))
|
||||
for fname, rest in cfunclist:
|
||||
self.writer.write_section('method_%s' % (fname,),
|
||||
Rest(*rest))
|
||||
|
||||
def build_funcrest(self, funclist):
|
||||
funcrest = self.build_functions(funclist)
|
||||
for fname, rest, tbdata in funcrest:
|
||||
self.writer.write_section('function_%s' % (fname,),
|
||||
Rest(*rest))
|
||||
for tbname, tbrest in tbdata:
|
||||
self.writer.write_section('traceback_%s' % (tbname,),
|
||||
Rest(*tbrest))
|
||||
|
||||
def build_index(self, modules):
|
||||
rest = [Title('index', abovechar='=', belowchar='=')]
|
||||
rest.append(Title('exported modules:', belowchar='='))
|
||||
for module in modules:
|
||||
mtitle = module
|
||||
if module == '':
|
||||
module = self.dsa.get_module_name()
|
||||
mtitle = '%s (top-level)' % (module,)
|
||||
linktarget = self.writer.getlink('module', module,
|
||||
'module_%s' % (module,))
|
||||
rest.append(ListItem(Link(mtitle, linktarget)))
|
||||
return rest
|
||||
|
||||
def build_modules(self, modules):
|
||||
ret = []
|
||||
for module, classes, functions in modules:
|
||||
mname = module
|
||||
if mname == '':
|
||||
mname = self.dsa.get_module_name()
|
||||
rest = [Title('module: %s' % (mname,), abovechar='=',
|
||||
belowchar='='),
|
||||
Title('index:', belowchar='=')]
|
||||
if classes:
|
||||
rest.append(Title('classes:', belowchar='^'))
|
||||
for cls, bases, cfunclist in classes:
|
||||
linktarget = self.writer.getlink('class', cls,
|
||||
'class_%s' % (cls,))
|
||||
rest.append(ListItem(Link(cls, linktarget)))
|
||||
classrest = self.build_classes(classes)
|
||||
if functions:
|
||||
rest.append(Title('functions:', belowchar='^'))
|
||||
for func in functions:
|
||||
if module:
|
||||
func = '%s.%s' % (module, func)
|
||||
linktarget = self.writer.getlink('function',
|
||||
func,
|
||||
'function_%s' % (func,))
|
||||
rest.append(ListItem(Link(func, linktarget)))
|
||||
funcrest = self.build_functions(functions, module, False)
|
||||
ret.append((module, rest, classrest, funcrest))
|
||||
return ret
|
||||
|
||||
def build_classes(self, classes):
|
||||
ret = []
|
||||
for cls, bases, functions in classes:
|
||||
rest = [Title('class: %s' % (cls,), belowchar='='),
|
||||
LiteralBlock(self.dsa.get_doc(cls))]
|
||||
# link to source
|
||||
link_to_class = self.linkgen.getlinkobj(cls, self.dsa.get_obj(cls))
|
||||
if link_to_class:
|
||||
rest.append(Paragraph(Text("source: "), Link(*link_to_class)))
|
||||
|
||||
if bases:
|
||||
rest.append(Title('base classes:', belowchar='^')),
|
||||
for base in bases:
|
||||
rest.append(ListItem(self.make_class_link(base)))
|
||||
if functions:
|
||||
rest.append(Title('functions:', belowchar='^'))
|
||||
for (func, origin) in functions:
|
||||
linktarget = self.writer.getlink('method',
|
||||
'%s.%s' % (cls, func),
|
||||
'method_%s.%s' % (cls,
|
||||
func))
|
||||
rest.append(ListItem(Link('%s.%s' % (cls, func),
|
||||
linktarget)))
|
||||
funcrest = self.build_functions(functions, cls, True)
|
||||
ret.append((cls, rest, funcrest))
|
||||
return ret
|
||||
|
||||
def build_functions(self, functions, parent='', methods=False):
|
||||
ret = []
|
||||
for function in functions:
|
||||
origin = None
|
||||
if methods:
|
||||
function, origin = function
|
||||
if parent:
|
||||
function = '%s.%s' % (parent, function)
|
||||
rest, tbrest = self.write_function(function, origin=origin,
|
||||
ismethod=methods)
|
||||
ret.append((function, rest, tbrest))
|
||||
return ret
|
||||
|
||||
def get_module_list(self):
|
||||
visited = []
|
||||
ret = []
|
||||
for name in self.dsa.get_class_names():
|
||||
if '.' in name:
|
||||
module, classname = split_of_last_part(name)
|
||||
if module in visited:
|
||||
continue
|
||||
visited.append(module)
|
||||
ret.append((module, self.get_class_list(module),
|
||||
self.get_function_list(module)))
|
||||
return ret
|
||||
|
||||
def get_class_list(self, module):
|
||||
ret = []
|
||||
for name in self.dsa.get_class_names():
|
||||
classname = name
|
||||
if '.' in name:
|
||||
classmodule, classname = split_of_last_part(name)
|
||||
if classmodule != module:
|
||||
continue
|
||||
elif module != '':
|
||||
continue
|
||||
bases = self.dsa.get_possible_base_classes(name)
|
||||
ret.append((name, bases, self.get_method_list(name)))
|
||||
return ret
|
||||
|
||||
def get_function_list(self, module=''):
|
||||
ret = []
|
||||
for name in self.dsa.get_function_names():
|
||||
funcname = name
|
||||
if '.' in name:
|
||||
funcpath, funcname = split_of_last_part(name)
|
||||
if funcpath != module:
|
||||
continue
|
||||
elif module != '':
|
||||
continue
|
||||
ret.append(funcname)
|
||||
return ret
|
||||
|
||||
def get_method_list(self, classname):
|
||||
methodnames = self.dsa.get_class_methods(classname)
|
||||
return [(mn, self.dsa.get_method_origin('%s.%s' % (classname, mn)))
|
||||
for mn in methodnames]
|
||||
|
||||
def process_type_link(self, _type):
|
||||
# now we do simple type dispatching and provide a link in this case
|
||||
lst = []
|
||||
data = self.dsa.get_type_desc(_type)
|
||||
if not data:
|
||||
for i in _type.striter():
|
||||
if isinstance(i, str):
|
||||
lst.append(i)
|
||||
else:
|
||||
lst += self.process_type_link(i)
|
||||
return lst
|
||||
name, _desc_type, is_degenerated = data
|
||||
if not is_degenerated:
|
||||
linktarget = self.writer.getlink(_desc_type, name,
|
||||
'%s_%s' % (_desc_type, name))
|
||||
lst.append(Link(str(_type), linktarget))
|
||||
else:
|
||||
# we should provide here some way of linking to sourcegen directly
|
||||
lst.append(name)
|
||||
return lst
|
||||
|
||||
def write_function(self, functionname, origin=None, ismethod=False,
|
||||
belowchar='-'):
|
||||
# XXX I think the docstring should either be split on \n\n and cleaned
|
||||
# from indentation, or treated as ReST too (although this is obviously
|
||||
# dangerous for non-ReST docstrings)...
|
||||
if ismethod:
|
||||
title = Title('method: %s' % (functionname,), belowchar=belowchar)
|
||||
else:
|
||||
title = Title('function: %s' % (functionname,),
|
||||
belowchar=belowchar)
|
||||
|
||||
lst = [title, LiteralBlock(self.dsa.get_doc(functionname)),
|
||||
LiteralBlock(self.dsa.get_function_definition(functionname))]
|
||||
link_to_function = self.linkgen.getlinkobj(functionname, self.dsa.get_obj(functionname))
|
||||
if link_to_function:
|
||||
lst.insert(1, Paragraph(Text("source: "), Link(*link_to_function)))
|
||||
|
||||
opar = Paragraph(Strong('origin'), ":")
|
||||
if origin:
|
||||
opar.add(self.make_class_link(origin))
|
||||
else:
|
||||
opar.add(Text('<UNKNOWN>'))
|
||||
lst.append(opar)
|
||||
|
||||
lst.append(Paragraph(Strong("where"), ":"))
|
||||
args, retval = self.dsa.get_function_signature(functionname)
|
||||
for name, _type in args + [('return value', retval)]:
|
||||
l = self.process_type_link(_type)
|
||||
items = []
|
||||
next = "%s :: " % name
|
||||
for item in l:
|
||||
if isinstance(item, str):
|
||||
next += item
|
||||
else:
|
||||
if next:
|
||||
items.append(Text(next))
|
||||
next = ""
|
||||
items.append(item)
|
||||
if next:
|
||||
items.append(Text(next))
|
||||
lst.append(ListItem(*items))
|
||||
|
||||
local_changes = self.dsa.get_function_local_changes(functionname)
|
||||
if local_changes:
|
||||
lst.append(Paragraph(Strong('changes in __dict__ after execution'), ":"))
|
||||
for k, changeset in local_changes.iteritems():
|
||||
lst.append(ListItem('%s: %s' % (k, ', '.join(changeset))))
|
||||
|
||||
exceptions = self.dsa.get_function_exceptions(functionname)
|
||||
if exceptions:
|
||||
lst.append(Paragraph(Strong('exceptions that might appear during '
|
||||
'execution'), ":"))
|
||||
for exc in exceptions:
|
||||
lst.append(ListItem(exc))
|
||||
# XXX: right now we leave it alone
|
||||
|
||||
# XXX missing implementation of dsa.get_function_location()
|
||||
#filename, lineno = self.dsa.get_function_location(functionname)
|
||||
#linkname, linktarget = self.linkgen.getlink(filename, lineno)
|
||||
#if linktarget:
|
||||
# lst.append(Paragraph("Function source: ",
|
||||
# Link(linkname, linktarget)))
|
||||
#else:
|
||||
source = self.dsa.get_function_source(functionname)
|
||||
if source:
|
||||
lst.append(Paragraph(Strong('function source'), ":"))
|
||||
lst.append(LiteralBlock(source))
|
||||
|
||||
# call sites..
|
||||
call_sites = self.dsa.get_function_callpoints(functionname)
|
||||
tbrest = []
|
||||
if call_sites:
|
||||
call_site_title = Title("call sites:", belowchar='+')
|
||||
lst.append(call_site_title)
|
||||
|
||||
# we have to think differently here. I would go for:
|
||||
# 1. A quick'n'dirty statement where call has appeared first
|
||||
# (topmost)
|
||||
# 2. Link to short traceback
|
||||
# 3. Link to long traceback
|
||||
for call_site, _ in call_sites:
|
||||
fdata, tbdata = self.call_site_link(functionname, call_site)
|
||||
lst += fdata
|
||||
tbrest.append(tbdata)
|
||||
|
||||
return lst, tbrest
|
||||
|
||||
def call_site_link(self, functionname, call_site):
|
||||
tbid, tbrest = self.gen_traceback(functionname, call_site)
|
||||
tbname = '%s.%s' % (functionname, tbid)
|
||||
linktarget = self.writer.getlink('traceback',
|
||||
tbname,
|
||||
'traceback_%s' % (tbname,))
|
||||
frest = [Paragraph("called in %s" % call_site[0].filename),
|
||||
Paragraph(Link("traceback %s" % (tbname,),
|
||||
linktarget))]
|
||||
return frest, (tbname, tbrest)
|
||||
|
||||
def gen_traceback(self, funcname, call_site):
|
||||
tbid = len(self.tracebacks.setdefault(funcname, []))
|
||||
self.tracebacks[funcname].append(call_site)
|
||||
tbrest = [Title('traceback for %s' % (funcname,))]
|
||||
for line in call_site:
|
||||
lineno = line.lineno - line.firstlineno
|
||||
linkname, linktarget = self.linkgen.getlink(line.filename,
|
||||
line.lineno + 1,
|
||||
funcname)
|
||||
if linktarget:
|
||||
tbrest.append(Paragraph(Link(linkname, linktarget)))
|
||||
else:
|
||||
tbrest.append(Paragraph(linkname))
|
||||
try:
|
||||
source = line.source
|
||||
except IOError:
|
||||
source = "*cannot get source*"
|
||||
mangled = []
|
||||
for i, sline in enumerate(str(source).split('\n')):
|
||||
if i == lineno:
|
||||
line = '-> %s' % (sline,)
|
||||
else:
|
||||
line = ' %s' % (sline,)
|
||||
mangled.append(line)
|
||||
tbrest.append(LiteralBlock('\n'.join(mangled)))
|
||||
return tbid, tbrest
|
||||
|
||||
def make_class_link(self, desc):
|
||||
if not desc or desc.is_degenerated:
|
||||
# create dummy link here, or no link at all
|
||||
return Strong(desc.name)
|
||||
else:
|
||||
linktarget = self.writer.getlink('class', desc.name,
|
||||
'class_%s' % (desc.name,))
|
||||
return Link(desc.name, linktarget)
|
|
@ -0,0 +1,84 @@
|
|||
from py.__.rest.transform import HTMLHandler, entitize
|
||||
from py.xml import html, raw
|
||||
|
||||
class PageHandler(HTMLHandler):
|
||||
def startDocument(self):
|
||||
super(PageHandler, self).startDocument()
|
||||
self.head.append(html.link(type='text/css', rel='stylesheet',
|
||||
href='style.css'))
|
||||
title = self.title[0]
|
||||
breadcrumb = ''.join([unicode(el) for el in self.breadcrumb(title)])
|
||||
self.body.append(html.div(raw(breadcrumb), class_='breadcrumb'))
|
||||
|
||||
def handleLink(self, text, target):
|
||||
self.tagstack[-1].append(html.a(text, href=target,
|
||||
target='content'))
|
||||
|
||||
def breadcrumb(self, title):
|
||||
if title != 'index':
|
||||
type, path = title.split('_', 1)
|
||||
path = path.split('.')
|
||||
module = None
|
||||
cls = None
|
||||
func = None
|
||||
meth = None
|
||||
if type == 'module':
|
||||
module = '.'.join(path)
|
||||
elif type == 'class':
|
||||
module = '.'.join(path[:-1])
|
||||
cls = path[-1]
|
||||
elif type == 'method':
|
||||
module = '.'.join(path[:-2])
|
||||
cls = path[-2]
|
||||
meth = path[-1]
|
||||
else:
|
||||
module = '.'.join(path[:-1])
|
||||
func = path[-1]
|
||||
if module:
|
||||
yield html.a(module, href='module_%s.html' % (module,))
|
||||
if type != 'module':
|
||||
yield u'.'
|
||||
if cls:
|
||||
s = cls
|
||||
if module:
|
||||
s = '%s.%s' % (module, cls)
|
||||
yield html.a(cls, href='class_%s.html' % (s,))
|
||||
if type != 'class':
|
||||
yield u'.'
|
||||
if meth:
|
||||
s = '%s.%s' % (cls, meth)
|
||||
if module:
|
||||
s = '%s.%s.%s' % (module, cls, meth)
|
||||
yield html.a(meth, href='method_%s.html' % (s,))
|
||||
if func:
|
||||
s = func
|
||||
if module:
|
||||
s = '%s.%s' % (module, func)
|
||||
yield html.a(func, href='function_%s.html' % (s,))
|
||||
|
||||
class IndexHandler(PageHandler):
|
||||
ignore_text = False
|
||||
|
||||
def startDocument(self):
|
||||
super(IndexHandler, self).startDocument()
|
||||
self.head.append(html.script(type='text/javascript', src='apigen.js'))
|
||||
self._push(html.div(id='sidebar'))
|
||||
|
||||
def endDocument(self):
|
||||
maindiv = html.div(id="main")
|
||||
maindiv.append(html.div(id="breadcrumb"))
|
||||
maindiv.append(html.iframe(name='content', id='content',
|
||||
src='module_py.html'))
|
||||
self.body.append(maindiv)
|
||||
|
||||
def startTitle(self, depth):
|
||||
self.ignore_text = True
|
||||
|
||||
def endTitle(self, depth):
|
||||
self.ignore_text = False
|
||||
|
||||
def handleText(self, text):
|
||||
if self.ignore_text:
|
||||
return
|
||||
super(IndexHandler, self).handleText(text)
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
class SomeClass(object):
|
||||
"""Some class definition"""
|
||||
|
||||
def __init__(self, a):
|
||||
self.a = a
|
||||
|
||||
def method(self, a, b, c):
|
||||
"""method docstring"""
|
||||
return a + b + c
|
||||
|
|
@ -0,0 +1,21 @@
|
|||
from somemodule import SomeClass
|
||||
|
||||
class SomeSubClass(SomeClass):
|
||||
"""Some subclass definition"""
|
||||
|
||||
def fun(a, b, c):
|
||||
"""Some docstring
|
||||
|
||||
Let's make it span a couple of lines to be interesting...
|
||||
|
||||
Note:
|
||||
|
||||
* rest
|
||||
* should
|
||||
* be
|
||||
* supported
|
||||
* or
|
||||
* ignored...
|
||||
"""
|
||||
return "d"
|
||||
|
|
@ -0,0 +1,41 @@
|
|||
import py
|
||||
from py.__.apigen.rest.htmlhandlers import PageHandler
|
||||
|
||||
def test_breadcrumb():
|
||||
h = PageHandler()
|
||||
for fname, expected in [
|
||||
('module_py', '<a href="module_py.html">py</a>'),
|
||||
('module_py.test',
|
||||
'<a href="module_py.test.html">py.test</a>'),
|
||||
('class_py.test',
|
||||
('<a href="module_py.html">py</a>.'
|
||||
'<a href="class_py.test.html">test</a>')),
|
||||
('class_py.test.foo',
|
||||
('<a href="module_py.test.html">py.test</a>.'
|
||||
'<a href="class_py.test.foo.html">foo</a>')),
|
||||
('class_py.test.foo.bar',
|
||||
('<a href="module_py.test.foo.html">py.test.foo</a>.'
|
||||
'<a href="class_py.test.foo.bar.html">bar</a>')),
|
||||
('function_foo', '<a href="function_foo.html">foo</a>'),
|
||||
('function_foo.bar',
|
||||
('<a href="module_foo.html">foo</a>.'
|
||||
'<a href="function_foo.bar.html">bar</a>')),
|
||||
('function_foo.bar.baz',
|
||||
('<a href="module_foo.bar.html">foo.bar</a>.'
|
||||
'<a href="function_foo.bar.baz.html">baz</a>')),
|
||||
('method_foo.bar',
|
||||
('<a href="class_foo.html">foo</a>.'
|
||||
'<a href="method_foo.bar.html">bar</a>')),
|
||||
('method_foo.bar.baz',
|
||||
('<a href="module_foo.html">foo</a>.'
|
||||
'<a href="class_foo.bar.html">bar</a>.'
|
||||
'<a href="method_foo.bar.baz.html">baz</a>')),
|
||||
('method_foo.bar.baz.qux',
|
||||
('<a href="module_foo.bar.html">foo.bar</a>.'
|
||||
'<a href="class_foo.bar.baz.html">baz</a>.'
|
||||
'<a href="method_foo.bar.baz.qux.html">qux</a>')),
|
||||
]:
|
||||
html = ''.join([unicode(el) for el in h.breadcrumb(fname)])
|
||||
print fname
|
||||
print html
|
||||
assert html == expected
|
|
@ -0,0 +1,486 @@
|
|||
|
||||
""" tests document generation
|
||||
"""
|
||||
|
||||
import py
|
||||
from StringIO import StringIO
|
||||
|
||||
from py.__.apigen.rest.genrest import ViewVC, RestGen, PipeWriter, \
|
||||
DirWriter, FileWriter, \
|
||||
DirectPaste, DirectFS, \
|
||||
HTMLDirWriter, SourceView
|
||||
from py.__.apigen.tracer.tracer import Tracer
|
||||
from py.__.apigen.tracer.docstorage import DocStorage, DocStorageAccessor
|
||||
from py.__.apigen.tracer.permastore import PermaDocStorage
|
||||
import pickle
|
||||
|
||||
from py.__.apigen.tracer.testing.runtest import cut_pyc
|
||||
from py.__.doc.conftest import genlinkchecks
|
||||
from py.__.rest.rst import Rest, Paragraph
|
||||
from py.__.rest.transform import HTMLHandler
|
||||
# XXX: UUuuuuuuuuuuuuuuuuuuuuuuu, dangerous import
|
||||
|
||||
sorted = py.builtin.sorted
|
||||
|
||||
def _nl(s):
|
||||
"""normalize newlines (converting to \n)"""
|
||||
s = s.replace('\r\n', '\n')
|
||||
s = s.replace('\r', '\n')
|
||||
return s
|
||||
|
||||
def setup_module(mod):
|
||||
mod.temppath = py.test.ensuretemp('restgen')
|
||||
|
||||
def fun_():
|
||||
pass
|
||||
|
||||
class SomeClass(object):
|
||||
"""Some class definition"""
|
||||
|
||||
def __init__(self, a):
|
||||
self.a = a
|
||||
|
||||
def method(self, a, b, c):
|
||||
"""method docstring"""
|
||||
return a + b + c
|
||||
|
||||
class SomeSubClass(SomeClass):
|
||||
"""Some subclass definition"""
|
||||
|
||||
def fun(a, b, c):
|
||||
"""Some docstring
|
||||
|
||||
Let's make it span a couple of lines to be interesting...
|
||||
|
||||
Note:
|
||||
|
||||
* rest
|
||||
* should
|
||||
* be
|
||||
* supported
|
||||
* or
|
||||
* ignored...
|
||||
"""
|
||||
return "d"
|
||||
|
||||
def test_direct_link():
|
||||
fname = cut_pyc(__file__)
|
||||
title, link = DirectPaste().getlink(fname, 2, "")
|
||||
assert title == '%s:%s' % (fname, 2)
|
||||
assert link == ''
|
||||
|
||||
def test_viewvc_link():
|
||||
vcview = ViewVC("http://codespeak.net/viewvc/")
|
||||
fname = cut_pyc(__file__)
|
||||
title, link = vcview.getlink(fname, 0, "")
|
||||
assert title == '%s:%s' % (fname, 0)
|
||||
assert link == ('http://codespeak.net/viewvc/py/apigen/rest/'
|
||||
'testing/test_rest.py?view=markup')
|
||||
|
||||
def test_fs_link():
|
||||
title, link = DirectFS().getlink('/foo/bar/baz.py', 100, "func")
|
||||
assert title == '/foo/bar/baz.py:100'
|
||||
assert link == 'file:///foo/bar/baz.py'
|
||||
|
||||
class WriterTest(object):
|
||||
def get_filled_writer(self, writerclass, *args, **kwargs):
|
||||
dw = writerclass(*args, **kwargs)
|
||||
dw.write_section('foo', Rest(Paragraph('foo data')))
|
||||
dw.write_section('bar', Rest(Paragraph('bar data')))
|
||||
return dw
|
||||
|
||||
class TestDirWriter(WriterTest):
|
||||
def test_write_section(self):
|
||||
tempdir = temppath.ensure('dirwriter', dir=True)
|
||||
dw = self.get_filled_writer(DirWriter, tempdir)
|
||||
fpaths = tempdir.listdir('*.txt')
|
||||
assert len(fpaths) == 2
|
||||
assert sorted([f.basename for f in fpaths]) == ['bar.txt', 'foo.txt']
|
||||
assert _nl(tempdir.join('foo.txt').read()) == 'foo data\n'
|
||||
assert _nl(tempdir.join('bar.txt').read()) == 'bar data\n'
|
||||
|
||||
def test_getlink(self):
|
||||
dw = DirWriter(temppath.join('dirwriter_getlink'))
|
||||
link = dw.getlink('function', 'Foo.bar', 'method_foo_bar')
|
||||
assert link == 'method_foo_bar.html'
|
||||
|
||||
class TestFileWriter(WriterTest):
|
||||
def test_write_section(self):
|
||||
tempfile = temppath.ensure('filewriter', file=True)
|
||||
fw = self.get_filled_writer(FileWriter, tempfile)
|
||||
data = tempfile.read()
|
||||
assert len(data)
|
||||
|
||||
def test_getlink(self):
|
||||
fw = FileWriter(temppath.join('filewriter_getlink'))
|
||||
link = fw.getlink('function', 'Foo.bar', 'method_foo_bar')
|
||||
assert link == '#function-foo-bar'
|
||||
# only produce the same link target once...
|
||||
link = fw.getlink('function', 'Foo.bar', 'method_foo_bar')
|
||||
assert link is None
|
||||
link = fw.getlink('function', 'Foo.__init__', 'method_foo___init__')
|
||||
assert link == '#function-foo-init'
|
||||
|
||||
class TestPipeWriter(WriterTest):
|
||||
def test_write_section(self):
|
||||
s = StringIO()
|
||||
pw = self.get_filled_writer(PipeWriter, s)
|
||||
data = s.getvalue()
|
||||
assert len(data)
|
||||
|
||||
def test_getlink(self):
|
||||
pw = PipeWriter(StringIO())
|
||||
link = pw.getlink('function', 'Foo.bar', 'method_foo_bar')
|
||||
assert link == 'method_foo_bar.txt'
|
||||
|
||||
class TestHTMLDirWriter(WriterTest):
|
||||
def test_write_section(self):
|
||||
tempdir = temppath.ensure('htmldirwriter', dir=1)
|
||||
hdw = self.get_filled_writer(HTMLDirWriter, HTMLHandler, HTMLHandler,
|
||||
tempdir)
|
||||
assert tempdir.join('foo.html').check(file=1)
|
||||
assert tempdir.join('bar.html').check(file=1)
|
||||
assert tempdir.join('foo.html').read().startswith('<html>')
|
||||
|
||||
class TestRest(object):
|
||||
def get_filled_docstorage(self):
|
||||
descs = {'SomeClass': SomeClass,
|
||||
'SomeSubClass': SomeSubClass,
|
||||
'fun':fun}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
s1 = SomeClass("a")
|
||||
fun(1, 2, s1)
|
||||
s2 = SomeSubClass("b")
|
||||
s2.method(1,2,3)
|
||||
fun(1, 3, s2)
|
||||
t.end_tracing()
|
||||
return DocStorageAccessor(ds)
|
||||
|
||||
def get_filled_docstorage_modules(self):
|
||||
import somemodule
|
||||
import someothermodule
|
||||
descs = {
|
||||
'somemodule.SomeClass': somemodule.SomeClass,
|
||||
'someothermodule.SomeSubClass': someothermodule.SomeSubClass,
|
||||
'someothermodule.fun': someothermodule.fun,
|
||||
}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
s1 = somemodule.SomeClass("a")
|
||||
someothermodule.fun(1, 2, s1)
|
||||
s2 = someothermodule.SomeSubClass("b")
|
||||
s2.method(1, 2, 3)
|
||||
someothermodule.fun(1, 3, s2)
|
||||
t.end_tracing()
|
||||
return DocStorageAccessor(ds)
|
||||
|
||||
def check_rest(self, tempdir):
|
||||
from py.__.misc import rest
|
||||
for path in tempdir.listdir('*.txt'):
|
||||
try:
|
||||
rest.process(path)
|
||||
except ImportError:
|
||||
py.test.skip('skipping rest generation because docutils is '
|
||||
'not installed (this is a partial skip, the rest '
|
||||
'of the test was successful)')
|
||||
for path in tempdir.listdir('*.txt'):
|
||||
for item, arg1, arg2, arg3 in genlinkchecks(path):
|
||||
item(arg1, arg2, arg3)
|
||||
|
||||
def test_generation_simple_api(self):
|
||||
ds = self.get_filled_docstorage()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("simple_api", dir=True)
|
||||
r = RestGen(ds, lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
basenames = [p.basename for p in tempdir.listdir('*.txt')]
|
||||
expected = [
|
||||
'class_SomeClass.txt',
|
||||
'class_SomeSubClass.txt',
|
||||
'function_fun.txt',
|
||||
'index.txt',
|
||||
'method_SomeClass.__init__.txt',
|
||||
'method_SomeClass.method.txt',
|
||||
'method_SomeSubClass.__init__.txt',
|
||||
'method_SomeSubClass.method.txt',
|
||||
'module_Unknown module.txt',
|
||||
'traceback_SomeClass.__init__.0.txt',
|
||||
'traceback_SomeSubClass.__init__.0.txt',
|
||||
'traceback_SomeSubClass.method.0.txt',
|
||||
'traceback_fun.0.txt',
|
||||
'traceback_fun.1.txt',
|
||||
]
|
||||
print sorted(basenames)
|
||||
assert sorted(basenames) == expected
|
||||
# now we check out...
|
||||
self.check_rest(tempdir)
|
||||
tempdir = temppath.ensure("simple_api_ps", dir=True)
|
||||
if 0:
|
||||
ps = PermaDocStorage(ds)
|
||||
r = RestGen(ps, lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
basenames = [p.basename for p in tempdir.listdir('*.txt')]
|
||||
assert sorted(basenames) == expected
|
||||
self.check_rest(tempdir)
|
||||
pickle.dumps(ps)
|
||||
|
||||
def test_generation_modules(self):
|
||||
ds = self.get_filled_docstorage_modules()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure('module_api', dir=True)
|
||||
r = RestGen(ds, lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
basenames = [p.basename for p in tempdir.listdir('*.txt')]
|
||||
expected = [
|
||||
'class_somemodule.SomeClass.txt',
|
||||
'class_someothermodule.SomeSubClass.txt',
|
||||
'function_someothermodule.fun.txt',
|
||||
'index.txt',
|
||||
'method_somemodule.SomeClass.__init__.txt',
|
||||
'method_somemodule.SomeClass.method.txt',
|
||||
'method_someothermodule.SomeSubClass.__init__.txt',
|
||||
'method_someothermodule.SomeSubClass.method.txt',
|
||||
'module_Unknown module.txt',
|
||||
'module_somemodule.txt',
|
||||
'module_someothermodule.txt',
|
||||
'traceback_somemodule.SomeClass.__init__.0.txt',
|
||||
'traceback_someothermodule.SomeSubClass.__init__.0.txt',
|
||||
'traceback_someothermodule.SomeSubClass.method.0.txt',
|
||||
'traceback_someothermodule.fun.0.txt',
|
||||
'traceback_someothermodule.fun.1.txt',
|
||||
]
|
||||
print sorted(basenames)
|
||||
assert sorted(basenames) == expected
|
||||
|
||||
def test_check_internal_links(self):
|
||||
ds = self.get_filled_docstorage()
|
||||
lg = DirectFS()
|
||||
tempdir = temppath.ensure('internal_links', dir=True)
|
||||
r = RestGen(ds, lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
index = tempdir.join('module_Unknown module.txt')
|
||||
assert index.check(file=True)
|
||||
data = _nl(index.read())
|
||||
assert data.find('.. _`fun`: function_fun.html\n') > -1
|
||||
assert data.find('.. _`fun`: #function-fun\n') == -1
|
||||
|
||||
tempfile = temppath.ensure('internal_links.txt',
|
||||
file=True)
|
||||
r = RestGen(ds, lg, FileWriter(tempfile))
|
||||
r.write()
|
||||
data = _nl(tempfile.read())
|
||||
assert data.find('.. _`fun`: #function-fun\n') > -1
|
||||
assert data.find('.. _`fun`: function_fun.html') == -1
|
||||
tempfile = temppath.ensure("internal_links_ps.txt", file=True)
|
||||
if 0:
|
||||
ps = PermaDocStorage(ds)
|
||||
r = RestGen(ps, lg, FileWriter(tempfile))
|
||||
r.write()
|
||||
data = _nl(tempfile.read())
|
||||
assert data.find('.. _`fun`: #function-fun\n') > -1
|
||||
assert data.find('.. _`fun`: function_fun.html') == -1
|
||||
pickle.dumps(ps)
|
||||
|
||||
def test_check_section_order(self):
|
||||
# we use the previous method's data
|
||||
tempfile = temppath.join('internal_links.txt')
|
||||
if not tempfile.check():
|
||||
py.test.skip('depends on previous test, which failed')
|
||||
data = _nl(tempfile.read())
|
||||
# index should be above the rest
|
||||
assert data.find('classes\\:') > -1
|
||||
assert data.find('classes\\:') < data.find('function\\: fun')
|
||||
assert data.find('classes\\:') < data.find(
|
||||
'class\\: SomeClass')
|
||||
# function definitions should be above class ones
|
||||
assert data.find('function\\: fun') > data.find('class\\: SomeClass')
|
||||
# class method definitions should be below the class defs
|
||||
assert data.find('class\\: SomeClass') < data.find(
|
||||
'method\\: SomeClass.method')
|
||||
# __init__ should be above other methods
|
||||
assert data.find('method\\: SomeClass.\\_\\_init\\_\\_') > -1
|
||||
assert data.find('method\\: SomeClass.\\_\\_init\\_\\_') < data.find(
|
||||
'method\\: SomeClass.method')
|
||||
# base class info
|
||||
assert py.std.re.search(r'class\\\: SomeSubClass.*'
|
||||
r'base classes\\\:\n\^+[\n ]+\* `SomeClass`_.*'
|
||||
r'`SomeSubClass.__init__',
|
||||
data, py.std.re.S)
|
||||
|
||||
def test_som_fun(self):
|
||||
descs = {'fun_': fun_}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
fun_()
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("some_fun", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
self.check_rest(tempdir)
|
||||
|
||||
def test_function_source(self):
|
||||
def blah():
|
||||
a = 3
|
||||
b = 4
|
||||
return a + b
|
||||
|
||||
descs = {'blah': blah}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
blah()
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("function_source", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
assert tempdir.join("function_blah.txt").read().find("a = 3") != -1
|
||||
self.check_rest(tempdir)
|
||||
ps = DocStorageAccessor(ds)
|
||||
r = RestGen(ps, lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
assert tempdir.join("function_blah.txt").read().find("a = 3") != -1
|
||||
|
||||
def test_function_arguments(self):
|
||||
def blah(a, b, c):
|
||||
return "axx"
|
||||
|
||||
class C:
|
||||
pass
|
||||
|
||||
descs = {'blah':blah}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
blah(3, "x", C())
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("function_args", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
source = tempdir.join("function_blah.txt").read()
|
||||
call_point = source.find("call sites\:")
|
||||
assert call_point != -1
|
||||
assert source.find("a \:\: <Int>") < call_point
|
||||
assert source.find("b \:\: <String>") < call_point
|
||||
assert source.find("c \:\: <Instance of Class C>") < call_point
|
||||
self.check_rest(tempdir)
|
||||
|
||||
def test_class_typedefs(self):
|
||||
class A(object):
|
||||
def __init__(self, x):
|
||||
pass
|
||||
|
||||
def a(self):
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
def __init__(self, y):
|
||||
pass
|
||||
|
||||
def xxx(x):
|
||||
return x
|
||||
|
||||
descs = {'A': A, 'B': B, 'xxx':xxx}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
xxx(A(3))
|
||||
xxx(B("f"))
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("classargs", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
source = tempdir.join("function_xxx.txt").read()
|
||||
call_point = source.find("call sites\:")
|
||||
assert call_point != -1
|
||||
print source
|
||||
assert -1 < source.find("x \:\: <Instance of AnyOf( `Class B`_ , "
|
||||
"`Class A`_ )>") < call_point
|
||||
source = tempdir.join('method_B.a.txt').read()
|
||||
assert source.find('**origin** \: `A`_') > -1
|
||||
self.check_rest(tempdir)
|
||||
|
||||
def test_exc_raising(self):
|
||||
def x():
|
||||
try:
|
||||
1/0
|
||||
except:
|
||||
pass
|
||||
|
||||
descs = {'x':x}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
x()
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("exc_raising", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
source = tempdir.join('function_x.txt').open().read()
|
||||
assert source.find('ZeroDivisionError') < source.find('call sites\:')
|
||||
|
||||
|
||||
def test_nonexist_origin(self):
|
||||
class A:
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
descs = {'B':B}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
B().method()
|
||||
t.end_tracing()
|
||||
lg = DirectPaste()
|
||||
tempdir = temppath.ensure("nonexit_origin", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
self.check_rest(tempdir)
|
||||
|
||||
def test_sourceview(self):
|
||||
class A:
|
||||
def method(self):
|
||||
pass
|
||||
|
||||
descs = {'A':A}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
A().method()
|
||||
t.end_tracing()
|
||||
lg = SourceView('http://localhost:8000')
|
||||
tempdir = temppath.ensure("sourceview", dir=True)
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
self.check_rest(tempdir)
|
||||
assert tempdir.join('traceback_A.method.0.txt').open().read().find(
|
||||
'.. _`/py/apigen/rest/testing/test\_rest.py\:A.method`: http://localhost:8000/py/apigen/rest/testing/test_rest.py#A.method') != -1
|
||||
|
||||
def test_sourceview_fun(self):
|
||||
def f():
|
||||
pass
|
||||
|
||||
descs = {'f':f}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
f()
|
||||
t.end_tracing()
|
||||
tempdir = temppath.ensure("sourceview_fun", dir=True)
|
||||
lg = SourceView('http://localhost:8000')
|
||||
r = RestGen(DocStorageAccessor(ds), lg, DirWriter(tempdir))
|
||||
r.write()
|
||||
self.check_rest(tempdir)
|
||||
assert tempdir.join('function_f.txt').open().read().find(
|
||||
'.. _`/py/apigen/rest/testing/test\_rest.py\:f`: http://localhost:8000/py/apigen/rest/testing/test_rest.py#f') != -1
|
|
@ -0,0 +1,143 @@
|
|||
|
||||
""" source browser using compiler module
|
||||
|
||||
WARNING!!!
|
||||
|
||||
This is very simple and very silly attempt to make so.
|
||||
|
||||
"""
|
||||
|
||||
from compiler import parse, ast
|
||||
import py
|
||||
|
||||
from py.__.path.common import PathBase
|
||||
|
||||
blockers = [ast.Function, ast.Class]
|
||||
|
||||
class BaseElem(object):
|
||||
def listnames(self):
|
||||
if getattr(self, 'parent', None):
|
||||
return self.parent.listnames() + '.' + self.name
|
||||
return self.name
|
||||
|
||||
class Module(BaseElem):
|
||||
def __init__(self, path, _dict):
|
||||
self.path = path
|
||||
self.dict = _dict
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self.dict[attr]
|
||||
except KeyError:
|
||||
raise AttributeError(attr)
|
||||
|
||||
def get_children(self):
|
||||
values = self.dict.values()
|
||||
all = values[:]
|
||||
for v in values:
|
||||
all += v.get_children()
|
||||
return all
|
||||
|
||||
def get_endline(start, lst):
|
||||
l = lst[::-1]
|
||||
for i in l:
|
||||
if i.lineno:
|
||||
return i.lineno
|
||||
end_ch = get_endline(None, i.getChildNodes())
|
||||
if end_ch:
|
||||
return end_ch
|
||||
return start
|
||||
|
||||
class Function(BaseElem):
|
||||
def __init__(self, name, parent, firstlineno, endlineno):
|
||||
self.firstlineno = firstlineno
|
||||
self.endlineno = endlineno
|
||||
self.name = name
|
||||
self.parent = parent
|
||||
|
||||
def get_children(self):
|
||||
return []
|
||||
|
||||
class Method(BaseElem):
|
||||
def __init__(self, name, parent, firstlineno, endlineno):
|
||||
self.name = name
|
||||
self.firstlineno = firstlineno
|
||||
self.endlineno = endlineno
|
||||
self.parent = parent
|
||||
|
||||
def function_from_ast(ast, cls_ast, cls=Function):
|
||||
startline = ast.lineno
|
||||
endline = get_endline(startline, ast.getChildNodes())
|
||||
assert endline
|
||||
return cls(ast.name, cls_ast, startline, endline)
|
||||
|
||||
def class_from_ast(cls_ast):
|
||||
bases = [i.name for i in cls_ast.bases if isinstance(i, ast.Name)]
|
||||
# XXX
|
||||
methods = {}
|
||||
startline = cls_ast.lineno
|
||||
name = cls_ast.name
|
||||
endline = get_endline(startline, cls_ast.getChildNodes())
|
||||
cls = Class(name, startline, endline, bases, [])
|
||||
cls.methods = dict([(i.name, function_from_ast(i, cls, Method)) for i in \
|
||||
cls_ast.code.nodes if isinstance(i, ast.Function)])
|
||||
return cls
|
||||
|
||||
class Class(BaseElem):
|
||||
def __init__(self, name, firstlineno, endlineno, bases, methods):
|
||||
self.bases = bases
|
||||
self.firstlineno = firstlineno
|
||||
self.endlineno = endlineno
|
||||
self.name = name
|
||||
self.methods = methods
|
||||
|
||||
def __getattr__(self, attr):
|
||||
try:
|
||||
return self.methods[attr]
|
||||
except KeyError:
|
||||
raise AttributeError(attr)
|
||||
|
||||
def get_children(self):
|
||||
return self.methods.values()
|
||||
|
||||
def dir_nodes(st):
|
||||
""" List all the subnodes, which are not blockers
|
||||
"""
|
||||
res = []
|
||||
for i in st.getChildNodes():
|
||||
res.append(i)
|
||||
if not i.__class__ in blockers:
|
||||
res += dir_nodes(i)
|
||||
return res
|
||||
|
||||
def update_mod_dict(imp_mod, mod_dict):
|
||||
# make sure that things that are in mod_dict, and not in imp_mod,
|
||||
# are not shown
|
||||
for key, value in mod_dict.items():
|
||||
if not hasattr(imp_mod, key):
|
||||
del mod_dict[key]
|
||||
|
||||
def parse_path(path):
|
||||
if not isinstance(path, PathBase):
|
||||
path = py.path.local(path)
|
||||
buf = path.open().read()
|
||||
st = parse(buf)
|
||||
# first go - we get all functions and classes defined on top-level
|
||||
nodes = dir_nodes(st)
|
||||
function_ast = [i for i in nodes if isinstance(i, ast.Function)]
|
||||
classes_ast = [i for i in nodes if isinstance(i, ast.Class)]
|
||||
mod_dict = dict([(i.name, function_from_ast(i, None)) for i in function_ast]
|
||||
+ [(i.name, class_from_ast(i)) for i in classes_ast])
|
||||
# we check all the elements, if they're really there
|
||||
try:
|
||||
mod = path.pyimport()
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except: # catch all other import problems generically
|
||||
# XXX some import problem: we probably should not
|
||||
# pretend to have an empty module
|
||||
pass
|
||||
else:
|
||||
update_mod_dict(mod, mod_dict)
|
||||
return Module(path, mod_dict)
|
||||
|
|
@ -0,0 +1,174 @@
|
|||
""" simple Python syntax coloring """
|
||||
|
||||
import re
|
||||
|
||||
class PythonSchema(object):
|
||||
""" contains information for syntax coloring """
|
||||
comment = [('#', '\n')]
|
||||
multiline_string = ['"""', "'''"]
|
||||
string = ['"""', "'''", '"', "'"]
|
||||
# XXX not complete
|
||||
keyword = ['for', 'if', 'not', 'then', 'else', 'while', 'from', 'import',
|
||||
'try', 'except', 'finally', 'raise', 'print', 'exec', 'eval',
|
||||
'break', 'in', 'assert', 'None']
|
||||
alt_keyword = ['def', 'class', 'return', 'pass', 'yield']
|
||||
|
||||
class Token(object):
|
||||
data = None
|
||||
type = 'unknown'
|
||||
|
||||
def __init__(self, data, type='unknown'):
|
||||
self.data = data
|
||||
self.type = type
|
||||
|
||||
def __repr__(self):
|
||||
return '<Token type="%s" %r>' % (self.type, self.data)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.data == other.data and self.type == other.type
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self.__eq__(other)
|
||||
|
||||
class Tokenizer(object):
|
||||
""" when fed lists strings, it will return tokens with type info
|
||||
|
||||
very naive tokenizer, state is recorded for multi-line strings, etc.
|
||||
"""
|
||||
|
||||
_re_word = re.compile('[\w_]+')
|
||||
_re_space = re.compile('\s+')
|
||||
_re_number = re.compile('[\d\.]*\d[\d\.]*l?', re.I)
|
||||
_re_rest = re.compile('[^\w\s\d\'"]+') # XXX cheating a bit with the quotes
|
||||
|
||||
# these will be filled using the schema
|
||||
_re_strings_full = None
|
||||
_re_strings_multiline = None
|
||||
_re_strings_comments = None
|
||||
|
||||
def __init__(self, schema):
|
||||
self.schema = schema
|
||||
self._inside_multiline = False
|
||||
|
||||
self._re_strings_full = []
|
||||
self._re_strings_multiline = []
|
||||
self._re_strings_empty = []
|
||||
for d in schema.string + schema.multiline_string:
|
||||
self._re_strings_full.append(
|
||||
re.compile(r'%s[^\\%s]*(\\.[^\\%s]*)+%s' % (d, d, d, d)))
|
||||
self._re_strings_full.append(
|
||||
re.compile(r'%s[^\\%s]+(\\.[^\\%s]*)*%s' % (d, d, d, d)))
|
||||
self._re_strings_empty.append(re.compile('%s%s' % (d, d)))
|
||||
for d in schema.multiline_string:
|
||||
self._re_strings_multiline.append((re.compile('%s.*' % (d,), re.S),
|
||||
re.compile('.*?%s' % (d,))))
|
||||
# no multi-line comments in Python... phew :)
|
||||
self._re_comments = []
|
||||
for start, end in schema.comment:
|
||||
self._re_comments.append(re.compile('%s.*?%s' % (start, end)))
|
||||
|
||||
def tokenize(self, data):
|
||||
if self._inside_multiline:
|
||||
m = self._inside_multiline.match(data)
|
||||
if not m:
|
||||
yield Token(data, 'string')
|
||||
data = ''
|
||||
else:
|
||||
s = m.group(0)
|
||||
data = data[len(s):]
|
||||
self._inside_multiline = False
|
||||
yield Token(s, 'string')
|
||||
while data:
|
||||
for f in [self._check_full_strings, self._check_multiline_strings,
|
||||
self._check_empty_strings, self._check_comments,
|
||||
self._check_number, self._check_space, self._check_word,
|
||||
self._check_rest]:
|
||||
data, t = f(data)
|
||||
if t:
|
||||
yield t
|
||||
break
|
||||
else:
|
||||
raise ValueError(
|
||||
'no token found in %r (bug in tokenizer)' % (data,))
|
||||
|
||||
def _check_full_strings(self, data):
|
||||
token = None
|
||||
for r in self._re_strings_full:
|
||||
m = r.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
data = data[len(s):]
|
||||
token = Token(s, type='string')
|
||||
break
|
||||
return data, token
|
||||
|
||||
def _check_multiline_strings(self, data):
|
||||
token = None
|
||||
for start, end in self._re_strings_multiline:
|
||||
m = start.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
data = ''
|
||||
self._inside_multiline = end
|
||||
token = Token(s, 'string')
|
||||
break
|
||||
return data, token
|
||||
|
||||
def _check_empty_strings(self, data):
|
||||
token = None
|
||||
for r in self._re_strings_empty:
|
||||
m = r.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
data = data[len(s):]
|
||||
token = Token(s, type='string')
|
||||
break
|
||||
return data, token
|
||||
|
||||
|
||||
def _check_comments(self, data):
|
||||
# fortunately we don't have to deal with multi-line comments
|
||||
token = None
|
||||
for r in self._re_comments:
|
||||
m = r.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
data = data[len(s):]
|
||||
token = Token(s, 'comment')
|
||||
break
|
||||
return data, token
|
||||
|
||||
def _check_word(self, data):
|
||||
m = self._re_word.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
type = 'word'
|
||||
if s in self.schema.keyword:
|
||||
type = 'keyword'
|
||||
elif s in self.schema.alt_keyword:
|
||||
type = 'alt_keyword'
|
||||
return data[len(s):], Token(s, type)
|
||||
return data, None
|
||||
|
||||
def _check_space(self, data):
|
||||
m = self._re_space.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
return data[len(s):], Token(s, 'whitespace')
|
||||
return data, None
|
||||
|
||||
def _check_number(self, data):
|
||||
m = self._re_number.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
return data[len(s):], Token(s, 'number')
|
||||
return data, None
|
||||
|
||||
def _check_rest(self, data):
|
||||
m = self._re_rest.match(data)
|
||||
if m:
|
||||
s = m.group(0)
|
||||
return data[len(s):], Token(s, 'unknown')
|
||||
return data, None
|
||||
|
||||
|
|
@ -0,0 +1,250 @@
|
|||
|
||||
""" html - generating ad-hoc html out of source browser
|
||||
"""
|
||||
|
||||
from py.xml import html, raw
|
||||
from compiler import ast
|
||||
import time
|
||||
from py.__.apigen.source.color import Tokenizer, PythonSchema
|
||||
|
||||
class HtmlEnchanter(object):
|
||||
reserved_words = ['if', 'for', 'return', 'yield']
|
||||
|
||||
def __init__(self, mod):
|
||||
self.mod = mod
|
||||
self.create_caches()
|
||||
|
||||
def create_caches(self):
|
||||
mod = self.mod
|
||||
linecache = {}
|
||||
for item in mod.get_children():
|
||||
linecache[item.firstlineno] = item
|
||||
self.linecache = linecache
|
||||
|
||||
def enchant_row(self, num, row):
|
||||
# add some informations to row, like functions defined in that
|
||||
# line, etc.
|
||||
try:
|
||||
item = self.linecache[num]
|
||||
# XXX: this should not be assertion, rather check, but we want to
|
||||
# know if stuff is working
|
||||
pos = row.find(item.name)
|
||||
assert pos != -1
|
||||
end = len(item.name) + pos
|
||||
chunk = html.a(row[pos:end], href="#" + item.listnames(),
|
||||
name=item.listnames())
|
||||
return [row[:pos], chunk, row[end:]]
|
||||
except KeyError:
|
||||
return [row] # no more info
|
||||
|
||||
class HTMLDocument(object):
|
||||
def __init__(self, tokenizer=None):
|
||||
self.html = root = html.html()
|
||||
self.head = head = self.create_head()
|
||||
root.append(head)
|
||||
self.body = body = self.create_body()
|
||||
root.append(body)
|
||||
self.table, self.tbody = table, tbody = self.create_table()
|
||||
body.append(table)
|
||||
|
||||
if tokenizer is None:
|
||||
tokenizer = Tokenizer(PythonSchema)
|
||||
self.tokenizer = tokenizer
|
||||
|
||||
def create_head(self):
|
||||
return html.head(
|
||||
html.title('source view'),
|
||||
html.style("""
|
||||
body, td {
|
||||
background-color: #FFF;
|
||||
color: black;
|
||||
font-family: monospace, Monaco;
|
||||
}
|
||||
|
||||
table, tr {
|
||||
margin: 0px;
|
||||
padding: 0px;
|
||||
border-width: 0px;
|
||||
}
|
||||
|
||||
a {
|
||||
color: blue;
|
||||
font-weight: bold;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #005;
|
||||
}
|
||||
|
||||
.lineno {
|
||||
text-align: right;
|
||||
color: #555;
|
||||
width: 3em;
|
||||
padding-right: 1em;
|
||||
border: 0px solid black;
|
||||
border-right-width: 1px;
|
||||
}
|
||||
|
||||
.code {
|
||||
padding-left: 1em;
|
||||
white-space: pre;
|
||||
}
|
||||
|
||||
.comment {
|
||||
color: purple;
|
||||
}
|
||||
|
||||
.string {
|
||||
color: #777;
|
||||
}
|
||||
|
||||
.keyword {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
.alt_keyword {
|
||||
color: green;
|
||||
}
|
||||
|
||||
""", type='text/css'),
|
||||
)
|
||||
|
||||
def create_body(self):
|
||||
return html.body()
|
||||
|
||||
def create_table(self):
|
||||
table = html.table(cellpadding='0', cellspacing='0')
|
||||
tbody = html.tbody()
|
||||
table.append(tbody)
|
||||
return table, tbody
|
||||
|
||||
def prepare_line(self, text):
|
||||
""" adds html formatting to text items (list)
|
||||
|
||||
only processes items if they're of a string type (or unicode)
|
||||
"""
|
||||
ret = []
|
||||
for item in text:
|
||||
if type(item) in [str, unicode]:
|
||||
tokens = self.tokenizer.tokenize(item)
|
||||
for t in tokens:
|
||||
if t.type in ['keyword', 'alt_keyword', 'number',
|
||||
'string', 'comment']:
|
||||
ret.append(html.span(t.data, class_=t.type))
|
||||
else:
|
||||
ret.append(t.data)
|
||||
else:
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
||||
def add_row(self, lineno, text):
|
||||
if text == ['']:
|
||||
text = [raw(' ')]
|
||||
else:
|
||||
text = self.prepare_line(text)
|
||||
self.tbody.append(html.tr(html.td(str(lineno), class_='lineno'),
|
||||
html.td(class_='code', *text)))
|
||||
|
||||
def __unicode__(self):
|
||||
# XXX don't like to use indent=0 here, but else py.xml's indentation
|
||||
# messes up the html inside the table cells (which displays formatting)
|
||||
return self.html.unicode(indent=0)
|
||||
|
||||
def create_html(mod):
|
||||
# out is some kind of stream
|
||||
#*[html.tr(html.td(i.name)) for i in mod.get_children()]
|
||||
lines = mod.path.open().readlines()
|
||||
|
||||
enchanter = HtmlEnchanter(mod)
|
||||
doc = HTMLDocument()
|
||||
for i, row in enumerate(lines):
|
||||
row = enchanter.enchant_row(i + 1, row)
|
||||
doc.add_row(i + 1, row)
|
||||
return unicode(doc)
|
||||
|
||||
style = html.style("""
|
||||
|
||||
body, p, td {
|
||||
background-color: #FFF;
|
||||
color: black;
|
||||
font-family: monospace, Monaco;
|
||||
}
|
||||
|
||||
td.type {
|
||||
width: 2em;
|
||||
}
|
||||
|
||||
td.name {
|
||||
width: 30em;
|
||||
}
|
||||
|
||||
td.mtime {
|
||||
width: 13em;
|
||||
}
|
||||
|
||||
td.size {
|
||||
text-alignment: right;
|
||||
}
|
||||
|
||||
""")
|
||||
|
||||
def create_dir_html(path, href_prefix=''):
|
||||
h = html.html(
|
||||
html.head(
|
||||
html.title('directory listing of %s' % (path,)),
|
||||
style,
|
||||
),
|
||||
)
|
||||
body = html.body(
|
||||
html.h1('directory listing of %s' % (path,)),
|
||||
)
|
||||
h.append(body)
|
||||
table = html.table()
|
||||
body.append(table)
|
||||
tbody = html.tbody()
|
||||
table.append(tbody)
|
||||
items = list(path.listdir())
|
||||
items.sort(key=lambda p: p.basename)
|
||||
items.sort(key=lambda p: not p.check(dir=True))
|
||||
for fpath in items:
|
||||
tr = html.tr()
|
||||
tbody.append(tr)
|
||||
td1 = html.td(fpath.check(dir=True) and 'D' or 'F', class_='type')
|
||||
tr.append(td1)
|
||||
href = fpath.basename
|
||||
if href_prefix:
|
||||
href = '%s%s' % (href_prefix, href)
|
||||
if fpath.check(dir=True):
|
||||
href += '/'
|
||||
td2 = html.td(html.a(fpath.basename, href=href), class_='name')
|
||||
tr.append(td2)
|
||||
td3 = html.td(time.strftime('%Y-%m-%d %H:%M:%S',
|
||||
time.gmtime(fpath.mtime())), class_='mtime')
|
||||
tr.append(td3)
|
||||
if fpath.check(dir=True):
|
||||
size = ''
|
||||
unit = ''
|
||||
else:
|
||||
size = fpath.size()
|
||||
unit = 'B'
|
||||
for u in ['kB', 'MB', 'GB', 'TB']:
|
||||
if size > 1024:
|
||||
size = round(size / 1024.0, 2)
|
||||
unit = u
|
||||
td4 = html.td('%s %s' % (size, unit), class_='size')
|
||||
tr.append(td4)
|
||||
return unicode(h)
|
||||
|
||||
def create_unknown_html(path):
|
||||
h = html.html(
|
||||
html.head(
|
||||
html.title('Can not display page'),
|
||||
style,
|
||||
),
|
||||
html.body(
|
||||
html.p('The data URL (%s) does not contain Python code.' % (path,))
|
||||
),
|
||||
)
|
||||
return h.unicode()
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
#!/usr/bin/python
|
||||
|
||||
import cgitb;cgitb.enable()
|
||||
import path
|
||||
import py
|
||||
from py.__.apigen.source.browser import parse_path
|
||||
from py.__.apigen.source.html import create_html, create_dir_html, \
|
||||
create_unknown_html
|
||||
|
||||
BASE_URL='http://codespeak.net/svn/py/dist'
|
||||
def cgi_main():
|
||||
import os
|
||||
reqpath = os.environ.get('PATH_INFO', '')
|
||||
path = py.path.svnurl('%s%s' % (BASE_URL, reqpath))
|
||||
if not path.check():
|
||||
return create_unknown_html(path)
|
||||
if path.check(file=True):
|
||||
return unicode(create_html(parse_path(path)))
|
||||
elif path.check(dir=True):
|
||||
prefix = ''
|
||||
if not reqpath:
|
||||
prefix = 'index.cgi/'
|
||||
return create_dir_html(path, href_prefix=prefix)
|
||||
else:
|
||||
return create_unknown_html(path)
|
||||
|
||||
print 'Content-Type: text/html; charset=UTF-8'
|
||||
print
|
||||
print cgi_main()
|
|
@ -0,0 +1,2 @@
|
|||
import os, sys
|
||||
sys.path = ['/'.join(os.path.dirname(__file__).split(os.sep)[:-3])] + sys.path
|
|
@ -0,0 +1,48 @@
|
|||
|
||||
""" web server for displaying source
|
||||
"""
|
||||
|
||||
import py
|
||||
try:
|
||||
from pypy.translator.js.examples import server
|
||||
except ImportError:
|
||||
py.test.skip("PyPy not found")
|
||||
from py.__.apigen.source.browser import parse_path
|
||||
from py.__.apigen.source.html import create_html, create_dir_html, create_unknown_html
|
||||
from py.xml import html
|
||||
|
||||
class Handler(server.TestHandler):
|
||||
BASE_URL='http://codespeak.net/svn/py/dist'
|
||||
|
||||
def __getattr__(self, attr):
|
||||
if attr == 'index':
|
||||
attr = ''
|
||||
url = self.BASE_URL + "/" + attr
|
||||
if url.endswith('_py'):
|
||||
url = url[:-3] + '.py'
|
||||
path = py.path.svnurl(url)
|
||||
if not path.check():
|
||||
def f(rev=None):
|
||||
return create_unknown_html(path)
|
||||
f.exposed = True
|
||||
f.func_name = attr
|
||||
return f
|
||||
def f(rev='HEAD'):
|
||||
path = py.path.svnurl(url, rev)
|
||||
# some try.. except.. here
|
||||
if path.check(file=True):
|
||||
return unicode(create_html(parse_path(path)))
|
||||
elif path.check(dir=True):
|
||||
return create_dir_html(path)
|
||||
else:
|
||||
return create_unknown_html(path)
|
||||
f.exposed = True
|
||||
f.func_name = attr
|
||||
return f
|
||||
|
||||
def _main():
|
||||
server.start_server(handler=Handler)
|
||||
|
||||
if __name__ == '__main__':
|
||||
_main()
|
||||
|
|
@ -0,0 +1,80 @@
|
|||
|
||||
""" test source browser abilities
|
||||
"""
|
||||
|
||||
from py.__.apigen.source.browser import parse_path, Class, Function, Method
|
||||
import py
|
||||
|
||||
def test_browser():
|
||||
tmp = py.test.ensuretemp("sourcebrowser")
|
||||
tmp.ensure("a.py").write(py.code.Source("""
|
||||
def f():
|
||||
pass
|
||||
|
||||
def g():
|
||||
pass
|
||||
|
||||
class X:
|
||||
pass
|
||||
|
||||
class Z(object):
|
||||
x = 1
|
||||
def zzz(self):
|
||||
1
|
||||
2
|
||||
3
|
||||
4
|
||||
"""))
|
||||
mod = parse_path(tmp.join("a.py"))
|
||||
assert isinstance(mod.g, Function)
|
||||
assert isinstance(mod.Z, Class)
|
||||
py.test.raises(AttributeError, "mod.zzz")
|
||||
assert mod.g.firstlineno == 5
|
||||
assert mod.g.name == "g"
|
||||
assert mod.g.endlineno == 6
|
||||
assert mod.X.firstlineno == 8
|
||||
assert mod.X.endlineno == 9
|
||||
assert mod.Z.bases == ["object"]
|
||||
assert isinstance(mod.Z.zzz, Method)
|
||||
assert mod.Z.zzz.firstlineno == 13
|
||||
assert mod.Z.zzz.endlineno == 17
|
||||
|
||||
def test_if_browser():
|
||||
tmp = py.test.ensuretemp("sourcebrowser")
|
||||
tmp.ensure("b.py").write(py.code.Source("""
|
||||
if 1:
|
||||
def f():
|
||||
pass
|
||||
if 0:
|
||||
def g():
|
||||
pass
|
||||
"""))
|
||||
mod = parse_path(tmp.join("b.py"))
|
||||
assert isinstance(mod.f, Function)
|
||||
py.test.raises(AttributeError, 'mod.g')
|
||||
|
||||
def test_bases():
|
||||
tmp = py.test.ensuretemp("sourcebrowser")
|
||||
tmp.ensure("c.py").write(py.code.Source("""
|
||||
import py
|
||||
class Dir(py.test.collect.Directory):
|
||||
pass
|
||||
"""))
|
||||
mod = parse_path(tmp.join("c.py"))
|
||||
# if it does not rise it's ok for now
|
||||
#
|
||||
|
||||
def test_importing_goes_wrong():
|
||||
tmp = py.test.ensuretemp("sourcebrowserimport")
|
||||
tmp.ensure("x.py").write(py.code.Source("""
|
||||
import aslkdjaslkdjasdl
|
||||
"""))
|
||||
mod = parse_path(tmp.join("x.py"))
|
||||
|
||||
tmp.ensure("y.py").write(py.code.Source("""
|
||||
raise KeyboardInterrupt
|
||||
"""))
|
||||
py.test.raises(KeyboardInterrupt, 'parse_path(tmp.join("y.py"))')
|
||||
|
||||
# if it does not rise it's ok for now
|
||||
#
|
|
@ -0,0 +1,77 @@
|
|||
import py
|
||||
from py.__.apigen.source.color import Tokenizer, Token, PythonSchema
|
||||
|
||||
class TestTokenizer(object):
|
||||
def tokens(self, data):
|
||||
t = Tokenizer(PythonSchema)
|
||||
return list(t.tokenize(data))
|
||||
|
||||
def test_word(self):
|
||||
assert self.tokens('foo') == [Token('foo', type='word')]
|
||||
assert self.tokens('_1_word') == [Token('_1_word', type='word')]
|
||||
|
||||
def test_keyword(self):
|
||||
assert 'if' in PythonSchema.keyword
|
||||
assert self.tokens('see if it works') == [Token('see', type='word'),
|
||||
Token(' ',
|
||||
type='whitespace'),
|
||||
Token('if', type='keyword'),
|
||||
Token(' ',
|
||||
type='whitespace'),
|
||||
Token('it', type='word'),
|
||||
Token(' ',
|
||||
type='whitespace'),
|
||||
Token('works', type='word')]
|
||||
|
||||
def test_space(self):
|
||||
assert self.tokens(' ') == [Token(' ', type='whitespace')]
|
||||
assert self.tokens(' \n') == [Token(' \n', type='whitespace')]
|
||||
|
||||
def test_number(self):
|
||||
# XXX incomplete
|
||||
assert self.tokens('1') == [Token('1', type='number')]
|
||||
assert self.tokens('1.1') == [Token('1.1', type='number')]
|
||||
assert self.tokens('.1') == [Token('.1', type='number')]
|
||||
assert self.tokens('1.') == [Token('1.', type='number')]
|
||||
assert self.tokens('1.1l') == [Token('1.1l', type='number')]
|
||||
|
||||
def test_printable(self):
|
||||
assert self.tokens('.') == [Token('.', 'unknown')]
|
||||
assert self.tokens(';#$@\n') == [Token(';#$@', type='unknown'),
|
||||
Token('\n', type='whitespace')]
|
||||
|
||||
def test_comment(self):
|
||||
assert self.tokens('# foo\n') == [Token('# foo\n', type='comment')]
|
||||
assert self.tokens('foo # bar\n') == [Token('foo', type='word'),
|
||||
Token(' ', type='whitespace'),
|
||||
Token('# bar\n', type='comment')]
|
||||
|
||||
def test_string_simple(self):
|
||||
assert self.tokens('"foo"') == [Token('"foo"', type='string')]
|
||||
assert self.tokens('"foo"\'bar\'') == [Token('"foo"', type='string'),
|
||||
Token("'bar'", type='string')]
|
||||
|
||||
def test_string_escape(self):
|
||||
assert self.tokens('"foo \\" bar"') == [Token('"foo \\" bar"',
|
||||
type='string')]
|
||||
|
||||
def test_string_multiline(self):
|
||||
t = Tokenizer(PythonSchema)
|
||||
res = list(t.tokenize('"""foo\n'))
|
||||
assert res == [Token('"""foo\n', type='string')]
|
||||
res = list(t.tokenize('bar\n'))
|
||||
assert res == [Token('bar\n', type='string')]
|
||||
res = list(t.tokenize('"""\n'))
|
||||
assert res == [Token('"""', type='string'),
|
||||
Token('\n', type='whitespace')]
|
||||
# tricky problem: the following line must not put the tokenizer in
|
||||
# 'multiline state'...
|
||||
res = list(t.tokenize('"""foo"""'))
|
||||
assert res == [Token('"""foo"""', type='string')]
|
||||
res = list(t.tokenize('bar'))
|
||||
assert res == [Token('bar', type='word')]
|
||||
|
||||
def test_string_following_printable(self):
|
||||
assert self.tokens('."foo"') == [Token('.', type='unknown'),
|
||||
Token('"foo"', type='string')]
|
||||
|
|
@ -0,0 +1,149 @@
|
|||
|
||||
""" test of html generation
|
||||
"""
|
||||
|
||||
from py.__.apigen.source.html import create_html, HTMLDocument
|
||||
from py.__.apigen.source.browser import parse_path
|
||||
from py.xml import html
|
||||
|
||||
import py
|
||||
import os
|
||||
|
||||
def create_html_and_show(path):
|
||||
mod = parse_path(path)
|
||||
html = create_html(mod)
|
||||
testfile = py.test.ensuretemp("htmloutput").ensure("test.html")
|
||||
testfile.write(unicode(html))
|
||||
return testfile
|
||||
|
||||
def test_basic():
|
||||
tmp = py.test.ensuretemp("sourcehtml")
|
||||
inp = tmp.ensure("one.py")
|
||||
inp.write(py.code.Source("""
|
||||
def func_one():
|
||||
pass
|
||||
|
||||
def func_two(x, y):
|
||||
x = 1
|
||||
y = 2
|
||||
return x + y
|
||||
|
||||
class B:
|
||||
pass
|
||||
|
||||
class A(B):
|
||||
def meth1(self):
|
||||
pass
|
||||
|
||||
def meth2(self):
|
||||
pass
|
||||
"""))
|
||||
|
||||
testfile = create_html_and_show(inp)
|
||||
data = testfile.open().read()
|
||||
assert data.find('<a href="#func_one"') != -1
|
||||
assert data.find('<a href="#func_two"') != -1
|
||||
assert data.find('<a href="#B"') != -1
|
||||
assert data.find('<a href="#A"') != -1
|
||||
assert data.find('<a href="#A.meth1"') != -1
|
||||
|
||||
class _HTMLDocument(HTMLDocument):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
class TestHTMLDocument(object):
|
||||
def test_head(self):
|
||||
doc = _HTMLDocument()
|
||||
head = doc.create_head()
|
||||
assert isinstance(head, html.head)
|
||||
rendered = unicode(head)
|
||||
assert rendered.find('<title>source view</title>') > -1
|
||||
assert py.std.re.search('<style type="text/css">[^<]+</style>',
|
||||
rendered)
|
||||
|
||||
def test_body(self):
|
||||
doc = _HTMLDocument()
|
||||
body = doc.create_body()
|
||||
assert unicode(body) == '<body></body>'
|
||||
|
||||
def test_table(self):
|
||||
doc = _HTMLDocument()
|
||||
table, tbody = doc.create_table()
|
||||
assert isinstance(table, html.table)
|
||||
assert isinstance(tbody, html.tbody)
|
||||
assert tbody == table[0]
|
||||
|
||||
def prepare_line(self, line, doc=None):
|
||||
if doc is None:
|
||||
doc = HTMLDocument()
|
||||
l = doc.prepare_line(line)
|
||||
return ''.join([unicode(i) for i in l])
|
||||
|
||||
def test_prepare_line_basic(self):
|
||||
result = self.prepare_line(['see if this works'])
|
||||
assert result == 'see <span class="keyword">if</span> this works'
|
||||
result = self.prepare_line(['see if this ',
|
||||
html.a('works', name='works'),' too'])
|
||||
assert result == ('see <span class="keyword">if</span> this '
|
||||
'<a name="works">works</a> too')
|
||||
result = self.prepare_line(['see if something else works'])
|
||||
assert result == ('see <span class="keyword">if</span> something '
|
||||
'<span class="keyword">else</span> works')
|
||||
result = self.prepare_line(['see if something ',
|
||||
html.a('else', name='else'), ' works too'])
|
||||
assert result == ('see <span class="keyword">if</span> something '
|
||||
'<a name="else">else</a> works too')
|
||||
|
||||
def test_prepare_line_strings(self):
|
||||
result = self.prepare_line(['foo = "bar"'])
|
||||
assert result == 'foo = <span class="string">"bar"</span>'
|
||||
|
||||
result = self.prepare_line(['"spam"'])
|
||||
assert result == '<span class="string">"spam"</span>'
|
||||
|
||||
# test multiline strings
|
||||
doc = HTMLDocument()
|
||||
result = self.prepare_line(['"""start of multiline'], doc)
|
||||
assert result == ('<span class="string">"""start of '
|
||||
'multiline</span>')
|
||||
# doc should now be in 'string mode'
|
||||
result = self.prepare_line(['see if it doesn\'t touch this'], doc)
|
||||
assert result == ('<span class="string">see if it doesn't touch '
|
||||
'this</span>')
|
||||
result = self.prepare_line(['"""'], doc)
|
||||
assert result == '<span class="string">"""</span>'
|
||||
result = self.prepare_line(['see if it colours this again'], doc)
|
||||
assert result == ('see <span class="keyword">if</span> it colours '
|
||||
'this again')
|
||||
|
||||
def test_add_row(self):
|
||||
doc = HTMLDocument()
|
||||
doc.add_row(1, ['""" this is a foo implementation """'])
|
||||
doc.add_row(2, [''])
|
||||
doc.add_row(3, ['class ', html.a('Foo', name='Foo'), ':'])
|
||||
doc.add_row(4, [' pass'])
|
||||
tbody = doc.tbody
|
||||
assert len(tbody) == 4
|
||||
assert unicode(tbody[0][0]) == '<td class="lineno">1</td>'
|
||||
assert unicode(tbody[0][1]) == ('<td class="code">'
|
||||
'<span class="string">'
|
||||
'""" '
|
||||
'this is a foo implementation '
|
||||
'"""'
|
||||
'</span></td>')
|
||||
assert unicode(tbody[1][1]) == '<td class="code"> </td>'
|
||||
assert unicode(tbody[2][1]) == ('<td class="code">'
|
||||
'<span class="alt_keyword">class'
|
||||
'</span> '
|
||||
'<a name="Foo">Foo</a>:</td>')
|
||||
assert unicode(tbody[3][1]) == ('<td class="code"> '
|
||||
'<span class="alt_keyword">pass'
|
||||
'</span></td>')
|
||||
|
||||
def test_unicode(self):
|
||||
doc = HTMLDocument()
|
||||
h = unicode(doc)
|
||||
print h
|
||||
assert py.std.re.match(r'<html>\s*<head>\s*<title>[^<]+</title>'
|
||||
'.*</body>\w*</html>$', h, py.std.re.S)
|
||||
|
|
@ -0,0 +1,105 @@
|
|||
#sidebar {
|
||||
width: 9em;
|
||||
float: left;
|
||||
vertical-align: top;
|
||||
margin-top: 0.5em;
|
||||
}
|
||||
|
||||
#main {
|
||||
margin-left: 10em;
|
||||
}
|
||||
|
||||
#content {
|
||||
border: 0px;
|
||||
height: 95%;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
#breadcrumb {
|
||||
height: 5%;
|
||||
display: none;
|
||||
}
|
||||
|
||||
#logo {
|
||||
/* float: left; */
|
||||
}
|
||||
|
||||
#logo img {
|
||||
border-width: 0px;
|
||||
}
|
||||
|
||||
#menubar a {
|
||||
text-decoration: none;
|
||||
color: blue;
|
||||
}
|
||||
|
||||
#menubar div.selected a {
|
||||
color: purple;
|
||||
}
|
||||
|
||||
.project_title {
|
||||
font-size: 2em;
|
||||
font-weight: bold;
|
||||
}
|
||||
|
||||
body, div, p, h1, h2, h3, h4 {
|
||||
font-family: Trebuchet MS, Verdana, Arial;
|
||||
background-color: #FFE;
|
||||
color: black;
|
||||
}
|
||||
|
||||
a {
|
||||
color: #006;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
ul {
|
||||
padding-left: 0em;
|
||||
margin-top: 0px;
|
||||
}
|
||||
|
||||
ul li {
|
||||
list-style-type: none;
|
||||
}
|
||||
|
||||
.code a {
|
||||
color: blue;
|
||||
font-weight: bold;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
a:hover {
|
||||
color: #005;
|
||||
}
|
||||
|
||||
.lineno {
|
||||
text-align: right;
|
||||
color: #555;
|
||||
width: 3em;
|
||||
padding-right: 1em;
|
||||
border: 0px solid black;
|
||||
border-right-width: 1px;
|
||||
}
|
||||
|
||||
.code {
|
||||
padding-left: 1em;
|
||||
white-space: pre;
|
||||
font-family: monospace, Monaco;
|
||||
}
|
||||
|
||||
.comment {
|
||||
color: purple;
|
||||
}
|
||||
|
||||
.string {
|
||||
color: #777;
|
||||
}
|
||||
|
||||
.keyword {
|
||||
color: blue;
|
||||
}
|
||||
|
||||
.alt_keyword {
|
||||
color: green;
|
||||
}
|
||||
|
|
@ -0,0 +1,360 @@
|
|||
import py
|
||||
html = py.xml.html
|
||||
from py.__.apigen.linker import Linker
|
||||
from py.__.apigen.htmlgen import *
|
||||
from py.__.apigen.tracer.docstorage import DocStorage, DocStorageAccessor
|
||||
from py.__.apigen.tracer.tracer import Tracer
|
||||
from py.__.apigen.project import Project
|
||||
from py.__.test.web import webcheck
|
||||
from py.__.apigen.conftest import option
|
||||
|
||||
def run_string_sequence_test(data, seq):
|
||||
currpos = -1
|
||||
for s in seq:
|
||||
newpos = data.find(s)
|
||||
if currpos >= newpos:
|
||||
if newpos == -1:
|
||||
message = 'not found'
|
||||
else:
|
||||
message = 'unexpected position: %s' % (newpos,)
|
||||
py.test.fail('string %r: %s' % (s, message))
|
||||
currpos = newpos
|
||||
|
||||
def setup_fs_project():
|
||||
temp = py.test.ensuretemp('apigen_example')
|
||||
temp.ensure("pkg/func.py").write(py.code.Source("""\
|
||||
def func(arg1):
|
||||
"docstring"
|
||||
"""))
|
||||
temp.ensure('pkg/someclass.py').write(py.code.Source("""\
|
||||
class SomeClass(object):
|
||||
" docstring someclass "
|
||||
def __init__(self, somevar):
|
||||
self.somevar = somevar
|
||||
|
||||
def get_somevar(self):
|
||||
" get_somevar docstring "
|
||||
return self.somevar
|
||||
SomeInstance = SomeClass(10)
|
||||
"""))
|
||||
temp.ensure('pkg/somesubclass.py').write(py.code.Source("""\
|
||||
from someclass import SomeClass
|
||||
class SomeSubClass(SomeClass):
|
||||
" docstring somesubclass "
|
||||
def get_somevar(self):
|
||||
return self.somevar + 1
|
||||
"""))
|
||||
temp.ensure('pkg/somenamespace.py').write(py.code.Source("""\
|
||||
def foo():
|
||||
return 'bar'
|
||||
def baz(qux):
|
||||
return qux
|
||||
"""))
|
||||
temp.ensure("pkg/__init__.py").write(py.code.Source("""\
|
||||
from py.initpkg import initpkg
|
||||
initpkg(__name__, exportdefs = {
|
||||
'main.sub.func': ("./func.py", "func"),
|
||||
'main.SomeClass': ('./someclass.py', 'SomeClass'),
|
||||
'main.SomeInstance': ('./someclass.py', 'SomeInstance'),
|
||||
'main.SomeSubClass': ('./somesubclass.py', 'SomeSubClass'),
|
||||
'main.SomeSubClass': ('./somesubclass.py', 'SomeSubClass'),
|
||||
'other': ('./somenamespace.py', '*'),
|
||||
})
|
||||
"""))
|
||||
return temp, 'pkg'
|
||||
|
||||
def get_dsa(fsroot, pkgname):
|
||||
py.std.sys.path.insert(0, str(fsroot))
|
||||
pkg = __import__(pkgname)
|
||||
ds = DocStorage()
|
||||
ds.from_pkg(pkg)
|
||||
dsa = DocStorageAccessor(ds)
|
||||
return ds, dsa
|
||||
|
||||
def _checkhtml(htmlstring):
|
||||
if isinstance(htmlstring, unicode):
|
||||
htmlstring = htmlstring.encode('UTF-8', 'replace')
|
||||
assert isinstance(htmlstring, str)
|
||||
if option.webcheck:
|
||||
webcheck.check_html(htmlstring)
|
||||
else:
|
||||
py.test.skip("pass --webcheck to validate html produced in tests "
|
||||
"(partial skip: the test has succeeded up until here)")
|
||||
|
||||
def _checkhtmlsnippet(htmlstring):
|
||||
# XXX wrap page around snippet and validate
|
||||
pass
|
||||
#newstring = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
|
||||
#"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n""" + unicode(h)
|
||||
#_checkhtml(newstring)
|
||||
|
||||
class AbstractBuilderTest(object):
|
||||
def setup_class(cls):
|
||||
cls.fs_root, cls.pkg_name = setup_fs_project()
|
||||
cls.ds, cls.dsa = get_dsa(cls.fs_root, cls.pkg_name)
|
||||
cls.project = Project()
|
||||
|
||||
def setup_method(self, meth):
|
||||
class LinkerForTests(Linker):
|
||||
def get_target(self, linkid):
|
||||
try:
|
||||
return super(LinkerForTests, self).get_target(linkid)
|
||||
except KeyError:
|
||||
return 'unknown_link_%s' % (linkid,)
|
||||
self.base = base = py.test.ensuretemp('%s_%s' % (
|
||||
self.__class__.__name__, meth.im_func.func_name))
|
||||
self.linker = linker = LinkerForTests()
|
||||
self.apb = ApiPageBuilder(base, linker, self.dsa, self.fs_root)
|
||||
self.spb = SourcePageBuilder(base, linker, self.fs_root)
|
||||
self.namespace_tree = create_namespace_tree(['main.sub',
|
||||
'main.sub.func',
|
||||
'main.SomeClass',
|
||||
'main.SomeSubClass',
|
||||
'main.SomeInstance',
|
||||
'other.foo',
|
||||
'other.bar'])
|
||||
|
||||
class TestApiPageBuilder(AbstractBuilderTest):
|
||||
def test_build_callable_view(self):
|
||||
ds, dsa = get_dsa(self.fs_root, self.pkg_name)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
pkg = __import__(self.pkg_name)
|
||||
pkg.main.sub.func(10)
|
||||
pkg.main.sub.func(pkg.main.SomeClass(10))
|
||||
t.end_tracing()
|
||||
apb = ApiPageBuilder(self.base, self.linker, dsa, self.fs_root)
|
||||
snippet = apb.build_callable_view('main.sub.func')
|
||||
html = snippet.unicode()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'arg1 :: AnyOf(',
|
||||
'href="',
|
||||
'Class SomeClass',
|
||||
'Int>',
|
||||
'return value :: <None>',
|
||||
'origin: %s' % (self.fs_root.join('pkg/func.py'),),
|
||||
'def func(arg1):',
|
||||
])
|
||||
_checkhtmlsnippet(html)
|
||||
|
||||
def test_build_function_pages(self):
|
||||
data = self.apb.prepare_function_pages(self.namespace_tree,
|
||||
['main.sub.func'])
|
||||
self.apb.build_function_pages(data, self.project)
|
||||
funcfile = self.base.join('api/main.sub.func.html')
|
||||
assert funcfile.check()
|
||||
html = funcfile.read()
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_class_view(self):
|
||||
snippet = self.apb.build_class_view('main.SomeClass')
|
||||
html = snippet.unicode()
|
||||
_checkhtmlsnippet(html)
|
||||
|
||||
def test_build_class_pages(self):
|
||||
data, methodsdata = self.apb.prepare_class_pages(self.namespace_tree,
|
||||
['main.SomeClass',
|
||||
'main.SomeSubClass'])
|
||||
self.apb.build_class_pages(data, self.project)
|
||||
clsfile = self.base.join('api/main.SomeClass.html')
|
||||
assert clsfile.check()
|
||||
html = clsfile.read()
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_class_pages_instance(self):
|
||||
data, methodsdata = self.apb.prepare_class_pages(self.namespace_tree,
|
||||
['main.SomeClass',
|
||||
'main.SomeSubClass',
|
||||
'main.SomeInstance'])
|
||||
self.apb.build_class_pages(data, self.project)
|
||||
clsfile = self.base.join('api/main.SomeInstance.html')
|
||||
assert clsfile.check()
|
||||
html = clsfile.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'instance of SomeClass()',
|
||||
])
|
||||
|
||||
def test_build_class_pages_nav_links(self):
|
||||
data, methodsdata = self.apb.prepare_class_pages(self.namespace_tree,
|
||||
['main.SomeSubClass',
|
||||
'main.SomeClass'])
|
||||
# fake some stuff that would be built from other methods
|
||||
self.linker.set_link('', 'api/index.html')
|
||||
self.linker.set_link('main', 'api/main.html')
|
||||
self.apb.build_class_pages(data, self.project)
|
||||
clsfile = self.base.join('api/main.SomeClass.html')
|
||||
assert clsfile.check()
|
||||
html = clsfile.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'href="../style.css"',
|
||||
'href="index.html">pkg',
|
||||
'href="main.html">main',
|
||||
'href="main.SomeClass.html">SomeClass',
|
||||
'href="main.SomeSubClass.html">SomeSubClass',
|
||||
])
|
||||
assert not 'href="main.sub.func.html"' in html
|
||||
py.test.skip('WOP from here')
|
||||
assert 'href="main.sub.html">sub' in html
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_class_pages_base_link(self):
|
||||
data, methodsdata = self.apb.prepare_class_pages(self.namespace_tree,
|
||||
['main.SomeSubClass',
|
||||
'main.SomeClass'])
|
||||
self.apb.build_class_pages(data, self.project)
|
||||
clsfile = self.base.join('api/main.SomeSubClass.html')
|
||||
assert clsfile.check()
|
||||
html = clsfile.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'href="../style.css"',
|
||||
'href="main.SomeClass.html">main.SomeClass',
|
||||
])
|
||||
_checkhtml(html)
|
||||
|
||||
def test_source_links(self):
|
||||
data, methodsdata = self.apb.prepare_class_pages(self.namespace_tree,
|
||||
['main.SomeSubClass',
|
||||
'main.SomeClass'])
|
||||
sourcedata = self.spb.prepare_pages(self.fs_root)
|
||||
self.apb.build_class_pages(data, self.project)
|
||||
self.spb.build_pages(sourcedata, self.project, self.fs_root)
|
||||
funchtml = self.base.join('api/main.SomeClass.html').read()
|
||||
assert funchtml.find('href="../source/pkg/someclass.py.html"') > -1
|
||||
_checkhtml(funchtml)
|
||||
|
||||
def test_build_namespace_pages(self):
|
||||
data = self.apb.prepare_namespace_pages(self.namespace_tree)
|
||||
self.apb.build_namespace_pages(data, self.project)
|
||||
mainfile = self.base.join('api/main.html')
|
||||
assert mainfile.check()
|
||||
html = mainfile.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'index of main namespace',
|
||||
])
|
||||
otherfile = self.base.join('api/other.html')
|
||||
assert otherfile.check()
|
||||
otherhtml = otherfile.read()
|
||||
print otherhtml
|
||||
run_string_sequence_test(otherhtml, [
|
||||
'index of other namespace',
|
||||
])
|
||||
_checkhtml(html)
|
||||
_checkhtml(otherhtml)
|
||||
|
||||
def test_build_namespace_pages_index(self):
|
||||
data = self.apb.prepare_namespace_pages(self.namespace_tree)
|
||||
self.apb.build_namespace_pages(data, self.project)
|
||||
pkgfile = self.base.join('api/index.html')
|
||||
assert pkgfile.check()
|
||||
html = pkgfile.read()
|
||||
assert 'index of project pkg namespace'
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_namespace_pages_subnamespace(self):
|
||||
data = self.apb.prepare_namespace_pages(self.namespace_tree)
|
||||
self.apb.build_namespace_pages(data, self.project)
|
||||
subfile = self.base.join('api/main.sub.html')
|
||||
assert subfile.check()
|
||||
html = subfile.read()
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_function_api_pages_nav(self):
|
||||
data = self.apb.prepare_function_pages(self.namespace_tree,
|
||||
['main.sub.func'])
|
||||
self.linker.set_link('', 'api/index.html')
|
||||
self.linker.set_link('main', 'api/main.html')
|
||||
self.linker.set_link('main.sub', 'api/main.sub.html')
|
||||
self.apb.build_function_pages(data, self.project)
|
||||
funcfile = self.base.join('api/main.sub.func.html')
|
||||
html = funcfile.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'<a href="index.html">',
|
||||
'<a href="main.html">',
|
||||
'<a href="main.sub.html">',
|
||||
'<a href="main.sub.func.html">',
|
||||
])
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_function_navigation(self):
|
||||
self.apb.prepare_namespace_pages(self.namespace_tree)
|
||||
self.apb.prepare_function_pages(self.namespace_tree, ['main.sub.func'])
|
||||
nav = self.apb.build_navigation('main.sub', ['main.sub.func'],
|
||||
'main.sub.func')
|
||||
html = nav.unicode(indent=0)
|
||||
print html.encode('UTF-8')
|
||||
assert (u'<div><a href="api/index.html">pkg</a></div>'
|
||||
u'<div>\xa0\xa0<a href="api/main.html">main</a></div>'
|
||||
u'<div>\xa0\xa0\xa0\xa0'
|
||||
u'<a href="api/main.sub.html">sub</a></div>'
|
||||
u'<div class="selected">\xa0\xa0\xa0\xa0\xa0\xa0'
|
||||
u'<a href="api/main.sub.func.html">func</a></div>'
|
||||
) in html
|
||||
|
||||
def test_build_root_namespace_view(self):
|
||||
data = self.apb.prepare_namespace_pages(self.namespace_tree)
|
||||
self.apb.build_namespace_pages(data, self.project)
|
||||
rootfile = self.base.join('api/index.html')
|
||||
assert rootfile.check()
|
||||
html = rootfile.read()
|
||||
assert '<a href="main.html">' in html
|
||||
_checkhtml(html)
|
||||
|
||||
class TestSourcePageBuilder(AbstractBuilderTest):
|
||||
def test_build_pages(self):
|
||||
data = self.spb.prepare_pages(self.fs_root)
|
||||
self.spb.build_pages(data, self.project, self.fs_root)
|
||||
somesource = self.base.join('source/pkg/func.py.html').read()
|
||||
_checkhtml(somesource)
|
||||
|
||||
def test_build_pages_nav(self):
|
||||
data = self.spb.prepare_pages(self.fs_root)
|
||||
self.spb.build_pages(data, self.project, self.fs_root)
|
||||
funcsource = self.base.join('source/pkg/func.py.html')
|
||||
assert funcsource.check(file=True)
|
||||
html = funcsource.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'href="../../style.css"',
|
||||
'<a href="../index.html">root</a>',
|
||||
'<a href="index.html">pkg</a>',
|
||||
'<a href="someclass.py.html">someclass.py</a>',
|
||||
'<a href="somesubclass.py.html">somesubclass.py</a>',
|
||||
])
|
||||
|
||||
def test_build_dir_page(self):
|
||||
data = self.spb.prepare_pages(self.fs_root)
|
||||
self.spb.build_pages(data, self.project, self.fs_root)
|
||||
pkgindex = self.base.join('source/pkg/index.html')
|
||||
assert pkgindex.check(file=True)
|
||||
html = pkgindex.read()
|
||||
print html
|
||||
run_string_sequence_test(html, [
|
||||
'href="../../style.css"',
|
||||
'<a href="../index.html">root</a>',
|
||||
'<a href="index.html">pkg</a>',
|
||||
'<a href="func.py.html">func.py</a>',
|
||||
'<a href="someclass.py.html">someclass.py</a>',
|
||||
'<a href="somesubclass.py.html">somesubclass.py</a>',
|
||||
'<h2>directories</h2>',
|
||||
'<h2>files</h2>'])
|
||||
_checkhtml(html)
|
||||
|
||||
def test_build_navigation_root(self):
|
||||
self.spb.prepare_pages(self.fs_root)
|
||||
nav = self.spb.build_navigation(self.fs_root.join('pkg'))
|
||||
html = nav.unicode(indent=0)
|
||||
print html.encode('UTF-8')
|
||||
run_string_sequence_test(html, [
|
||||
'href="source/index.html">root',
|
||||
'href="source/pkg/index.html">pkg',
|
||||
'href="source/pkg/func.py.html">func.py',
|
||||
'href="source/pkg/someclass.py.html">someclass.py',
|
||||
'href="source/pkg/somesubclass.py.html">somesubclass.py',
|
||||
])
|
||||
|
|
@ -0,0 +1,116 @@
|
|||
""" functional test for apigen.py
|
||||
|
||||
script to build api + source docs from py.test
|
||||
"""
|
||||
|
||||
import py
|
||||
|
||||
def setup_fs_project():
|
||||
temp = py.test.ensuretemp('apigen_functional')
|
||||
temp.ensure("pkg/func.py").write(py.code.Source("""\
|
||||
def func(arg1):
|
||||
"docstring"
|
||||
|
||||
def func_2(arg1, arg2):
|
||||
return arg1(arg2)
|
||||
"""))
|
||||
temp.ensure('pkg/sometestclass.py').write(py.code.Source("""\
|
||||
class SomeTestClass(object):
|
||||
" docstring sometestclass "
|
||||
def __init__(self, somevar):
|
||||
self.somevar = somevar
|
||||
|
||||
def get_somevar(self):
|
||||
" get_somevar docstring "
|
||||
return self.somevar
|
||||
"""))
|
||||
temp.ensure('pkg/sometestsubclass.py').write(py.code.Source("""\
|
||||
from sometestclass import SomeTestClass
|
||||
class SomeTestSubClass(SomeTestClass):
|
||||
" docstring sometestsubclass "
|
||||
def get_somevar(self):
|
||||
return self.somevar + 1
|
||||
"""))
|
||||
temp.ensure('pkg/somenamespace.py').write(py.code.Source("""\
|
||||
def foo():
|
||||
return 'bar'
|
||||
def baz(qux):
|
||||
return qux
|
||||
"""))
|
||||
temp.ensure("pkg/__init__.py").write(py.code.Source("""\
|
||||
from py.initpkg import initpkg
|
||||
initpkg(__name__, exportdefs = {
|
||||
'main.sub.func': ("./func.py", "func"),
|
||||
'main.func': ("./func.py", "func_2"),
|
||||
'main.SomeTestClass': ('./sometestclass.py', 'SomeTestClass'),
|
||||
'main.SomeTestSubClass': ('./sometestsubclass.py',
|
||||
'SomeTestSubClass'),
|
||||
})
|
||||
"""))
|
||||
temp.ensure('pkg/test/test_pkg.py').write(py.code.Source("""\
|
||||
import py
|
||||
py.std.sys.path.insert(0,
|
||||
py.magic.autopath().dirpath().dirpath().dirpath().strpath)
|
||||
import pkg
|
||||
|
||||
# this mainly exists to provide some data to the tracer
|
||||
def test_pkg():
|
||||
s = pkg.main.SomeTestClass(10)
|
||||
assert s.get_somevar() == 10
|
||||
s = pkg.main.SomeTestClass('10')
|
||||
assert s.get_somevar() == '10'
|
||||
s = pkg.main.SomeTestSubClass(10)
|
||||
assert s.get_somevar() == 11
|
||||
s = pkg.main.SomeTestSubClass('10')
|
||||
py.test.raises(TypeError, 's.get_somevar()')
|
||||
assert pkg.main.sub.func(10) is None
|
||||
assert pkg.main.sub.func(20) is None
|
||||
s = pkg.main.func(pkg.main.SomeTestClass, 10)
|
||||
assert isinstance(s, pkg.main.SomeTestClass)
|
||||
"""))
|
||||
return temp, 'pkg'
|
||||
|
||||
def test_apigen_functional():
|
||||
fs_root, package_name = setup_fs_project()
|
||||
tempdir = py.test.ensuretemp('test_apigen_functional_results')
|
||||
parentdir = py.magic.autopath().dirpath().dirpath()
|
||||
pkgdir = fs_root.join('pkg')
|
||||
try:
|
||||
output = py.process.cmdexec('APIGEN_TARGET="%s" py.test --session=L '
|
||||
'--apigen="%s/apigen.py" "%s"' % (
|
||||
tempdir, parentdir, pkgdir))
|
||||
except py.error.Error, e:
|
||||
print e.out
|
||||
raise
|
||||
assert output.lower().find('traceback') == -1
|
||||
|
||||
# just some quick content checks
|
||||
apidir = tempdir.join('api')
|
||||
assert apidir.check(dir=True)
|
||||
sometestclass_api = apidir.join('main.SomeTestClass.html')
|
||||
assert sometestclass_api.check(file=True)
|
||||
html = sometestclass_api.read()
|
||||
assert '<a href="main.SomeTestClass.html">SomeTestClass</a>' in html
|
||||
# XXX not linking to method files anymore
|
||||
#sometestclass_init_api = apidir.join('main.SomeTestClass.__init__.html')
|
||||
#assert sometestclass_init_api.check(file=True)
|
||||
#assert sometestclass_init_api.read().find(
|
||||
# '<a href="main.SomeTestClass.__init__.html">__init__</a>') > -1
|
||||
namespace_api = apidir.join('main.html')
|
||||
assert namespace_api.check(file=True)
|
||||
html = namespace_api.read()
|
||||
assert '<a href="main.SomeTestClass.html">SomeTestClass</a>' in html
|
||||
|
||||
sourcedir = tempdir.join('source')
|
||||
assert sourcedir.check(dir=True)
|
||||
sometestclass_source = sourcedir.join('sometestclass.py.html')
|
||||
assert sometestclass_source.check(file=True)
|
||||
html = sometestclass_source.read()
|
||||
assert '<div class="project_title">sources for sometestclass.py</div>' in html
|
||||
|
||||
# XXX later...
|
||||
#index = sourcedir.join('index.html')
|
||||
#assert index.check(file=True)
|
||||
#html = index.read()
|
||||
#assert '<a href="main/index.html">main</a>' in html
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
import py
|
||||
from py.__.apigen import htmlgen
|
||||
from py.__.apigen.linker import Linker
|
||||
|
||||
def test_create_namespace_tree():
|
||||
tree = htmlgen.create_namespace_tree(['foo.bar.baz'])
|
||||
assert tree == {'': ['foo'],
|
||||
'foo': ['foo.bar'],
|
||||
'foo.bar': ['foo.bar.baz']}
|
||||
tree = htmlgen.create_namespace_tree(['foo.bar.baz', 'foo.bar.qux'])
|
||||
assert tree == {'': ['foo'],
|
||||
'foo': ['foo.bar'],
|
||||
'foo.bar': ['foo.bar.baz', 'foo.bar.qux']}
|
||||
tree = htmlgen.create_namespace_tree(['pkg.sub.func',
|
||||
'pkg.SomeClass',
|
||||
'pkg.SomeSubClass'])
|
||||
assert tree == {'': ['pkg'],
|
||||
'pkg.sub': ['pkg.sub.func'],
|
||||
'pkg': ['pkg.sub', 'pkg.SomeClass',
|
||||
'pkg.SomeSubClass']}
|
||||
|
||||
def test_build_navitem_html():
|
||||
l = Linker()
|
||||
l.set_link('spam.eggs.foo', 'foo.html')
|
||||
h = htmlgen.build_navitem_html(l, 'foo', 'spam.eggs.foo', 0, False)
|
||||
assert unicode(h) == u'<div><a href="foo.html">foo</a></div>'
|
||||
h = htmlgen.build_navitem_html(l, 'bar', 'spam.eggs.foo', 1, True)
|
||||
assert unicode(h) == (u'<div class="selected">\xa0\xa0'
|
||||
u'<a href="foo.html">bar</a></div>')
|
||||
|
||||
def test_source_dirs_files():
|
||||
temp = py.test.ensuretemp('test_source_dirs_files')
|
||||
temp.join('dir').ensure(dir=True)
|
||||
temp.join('dir/file1.py').ensure(file=True)
|
||||
temp.join('dir/file2.pyc').ensure(file=True)
|
||||
temp.join('dir/file3.c').ensure(file=True)
|
||||
temp.join('dir/.hidden_file').ensure(file=True)
|
||||
temp.join('dir/sub').ensure(dir=True)
|
||||
temp.join('dir/.hidden_dir').ensure(dir=True)
|
||||
dirs, files = htmlgen.source_dirs_files(temp.join('dir'))
|
||||
dirnames = py.builtin.sorted([d.basename for d in dirs])
|
||||
filenames = py.builtin.sorted([f.basename for f in files])
|
||||
assert dirnames == ['sub']
|
||||
assert filenames == ['file1.py', 'file3.c']
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
import py
|
||||
from py.__.apigen.linker import Linker, getrelfspath, relpath
|
||||
|
||||
class TestLinker(object):
|
||||
def test_get_target(self):
|
||||
linker = Linker()
|
||||
lazyhref = linker.get_lazyhref('py.path.local')
|
||||
linker.set_link('py.path.local', 'py/path/local.html')
|
||||
relpath = linker.get_target('py.path.local')
|
||||
assert relpath == 'py/path/local.html'
|
||||
|
||||
def test_target_relative(self):
|
||||
linker = Linker()
|
||||
lazyhref = linker.get_lazyhref('py.path.local')
|
||||
linker.set_link('py.path.local', 'py/path/local.html')
|
||||
relpath = linker.call_withbase('py/index.html',
|
||||
linker.get_target, 'py.path.local')
|
||||
assert relpath == 'path/local.html'
|
||||
|
||||
|
||||
|
||||
testspec = [
|
||||
'a a/b a/b',
|
||||
'/a /a/b a/b',
|
||||
'a b b',
|
||||
'/a /b b',
|
||||
'a/b c/d ../c/d',
|
||||
'/a/b /c/d ../c/d',
|
||||
'a/b a ../a',
|
||||
'/a/b /a ../a',
|
||||
]
|
||||
|
||||
def gen_check(frompath, topath, expected):
|
||||
result = relpath(frompath, topath)
|
||||
print "linking", frompath, "to", topath
|
||||
assert result == expected
|
||||
|
||||
def test_gen_check():
|
||||
for line in testspec:
|
||||
frompath, topath, expected = line.split()
|
||||
yield gen_check, frompath, topath, expected
|
||||
|
||||
def test_check_incompatible():
|
||||
py.test.raises(ValueError, "relpath('/a', 'b')")
|
|
@ -0,0 +1,68 @@
|
|||
|
||||
* format docstrings more nicely (with tests)
|
||||
|
||||
* have the API function view be as informative as possible
|
||||
without having to go to the "single method" view
|
||||
(do we even need a single method view?), for example:
|
||||
|
||||
browsing the class views (and clicking on methods)
|
||||
should always make it obvious which class is being
|
||||
viewed. method views (when navigating there through
|
||||
the class view) should also have the source there
|
||||
|
||||
* have class-level attributes be displayed
|
||||
|
||||
* use "inherited" doc strings, i.e. for
|
||||
class A:
|
||||
def meth(self):
|
||||
"doc1"
|
||||
class B(A):
|
||||
def meth(self):
|
||||
pass
|
||||
|
||||
B.meth should display the A.meth docstring, probably
|
||||
with special formatting (italics or so).
|
||||
|
||||
* factor out some common code in the build_* functions
|
||||
|
||||
* refactor the apigen/rsession interaction to become
|
||||
cleaner (e.g. apigen's get_documentable_items should
|
||||
be separately tested and the caller should not need
|
||||
to guess what it will get, i think)
|
||||
|
||||
* look out for and streamline all apigen/source-viewer
|
||||
documentation into one document
|
||||
|
||||
|
||||
|
||||
* consider automating dependencies:
|
||||
|
||||
e.g. something like: queue_render(page, fspath, linker, ...)
|
||||
would defer the rendering until later.
|
||||
then a loop does:
|
||||
|
||||
maxlength = len(queue)
|
||||
while queue:
|
||||
page, fspath, linker, ... = queue.get()
|
||||
# fill outputpath/link here or even earlier
|
||||
if all_links_resolve(page, linker):
|
||||
render it and write to filesystem
|
||||
maxlength = len(queue)
|
||||
else:
|
||||
queue.append(...)
|
||||
maxlength -= 1
|
||||
if maxlength <= 0:
|
||||
print "ERROR: seems i can't make progress"
|
||||
print "unresolved links follow: "
|
||||
...
|
||||
print "unresolved pages/fspaths:"
|
||||
...
|
||||
XXX maybe: print "filling linker with dummy hrefs, and rendering anyway"
|
||||
...
|
||||
raise ...
|
||||
|
||||
* also we might have a support function for tests that
|
||||
fills the linker with "dummy hrefs" for certain types
|
||||
like source links
|
||||
|
||||
* XXX list more here
|
|
@ -0,0 +1,358 @@
|
|||
|
||||
import py
|
||||
from py.__.apigen.tracer import model
|
||||
|
||||
import types
|
||||
import inspect
|
||||
import copy
|
||||
|
||||
MAX_CALL_SITES = 20
|
||||
|
||||
set = py.builtin.set
|
||||
|
||||
def is_private(name):
|
||||
return name.startswith('_') and not name.startswith('__')
|
||||
|
||||
class CallFrame(object):
|
||||
def __init__(self, frame):
|
||||
self.filename = frame.code.raw.co_filename
|
||||
self.lineno = frame.lineno
|
||||
self.firstlineno = frame.code.firstlineno
|
||||
self.source = frame.code.source()
|
||||
|
||||
def _getval(self):
|
||||
return (self.filename, self.lineno)
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self._getval())
|
||||
|
||||
def __eq__(self, other):
|
||||
return self._getval() == other._getval()
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
class CallStack(object):
|
||||
def __init__(self, tb):
|
||||
#if isinstance(tb, py.code.Traceback):
|
||||
# self.tb = tb
|
||||
#else:
|
||||
# self.tb = py.code.Traceback(tb)
|
||||
self.tb = [CallFrame(frame) for frame in tb]
|
||||
|
||||
#def _getval(self):
|
||||
# return [(frame.code.raw.co_filename, frame.lineno+1) for frame
|
||||
# in self]
|
||||
|
||||
def __hash__(self):
|
||||
return hash(tuple(self.tb))
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.tb == other.tb
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
#def __getattr__(self, attr):
|
||||
# return getattr(self.tb, attr)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.tb)
|
||||
|
||||
def __getitem__(self, item):
|
||||
return self.tb[item]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.tb)
|
||||
|
||||
def __cmp__(self, other):
|
||||
return cmp(self.tb, other.tb)
|
||||
|
||||
def cut_stack(stack, frame, upward_frame=None):
|
||||
if hasattr(frame, 'raw'):
|
||||
frame = frame.raw
|
||||
if upward_frame:
|
||||
if hasattr(upward_frame, 'raw'):
|
||||
upward_frame = upward_frame.raw
|
||||
return CallStack([py.code.Frame(i) for i in stack[stack.index(frame):\
|
||||
stack.index(upward_frame)+1]])
|
||||
return CallStack([py.code.Frame(i) for i in stack[stack.index(frame):]])
|
||||
|
||||
##class CallSite(object):
|
||||
## def __init__(self, filename, lineno):
|
||||
## self.filename = filename
|
||||
## self.lineno = lineno
|
||||
##
|
||||
## def get_tuple(self):
|
||||
## return self.filename, self.lineno
|
||||
##
|
||||
## def __hash__(self):
|
||||
## return hash((self.filename, self.lineno))
|
||||
##
|
||||
## def __eq__(self, other):
|
||||
## return (self.filename, self.lineno) == (other.filename, other.lineno)
|
||||
##
|
||||
## def __ne__(self, other):
|
||||
## return not self == other
|
||||
##
|
||||
## def __cmp__(self, other):
|
||||
## if self.filename < other.filename:
|
||||
## return -1
|
||||
## if self.filename > other.filename:
|
||||
## return 1
|
||||
## if self.lineno < other.lineno:
|
||||
## return -1
|
||||
## if self.lineno > other.lineno:
|
||||
## return 1
|
||||
## return 0
|
||||
|
||||
class NonHashableObject(object):
|
||||
def __init__(self, cls):
|
||||
self.cls = cls
|
||||
|
||||
def __hash__(self):
|
||||
raise NotImplementedError("Object of type %s are unhashable" % self.cls)
|
||||
|
||||
class Desc(object):
|
||||
def __init__(self, name, pyobj, **kwargs):
|
||||
self.pyobj = pyobj
|
||||
self.is_degenerated = False
|
||||
self.name = name
|
||||
if type(self) is Desc:
|
||||
# do not override property...
|
||||
self.code = NonHashableObject(self.__class__) # dummy think that makes code unhashable
|
||||
# we make new base class instead of using pypy's one because
|
||||
# of type restrictions of pypy descs
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.code)
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, Desc):
|
||||
return self.code == other.code
|
||||
if isinstance(other, types.CodeType):
|
||||
return self.code == other
|
||||
if isinstance(other, tuple) and len(other) == 2:
|
||||
return self.code == other
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
# This set of functions will not work on Desc, because we need to
|
||||
# define code somehow
|
||||
|
||||
class FunctionDesc(Desc):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(FunctionDesc, self).__init__(*args, **kwargs)
|
||||
self.inputcells = [model.s_ImpossibleValue for i in xrange(self.\
|
||||
code.co_argcount)]
|
||||
self.call_sites = {}
|
||||
self.keep_frames = kwargs.get('keep_frames', False)
|
||||
self.frame_copier = kwargs.get('frame_copier', lambda x:x)
|
||||
self.retval = model.s_ImpossibleValue
|
||||
self.exceptions = {}
|
||||
|
||||
def consider_call(self, inputcells):
|
||||
for cell_num, cell in enumerate(inputcells):
|
||||
self.inputcells[cell_num] = model.unionof(cell, self.inputcells[cell_num])
|
||||
|
||||
def consider_call_site(self, frame, cut_frame):
|
||||
if len(self.call_sites) > MAX_CALL_SITES:
|
||||
return
|
||||
stack = [i[0] for i in inspect.stack()]
|
||||
cs = cut_stack(stack, frame, cut_frame)
|
||||
self.call_sites[cs] = cs
|
||||
|
||||
def consider_exception(self, exc, value):
|
||||
self.exceptions[exc] = True
|
||||
|
||||
def get_call_sites(self):
|
||||
# convinient accessor for various data which we keep there
|
||||
if not self.keep_frames:
|
||||
return [(key, val) for key, val in self.call_sites.iteritems()]
|
||||
else:
|
||||
lst = []
|
||||
for key, val in self.call_sites.iteritems():
|
||||
for frame in val:
|
||||
lst.append((key, frame))
|
||||
return lst
|
||||
|
||||
def consider_return(self, arg):
|
||||
self.retval = model.unionof(arg, self.retval)
|
||||
|
||||
def consider_start_locals(self, frame):
|
||||
pass
|
||||
|
||||
def consider_end_locals(self, frame):
|
||||
pass
|
||||
|
||||
def getcode(self):
|
||||
return self.pyobj.func_code
|
||||
code = property(getcode)
|
||||
|
||||
def get_local_changes(self):
|
||||
return {}
|
||||
|
||||
class ClassDesc(Desc):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(ClassDesc, self).__init__(*args, **kwargs)
|
||||
self.fields = {}
|
||||
# we'll gather informations about methods and possibly
|
||||
# other variables encountered here
|
||||
|
||||
def getcode(self):
|
||||
# This is a hack. We're trying to return as much close to __init__
|
||||
# of us as possible, but still hashable object
|
||||
if hasattr(self.pyobj, '__init__'):
|
||||
if hasattr(self.pyobj.__init__, 'im_func') and \
|
||||
hasattr(self.pyobj.__init__.im_func, 'func_code'):
|
||||
result = self.pyobj.__init__.im_func.func_code
|
||||
else:
|
||||
result = self.pyobj.__init__
|
||||
else:
|
||||
result = self.pyobj
|
||||
try:
|
||||
hash(result)
|
||||
except KeyboardInterrupt, SystemExit:
|
||||
raise
|
||||
except: # XXX UUuuuu bare except here. What can it really rise???
|
||||
try:
|
||||
hash(self.pyobj)
|
||||
result = self.pyobj
|
||||
except:
|
||||
result = self
|
||||
return result
|
||||
code = property(getcode)
|
||||
|
||||
def consider_call(self, inputcells):
|
||||
if '__init__' in self.fields:
|
||||
md = self.fields['__init__']
|
||||
else:
|
||||
md = MethodDesc(self.name + '.__init__', self.pyobj.__init__)
|
||||
self.fields['__init__'] = md
|
||||
md.consider_call(inputcells)
|
||||
|
||||
def consider_return(self, arg):
|
||||
pass # we *know* what return value we do have
|
||||
|
||||
def consider_exception(self, exc, value):
|
||||
if '__init__' in self.fields:
|
||||
md = self.fields['__init__']
|
||||
else:
|
||||
md = MethodDesc(self.name + '.__init__', self.pyobj.__init__)
|
||||
self.fields['__init__'] = md
|
||||
md.consider_exception(exc, value)
|
||||
|
||||
def consider_start_locals(self, frame):
|
||||
if '__init__' in self.fields:
|
||||
md = self.fields['__init__']
|
||||
md.consider_start_locals(frame)
|
||||
|
||||
def consider_end_locals(self, frame):
|
||||
if '__init__' in self.fields:
|
||||
md = self.fields['__init__']
|
||||
md.consider_end_locals(frame)
|
||||
|
||||
def consider_call_site(self, frame, cut_frame):
|
||||
self.fields['__init__'].consider_call_site(frame, cut_frame)
|
||||
|
||||
def add_method_desc(self, name, methoddesc):
|
||||
self.fields[name] = methoddesc
|
||||
|
||||
def getfields(self):
|
||||
# return fields of values that has been used
|
||||
l = [i for i, v in self.fields.iteritems() if not is_private(i)]
|
||||
return l
|
||||
|
||||
def getbases(self):
|
||||
bases = []
|
||||
tovisit = [self.pyobj]
|
||||
while tovisit:
|
||||
current = tovisit.pop()
|
||||
if current is not self.pyobj:
|
||||
bases.append(current)
|
||||
tovisit += [b for b in current.__bases__ if b not in bases]
|
||||
return bases
|
||||
bases = property(getbases)
|
||||
|
||||
## def has_code(self, code):
|
||||
## # check __init__ method
|
||||
## return self.pyobj.__init__.im_func.func_code is code
|
||||
##
|
||||
## def consider_call(self, inputcells):
|
||||
## # special thing, make MethodDesc for __init__
|
||||
##
|
||||
##
|
||||
class MethodDesc(FunctionDesc):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(MethodDesc, self).__init__(*args, **kwargs)
|
||||
self.old_dict = {}
|
||||
self.changeset = {}
|
||||
|
||||
# right now it's not different than method desc, only code is different
|
||||
def getcode(self):
|
||||
return self.pyobj.im_func.func_code
|
||||
code = property(getcode)
|
||||
## def has_code(self, code):
|
||||
## return self.pyobj.im_func.func_code is code
|
||||
|
||||
def __hash__(self):
|
||||
return hash((self.code, self.pyobj.im_class))
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, tuple):
|
||||
return self.code is other[0] and self.pyobj.im_class is other[1]
|
||||
if isinstance(other, MethodDesc):
|
||||
return self.pyobj is other.pyobj
|
||||
return False
|
||||
|
||||
def consider_start_locals(self, frame):
|
||||
# XXX recursion issues?
|
||||
obj = frame.f_locals[self.pyobj.im_func.func_code.co_varnames[0]]
|
||||
try:
|
||||
if not obj:
|
||||
# static method
|
||||
return
|
||||
except AttributeError:
|
||||
return
|
||||
self.old_dict = self.perform_dict_copy(obj.__dict__)
|
||||
|
||||
def perform_dict_copy(self, d):
|
||||
if d is None:
|
||||
return {}
|
||||
return d.copy()
|
||||
|
||||
def consider_end_locals(self, frame):
|
||||
obj = frame.f_locals[self.pyobj.im_func.func_code.co_varnames[0]]
|
||||
try:
|
||||
if not obj:
|
||||
# static method
|
||||
return
|
||||
except AttributeError:
|
||||
return
|
||||
# store the local changes
|
||||
# update self.changeset
|
||||
self.update_changeset(obj.__dict__)
|
||||
|
||||
def get_local_changes(self):
|
||||
return self.changeset
|
||||
|
||||
def set_changeset(changeset, key, value):
|
||||
if key not in changeset:
|
||||
changeset[key] = set([value])
|
||||
else:
|
||||
changeset[key].add(value)
|
||||
set_changeset = staticmethod(set_changeset)
|
||||
|
||||
def update_changeset(self, new_dict):
|
||||
changeset = self.changeset
|
||||
for k, v in self.old_dict.iteritems():
|
||||
if k not in new_dict:
|
||||
self.set_changeset(changeset, k, "deleted")
|
||||
elif new_dict[k] != v:
|
||||
self.set_changeset(changeset, k, "changed")
|
||||
for k, v in new_dict.iteritems():
|
||||
if k not in self.old_dict:
|
||||
self.set_changeset(changeset, k, "created")
|
||||
return changeset
|
||||
|
|
@ -0,0 +1,334 @@
|
|||
|
||||
""" This module is keeping track about API informations as well as
|
||||
providing some interface to easily access stored data
|
||||
"""
|
||||
|
||||
import py
|
||||
import sys
|
||||
import types
|
||||
import inspect
|
||||
|
||||
from py.__.apigen.tracer.description import FunctionDesc, ClassDesc, \
|
||||
MethodDesc, Desc
|
||||
|
||||
from py.__.apigen.tracer import model
|
||||
|
||||
sorted = py.builtin.sorted
|
||||
|
||||
class DocStorage(object):
|
||||
""" Class storing info about API
|
||||
"""
|
||||
def consider_call(self, frame, caller_frame, upward_cut_frame=None):
|
||||
assert isinstance(frame, py.code.Frame)
|
||||
desc = self.find_desc(frame.code, frame.raw.f_locals)
|
||||
if desc:
|
||||
self.generalize_args(desc, frame)
|
||||
desc.consider_call_site(caller_frame, upward_cut_frame)
|
||||
desc.consider_start_locals(frame)
|
||||
|
||||
def generalize_args(self, desc, frame):
|
||||
args = [arg for key, arg in frame.getargs()]
|
||||
#self.call_stack.append((desc, args))
|
||||
desc.consider_call([model.guess_type(arg) for arg in args])
|
||||
|
||||
def generalize_retval(self, desc, arg):
|
||||
desc.consider_return(model.guess_type(arg))
|
||||
|
||||
def consider_return(self, frame, arg):
|
||||
assert isinstance(frame, py.code.Frame)
|
||||
desc = self.find_desc(frame.code, frame.raw.f_locals)
|
||||
if desc:
|
||||
self.generalize_retval(desc, arg)
|
||||
desc.consider_end_locals(frame)
|
||||
|
||||
def consider_exception(self, frame, arg):
|
||||
desc = self.find_desc(frame.code, frame.raw.f_locals)
|
||||
if desc:
|
||||
exc_class, value, _ = arg
|
||||
desc.consider_exception(exc_class, value)
|
||||
|
||||
def find_desc(self, code, locals):
|
||||
try:
|
||||
# argh, very fragile specialcasing
|
||||
return self.desc_cache[(code.raw,
|
||||
locals[code.raw.co_varnames[0]].__class__)]
|
||||
except (KeyError, IndexError, AttributeError): # XXX hrmph
|
||||
return self.desc_cache.get(code.raw, None)
|
||||
#for desc in self.descs.values():
|
||||
# if desc.has_code(frame.code.raw):
|
||||
# return desc
|
||||
#return None
|
||||
|
||||
def make_cache(self):
|
||||
self.desc_cache = {}
|
||||
for key, desc in self.descs.iteritems():
|
||||
self.desc_cache[desc] = desc
|
||||
|
||||
def from_dict(self, _dict, keep_frames = False):
|
||||
self.descs = {}
|
||||
for key, val in _dict.iteritems():
|
||||
to_key, to_val = self.make_desc(key, val)
|
||||
if to_key:
|
||||
self.descs[to_key] = to_val
|
||||
self.make_cache()
|
||||
# XXX
|
||||
return self
|
||||
|
||||
# XXX: This function becomes slowly outdated and even might go away at some
|
||||
# point. The question is whether we want to use tracer.magic or not
|
||||
# at all
|
||||
def add_desc(self, name, value, **kwargs):
|
||||
key = name
|
||||
count = 1
|
||||
while key in self.descs:
|
||||
key = "%s_%d" % (name, count)
|
||||
count += 1
|
||||
key, desc = self.make_desc(key, value, **kwargs)
|
||||
if key:
|
||||
self.descs[key] = desc
|
||||
self.desc_cache[desc] = desc
|
||||
return desc
|
||||
else:
|
||||
return None
|
||||
|
||||
def make_desc(self, key, value, add_desc=True, **kwargs):
|
||||
if isinstance(value, types.FunctionType):
|
||||
desc = FunctionDesc(key, value, **kwargs)
|
||||
elif isinstance(value, (types.ObjectType, types.ClassType)):
|
||||
desc = ClassDesc(key, value, **kwargs)
|
||||
# XXX: This is the special case when we do not have __init__
|
||||
# in dir(value) for uknown reason. Need to investigate it
|
||||
for name in dir(value) + ['__init__']:
|
||||
field = getattr(value, name, None)
|
||||
if isinstance(field, types.MethodType) and \
|
||||
isinstance(field.im_func, types.FunctionType):
|
||||
real_name = key + '.' + name
|
||||
md = MethodDesc(real_name, field)
|
||||
if add_desc: # XXX hack
|
||||
self.descs[real_name] = md
|
||||
desc.add_method_desc(name, md)
|
||||
# Some other fields as well?
|
||||
elif isinstance(value, types.MethodType):
|
||||
desc = MethodDesc(key, value, **kwargs)
|
||||
else:
|
||||
desc = Desc(value)
|
||||
return (key, desc) # How to do it better? I want a desc to be a key
|
||||
# value, but I cannot get full object if I do a lookup
|
||||
|
||||
def from_pkg(self, module, keep_frames=False):
|
||||
self.module = module
|
||||
defs = module.__package__.exportdefs
|
||||
d = {}
|
||||
for key, value in defs.iteritems():
|
||||
chain = key.split('.')
|
||||
base = module
|
||||
for elem in chain:
|
||||
base = getattr(base, elem)
|
||||
if value[1] == '*':
|
||||
d.update(self.get_star_import_tree(base, key))
|
||||
else:
|
||||
d[key] = base
|
||||
self.from_dict(d, keep_frames)
|
||||
# XXX
|
||||
return self
|
||||
|
||||
def get_star_import_tree(self, module, modname):
|
||||
""" deal with '*' entries in an initpkg situation """
|
||||
ret = {}
|
||||
modpath = py.path.local(inspect.getsourcefile(module))
|
||||
pkgpath = module.__package__.getpath()
|
||||
for objname in dir(module):
|
||||
if objname.startswith('_'):
|
||||
continue # also skip __*__ attributes
|
||||
obj = getattr(module, objname)
|
||||
if (isinstance(obj, types.ClassType) or
|
||||
isinstance(obj, types.ObjectType)):
|
||||
try:
|
||||
sourcefile_object = py.path.local(
|
||||
inspect.getsourcefile(obj))
|
||||
except TypeError:
|
||||
continue
|
||||
else:
|
||||
if sourcefile_object.strpath != modpath.strpath:
|
||||
# not in this package
|
||||
continue
|
||||
dotted_name = '%s.%s' % (modname, objname)
|
||||
ret[dotted_name] = obj
|
||||
return ret
|
||||
|
||||
def from_module(self, func):
|
||||
raise NotImplementedError("From module")
|
||||
|
||||
class AbstractDocStorageAccessor(object):
|
||||
def __init__(self):
|
||||
raise NotImplementedError("Purely virtual object")
|
||||
|
||||
def get_function_names(self):
|
||||
""" Returning names of all functions
|
||||
"""
|
||||
|
||||
def get_class_names(self):
|
||||
""" Returning names of all classess
|
||||
"""
|
||||
|
||||
def get_doc(self, name):
|
||||
""" Returning __doc__ of a function
|
||||
"""
|
||||
|
||||
def get_function_definition(self, name):
|
||||
""" Returns definition of a function (source)
|
||||
"""
|
||||
|
||||
def get_function_signature(self, name):
|
||||
""" Returns types of a function
|
||||
"""
|
||||
|
||||
def get_function_callpoints(self, name):
|
||||
""" Returns list of all callpoints
|
||||
"""
|
||||
|
||||
def get_module_name(self):
|
||||
pass
|
||||
|
||||
def get_class_methods(self, name):
|
||||
""" Returns all methods of a class
|
||||
"""
|
||||
|
||||
#def get_object_info(self, key):
|
||||
#
|
||||
|
||||
def get_module_info(self):
|
||||
""" Returns module information
|
||||
"""
|
||||
|
||||
class DocStorageAccessor(AbstractDocStorageAccessor):
|
||||
""" Set of helper functions to access DocStorage, separated in different
|
||||
class to keep abstraction
|
||||
"""
|
||||
def __init__(self, ds):
|
||||
self.ds = ds
|
||||
|
||||
def _get_names(self, filter):
|
||||
return [i for i, desc in self.ds.descs.iteritems() if filter(i, desc)]
|
||||
|
||||
def get_function_names(self):
|
||||
return sorted(self._get_names(lambda i, desc: type(desc) is
|
||||
FunctionDesc))
|
||||
|
||||
def get_class_names(self):
|
||||
return sorted(self._get_names(lambda i, desc: isinstance(desc,
|
||||
ClassDesc)))
|
||||
|
||||
#def get_function(self, name):
|
||||
# return self.ds.descs[name].pyobj
|
||||
|
||||
def get_doc(self, name):
|
||||
return self.ds.descs[name].pyobj.__doc__ or "*Not documented*"
|
||||
|
||||
def get_function_definition(self, name):
|
||||
desc = self.ds.descs[name]
|
||||
assert isinstance(desc, FunctionDesc)
|
||||
code = py.code.Code(desc.code)
|
||||
return code.fullsource[code.firstlineno]
|
||||
|
||||
def get_function_signature(self, name):
|
||||
desc = self.ds.descs[name]
|
||||
# we return pairs of (name, type) here
|
||||
names = desc.pyobj.func_code.co_varnames[
|
||||
:desc.pyobj.func_code.co_argcount]
|
||||
types = desc.inputcells
|
||||
return zip(names, types), desc.retval
|
||||
|
||||
def get_function_source(self, name):
|
||||
desc = self.ds.descs[name]
|
||||
try:
|
||||
return str(py.code.Source(desc.pyobj))
|
||||
except IOError:
|
||||
return "Cannot get source"
|
||||
|
||||
def get_function_callpoints(self, name):
|
||||
# return list of tuple (filename, fileline, frame)
|
||||
return self.ds.descs[name].get_call_sites()
|
||||
|
||||
def get_function_local_changes(self, name):
|
||||
return self.ds.descs[name].get_local_changes()
|
||||
|
||||
def get_function_exceptions(self, name):
|
||||
return sorted([i.__name__ for i in self.ds.descs[name].exceptions.keys()])
|
||||
|
||||
def get_module_name(self):
|
||||
if hasattr(self.ds, 'module'):
|
||||
return self.ds.module.__name__
|
||||
return "Unknown module"
|
||||
|
||||
def get_class_methods(self, name):
|
||||
desc = self.ds.descs[name]
|
||||
assert isinstance(desc, ClassDesc)
|
||||
return sorted(desc.getfields())
|
||||
|
||||
def get_module_info(self):
|
||||
module = getattr(self.ds, 'module', None)
|
||||
if module is None:
|
||||
return "Lack of module info"
|
||||
try:
|
||||
retval = module.__doc__ or "*undocumented*"
|
||||
retval = module.__package__.description
|
||||
retval = module.__package__.long_description
|
||||
except AttributeError:
|
||||
pass
|
||||
return retval
|
||||
|
||||
def get_type_desc(self, _type):
|
||||
# XXX We provide only classes here
|
||||
if not isinstance(_type, model.SomeClass):
|
||||
return None
|
||||
# XXX we might want to cache it at some point
|
||||
for key, desc in self.ds.descs.iteritems():
|
||||
if desc.pyobj == _type.cls:
|
||||
return key, 'class', desc.is_degenerated
|
||||
return None
|
||||
|
||||
def get_method_origin(self, name):
|
||||
method = self.ds.descs[name].pyobj
|
||||
cls = method.im_class
|
||||
if not cls.__bases__:
|
||||
return self.desc_from_pyobj(cls, cls.__name__)
|
||||
curr = cls
|
||||
while curr:
|
||||
for base in curr.__bases__:
|
||||
basefunc = getattr(base, method.im_func.func_name, None)
|
||||
if (basefunc is not None and hasattr(basefunc, 'im_func') and
|
||||
hasattr(basefunc.im_func, 'func_code') and
|
||||
basefunc.im_func.func_code is
|
||||
method.im_func.func_code):
|
||||
curr = base
|
||||
break
|
||||
else:
|
||||
break
|
||||
return self.desc_from_pyobj(curr, curr.__name__)
|
||||
|
||||
def get_possible_base_classes(self, name):
|
||||
cls = self.ds.descs[name].pyobj
|
||||
if not hasattr(cls, '__bases__'):
|
||||
return []
|
||||
retval = []
|
||||
for base in cls.__bases__:
|
||||
desc = self.desc_from_pyobj(base, base.__name__)
|
||||
if desc is not None:
|
||||
retval.append(desc)
|
||||
return retval
|
||||
|
||||
def desc_from_pyobj(self, pyobj, name):
|
||||
for desc in self.ds.descs.values():
|
||||
if isinstance(desc, ClassDesc) and desc.pyobj is pyobj:
|
||||
return desc
|
||||
# otherwise create empty desc
|
||||
key, desc = self.ds.make_desc(name, pyobj, False)
|
||||
#self.ds.descs[key] = desc
|
||||
desc.is_degenerated = True
|
||||
# and make sure we'll not try to link to it directly
|
||||
return desc
|
||||
|
||||
def get_obj(self, name):
|
||||
return self.ds.descs[name].pyobj
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
|
||||
""" magic - some operations which helps to extend PDB with some magic data.
|
||||
Actually there is only explicit tracking of data, might be extended to
|
||||
automatic at some point.
|
||||
"""
|
||||
|
||||
# some magic stuff to have singleton of DocStorage, but initialised explicitely
|
||||
|
||||
import weakref
|
||||
|
||||
import py
|
||||
from py.__.apigen.tracer.docstorage import DocStorage
|
||||
from py.__.apigen.tracer.tracer import Tracer
|
||||
import sys
|
||||
|
||||
class DocStorageKeeper(object):
|
||||
doc_storage = DocStorage()
|
||||
doc_storage.tracer = Tracer(doc_storage)
|
||||
doc_storage.from_dict({})
|
||||
|
||||
def set_storage(cl, ds):
|
||||
cl.doc_storage = ds
|
||||
cl.doc_storage.tracer = Tracer(ds)
|
||||
set_storage = classmethod(set_storage)
|
||||
|
||||
def get_storage():
|
||||
return DocStorageKeeper.doc_storage
|
||||
|
||||
def stack_copier(frame):
|
||||
# copy all stack, not only frame
|
||||
num = 0
|
||||
gather = False
|
||||
stack = []
|
||||
try:
|
||||
while 1:
|
||||
if gather:
|
||||
stack.append(py.code.Frame(sys._getframe(num)))
|
||||
else:
|
||||
if sys._getframe(num) is frame.raw:
|
||||
gather = True
|
||||
num += 1
|
||||
except ValueError:
|
||||
pass
|
||||
return stack
|
||||
|
||||
def trace(keep_frames=False, frame_copier=lambda x:x):
|
||||
def decorator(fun):
|
||||
ds = get_storage()
|
||||
# in case we do not have this function inside doc storage, we
|
||||
# want to have it
|
||||
desc = ds.find_desc(py.code.Code(fun.func_code))
|
||||
if desc is None:
|
||||
desc = ds.add_desc(fun.func_name, fun, keep_frames=keep_frames,
|
||||
frame_copier=frame_copier)
|
||||
|
||||
def wrapper(*args, **kwargs):
|
||||
ds.tracer.start_tracing()
|
||||
retval = fun(*args, **kwargs)
|
||||
ds.tracer.end_tracing()
|
||||
return retval
|
||||
|
||||
return wrapper
|
||||
return decorator
|
|
@ -0,0 +1,331 @@
|
|||
|
||||
""" model - type system model for apigen
|
||||
"""
|
||||
|
||||
# we implement all the types which are in the types.*, naming
|
||||
# scheme after pypy's
|
||||
|
||||
import py
|
||||
import types
|
||||
|
||||
set = py.builtin.set
|
||||
|
||||
|
||||
# __extend__ and pairtype?
|
||||
class SomeObject(object):
|
||||
typedef = types.ObjectType
|
||||
|
||||
def __repr__(self):
|
||||
return "<%s>" % self.__class__.__name__[4:]
|
||||
return str(self.typedef)[7:-2]
|
||||
|
||||
def unionof(self, other):
|
||||
if isinstance(other, SomeImpossibleValue):
|
||||
return self
|
||||
if isinstance(other, SomeUnion):
|
||||
return other.unionof(self)
|
||||
if self == other:
|
||||
return self
|
||||
return SomeUnion([self, other])
|
||||
|
||||
def gettypedef(self):
|
||||
return self.typedef
|
||||
|
||||
def __hash__(self):
|
||||
return hash(self.__class__)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.__class__ == other.__class__
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
# this is to provide possibility of eventually linking some stuff
|
||||
def striter(self):
|
||||
yield str(self)
|
||||
|
||||
class SomeUnion(object):
|
||||
# empty typedef
|
||||
def __init__(self, possibilities):
|
||||
self.possibilities = set(possibilities)
|
||||
|
||||
def unionof(self, other):
|
||||
if isinstance(other, SomeUnion):
|
||||
return SomeUnion(self.possibilities.union(other.possibilities))
|
||||
return SomeUnion(list(self.possibilities) + [other])
|
||||
|
||||
def __eq__(self, other):
|
||||
if type(other) is not SomeUnion:
|
||||
return False
|
||||
return self.possibilities == other.possibilities
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __repr__(self):
|
||||
return "AnyOf(%s)" % ", ".join([str(i) for i in list(self.possibilities)])
|
||||
|
||||
def gettypedef(self):
|
||||
return (None, None)
|
||||
|
||||
def striter(self):
|
||||
yield "AnyOf("
|
||||
for num, i in enumerate(self.possibilities):
|
||||
yield i
|
||||
if num != len(self.possibilities) - 1:
|
||||
yield ", "
|
||||
yield ")"
|
||||
|
||||
class SomeBoolean(SomeObject):
|
||||
typedef = types.BooleanType
|
||||
|
||||
class SomeBuffer(SomeObject):
|
||||
typedef = types.BufferType
|
||||
|
||||
class SomeBuiltinFunction(SomeObject):
|
||||
typedef = types.BuiltinFunctionType
|
||||
|
||||
#class SomeBuiltinMethod(SomeObject):
|
||||
# typedef = types.BuiltinMethodType
|
||||
|
||||
class SomeClass(SomeObject):
|
||||
typedef = types.ClassType
|
||||
|
||||
def __init__(self, cls):
|
||||
self.cls = cls
|
||||
self.name = cls.__name__
|
||||
self.id = id(cls)
|
||||
|
||||
def __getstate__(self):
|
||||
return (self.name, self.id)
|
||||
|
||||
def __setstate__(self, state):
|
||||
self.name, self.id = state
|
||||
self.cls = None
|
||||
|
||||
def __hash__(self):
|
||||
return hash("Class") ^ hash(self.id)
|
||||
|
||||
def __eq__(self, other):
|
||||
if type(other) is not SomeClass:
|
||||
return False
|
||||
return self.id == other.id
|
||||
|
||||
def unionof(self, other):
|
||||
if type(other) is not SomeClass or self.id is not other.id:
|
||||
return super(SomeClass, self).unionof(other)
|
||||
return self
|
||||
|
||||
def __repr__(self):
|
||||
return "Class %s" % self.name
|
||||
|
||||
class SomeCode(SomeObject):
|
||||
typedef = types.CodeType
|
||||
|
||||
class SomeComplex(SomeObject):
|
||||
typedef = types.ComplexType
|
||||
|
||||
class SomeDictProxy(SomeObject):
|
||||
typedef = types.DictProxyType
|
||||
|
||||
class SomeDict(SomeObject):
|
||||
typedef = types.DictType
|
||||
|
||||
class SomeEllipsis(SomeObject):
|
||||
typedef = types.EllipsisType
|
||||
|
||||
class SomeFile(SomeObject):
|
||||
typedef = types.FileType
|
||||
|
||||
class SomeFloat(SomeObject):
|
||||
typedef = types.FloatType
|
||||
|
||||
class SomeFrame(SomeObject):
|
||||
typedef = types.FrameType
|
||||
|
||||
class SomeFunction(SomeObject):
|
||||
typedef = types.FunctionType
|
||||
|
||||
class SomeGenerator(SomeObject):
|
||||
typedef = types.GeneratorType
|
||||
|
||||
class SomeInstance(SomeObject):
|
||||
def __init__(self, classdef):
|
||||
self.classdef = classdef
|
||||
|
||||
def __hash__(self):
|
||||
return hash("SomeInstance") ^ hash(self.classdef)
|
||||
|
||||
def __eq__(self, other):
|
||||
if type(other) is not SomeInstance:
|
||||
return False
|
||||
return other.classdef == self.classdef
|
||||
|
||||
def unionof(self, other):
|
||||
if type(other) is not SomeInstance:
|
||||
return super(SomeInstance, self).unionof(other)
|
||||
if self.classdef == other.classdef:
|
||||
return self
|
||||
return SomeInstance(unionof(self.classdef, other.classdef))
|
||||
|
||||
def __repr__(self):
|
||||
return "<Instance of %s>" % str(self.classdef)
|
||||
|
||||
def striter(self):
|
||||
yield "<Instance of "
|
||||
yield self.classdef
|
||||
yield ">"
|
||||
|
||||
typedef = types.InstanceType
|
||||
|
||||
class SomeInt(SomeObject):
|
||||
typedef = types.IntType
|
||||
|
||||
class SomeLambda(SomeObject):
|
||||
typedef = types.LambdaType
|
||||
|
||||
class SomeList(SomeObject):
|
||||
typedef = types.ListType
|
||||
|
||||
class SomeLong(SomeObject):
|
||||
typedef = types.LongType
|
||||
|
||||
class SomeMethod(SomeObject):
|
||||
typedef = types.MethodType
|
||||
|
||||
class SomeModule(SomeObject):
|
||||
typedef = types.ModuleType
|
||||
|
||||
class SomeNone(SomeObject):
|
||||
typedef = types.NoneType
|
||||
|
||||
class SomeNotImplemented(SomeObject):
|
||||
typedef = types.NotImplementedType
|
||||
|
||||
class SomeObject(SomeObject):
|
||||
typedef = types.ObjectType
|
||||
|
||||
class SomeSlice(SomeObject):
|
||||
typedef = types.SliceType
|
||||
|
||||
class SomeString(SomeObject):
|
||||
typedef = types.StringType
|
||||
|
||||
class SomeTraceback(SomeObject):
|
||||
typedef = types.TracebackType
|
||||
|
||||
class SomeTuple(SomeObject):
|
||||
typedef = types.TupleType
|
||||
|
||||
class SomeType(SomeObject):
|
||||
typedef = types.TypeType
|
||||
|
||||
class SomeUnboundMethod(SomeObject):
|
||||
typedef = types.UnboundMethodType
|
||||
|
||||
class SomeUnicode(SomeObject):
|
||||
typedef = types.UnicodeType
|
||||
|
||||
class SomeXRange(SomeObject):
|
||||
typedef = types.XRangeType
|
||||
|
||||
class SomeImpossibleValue(SomeObject):
|
||||
def unionof(self, other):
|
||||
return other
|
||||
|
||||
def __repr__(self):
|
||||
return "<UNKNOWN>"
|
||||
|
||||
s_ImpossibleValue = SomeImpossibleValue()
|
||||
s_None = SomeNone()
|
||||
s_Ellipsis = SomeEllipsis()
|
||||
|
||||
def guess_type(x):
|
||||
# this is mostly copy of immutablevalue
|
||||
if hasattr(x, 'im_self') and x.im_self is None:
|
||||
x = x.im_func
|
||||
assert not hasattr(x, 'im_self')
|
||||
tp = type(x)
|
||||
if tp is bool:
|
||||
result = SomeBoolean()
|
||||
elif tp is int:
|
||||
result = SomeInt()
|
||||
elif issubclass(tp, str):
|
||||
result = SomeString()
|
||||
elif tp is unicode:
|
||||
result = SomeUnicode()
|
||||
elif tp is tuple:
|
||||
result = SomeTuple()
|
||||
#result = SomeTuple(items = [self.immutablevalue(e, need_const) for e in x])
|
||||
elif tp is float:
|
||||
result = SomeFloat()
|
||||
elif tp is list:
|
||||
#else:
|
||||
# listdef = ListDef(self, s_ImpossibleValue)
|
||||
# for e in x:
|
||||
# listdef.generalize(self.annotation_from_example(e))
|
||||
result = SomeList()
|
||||
elif tp is dict:
|
||||
## dictdef = DictDef(self,
|
||||
## s_ImpossibleValue,
|
||||
## s_ImpossibleValue,
|
||||
## is_r_dict = tp is r_dict)
|
||||
## if tp is r_dict:
|
||||
## s_eqfn = self.immutablevalue(x.key_eq)
|
||||
## s_hashfn = self.immutablevalue(x.key_hash)
|
||||
## dictdef.dictkey.update_rdict_annotations(s_eqfn,
|
||||
## s_hashfn)
|
||||
## for ek, ev in x.iteritems():
|
||||
## dictdef.generalize_key(self.annotation_from_example(ek))
|
||||
## dictdef.generalize_value(self.annotation_from_example(ev))
|
||||
result = SomeDict()
|
||||
elif tp is types.ModuleType:
|
||||
result = SomeModule()
|
||||
elif callable(x):
|
||||
#if hasattr(x, '__self__') and x.__self__ is not None:
|
||||
# # for cases like 'l.append' where 'l' is a global constant list
|
||||
# s_self = self.immutablevalue(x.__self__, need_const)
|
||||
# result = s_self.find_method(x.__name__)
|
||||
# if result is None:
|
||||
# result = SomeObject()
|
||||
#elif hasattr(x, 'im_self') and hasattr(x, 'im_func'):
|
||||
# # on top of PyPy, for cases like 'l.append' where 'l' is a
|
||||
# # global constant list, the find_method() returns non-None
|
||||
# s_self = self.immutablevalue(x.im_self, need_const)
|
||||
# result = s_self.find_method(x.im_func.__name__)
|
||||
#else:
|
||||
# result = None
|
||||
#if result is None:
|
||||
# if (self.annotator.policy.allow_someobjects
|
||||
# and getattr(x, '__module__', None) == '__builtin__'
|
||||
# # XXX note that the print support functions are __builtin__
|
||||
# and tp not in (types.FunctionType, types.MethodType)):
|
||||
## result = SomeObject()
|
||||
# result.knowntype = tp # at least for types this needs to be correct
|
||||
# else:
|
||||
# result = SomePBC([self.getdesc(x)])
|
||||
if tp is types.BuiltinFunctionType or tp is types.BuiltinMethodType:
|
||||
result = SomeBuiltinFunction()
|
||||
elif hasattr(x, 'im_func'):
|
||||
result = SomeMethod()
|
||||
elif hasattr(x, 'func_code'):
|
||||
result = SomeFunction()
|
||||
elif hasattr(x, '__class__'):
|
||||
if x.__class__ is type:
|
||||
result = SomeClass(x)
|
||||
else:
|
||||
result = SomeInstance(SomeClass(x.__class__))
|
||||
elif tp is types.ClassType:
|
||||
result = SomeClass(x)
|
||||
elif x is None:
|
||||
return s_None
|
||||
elif hasattr(x, '__class__'):
|
||||
result = SomeInstance(SomeClass(x.__class__))
|
||||
else:
|
||||
result = SomeObject()
|
||||
# XXX here we might want to consider stuff like
|
||||
# buffer, slice, etc. etc. Let's leave it for now
|
||||
return result
|
||||
|
||||
def unionof(first, other):
|
||||
return first.unionof(other)
|
|
@ -0,0 +1,106 @@
|
|||
import py
|
||||
|
||||
class DescPlaceholder(object):
|
||||
pass
|
||||
|
||||
class ClassPlaceholder(object):
|
||||
pass
|
||||
|
||||
class SerialisableClassDesc(object):
|
||||
def __init__(self, original_desc):
|
||||
self.is_degenerated = original_desc.is_degenerated
|
||||
self.name = original_desc.name
|
||||
|
||||
class PermaDocStorage(object):
|
||||
""" Picklable version of docstorageaccessor
|
||||
"""
|
||||
function_fields = ['source', 'signature', 'definition', 'callpoints',
|
||||
'local_changes', 'exceptions']
|
||||
|
||||
def __init__(self, dsa):
|
||||
""" Initialise from original doc storage accessor
|
||||
"""
|
||||
self.names = {}
|
||||
self.module_info = dsa.get_module_info()
|
||||
self.module_name = dsa.get_module_name()
|
||||
self._save_functions(dsa)
|
||||
self._save_classes(dsa)
|
||||
|
||||
def _save_functions(self, dsa):
|
||||
names = dsa.get_function_names()
|
||||
self.function_names = names
|
||||
for name in names:
|
||||
self._save_function(dsa, name)
|
||||
|
||||
def _save_function(self, dsa, name):
|
||||
ph = DescPlaceholder()
|
||||
ph.__doc__ = dsa.get_doc(name)
|
||||
for field in self.function_fields:
|
||||
setattr(ph, field, getattr(dsa, 'get_function_%s' % field)(name))
|
||||
self.names[name] = ph
|
||||
return ph
|
||||
|
||||
def _save_classes(self, dsa):
|
||||
names = dsa.get_class_names()
|
||||
self.class_names = names
|
||||
for name in names:
|
||||
ph = ClassPlaceholder()
|
||||
ph.__doc__ = dsa.get_doc(name)
|
||||
methods = dsa.get_class_methods(name)
|
||||
ph.methods = methods
|
||||
ph.base_classes = [SerialisableClassDesc(i) for i in
|
||||
dsa.get_possible_base_classes(name)]
|
||||
|
||||
for method in methods:
|
||||
method_name = name + "." + method
|
||||
mh = self._save_function(dsa, name + "." + method)
|
||||
mh.origin = SerialisableClassDesc(dsa.get_method_origin(
|
||||
method_name))
|
||||
self.names[name] = ph
|
||||
|
||||
def get_class_methods(self, name):
|
||||
desc = self.names[name]
|
||||
assert isinstance(desc, ClassPlaceholder)
|
||||
return desc.methods
|
||||
|
||||
def get_doc(self, name):
|
||||
return self.names[name].__doc__
|
||||
|
||||
def get_module_info(self):
|
||||
return self.module_info
|
||||
|
||||
def get_module_name(self):
|
||||
return self.module_name
|
||||
|
||||
def get_class_names(self):
|
||||
return self.class_names
|
||||
|
||||
def get_function_names(self):
|
||||
return self.function_names
|
||||
|
||||
def get_method_origin(self, name):
|
||||
# returns a DESCRIPTION of a method origin, to make sure where we
|
||||
# write it
|
||||
return self.names[name].origin
|
||||
|
||||
def get_possible_base_classes(self, name):
|
||||
# returns list of descs of base classes
|
||||
return self.names[name].base_classes
|
||||
|
||||
# This are placeholders to provide something more reliable
|
||||
def get_type_desc(self, _type):
|
||||
return None
|
||||
|
||||
#def get_obj(self, name):
|
||||
# This is quite hairy, get rid of it soon
|
||||
# # returns a pyobj
|
||||
# pass
|
||||
|
||||
for field in PermaDocStorage.function_fields:
|
||||
d = {"field": field}
|
||||
func_name = "get_function_%s" % (field, )
|
||||
exec py.code.Source("""
|
||||
def %s(self, name, field=field):
|
||||
return getattr(self.names[name], field)
|
||||
""" % (func_name, )).compile() in d
|
||||
setattr(PermaDocStorage, func_name, d[func_name])
|
|
@ -0,0 +1,13 @@
|
|||
import py
|
||||
|
||||
from py.__.initpkg import initpkg
|
||||
|
||||
initpkg(__name__,
|
||||
description="test package",
|
||||
exportdefs = {
|
||||
'pak.mod.one': ('./pak/mod.py', 'one'),
|
||||
'pak.mod.two': ('./pak/mod.py', 'nottwo'),
|
||||
'notpak.notmod.notclass': ('./pak/mod.py', 'cls'),
|
||||
'somenamespace': ('./pak/somenamespace.py', '*'),
|
||||
})
|
||||
|
|
@ -0,0 +1,10 @@
|
|||
class cls(object):
|
||||
def __init__(self, x):
|
||||
self.x = x
|
||||
|
||||
def one(x):
|
||||
return x+3
|
||||
|
||||
def nottwo():
|
||||
pass
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
def foo(x):
|
||||
return x + 1
|
||||
|
||||
def bar(x):
|
||||
return x + 2
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
def cut_pyc(f_name):
|
||||
if f_name.endswith('.pyc'):
|
||||
return f_name[:-1]
|
||||
return f_name
|
|
@ -0,0 +1,28 @@
|
|||
|
||||
""" Some additional tests about descriptions
|
||||
"""
|
||||
|
||||
from py.__.apigen.tracer.description import *
|
||||
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
class C(object):
|
||||
pass
|
||||
|
||||
class D:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def test_getcode():
|
||||
assert hash(ClassDesc("a", A).code)
|
||||
assert hash(ClassDesc("b", B).code)
|
||||
assert hash(ClassDesc("c", C).code)
|
||||
assert hash(ClassDesc("d", D).code)
|
||||
|
||||
def test_eq():
|
||||
assert ClassDesc('a', A) == ClassDesc('a', A)
|
|
@ -0,0 +1,420 @@
|
|||
|
||||
""" test doc generation
|
||||
"""
|
||||
|
||||
import py
|
||||
import sys
|
||||
|
||||
#try:
|
||||
from py.__.apigen.tracer.tracer import DocStorage, Tracer
|
||||
from py.__.apigen.tracer.docstorage import DocStorageAccessor
|
||||
from py.__.apigen.tracer.testing.runtest import cut_pyc
|
||||
from py.__.apigen.tracer.description import FunctionDesc
|
||||
from py.__.apigen.tracer import model
|
||||
from py.__.apigen.tracer.permastore import PermaDocStorage
|
||||
# from pypy.annotation import model
|
||||
#except ImportError, s:
|
||||
# py.test.skip("Cannot import: %s" % str(s))
|
||||
|
||||
#def setup_module(mod):
|
||||
# data_path = py.path.local(mod.__file__).dirpath().join("data")
|
||||
# sys.path.insert(0, str(data_path))
|
||||
|
||||
# XXX: Perma doc storage disabled a bit
|
||||
|
||||
sorted = py.builtin.sorted
|
||||
set = py.builtin.set
|
||||
|
||||
def fun(a, b, c):
|
||||
"Some docstring"
|
||||
return "d"
|
||||
|
||||
def test_basic():
|
||||
descs = {"fun":fun}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
fun(1, ("g", 3), 8)
|
||||
fun(2., ("a", 1.), "a")
|
||||
t.end_tracing()
|
||||
desc = ds.descs['fun']
|
||||
inputcells = desc.inputcells
|
||||
assert len(inputcells) == 3
|
||||
assert isinstance(inputcells[0], model.SomeUnion)
|
||||
assert isinstance(inputcells[1], model.SomeTuple)
|
||||
assert isinstance(inputcells[2], model.SomeUnion)
|
||||
assert isinstance(desc.retval, model.SomeString)
|
||||
cs = sorted(desc.call_sites.keys())
|
||||
assert len(cs) == 2
|
||||
f_name = cut_pyc(__file__)
|
||||
assert len(cs[0]) == 1
|
||||
assert len(cs[1]) == 1
|
||||
assert cs[1][0].filename == f_name
|
||||
# lines are counted from 0
|
||||
num = test_basic.func_code.co_firstlineno
|
||||
assert cs[1][0].lineno == num + 4 or cs[1][0].lineno == num + 5
|
||||
assert cs[0][0].filename == f_name
|
||||
assert cs[0][0].lineno == num + 5 or cs[0][0].lineno == num + 4
|
||||
if 0:
|
||||
pds = PermaDocStorage(DocStorageAccessor(ds))
|
||||
assert pds.get_function_names() == ['fun']
|
||||
sig = pds.get_function_signature('fun')
|
||||
assert sig[0][0][0] == 'a'
|
||||
assert isinstance(sig[0][0][1], model.SomeUnion)
|
||||
assert len(pds.get_function_callpoints('fun')) == 2
|
||||
|
||||
class AClass(object):
|
||||
""" Class docstring
|
||||
"""
|
||||
def __init__(self, b="blah"):
|
||||
pass
|
||||
|
||||
def exposed_method(self, a, b, c):
|
||||
""" method docstring
|
||||
"""
|
||||
return self._hidden_method()
|
||||
|
||||
def _hidden_method(self):
|
||||
""" should not appear
|
||||
"""
|
||||
return "z"
|
||||
|
||||
class ANotherClass(AClass):
|
||||
def another_exposed_method(self, a):
|
||||
# no docstring
|
||||
return a
|
||||
|
||||
def test_class():
|
||||
descs = {'AClass':AClass}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
s = AClass()
|
||||
s.exposed_method(1, 2., [1,2,3])
|
||||
t.end_tracing()
|
||||
desc = ds.descs['AClass']
|
||||
inputcells = desc.fields['__init__'].inputcells
|
||||
assert len(inputcells) == 2
|
||||
assert isinstance(inputcells[0], model.SomeInstance)
|
||||
#assert inputcells[0].classdef.classdesc.pyobj is SomeClass
|
||||
# XXX: should work
|
||||
assert isinstance(inputcells[1], model.SomeString)
|
||||
f_name = __file__
|
||||
if f_name.endswith('.pyc'):
|
||||
f_name = f_name[:-1]
|
||||
cs = sorted(desc.fields['__init__'].call_sites.keys())
|
||||
assert len(cs) == 1
|
||||
assert len(cs[0]) == 1
|
||||
assert cs[0][0].filename == f_name
|
||||
assert cs[0][0].lineno == test_class.func_code.co_firstlineno + 4
|
||||
# method check
|
||||
assert sorted(desc.getfields()) == ['__init__', 'exposed_method']
|
||||
inputcells = desc.fields['exposed_method'].inputcells
|
||||
assert len(inputcells) == 4
|
||||
assert isinstance(inputcells[0], model.SomeInstance)
|
||||
#assert inputcells[0].classdef.classdesc.pyobj is SomeClass
|
||||
# XXX should work
|
||||
assert isinstance(inputcells[1], model.SomeInt)
|
||||
assert isinstance(inputcells[2], model.SomeFloat)
|
||||
assert isinstance(inputcells[3], model.SomeList)
|
||||
assert isinstance(desc.fields['exposed_method'].retval, model.SomeString)
|
||||
if 0:
|
||||
pds = PermaDocStorage(DocStorageAccessor(ds))
|
||||
assert pds.get_class_names() == ['AClass']
|
||||
assert len(pds.get_function_signature('AClass.exposed_method')[0]) == 4
|
||||
|
||||
def other_fun():
|
||||
pass
|
||||
|
||||
def test_add_desc():
|
||||
ds = DocStorage().from_dict({})
|
||||
ds.add_desc("one", fun)
|
||||
ds.add_desc("one", other_fun)
|
||||
assert sorted(ds.descs.keys()) == ["one", "one_1"]
|
||||
assert isinstance(ds.descs["one"], FunctionDesc)
|
||||
assert isinstance(ds.descs["one_1"], FunctionDesc)
|
||||
assert ds.descs["one"].pyobj is fun
|
||||
assert ds.descs["one_1"].pyobj is other_fun
|
||||
assert ds.desc_cache[ds.descs["one"]] is ds.descs["one"]
|
||||
assert ds.desc_cache[ds.descs["one_1"]] is ds.descs["one_1"]
|
||||
|
||||
def test_while_call():
|
||||
ds = DocStorage().from_dict({"other_fun":other_fun})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
for x in xrange(8):
|
||||
other_fun()
|
||||
t.end_tracing()
|
||||
desc = ds.descs["other_fun"]
|
||||
assert len(desc.call_sites.keys()) == 1
|
||||
#assert isinstance(desc.call_sites.values()[0][0], py.code.Frame)
|
||||
if 0:
|
||||
pds = PermaDocStorage(DocStorageAccessor(ds))
|
||||
assert len(pds.get_function_callpoints("other_fun")) == 1
|
||||
|
||||
class A(object):
|
||||
def method(self, x):
|
||||
self.x = x
|
||||
|
||||
class B:
|
||||
def method(self, x):
|
||||
self.x = x
|
||||
|
||||
def test_without_init():
|
||||
ds = DocStorage().from_dict({'A':A, 'B':B})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
x = A()
|
||||
y = B()
|
||||
x.method(3)
|
||||
y.method(4)
|
||||
t.end_tracing()
|
||||
assert isinstance(ds.descs['A'].fields['method'].inputcells[1],
|
||||
model.SomeInt)
|
||||
assert isinstance(ds.descs['B'].fields['method'].inputcells[1],
|
||||
model.SomeInt)
|
||||
if 0:
|
||||
pds = PermaDocStorage(DocStorageAccessor(ds))
|
||||
|
||||
def test_local_changes():
|
||||
class testclass(object):
|
||||
def __init__(self):
|
||||
self.foo = 0
|
||||
def bar(self, x):
|
||||
self.foo = x
|
||||
ds = DocStorage().from_dict({'testclass': testclass})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
c = testclass()
|
||||
c.bar(1)
|
||||
t.end_tracing()
|
||||
desc = ds.descs['testclass']
|
||||
methdesc = desc.fields['bar']
|
||||
#assert methdesc.old_dict != methdesc.new_dict
|
||||
assert methdesc.get_local_changes() == {'foo': set(['changed'])}
|
||||
return ds
|
||||
|
||||
def test_local_changes_nochange():
|
||||
class testclass(object):
|
||||
def __init__(self):
|
||||
self.foo = 0
|
||||
def bar(self, x):
|
||||
self.foo = x
|
||||
ds = DocStorage().from_dict({'testclass': testclass})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
c = testclass()
|
||||
t.end_tracing()
|
||||
desc = ds.descs['testclass']
|
||||
methdesc = desc.fields['bar']
|
||||
assert methdesc.get_local_changes() == {}
|
||||
return ds
|
||||
|
||||
def test_multiple_classes_with_same_init():
|
||||
class A:
|
||||
def __init__(self, x):
|
||||
self.x = x
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'A':A, 'B':B})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
c = A(3)
|
||||
d = B(4)
|
||||
t.end_tracing()
|
||||
assert len(ds.descs['A'].fields['__init__'].call_sites) == 1
|
||||
assert len(ds.descs['B'].fields['__init__'].call_sites) == 1
|
||||
return ds
|
||||
|
||||
def test_exception_raise():
|
||||
def x():
|
||||
1/0
|
||||
|
||||
def y():
|
||||
try:
|
||||
x()
|
||||
except ZeroDivisionError:
|
||||
pass
|
||||
|
||||
def z():
|
||||
y()
|
||||
|
||||
ds = DocStorage().from_dict({'x':x, 'y':y, 'z':z})
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
z()
|
||||
t.end_tracing()
|
||||
assert ds.descs['x'].exceptions.keys() == [ZeroDivisionError]
|
||||
assert ds.descs['y'].exceptions.keys() == [ZeroDivisionError]
|
||||
assert ds.descs['z'].exceptions.keys() == []
|
||||
return ds
|
||||
|
||||
def test_subclass():
|
||||
descs = {'ANotherClass': ANotherClass}
|
||||
ds = DocStorage().from_dict(descs)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
s = ANotherClass('blah blah')
|
||||
s.another_exposed_method(1)
|
||||
t.end_tracing()
|
||||
desc = ds.descs['ANotherClass']
|
||||
assert len(desc.fields) == 4
|
||||
inputcells = desc.fields['__init__'].inputcells
|
||||
assert len(inputcells) == 2
|
||||
inputcells = desc.fields['another_exposed_method'].inputcells
|
||||
assert len(inputcells) == 2
|
||||
bases = desc.bases
|
||||
assert len(bases) == 2
|
||||
return ds
|
||||
|
||||
def test_bases():
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B:
|
||||
pass
|
||||
|
||||
class C(A,B):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'C':C, 'B':B})
|
||||
dsa = DocStorageAccessor(ds)
|
||||
for desc in dsa.get_possible_base_classes('C'):
|
||||
assert desc is ds.descs['B'] or desc.is_degenerated
|
||||
return ds
|
||||
|
||||
def test_desc_from_pyobj():
|
||||
class A:
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'A': A, 'B': B})
|
||||
dsa = DocStorageAccessor(ds)
|
||||
assert dsa.desc_from_pyobj(A, 'A') is ds.descs['A']
|
||||
return ds
|
||||
|
||||
def test_method_origin():
|
||||
class A:
|
||||
def foo(self):
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
def bar(self):
|
||||
pass
|
||||
|
||||
class C(B):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'C': C, 'B': B})
|
||||
dsa = DocStorageAccessor(ds)
|
||||
origin = dsa.get_method_origin('C.bar')
|
||||
assert origin is ds.descs['B']
|
||||
return ds
|
||||
|
||||
def test_multiple_methods():
|
||||
class A(object):
|
||||
def meth(self):
|
||||
pass
|
||||
|
||||
class B(A):
|
||||
pass
|
||||
|
||||
class C(A):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'C':C, 'B':B})
|
||||
dsa = DocStorageAccessor(ds)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
B().meth()
|
||||
C().meth()
|
||||
t.end_tracing()
|
||||
assert len(ds.descs['B'].fields['meth'].call_sites) == 1
|
||||
assert len(ds.descs['C'].fields['meth'].call_sites) == 1
|
||||
return ds
|
||||
|
||||
def test_is_private():
|
||||
# XXX implicit test, but so are the rest :|
|
||||
class Foo(object):
|
||||
def foo(self):
|
||||
pass
|
||||
def _foo(self):
|
||||
pass
|
||||
def __foo(self):
|
||||
pass
|
||||
def trigger__foo(self):
|
||||
self.__foo()
|
||||
def __foo__(self):
|
||||
pass
|
||||
|
||||
ds = DocStorage().from_dict({'Foo': Foo})
|
||||
dsa = DocStorageAccessor(ds)
|
||||
t = Tracer(ds)
|
||||
t.start_tracing()
|
||||
f = Foo()
|
||||
f.foo()
|
||||
f._foo()
|
||||
f.trigger__foo()
|
||||
f.__foo__()
|
||||
t.end_tracing()
|
||||
assert sorted(ds.descs['Foo'].getfields()) == ['__foo__', 'foo',
|
||||
'trigger__foo']
|
||||
|
||||
def setup_fs_project():
|
||||
temp = py.test.ensuretemp('test_get_initpkg_star_items')
|
||||
temp.ensure("pkg/func.py").write(py.code.Source("""\
|
||||
def func(arg1):
|
||||
"docstring"
|
||||
"""))
|
||||
temp.ensure('pkg/someclass.py').write(py.code.Source("""\
|
||||
class SomeClass(object):
|
||||
" docstring someclass "
|
||||
def __init__(self, somevar):
|
||||
self.somevar = somevar
|
||||
|
||||
def get_somevar(self):
|
||||
" get_somevar docstring "
|
||||
return self.somevar
|
||||
SomeInstance = SomeClass(10)
|
||||
"""))
|
||||
temp.ensure('pkg/somesubclass.py').write(py.code.Source("""\
|
||||
from someclass import SomeClass
|
||||
class SomeSubClass(SomeClass):
|
||||
" docstring somesubclass "
|
||||
def get_somevar(self):
|
||||
return self.somevar + 1
|
||||
"""))
|
||||
temp.ensure('pkg/somenamespace.py').write(py.code.Source("""\
|
||||
from pkg.main.sub import func
|
||||
|
||||
def foo():
|
||||
return 'bar'
|
||||
def baz(qux):
|
||||
return qux
|
||||
"""))
|
||||
temp.ensure("pkg/__init__.py").write(py.code.Source("""\
|
||||
from py.initpkg import initpkg
|
||||
initpkg(__name__, exportdefs = {
|
||||
'main.sub.func': ("./func.py", "func"),
|
||||
'main.SomeClass': ('./someclass.py', 'SomeClass'),
|
||||
'main.SomeInstance': ('./someclass.py', 'SomeInstance'),
|
||||
'main.SomeSubClass': ('./somesubclass.py', 'SomeSubClass'),
|
||||
'main.SomeSubClass': ('./somesubclass.py', 'SomeSubClass'),
|
||||
'other': ('./somenamespace.py', '*'),
|
||||
})
|
||||
"""))
|
||||
return temp, 'pkg'
|
||||
|
||||
def test_get_initpkg_star_items():
|
||||
pkgdir, pkgname = setup_fs_project()
|
||||
py.std.sys.path.insert(0, str(pkgdir))
|
||||
pkg = __import__(pkgname)
|
||||
ds = DocStorage().from_pkg(pkg)
|
||||
sit = ds.get_star_import_tree(pkg.other, 'pkg.other')
|
||||
print sit
|
||||
assert sorted(sit.keys()) == ['pkg.other.baz', 'pkg.other.foo']
|
||||
|
|
@ -0,0 +1,48 @@
|
|||
|
||||
""" test magic abilities of tracer
|
||||
"""
|
||||
|
||||
import py
|
||||
py.test.skip("These features has been disabled")
|
||||
|
||||
from py.__.apigen.tracer.magic import trace, get_storage, stack_copier, \
|
||||
DocStorageKeeper
|
||||
from py.__.apigen.tracer.docstorage import DocStorage
|
||||
from py.__.apigen.tracer import model
|
||||
|
||||
#def setup_function(f):
|
||||
# DocStorageKeeper.set_storage(DocStorage().from_dict({}))
|
||||
|
||||
def fun(a, b, c):
|
||||
return "a"
|
||||
fun = trace()(fun)
|
||||
|
||||
def test_magic():
|
||||
fun(1, 2, 3)
|
||||
|
||||
ds = get_storage()
|
||||
assert 'fun' in ds.descs
|
||||
assert len(ds.descs.keys()) == 2
|
||||
desc = ds.descs['fun']
|
||||
inputcells = desc.inputcells
|
||||
assert isinstance(inputcells[0], model.SomeInt)
|
||||
assert isinstance(inputcells[1], model.SomeInt)
|
||||
assert isinstance(inputcells[2], model.SomeInt)
|
||||
assert isinstance(desc.retval, model.SomeString)
|
||||
|
||||
def g(x):
|
||||
return f(x)
|
||||
|
||||
def f(x):
|
||||
return x + 3
|
||||
f = trace(keep_frames=True, frame_copier=stack_copier)(f)
|
||||
|
||||
def test_fancy_copier():
|
||||
g(1)
|
||||
|
||||
ds = get_storage()
|
||||
assert 'f' in ds.descs
|
||||
desc = ds.descs['f']
|
||||
stack = desc.call_sites.values()[0][0]
|
||||
assert str(stack[0].statement) == ' return f(x)'
|
||||
assert str(stack[1].statement) == ' g(1)'
|
|
@ -0,0 +1,112 @@
|
|||
|
||||
""" test_model - our (very simple) type system
|
||||
model tests
|
||||
"""
|
||||
|
||||
from py.__.apigen.tracer.model import *
|
||||
|
||||
import types
|
||||
import py
|
||||
|
||||
def check_guess(val, t):
|
||||
assert isinstance(guess_type(val), t)
|
||||
|
||||
def test_basic():
|
||||
""" This tests checks every object that we might want
|
||||
to track
|
||||
"""
|
||||
check_guess(3, SomeInt)
|
||||
check_guess(3., SomeFloat)
|
||||
check_guess(True, SomeBoolean)
|
||||
check_guess(lambda x: None, SomeFunction)
|
||||
|
||||
class A:
|
||||
pass
|
||||
|
||||
check_guess(A, SomeClass)
|
||||
check_guess(A(), SomeInstance)
|
||||
|
||||
class B(object):
|
||||
def meth(self):
|
||||
pass
|
||||
|
||||
class C(object):
|
||||
def __call__(self):
|
||||
pass
|
||||
|
||||
check_guess(B, SomeClass)
|
||||
check_guess(B.meth, SomeFunction)
|
||||
check_guess(B(), SomeInstance)
|
||||
check_guess(B().meth, SomeMethod)
|
||||
check_guess([1], SomeList)
|
||||
check_guess(None, SomeNone)
|
||||
check_guess((1,), SomeTuple)
|
||||
check_guess(C(), SomeInstance)
|
||||
import sys
|
||||
check_guess(sys, SomeModule)
|
||||
check_guess({}, SomeDict)
|
||||
check_guess(sys.exc_info, SomeBuiltinFunction)
|
||||
|
||||
def test_anyof():
|
||||
def check_lst(lst):
|
||||
a = guess_type(lst[0])
|
||||
for i in lst[1:]:
|
||||
a = unionof(a, guess_type(i))
|
||||
d = dict([(i, True) for i in a.possibilities])
|
||||
assert len(a.possibilities) == len(d)
|
||||
for i in a.possibilities:
|
||||
assert not isinstance(i, SomeUnion)
|
||||
return a
|
||||
|
||||
class C(object):
|
||||
pass
|
||||
|
||||
ret = check_lst([3, 4, 3., "aa"])
|
||||
assert len(ret.possibilities) == 3
|
||||
ret = check_lst([3, 4, 3.])
|
||||
ret2 = check_lst([1, "aa"])
|
||||
ret3 = unionof(ret, ret2)
|
||||
assert len(ret3.possibilities) == 3
|
||||
ret = check_lst([3, 1.])
|
||||
ret = unionof(ret, guess_type("aa"))
|
||||
ret = unionof(guess_type("aa"), ret)
|
||||
ret = unionof(guess_type(C()), ret)
|
||||
ret = unionof(ret, guess_type("aa"))
|
||||
ret = unionof(ret, guess_type(C()))
|
||||
assert len(ret.possibilities) == 4
|
||||
|
||||
def test_union():
|
||||
class A(object):
|
||||
pass
|
||||
|
||||
class B(object):
|
||||
pass
|
||||
|
||||
f = guess_type(A).unionof(guess_type(A))
|
||||
assert isinstance(f, SomeClass)
|
||||
assert f.cls is A
|
||||
f = guess_type(A).unionof(guess_type(B)).unionof(guess_type(A))
|
||||
assert isinstance(f, SomeUnion)
|
||||
assert len(f.possibilities) == 2
|
||||
f = guess_type(A()).unionof(guess_type(A()))
|
||||
assert isinstance(f, SomeInstance)
|
||||
assert isinstance(f.classdef, SomeClass)
|
||||
assert f.classdef.cls is A
|
||||
f = guess_type(B()).unionof(guess_type(A())).unionof(guess_type(B()))
|
||||
assert isinstance(f, SomeInstance)
|
||||
assert isinstance(f.classdef, SomeUnion)
|
||||
assert len(f.classdef.possibilities) == 2
|
||||
|
||||
def test_striter():
|
||||
class A(object):
|
||||
pass
|
||||
|
||||
class B(object):
|
||||
pass
|
||||
|
||||
g = guess_type(A).unionof(guess_type(A()))
|
||||
l = py.builtin.sorted(list(g.striter()))
|
||||
assert l[4] == "AnyOf("
|
||||
assert isinstance(l[0], SomeClass)
|
||||
assert l[3] == ", "
|
||||
assert isinstance(l[1], SomeInstance)
|
|
@ -0,0 +1,51 @@
|
|||
|
||||
""" some tests for from_package
|
||||
"""
|
||||
|
||||
from py.__.apigen.tracer.docstorage import DocStorage
|
||||
from py.__.apigen.tracer.tracer import Tracer
|
||||
from py.__.apigen.tracer import model
|
||||
import sys
|
||||
import py
|
||||
|
||||
def setup_module(mod):
|
||||
sys.path.insert(0, str(py.path.local(__file__).dirpath().join("package")))
|
||||
import submodule
|
||||
mod.submodule = submodule
|
||||
|
||||
def teardown_module(mod):
|
||||
sys.path = sys.path[1:]
|
||||
|
||||
class TestFullModule(object):
|
||||
def setup_class(cls):
|
||||
cls.ds = DocStorage().from_pkg(submodule)
|
||||
cls.tracer = Tracer(cls.ds)
|
||||
|
||||
def test_init(self):
|
||||
ds = self.ds
|
||||
print sorted(ds.descs.keys())
|
||||
assert len(ds.descs) == 6
|
||||
assert py.builtin.sorted(ds.descs.keys()) == [
|
||||
'notpak.notmod.notclass', 'notpak.notmod.notclass.__init__',
|
||||
'pak.mod.one', 'pak.mod.two', 'somenamespace.bar',
|
||||
'somenamespace.foo']
|
||||
|
||||
def test_simple_call(self):
|
||||
ds = self.ds
|
||||
self.tracer.start_tracing()
|
||||
submodule.pak.mod.one(3)
|
||||
self.tracer.end_tracing()
|
||||
desc = self.ds.descs['pak.mod.one']
|
||||
assert isinstance(desc.retval, model.SomeInt)
|
||||
assert isinstance(desc.inputcells[0], model.SomeInt)
|
||||
|
||||
def test_call_class(self):
|
||||
ds = self.ds
|
||||
self.tracer.start_tracing()
|
||||
c = submodule.notpak.notmod.notclass(3)
|
||||
self.tracer.end_tracing()
|
||||
desc = self.ds.descs['notpak.notmod.notclass']
|
||||
methdesc = desc.fields['__init__']
|
||||
assert isinstance(methdesc.inputcells[0], model.SomeInstance)
|
||||
assert isinstance(methdesc.inputcells[1], model.SomeInt)
|
||||
|
|
@ -0,0 +1,55 @@
|
|||
|
||||
""" simple tracer for API generation
|
||||
"""
|
||||
|
||||
import py
|
||||
import sys
|
||||
import types
|
||||
|
||||
from py.__.apigen.tracer.description import FunctionDesc
|
||||
from py.__.apigen.tracer.docstorage import DocStorage
|
||||
|
||||
class UnionError(Exception):
|
||||
pass
|
||||
|
||||
class NoValue(object):
|
||||
pass
|
||||
|
||||
class Tracer(object):
|
||||
""" Basic tracer object, used for gathering additional info
|
||||
about API functions
|
||||
"""
|
||||
def __init__(self, docstorage):
|
||||
self.docstorage = docstorage
|
||||
self.tracing = False
|
||||
|
||||
_locals = {}
|
||||
def _tracer(self, frame, event, arg):
|
||||
|
||||
# perform actuall tracing
|
||||
frame = py.code.Frame(frame)
|
||||
if event == 'call':
|
||||
assert arg is None
|
||||
try:
|
||||
self.docstorage.consider_call(frame,
|
||||
py.code.Frame(sys._getframe(2)),
|
||||
self.frame)
|
||||
except ValueError:
|
||||
self.docstorage.consider_call(frame, None, self.frame)
|
||||
elif event == 'return':
|
||||
self.docstorage.consider_return(frame, arg)
|
||||
elif event == 'exception':
|
||||
self.docstorage.consider_exception(frame, arg)
|
||||
return self._tracer
|
||||
|
||||
def start_tracing(self):
|
||||
if self.tracing:
|
||||
return
|
||||
self.tracing = True
|
||||
self.frame = py.code.Frame(sys._getframe(1))
|
||||
sys.settrace(self._tracer)
|
||||
|
||||
def end_tracing(self):
|
||||
self.tracing = False
|
||||
sys.settrace(None)
|
||||
|
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
#
|
||||
# find and import a version of 'py'
|
||||
#
|
||||
import sys
|
||||
import os
|
||||
from os.path import dirname as opd, exists, join, basename, abspath
|
||||
|
||||
def searchpy(current):
|
||||
while 1:
|
||||
last = current
|
||||
initpy = join(current, '__init__.py')
|
||||
if not exists(initpy):
|
||||
pydir = join(current, 'py')
|
||||
# recognize py-package and ensure it is importable
|
||||
if exists(pydir) and exists(join(pydir, '__init__.py')):
|
||||
#for p in sys.path:
|
||||
# if p == current:
|
||||
# return True
|
||||
if current != sys.path[0]: # if we are already first, then ok
|
||||
print >>sys.stderr, "inserting into sys.path:", current
|
||||
sys.path.insert(0, current)
|
||||
return True
|
||||
current = opd(current)
|
||||
if last == current:
|
||||
return False
|
||||
|
||||
if not searchpy(abspath(os.curdir)):
|
||||
if not searchpy(opd(abspath(sys.argv[0]))):
|
||||
if not searchpy(opd(__file__)):
|
||||
pass # let's hope it is just on sys.path
|
||||
|
||||
import py
|
||||
|
||||
if __name__ == '__main__':
|
||||
print "py lib is at", py.__file__
|
|
@ -0,0 +1,183 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from _findpy import py
|
||||
import sys
|
||||
|
||||
pydir = py.path.local(py.__file__).dirpath()
|
||||
rootdir = pydir.dirpath()
|
||||
|
||||
def gen_manifest():
|
||||
pywc = py.path.svnwc(pydir)
|
||||
status = pywc.status(rec=True)
|
||||
#assert not status.modified
|
||||
#assert not status.deleted
|
||||
#assert not status.added
|
||||
versioned = dict([(x.localpath,1) for x in status.allpath()])
|
||||
|
||||
l = []
|
||||
for x in rootdir.visit(None, lambda x: x.basename != '.svn'):
|
||||
if x.check(file=1):
|
||||
names = [y.basename for y in x.parts()]
|
||||
if '.svn' in names:
|
||||
l.append(x)
|
||||
elif x in versioned:
|
||||
l.append(x)
|
||||
l.append(rootdir / "setup.py")
|
||||
l = [x.relto(rootdir) for x in l]
|
||||
l.append("")
|
||||
s = "\n".join(l)
|
||||
return s
|
||||
|
||||
def trace(arg):
|
||||
lines = str(arg).split('\n')
|
||||
prefix = "[trace] "
|
||||
prefix = "* "
|
||||
indent = len(prefix)
|
||||
ispace = " " * indent
|
||||
lines = [ispace + line for line in lines]
|
||||
if lines:
|
||||
lines[0] = prefix + lines[0][indent:]
|
||||
for line in lines:
|
||||
print >>py.std.sys.stdout, line
|
||||
|
||||
def make_distfiles(tmpdir):
|
||||
""" return distdir with tar.gz and zipfile. """
|
||||
manifest = tmpdir.join('MANIFEST')
|
||||
trace("generating %s" %(manifest,))
|
||||
content = gen_manifest()
|
||||
manifest.write(content)
|
||||
trace("wrote %d files into manifest file" %len(content.split('\n')))
|
||||
|
||||
distdir = tmpdir.ensure('dist', dir=1)
|
||||
oldir = rootdir.chdir()
|
||||
try:
|
||||
trace("invoking sdist, generating into %s" % (distdir,))
|
||||
py._dist.setup(py, script_name="setup.py",
|
||||
script_args=('-q', 'sdist', '--no-prune',
|
||||
'-m', str(manifest),
|
||||
'--formats=gztar,zip',
|
||||
'-d', str(distdir)))
|
||||
py._dist.setup(py, script_name="setup.py",
|
||||
script_args=('-q', 'bdist_wininst',
|
||||
#'-m', str(manifest),
|
||||
'-d', str(distdir)))
|
||||
finally:
|
||||
oldir.chdir()
|
||||
return distdir
|
||||
|
||||
|
||||
def pytest(unpacked):
|
||||
trace("py-testing %s" % unpacked)
|
||||
old = unpacked.chdir()
|
||||
try:
|
||||
import os
|
||||
os.system("python py/bin/py.test py")
|
||||
finally:
|
||||
old.chdir()
|
||||
|
||||
def unpackremotetar(tmpdir, strurl):
|
||||
import tarfile, urllib
|
||||
f = urllib.urlopen(strurl)
|
||||
basename = strurl.split('/')[-1]
|
||||
target = tmpdir.join(basename)
|
||||
trace("downloading to %s" %(target,))
|
||||
target.write(f.read())
|
||||
|
||||
trace("extracting to %s" %(target,))
|
||||
old = tmpdir.chdir()
|
||||
try:
|
||||
py.process.cmdexec("tar zxf %s" %(target,))
|
||||
finally:
|
||||
old.chdir()
|
||||
prefix = '.tar.gz'
|
||||
assert basename.endswith(prefix)
|
||||
stripped = basename[:-len(prefix)]
|
||||
unpacked = tmpdir.join(stripped)
|
||||
assert unpacked
|
||||
return unpacked
|
||||
|
||||
def checksvnworks(unpacked):
|
||||
pywc = py.path.svnwc(unpacked.join('py'))
|
||||
trace("checking versioning works: %s" %(pywc,))
|
||||
status = pywc.status(rec=True)
|
||||
assert not status.modified
|
||||
assert not status.deleted
|
||||
assert not status.unknown
|
||||
|
||||
def pytest_remote(address, url):
|
||||
gw = py.execnet.SshGateway(address)
|
||||
basename = url[url.rfind('/')+1:]
|
||||
purebasename = basename[:-len('.tar.gz')]
|
||||
|
||||
def mytrace(x, l=[]):
|
||||
l.append(x)
|
||||
if x.endswith('\n'):
|
||||
trace("".join(l))
|
||||
l[:] = []
|
||||
|
||||
channel = gw.remote_exec(stdout=mytrace, stderr=sys.stderr, source="""
|
||||
url = %(url)r
|
||||
basename = %(basename)r
|
||||
purebasename = %(purebasename)r
|
||||
import os, urllib
|
||||
f = urllib.urlopen(url)
|
||||
print "reading from", url
|
||||
s = f.read()
|
||||
f.close()
|
||||
f = open(basename, 'w')
|
||||
f.write(s)
|
||||
f.close()
|
||||
if os.path.exists(purebasename):
|
||||
import shutil
|
||||
shutil.rmtree(purebasename)
|
||||
os.system("tar zxf %%s" %% (basename,))
|
||||
print "unpacked", purebasename
|
||||
os.chdir(purebasename)
|
||||
print "testing at %(address)s ..."
|
||||
#os.system("python py/bin/py.test py")
|
||||
import commands
|
||||
status, output = commands.getstatusoutput("python py/bin/py.test py")
|
||||
#print output
|
||||
print "status:", status
|
||||
|
||||
""" % locals())
|
||||
channel.waitclose(200.0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
py.magic.invoke(assertion=True)
|
||||
version = py.std.sys.argv[1]
|
||||
assert py.__package__.version == version, (
|
||||
"py package has version %s\nlocation: %s" %
|
||||
(py.__package__.version, pydir))
|
||||
|
||||
tmpdir = py.path.local.get_temproot().join('makepyrelease-%s' % version)
|
||||
if tmpdir.check():
|
||||
trace("removing %s" %(tmpdir,))
|
||||
tmpdir.remove()
|
||||
tmpdir.mkdir()
|
||||
trace("using tmpdir %s" %(tmpdir,))
|
||||
|
||||
distdir = make_distfiles(tmpdir)
|
||||
targz = distdir.join('py-%s.tar.gz' % version)
|
||||
zip = distdir.join('py-%s.zip' % version)
|
||||
files = distdir.listdir()
|
||||
for fn in files:
|
||||
assert fn.check(file=1)
|
||||
|
||||
remotedir = 'codespeak.net://www/codespeak.net/htdocs/download/py/'
|
||||
source = distdir # " ".join([str(x) for x in files])
|
||||
trace("rsyncing %(source)s to %(remotedir)s" % locals())
|
||||
py.process.cmdexec("rsync -avz %(source)s/ %(remotedir)s" % locals())
|
||||
|
||||
ddir = tmpdir.ensure('download', dir=1)
|
||||
URL = py.__package__.download_url # 'http://codespeak.net/download/py/'
|
||||
unpacked = unpackremotetar(ddir, URL)
|
||||
assert unpacked == ddir.join("py-%s" % (version,))
|
||||
|
||||
#checksvnworks(unpacked)
|
||||
#pytest(unpacked)
|
||||
|
||||
pytest_remote('test@codespeak.net', py.__package__.download_url)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""\
|
||||
py.cleanup [PATH]
|
||||
|
||||
Delete pyc file recursively, starting from PATH (which defaults to the current
|
||||
working directory). Don't follow links and don't recurse into directories with
|
||||
a ".".
|
||||
"""
|
||||
from _findpy import py
|
||||
import py
|
||||
from py.compat import optparse
|
||||
|
||||
parser = optparse.OptionParser(usage=__doc__)
|
||||
|
||||
if __name__ == '__main__':
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if len(args) >= 1:
|
||||
path = py.path.local(args)
|
||||
else:
|
||||
path = py.path.local()
|
||||
print "cleaning path", path
|
||||
for x in path.visit('*.pyc', lambda x: x.check(dotfile=0, link=0)):
|
||||
x.remove()
|
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
# hands on script to compute the non-empty Lines of Code
|
||||
# for tests and non-test code
|
||||
|
||||
"""\
|
||||
py.countloc [PATHS]
|
||||
|
||||
Count (non-empty) lines of python code and number of python files recursively
|
||||
starting from a list of paths given on the command line (starting from the
|
||||
current working directory). Distinguish between test files and normal ones and
|
||||
report them separately.
|
||||
"""
|
||||
from _findpy import py
|
||||
import py
|
||||
from py.compat import optparse
|
||||
from py.__.misc.cmdline.countloc import countloc
|
||||
|
||||
parser = optparse.OptionParser(usage=__doc__)
|
||||
|
||||
if __name__ == '__main__':
|
||||
(options, args) = parser.parse_args()
|
||||
countloc(args)
|
|
@ -0,0 +1,73 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
"""\
|
||||
py.lookup SEARCH_STRING [options]
|
||||
|
||||
Looks recursively at Python files for a SEARCH_STRING, starting from the
|
||||
present working directory. Prints the line, with the filename and line-number
|
||||
prepended."""
|
||||
|
||||
import sys, os
|
||||
sys.path.insert(0, os.path.dirname(__file__))
|
||||
from _findpy import py
|
||||
from py.__.misc.terminal_helper import ansi_print, terminal_width
|
||||
import re
|
||||
|
||||
curdir = py.path.local()
|
||||
def rec(p):
|
||||
return p.check(dotfile=0)
|
||||
|
||||
optparse = py.compat.optparse
|
||||
|
||||
parser = optparse.OptionParser(usage=__doc__)
|
||||
parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase",
|
||||
help="ignore case distinctions")
|
||||
parser.add_option("-C", "--context", action="store", type="int", dest="context",
|
||||
default=0, help="How many lines of output to show")
|
||||
|
||||
def find_indexes(search_line, string):
|
||||
indexes = []
|
||||
before = 0
|
||||
while 1:
|
||||
i = search_line.find(string, before)
|
||||
if i == -1:
|
||||
break
|
||||
indexes.append(i)
|
||||
before = i + len(string)
|
||||
return indexes
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
(options, args) = parser.parse_args()
|
||||
string = args[0]
|
||||
if options.ignorecase:
|
||||
string = string.lower()
|
||||
for x in curdir.visit('*.py', rec):
|
||||
s = x.read()
|
||||
searchs = s
|
||||
if options.ignorecase:
|
||||
searchs = s.lower()
|
||||
if s.find(string) != -1:
|
||||
lines = s.splitlines()
|
||||
if options.ignorecase:
|
||||
searchlines = s.lower().splitlines()
|
||||
else:
|
||||
searchlines = lines
|
||||
for i, (line, searchline) in enumerate(zip(lines, searchlines)):
|
||||
indexes = find_indexes(searchline, string)
|
||||
if not indexes:
|
||||
continue
|
||||
if not options.context:
|
||||
sys.stdout.write("%s:%d: " %(x.relto(curdir), i+1))
|
||||
last_index = 0
|
||||
for index in indexes:
|
||||
sys.stdout.write(line[last_index: index])
|
||||
ansi_print(line[index: index+len(string)],
|
||||
file=sys.stdout, esc=31, newline=False)
|
||||
last_index = index + len(string)
|
||||
sys.stdout.write(line[last_index:] + "\n")
|
||||
else:
|
||||
context = (options.context)/2
|
||||
for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)):
|
||||
print "%s:%d: %s" %(x.relto(curdir), count+1, lines[count].rstrip())
|
||||
print "-" * terminal_width
|
|
@ -0,0 +1,79 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
invoke
|
||||
|
||||
py.rest filename1.txt directory
|
||||
|
||||
to generate html files from ReST.
|
||||
|
||||
It is also possible to generate pdf files using the --topdf option.
|
||||
|
||||
http://docutils.sourceforge.net/docs/user/rst/quickref.html
|
||||
|
||||
"""
|
||||
|
||||
import os, sys
|
||||
from _findpy import py
|
||||
from py.__.misc import rest
|
||||
from py.__.rest import directive
|
||||
from py.__.rest.latex import process_rest_file, process_configfile
|
||||
|
||||
|
||||
if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()):
|
||||
def log(msg):
|
||||
print msg
|
||||
else:
|
||||
def log(msg):
|
||||
pass
|
||||
|
||||
optparse = py.compat.optparse
|
||||
|
||||
parser = optparse.OptionParser(usage=__doc__)
|
||||
parser.add_option("--topdf", action="store_true", dest="topdf", default=False,
|
||||
help="generate pdf files")
|
||||
parser.add_option("--stylesheet", dest="stylesheet", default=None,
|
||||
help="use specified latex style sheet")
|
||||
parser.add_option("--debug", action="store_true", dest="debug",
|
||||
default=False,
|
||||
help="print debug output and don't delete files")
|
||||
|
||||
|
||||
if __name__=='__main__':
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if len(args) == 0:
|
||||
filenames = [py.path.svnwc()]
|
||||
else:
|
||||
filenames = [py.path.svnwc(x) for x in args]
|
||||
|
||||
if options.topdf:
|
||||
directive.set_backend_and_register_directives("latex")
|
||||
|
||||
for p in filenames:
|
||||
if not p.check():
|
||||
log("path %s not found, ignoring" % p)
|
||||
continue
|
||||
def fil(p):
|
||||
return p.check(fnmatch='*.txt', versioned=True)
|
||||
def rec(p):
|
||||
return p.check(dotfile=0)
|
||||
if p.check(dir=1):
|
||||
for x in p.visit(fil, rec):
|
||||
rest.process(x)
|
||||
elif p.check(file=1):
|
||||
if p.ext == ".rst2pdfconfig":
|
||||
directive.set_backend_and_register_directives("latex")
|
||||
process_configfile(p, options.debug)
|
||||
else:
|
||||
if options.topdf:
|
||||
cfg = p.new(ext=".rst2pdfconfig")
|
||||
if cfg.check():
|
||||
print "using config file %s" % (cfg, )
|
||||
process_configfile(cfg, options.debug)
|
||||
else:
|
||||
process_rest_file(p.localpath,
|
||||
options.stylesheet,
|
||||
options.debug)
|
||||
else:
|
||||
rest.process(p)
|
||||
|
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from _findpy import py
|
||||
py.test.cmdline.main()
|
|
@ -0,0 +1,3 @@
|
|||
@echo off
|
||||
python "%~dp0\py.test" %*
|
||||
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
python "%~dp0\..\py.cleanup" %*
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
python "%~dp0\..\py.countloc" %*
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
python "%~dp0\..\py.lookup" %*
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
python "%~dp0\..\py.rest" %*
|
|
@ -0,0 +1,2 @@
|
|||
@echo off
|
||||
python "%~dp0\..\py.test" %*
|
|
@ -0,0 +1 @@
|
|||
#
|
|
@ -0,0 +1,9 @@
|
|||
from __future__ import generators
|
||||
try:
|
||||
enumerate = enumerate
|
||||
except NameError:
|
||||
def enumerate(iterable):
|
||||
i = 0
|
||||
for x in iterable:
|
||||
yield i, x
|
||||
i += 1
|
|
@ -0,0 +1,4 @@
|
|||
try:
|
||||
BaseException = BaseException
|
||||
except NameError:
|
||||
BaseException = Exception
|
|
@ -0,0 +1,37 @@
|
|||
from __future__ import generators
|
||||
try:
|
||||
reversed = reversed
|
||||
except NameError:
|
||||
|
||||
def reversed(sequence):
|
||||
"""reversed(sequence) -> reverse iterator over values of the sequence
|
||||
|
||||
Return a reverse iterator
|
||||
"""
|
||||
if hasattr(sequence, '__reversed__'):
|
||||
return sequence.__reversed__()
|
||||
if not hasattr(sequence, '__getitem__'):
|
||||
raise TypeError("argument to reversed() must be a sequence")
|
||||
return reversed_iterator(sequence)
|
||||
|
||||
|
||||
class reversed_iterator(object):
|
||||
|
||||
def __init__(self, seq):
|
||||
self.seq = seq
|
||||
self.remaining = len(seq)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
i = self.remaining
|
||||
if i > 0:
|
||||
i -= 1
|
||||
item = self.seq[i]
|
||||
self.remaining = i
|
||||
return item
|
||||
raise StopIteration
|
||||
|
||||
def __length_hint__(self):
|
||||
return self.remaining
|
|
@ -0,0 +1,587 @@
|
|||
"""Classes to represent arbitrary sets (including sets of sets).
|
||||
|
||||
This module implements sets using dictionaries whose values are
|
||||
ignored. The usual operations (union, intersection, deletion, etc.)
|
||||
are provided as both methods and operators.
|
||||
|
||||
Important: sets are not sequences! While they support 'x in s',
|
||||
'len(s)', and 'for x in s', none of those operations are unique for
|
||||
sequences; for example, mappings support all three as well. The
|
||||
characteristic operation for sequences is subscripting with small
|
||||
integers: s[i], for i in range(len(s)). Sets don't support
|
||||
subscripting at all. Also, sequences allow multiple occurrences and
|
||||
their elements have a definite order; sets on the other hand don't
|
||||
record multiple occurrences and don't remember the order of element
|
||||
insertion (which is why they don't support s[i]).
|
||||
|
||||
The following classes are provided:
|
||||
|
||||
BaseSet -- All the operations common to both mutable and immutable
|
||||
sets. This is an abstract class, not meant to be directly
|
||||
instantiated.
|
||||
|
||||
Set -- Mutable sets, subclass of BaseSet; not hashable.
|
||||
|
||||
ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
|
||||
|
||||
|
||||
_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
|
||||
giving the same hash value as the immutable set equivalent
|
||||
would have. Do not use this class directly.
|
||||
|
||||
Only hashable objects can be added to a Set. In particular, you cannot
|
||||
really add a Set as an element to another Set; if you try, what is
|
||||
actually added is an ImmutableSet built from it (it compares equal to
|
||||
the one you tried adding).
|
||||
|
||||
When you ask if `x in y' where x is a Set and y is a Set or
|
||||
ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
|
||||
what's tested is actually `z in y'.
|
||||
|
||||
"""
|
||||
|
||||
# Code history:
|
||||
#
|
||||
# - Greg V. Wilson wrote the first version, using a different approach
|
||||
# to the mutable/immutable problem, and inheriting from dict.
|
||||
#
|
||||
# - Alex Martelli modified Greg's version to implement the current
|
||||
# Set/ImmutableSet approach, and make the data an attribute.
|
||||
#
|
||||
# - Guido van Rossum rewrote much of the code, made some API changes,
|
||||
# and cleaned up the docstrings.
|
||||
#
|
||||
# - Raymond Hettinger added a number of speedups and other
|
||||
# improvements.
|
||||
|
||||
from __future__ import generators
|
||||
try:
|
||||
from itertools import ifilter, ifilterfalse
|
||||
except ImportError:
|
||||
# Code to make the module run under Py2.2
|
||||
def ifilter(predicate, iterable):
|
||||
if predicate is None:
|
||||
def predicate(x):
|
||||
return x
|
||||
for x in iterable:
|
||||
if predicate(x):
|
||||
yield x
|
||||
def ifilterfalse(predicate, iterable):
|
||||
if predicate is None:
|
||||
def predicate(x):
|
||||
return x
|
||||
for x in iterable:
|
||||
if not predicate(x):
|
||||
yield x
|
||||
try:
|
||||
True, False
|
||||
except NameError:
|
||||
True, False = (0==0, 0!=0)
|
||||
|
||||
__all__ = ['BaseSet', 'Set', 'ImmutableSet']
|
||||
|
||||
class BaseSet(object):
|
||||
"""Common base class for mutable and immutable sets."""
|
||||
|
||||
__slots__ = ['_data']
|
||||
|
||||
# Constructor
|
||||
|
||||
def __init__(self):
|
||||
"""This is an abstract class."""
|
||||
# Don't call this from a concrete subclass!
|
||||
if self.__class__ is BaseSet:
|
||||
raise TypeError, ("BaseSet is an abstract class. "
|
||||
"Use Set or ImmutableSet.")
|
||||
|
||||
# Standard protocols: __len__, __repr__, __str__, __iter__
|
||||
|
||||
def __len__(self):
|
||||
"""Return the number of elements of a set."""
|
||||
return len(self._data)
|
||||
|
||||
def __repr__(self):
|
||||
"""Return string representation of a set.
|
||||
|
||||
This looks like 'Set([<list of elements>])'.
|
||||
"""
|
||||
return self._repr()
|
||||
|
||||
# __str__ is the same as __repr__
|
||||
__str__ = __repr__
|
||||
|
||||
def _repr(self, sorted=False):
|
||||
elements = self._data.keys()
|
||||
if sorted:
|
||||
elements.sort()
|
||||
return '%s(%r)' % (self.__class__.__name__, elements)
|
||||
|
||||
def __iter__(self):
|
||||
"""Return an iterator over the elements or a set.
|
||||
|
||||
This is the keys iterator for the underlying dict.
|
||||
"""
|
||||
return self._data.iterkeys()
|
||||
|
||||
# Three-way comparison is not supported. However, because __eq__ is
|
||||
# tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
|
||||
# then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
|
||||
# case).
|
||||
|
||||
def __cmp__(self, other):
|
||||
raise TypeError, "can't compare sets using cmp()"
|
||||
|
||||
# Equality comparisons using the underlying dicts. Mixed-type comparisons
|
||||
# are allowed here, where Set == z for non-Set z always returns False,
|
||||
# and Set != z always True. This allows expressions like "x in y" to
|
||||
# give the expected result when y is a sequence of mixed types, not
|
||||
# raising a pointless TypeError just because y contains a Set, or x is
|
||||
# a Set and y contain's a non-set ("in" invokes only __eq__).
|
||||
# Subtle: it would be nicer if __eq__ and __ne__ could return
|
||||
# NotImplemented instead of True or False. Then the other comparand
|
||||
# would get a chance to determine the result, and if the other comparand
|
||||
# also returned NotImplemented then it would fall back to object address
|
||||
# comparison (which would always return False for __eq__ and always
|
||||
# True for __ne__). However, that doesn't work, because this type
|
||||
# *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
|
||||
# Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
|
||||
|
||||
def __eq__(self, other):
|
||||
if isinstance(other, BaseSet):
|
||||
return self._data == other._data
|
||||
else:
|
||||
return False
|
||||
|
||||
def __ne__(self, other):
|
||||
if isinstance(other, BaseSet):
|
||||
return self._data != other._data
|
||||
else:
|
||||
return True
|
||||
|
||||
# Copying operations
|
||||
|
||||
def copy(self):
|
||||
"""Return a shallow copy of a set."""
|
||||
result = self.__class__()
|
||||
result._data.update(self._data)
|
||||
return result
|
||||
|
||||
__copy__ = copy # For the copy module
|
||||
|
||||
def __deepcopy__(self, memo):
|
||||
"""Return a deep copy of a set; used by copy module."""
|
||||
# This pre-creates the result and inserts it in the memo
|
||||
# early, in case the deep copy recurses into another reference
|
||||
# to this same set. A set can't be an element of itself, but
|
||||
# it can certainly contain an object that has a reference to
|
||||
# itself.
|
||||
from copy import deepcopy
|
||||
result = self.__class__()
|
||||
memo[id(self)] = result
|
||||
data = result._data
|
||||
value = True
|
||||
for elt in self:
|
||||
data[deepcopy(elt, memo)] = value
|
||||
return result
|
||||
|
||||
# Standard set operations: union, intersection, both differences.
|
||||
# Each has an operator version (e.g. __or__, invoked with |) and a
|
||||
# method version (e.g. union).
|
||||
# Subtle: Each pair requires distinct code so that the outcome is
|
||||
# correct when the type of other isn't suitable. For example, if
|
||||
# we did "union = __or__" instead, then Set().union(3) would return
|
||||
# NotImplemented instead of raising TypeError (albeit that *why* it
|
||||
# raises TypeError as-is is also a bit subtle).
|
||||
|
||||
def __or__(self, other):
|
||||
"""Return the union of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in either set.)
|
||||
"""
|
||||
if not isinstance(other, BaseSet):
|
||||
return NotImplemented
|
||||
return self.union(other)
|
||||
|
||||
def union(self, other):
|
||||
"""Return the union of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in either set.)
|
||||
"""
|
||||
result = self.__class__(self)
|
||||
result._update(other)
|
||||
return result
|
||||
|
||||
def __and__(self, other):
|
||||
"""Return the intersection of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in both sets.)
|
||||
"""
|
||||
if not isinstance(other, BaseSet):
|
||||
return NotImplemented
|
||||
return self.intersection(other)
|
||||
|
||||
def intersection(self, other):
|
||||
"""Return the intersection of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in both sets.)
|
||||
"""
|
||||
if not isinstance(other, BaseSet):
|
||||
other = Set(other)
|
||||
if len(self) <= len(other):
|
||||
little, big = self, other
|
||||
else:
|
||||
little, big = other, self
|
||||
common = ifilter(big._data.has_key, little)
|
||||
return self.__class__(common)
|
||||
|
||||
def __xor__(self, other):
|
||||
"""Return the symmetric difference of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in exactly one of the sets.)
|
||||
"""
|
||||
if not isinstance(other, BaseSet):
|
||||
return NotImplemented
|
||||
return self.symmetric_difference(other)
|
||||
|
||||
def symmetric_difference(self, other):
|
||||
"""Return the symmetric difference of two sets as a new set.
|
||||
|
||||
(I.e. all elements that are in exactly one of the sets.)
|
||||
"""
|
||||
result = self.__class__()
|
||||
data = result._data
|
||||
value = True
|
||||
selfdata = self._data
|
||||
try:
|
||||
otherdata = other._data
|
||||
except AttributeError:
|
||||
otherdata = Set(other)._data
|
||||
for elt in ifilterfalse(otherdata.has_key, selfdata):
|
||||
data[elt] = value
|
||||
for elt in ifilterfalse(selfdata.has_key, otherdata):
|
||||
data[elt] = value
|
||||
return result
|
||||
|
||||
def __sub__(self, other):
|
||||
"""Return the difference of two sets as a new Set.
|
||||
|
||||
(I.e. all elements that are in this set and not in the other.)
|
||||
"""
|
||||
if not isinstance(other, BaseSet):
|
||||
return NotImplemented
|
||||
return self.difference(other)
|
||||
|
||||
def difference(self, other):
|
||||
"""Return the difference of two sets as a new Set.
|
||||
|
||||
(I.e. all elements that are in this set and not in the other.)
|
||||
"""
|
||||
result = self.__class__()
|
||||
data = result._data
|
||||
try:
|
||||
otherdata = other._data
|
||||
except AttributeError:
|
||||
otherdata = Set(other)._data
|
||||
value = True
|
||||
for elt in ifilterfalse(otherdata.has_key, self):
|
||||
data[elt] = value
|
||||
return result
|
||||
|
||||
# Membership test
|
||||
|
||||
def __contains__(self, element):
|
||||
"""Report whether an element is a member of a set.
|
||||
|
||||
(Called in response to the expression `element in self'.)
|
||||
"""
|
||||
try:
|
||||
return element in self._data
|
||||
except TypeError:
|
||||
transform = getattr(element, "__as_temporarily_immutable__", None)
|
||||
if transform is None:
|
||||
raise # re-raise the TypeError exception we caught
|
||||
return transform() in self._data
|
||||
|
||||
# Subset and superset test
|
||||
|
||||
def issubset(self, other):
|
||||
"""Report whether another set contains this set."""
|
||||
self._binary_sanity_check(other)
|
||||
if len(self) > len(other): # Fast check for obvious cases
|
||||
return False
|
||||
for elt in ifilterfalse(other._data.has_key, self):
|
||||
return False
|
||||
return True
|
||||
|
||||
def issuperset(self, other):
|
||||
"""Report whether this set contains another set."""
|
||||
self._binary_sanity_check(other)
|
||||
if len(self) < len(other): # Fast check for obvious cases
|
||||
return False
|
||||
for elt in ifilterfalse(self._data.has_key, other):
|
||||
return False
|
||||
return True
|
||||
|
||||
# Inequality comparisons using the is-subset relation.
|
||||
__le__ = issubset
|
||||
__ge__ = issuperset
|
||||
|
||||
def __lt__(self, other):
|
||||
self._binary_sanity_check(other)
|
||||
return len(self) < len(other) and self.issubset(other)
|
||||
|
||||
def __gt__(self, other):
|
||||
self._binary_sanity_check(other)
|
||||
return len(self) > len(other) and self.issuperset(other)
|
||||
|
||||
# Assorted helpers
|
||||
|
||||
def _binary_sanity_check(self, other):
|
||||
# Check that the other argument to a binary operation is also
|
||||
# a set, raising a TypeError otherwise.
|
||||
if not isinstance(other, BaseSet):
|
||||
raise TypeError, "Binary operation only permitted between sets"
|
||||
|
||||
def _compute_hash(self):
|
||||
# Calculate hash code for a set by xor'ing the hash codes of
|
||||
# the elements. This ensures that the hash code does not depend
|
||||
# on the order in which elements are added to the set. This is
|
||||
# not called __hash__ because a BaseSet should not be hashable;
|
||||
# only an ImmutableSet is hashable.
|
||||
result = 0
|
||||
for elt in self:
|
||||
result ^= hash(elt)
|
||||
return result
|
||||
|
||||
def _update(self, iterable):
|
||||
# The main loop for update() and the subclass __init__() methods.
|
||||
data = self._data
|
||||
|
||||
# Use the fast update() method when a dictionary is available.
|
||||
if isinstance(iterable, BaseSet):
|
||||
data.update(iterable._data)
|
||||
return
|
||||
|
||||
value = True
|
||||
|
||||
if type(iterable) in (list, tuple, xrange):
|
||||
# Optimized: we know that __iter__() and next() can't
|
||||
# raise TypeError, so we can move 'try:' out of the loop.
|
||||
it = iter(iterable)
|
||||
while True:
|
||||
try:
|
||||
for element in it:
|
||||
data[element] = value
|
||||
return
|
||||
except TypeError:
|
||||
transform = getattr(element, "__as_immutable__", None)
|
||||
if transform is None:
|
||||
raise # re-raise the TypeError exception we caught
|
||||
data[transform()] = value
|
||||
else:
|
||||
# Safe: only catch TypeError where intended
|
||||
for element in iterable:
|
||||
try:
|
||||
data[element] = value
|
||||
except TypeError:
|
||||
transform = getattr(element, "__as_immutable__", None)
|
||||
if transform is None:
|
||||
raise # re-raise the TypeError exception we caught
|
||||
data[transform()] = value
|
||||
|
||||
|
||||
class ImmutableSet(BaseSet):
|
||||
"""Immutable set class."""
|
||||
|
||||
__slots__ = ['_hashcode']
|
||||
|
||||
# BaseSet + hashing
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
"""Construct an immutable set from an optional iterable."""
|
||||
self._hashcode = None
|
||||
self._data = {}
|
||||
if iterable is not None:
|
||||
self._update(iterable)
|
||||
|
||||
def __hash__(self):
|
||||
if self._hashcode is None:
|
||||
self._hashcode = self._compute_hash()
|
||||
return self._hashcode
|
||||
|
||||
def __getstate__(self):
|
||||
return self._data, self._hashcode
|
||||
|
||||
def __setstate__(self, state):
|
||||
self._data, self._hashcode = state
|
||||
|
||||
class Set(BaseSet):
|
||||
""" Mutable set class."""
|
||||
|
||||
__slots__ = []
|
||||
|
||||
# BaseSet + operations requiring mutability; no hashing
|
||||
|
||||
def __init__(self, iterable=None):
|
||||
"""Construct a set from an optional iterable."""
|
||||
self._data = {}
|
||||
if iterable is not None:
|
||||
self._update(iterable)
|
||||
|
||||
def __getstate__(self):
|
||||
# getstate's results are ignored if it is not
|
||||
return self._data,
|
||||
|
||||
def __setstate__(self, data):
|
||||
self._data, = data
|
||||
|
||||
def __hash__(self):
|
||||
"""A Set cannot be hashed."""
|
||||
# We inherit object.__hash__, so we must deny this explicitly
|
||||
raise TypeError, "Can't hash a Set, only an ImmutableSet."
|
||||
|
||||
# In-place union, intersection, differences.
|
||||
# Subtle: The xyz_update() functions deliberately return None,
|
||||
# as do all mutating operations on built-in container types.
|
||||
# The __xyz__ spellings have to return self, though.
|
||||
|
||||
def __ior__(self, other):
|
||||
"""Update a set with the union of itself and another."""
|
||||
self._binary_sanity_check(other)
|
||||
self._data.update(other._data)
|
||||
return self
|
||||
|
||||
def union_update(self, other):
|
||||
"""Update a set with the union of itself and another."""
|
||||
self._update(other)
|
||||
|
||||
def __iand__(self, other):
|
||||
"""Update a set with the intersection of itself and another."""
|
||||
self._binary_sanity_check(other)
|
||||
self._data = (self & other)._data
|
||||
return self
|
||||
|
||||
def intersection_update(self, other):
|
||||
"""Update a set with the intersection of itself and another."""
|
||||
if isinstance(other, BaseSet):
|
||||
self &= other
|
||||
else:
|
||||
self._data = (self.intersection(other))._data
|
||||
|
||||
def __ixor__(self, other):
|
||||
"""Update a set with the symmetric difference of itself and another."""
|
||||
self._binary_sanity_check(other)
|
||||
self.symmetric_difference_update(other)
|
||||
return self
|
||||
|
||||
def symmetric_difference_update(self, other):
|
||||
"""Update a set with the symmetric difference of itself and another."""
|
||||
data = self._data
|
||||
value = True
|
||||
if not isinstance(other, BaseSet):
|
||||
other = Set(other)
|
||||
if self is other:
|
||||
self.clear()
|
||||
for elt in other:
|
||||
if elt in data:
|
||||
del data[elt]
|
||||
else:
|
||||
data[elt] = value
|
||||
|
||||
def __isub__(self, other):
|
||||
"""Remove all elements of another set from this set."""
|
||||
self._binary_sanity_check(other)
|
||||
self.difference_update(other)
|
||||
return self
|
||||
|
||||
def difference_update(self, other):
|
||||
"""Remove all elements of another set from this set."""
|
||||
data = self._data
|
||||
if not isinstance(other, BaseSet):
|
||||
other = Set(other)
|
||||
if self is other:
|
||||
self.clear()
|
||||
for elt in ifilter(data.has_key, other):
|
||||
del data[elt]
|
||||
|
||||
# Python dict-like mass mutations: update, clear
|
||||
|
||||
def update(self, iterable):
|
||||
"""Add all values from an iterable (such as a list or file)."""
|
||||
self._update(iterable)
|
||||
|
||||
def clear(self):
|
||||
"""Remove all elements from this set."""
|
||||
self._data.clear()
|
||||
|
||||
# Single-element mutations: add, remove, discard
|
||||
|
||||
def add(self, element):
|
||||
"""Add an element to a set.
|
||||
|
||||
This has no effect if the element is already present.
|
||||
"""
|
||||
try:
|
||||
self._data[element] = True
|
||||
except TypeError:
|
||||
transform = getattr(element, "__as_immutable__", None)
|
||||
if transform is None:
|
||||
raise # re-raise the TypeError exception we caught
|
||||
self._data[transform()] = True
|
||||
|
||||
def remove(self, element):
|
||||
"""Remove an element from a set; it must be a member.
|
||||
|
||||
If the element is not a member, raise a KeyError.
|
||||
"""
|
||||
try:
|
||||
del self._data[element]
|
||||
except TypeError:
|
||||
transform = getattr(element, "__as_temporarily_immutable__", None)
|
||||
if transform is None:
|
||||
raise # re-raise the TypeError exception we caught
|
||||
del self._data[transform()]
|
||||
|
||||
def discard(self, element):
|
||||
"""Remove an element from a set if it is a member.
|
||||
|
||||
If the element is not a member, do nothing.
|
||||
"""
|
||||
try:
|
||||
self.remove(element)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def pop(self):
|
||||
"""Remove and return an arbitrary set element."""
|
||||
return self._data.popitem()[0]
|
||||
|
||||
def __as_immutable__(self):
|
||||
# Return a copy of self as an immutable set
|
||||
return ImmutableSet(self)
|
||||
|
||||
def __as_temporarily_immutable__(self):
|
||||
# Return self wrapped in a temporarily immutable set
|
||||
return _TemporarilyImmutableSet(self)
|
||||
|
||||
|
||||
class _TemporarilyImmutableSet(BaseSet):
|
||||
# Wrap a mutable set as if it was temporarily immutable.
|
||||
# This only supplies hashing and equality comparisons.
|
||||
|
||||
def __init__(self, set):
|
||||
self._set = set
|
||||
self._data = set._data # Needed by ImmutableSet.__eq__()
|
||||
|
||||
def __hash__(self):
|
||||
return self._set._compute_hash()
|
||||
|
||||
|
||||
try:
|
||||
set, frozenset = set, frozenset
|
||||
except NameError:
|
||||
try:
|
||||
from sets import Set as set, ImmutableSet as frozenset
|
||||
except ImportError:
|
||||
set = Set
|
||||
frozenset = ImmutableSet
|
|
@ -0,0 +1,31 @@
|
|||
builtin_cmp = cmp # need to use cmp as keyword arg
|
||||
|
||||
def _sorted(iterable, cmp=None, key=None, reverse=0):
|
||||
use_cmp = None
|
||||
if key is not None:
|
||||
if cmp is None:
|
||||
def use_cmp(x, y):
|
||||
return builtin_cmp(x[0], y[0])
|
||||
else:
|
||||
def use_cmp(x, y):
|
||||
return cmp(x[0], y[0])
|
||||
l = [(key(element), element) for element in iterable]
|
||||
else:
|
||||
if cmp is not None:
|
||||
use_cmp = cmp
|
||||
l = list(iterable)
|
||||
#print l
|
||||
if use_cmp is not None:
|
||||
l.sort(use_cmp)
|
||||
else:
|
||||
l.sort()
|
||||
if reverse:
|
||||
l.reverse()
|
||||
if key is not None:
|
||||
return [element for (_, element) in l]
|
||||
return l
|
||||
|
||||
try:
|
||||
sorted = sorted
|
||||
except NameError:
|
||||
sorted = _sorted
|
|
@ -0,0 +1 @@
|
|||
#
|
|
@ -0,0 +1,7 @@
|
|||
import sys
|
||||
import py
|
||||
|
||||
def test_enumerate():
|
||||
l = [0,1,2]
|
||||
for i,x in py.builtin.enumerate(l):
|
||||
assert i == x
|
|
@ -0,0 +1,13 @@
|
|||
import py
|
||||
|
||||
def test_BaseException():
|
||||
assert issubclass(IndexError, py.builtin.BaseException)
|
||||
assert issubclass(Exception, py.builtin.BaseException)
|
||||
assert issubclass(KeyboardInterrupt, py.builtin.BaseException)
|
||||
|
||||
class MyRandomClass(object):
|
||||
pass
|
||||
assert not issubclass(MyRandomClass, py.builtin.BaseException)
|
||||
|
||||
assert py.builtin.BaseException.__module__ == 'exceptions'
|
||||
assert Exception.__name__ == 'Exception'
|
|
@ -0,0 +1,14 @@
|
|||
from py.builtin import reversed
|
||||
from py.test import raises
|
||||
|
||||
def test_reversed():
|
||||
r = reversed("hello")
|
||||
assert iter(r) is r
|
||||
assert r.next() == "o"
|
||||
assert r.next() == "l"
|
||||
assert r.next() == "l"
|
||||
assert r.next() == "e"
|
||||
assert r.next() == "h"
|
||||
raises(StopIteration, r.next)
|
||||
assert list(reversed(list(reversed("hello")))) == ['h','e','l','l','o']
|
||||
raises(TypeError, reversed, reversed("hello"))
|
|
@ -0,0 +1,14 @@
|
|||
# some small tests to see whether sets are there and work
|
||||
|
||||
from py.builtin import set, frozenset
|
||||
|
||||
def test_simple():
|
||||
s = set([1, 2, 3, 4])
|
||||
assert s == set([3, 4, 2, 1])
|
||||
s1 = s.union(set([5, 6]))
|
||||
assert 5 in s1
|
||||
assert 1 in s1
|
||||
|
||||
def test_frozenset():
|
||||
s = set([frozenset([0, 1]), frozenset([1, 0])])
|
||||
assert len(s) == 1
|
|
@ -0,0 +1,25 @@
|
|||
from __future__ import generators
|
||||
import py
|
||||
from py.__.builtin.sorted import _sorted
|
||||
|
||||
def test_sorted():
|
||||
for s in [_sorted, py.builtin.sorted]:
|
||||
def test():
|
||||
assert s([3, 2, 1]) == [1, 2, 3]
|
||||
assert s([1, 2, 3], reverse=True) == [3, 2, 1]
|
||||
l = s([1, 2, 3, 4, 5, 6], key=lambda x: x % 2)
|
||||
assert l == [2, 4, 6, 1, 3, 5]
|
||||
l = s([1, 2, 3, 4], cmp=lambda x, y: -cmp(x, y))
|
||||
assert l == [4, 3, 2, 1]
|
||||
l = s([1, 2, 3, 4], cmp=lambda x, y: -cmp(x, y),
|
||||
key=lambda x: x % 2)
|
||||
assert l == [1, 3, 2, 4]
|
||||
|
||||
def compare(x, y):
|
||||
assert type(x) == str
|
||||
assert type(y) == str
|
||||
return cmp(x, y)
|
||||
data = 'The quick Brown fox Jumped over The lazy Dog'.split()
|
||||
s(data, cmp=compare, key=str.lower)
|
||||
yield test
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
import py
|
||||
|
||||
class Directory(py.test.collect.Directory):
|
||||
# XXX see in which situations/platforms we want
|
||||
# run tests here
|
||||
#def recfilter(self, path):
|
||||
# if py.std.sys.platform == 'linux2':
|
||||
# if path.basename == 'greenlet':
|
||||
# return False
|
||||
# return super(Directory, self).recfilter(path)
|
||||
|
||||
#def run(self):
|
||||
# py.test.skip("c-extension testing needs platform selection")
|
||||
pass
|
|
@ -0,0 +1,13 @@
|
|||
|
||||
This code is by Armin Rigo with the core pieces by Christian Tismer.
|
||||
These pieces are:
|
||||
|
||||
- slp_platformselect.h
|
||||
- switch_*.h, included from the previous one.
|
||||
|
||||
All additional credits for the general idea of stack switching also go to
|
||||
Christian. In other words, if it works it is thanks to Christian's work,
|
||||
and if it crashes it is because of a bug of mine :-)
|
||||
|
||||
|
||||
-- Armin
|
|
@ -0,0 +1,148 @@
|
|||
import thread, sys
|
||||
|
||||
__all__ = ['greenlet', 'main', 'getcurrent']
|
||||
|
||||
|
||||
class greenlet(object):
|
||||
__slots__ = ('run', '_controller')
|
||||
|
||||
def __init__(self, run=None, parent=None):
|
||||
if run is not None:
|
||||
self.run = run
|
||||
if parent is not None:
|
||||
self.parent = parent
|
||||
|
||||
def switch(self, *args):
|
||||
global _passaround_
|
||||
_passaround_ = None, args, None
|
||||
self._controller.switch(self)
|
||||
exc, val, tb = _passaround_
|
||||
del _passaround_
|
||||
if exc is None:
|
||||
if isinstance(val, tuple) and len(val) == 1:
|
||||
return val[0]
|
||||
else:
|
||||
return val
|
||||
else:
|
||||
raise exc, val, tb
|
||||
|
||||
def __nonzero__(self):
|
||||
return self._controller.isactive()
|
||||
|
||||
def __new__(cls, *args, **kwds):
|
||||
self = object.__new__(cls)
|
||||
self._controller = _Controller()
|
||||
return self
|
||||
|
||||
def __del__(self):
|
||||
#print 'DEL:', self
|
||||
if self._controller.parent is None:
|
||||
return # don't kill the main greenlet
|
||||
while self._controller.isactive():
|
||||
self._controller.kill(self)
|
||||
|
||||
def getparent(self):
|
||||
return self._controller.parent
|
||||
|
||||
def setparent(self, nparent):
|
||||
if not isinstance(nparent, greenlet):
|
||||
raise TypeError, "parent must be a greenlet"
|
||||
p = nparent
|
||||
while p is not None:
|
||||
if p is self:
|
||||
raise ValueError, "cyclic parent chain"
|
||||
p = p._controller.parent
|
||||
self._controller.parent = nparent
|
||||
|
||||
parent = property(getparent, setparent)
|
||||
del getparent
|
||||
del setparent
|
||||
|
||||
|
||||
class _Controller:
|
||||
# Controllers are separated from greenlets to allow greenlets to be
|
||||
# deallocated while running, when their last reference goes away.
|
||||
# Care is taken to keep only references to controllers in thread's
|
||||
# frames' local variables.
|
||||
|
||||
# _Controller.parent: the parent greenlet.
|
||||
# _Controller.lock: the lock used for synchronization
|
||||
# it is not set before the greenlet starts
|
||||
# it is None after the greenlet stops
|
||||
|
||||
def __init__(self):
|
||||
self.parent = _current_
|
||||
|
||||
def isactive(self):
|
||||
return getattr(self, 'lock', None) is not None
|
||||
|
||||
def switch(self, target):
|
||||
previous = _current_._controller
|
||||
self.switch_no_wait(target)
|
||||
# wait until someone releases this thread's lock
|
||||
previous.lock.acquire()
|
||||
|
||||
def switch_no_wait(self, target):
|
||||
# lock tricks: each greenlet has its own lock which is almost always
|
||||
# in 'acquired' state:
|
||||
# * the current greenlet runs with its lock acquired
|
||||
# * all other greenlets wait on their own lock's acquire() call
|
||||
global _current_
|
||||
try:
|
||||
while 1:
|
||||
_current_ = target
|
||||
lock = self.lock
|
||||
if lock is not None:
|
||||
break
|
||||
target = self.parent
|
||||
self = target._controller
|
||||
except AttributeError:
|
||||
# start the new greenlet
|
||||
lock = self.lock = thread.allocate_lock()
|
||||
lock.acquire()
|
||||
thread.start_new_thread(self.run_thread, (target.run,))
|
||||
else:
|
||||
# release (re-enable) the target greenlet's thread
|
||||
lock.release()
|
||||
|
||||
def run_thread(self, run):
|
||||
#print 'ENTERING', self
|
||||
global _passaround_
|
||||
exc, val, tb = _passaround_
|
||||
if exc is None:
|
||||
try:
|
||||
result = run(*val)
|
||||
except SystemExit, e:
|
||||
_passaround_ = None, (e,), None
|
||||
except:
|
||||
_passaround_ = sys.exc_info()
|
||||
else:
|
||||
_passaround_ = None, (result,), None
|
||||
self.lock = None
|
||||
#print 'LEAVING', self
|
||||
self.switch_no_wait(self.parent)
|
||||
|
||||
def kill(self, target):
|
||||
# see comments in greenlet.c:green_dealloc()
|
||||
global _passaround_
|
||||
self._parent_ = _current_
|
||||
_passaround_ = SystemExit, None, None
|
||||
self.switch(target)
|
||||
exc, val, tb = _passaround_
|
||||
del _passaround_
|
||||
if exc is not None:
|
||||
if val is None:
|
||||
print >> sys.stderr, "Exception", "%s" % (exc,),
|
||||
else:
|
||||
print >> sys.stderr, "Exception", "%s: %s" % (exc, val),
|
||||
print >> sys.stderr, "in", self, "ignored"
|
||||
|
||||
|
||||
_current_ = None
|
||||
main = greenlet()
|
||||
main._controller.lock = thread.allocate_lock()
|
||||
main._controller.lock.acquire()
|
||||
_current_ = main
|
||||
|
||||
def getcurrent():
|
||||
return _current_
|
|
@ -0,0 +1,977 @@
|
|||
#include "greenlet.h"
|
||||
#include "structmember.h"
|
||||
|
||||
|
||||
/* XXX major open bugs:
|
||||
XXX - no GC. Unfinished greenlets won't be deallocated if they
|
||||
XXX contain a cycle to themselves from anywhere in their frame stack.
|
||||
*/
|
||||
|
||||
/***********************************************************
|
||||
|
||||
A PyGreenlet is a range of C stack addresses that must be
|
||||
saved and restored in such a way that the full range of the
|
||||
stack contains valid data when we switch to it.
|
||||
|
||||
Stack layout for a greenlet:
|
||||
|
||||
| ^^^ |
|
||||
| older data |
|
||||
| |
|
||||
stack_stop . |_______________|
|
||||
. | |
|
||||
. | greenlet data |
|
||||
. | in stack |
|
||||
. * |_______________| . . _____________ stack_copy + stack_saved
|
||||
. | | | |
|
||||
. | data | |greenlet data|
|
||||
. | unrelated | | saved |
|
||||
. | to | | in heap |
|
||||
stack_start . | this | . . |_____________| stack_copy
|
||||
| greenlet |
|
||||
| |
|
||||
| newer data |
|
||||
| vvv |
|
||||
|
||||
|
||||
Note that a greenlet's stack data is typically partly at its correct
|
||||
place in the stack, and partly saved away in the heap, but always in
|
||||
the above configuration: two blocks, the more recent one in the heap
|
||||
and the older one still in the stack (either block may be empty).
|
||||
|
||||
Greenlets are chained: each points to the previous greenlet, which is
|
||||
the one that owns the data currently in the C stack above my
|
||||
stack_stop. The currently running greenlet is the first element of
|
||||
this chain. The main (initial) greenlet is the last one. Greenlets
|
||||
whose stack is entirely in the heap can be skipped from the chain.
|
||||
|
||||
The chain is not related to execution order, but only to the order
|
||||
in which bits of C stack happen to belong to greenlets at a particular
|
||||
point in time.
|
||||
|
||||
The main greenlet doesn't have a stack_stop: it is responsible for the
|
||||
complete rest of the C stack, and we don't know where it begins. We
|
||||
use (char*) -1, the largest possible address.
|
||||
|
||||
States:
|
||||
stack_stop == NULL && stack_start == NULL: did not start yet
|
||||
stack_stop != NULL && stack_start == NULL: already finished
|
||||
stack_stop != NULL && stack_start != NULL: active
|
||||
|
||||
The running greenlet's stack_start is undefined but not NULL.
|
||||
|
||||
***********************************************************/
|
||||
|
||||
/*** global state ***/
|
||||
|
||||
/* In the presence of multithreading, this is a bit tricky:
|
||||
|
||||
- ts_current always store a reference to a greenlet, but it is
|
||||
not really the current greenlet after a thread switch occurred.
|
||||
|
||||
- each *running* greenlet uses its run_info field to know which
|
||||
thread it is attached to. A greenlet can only run in the thread
|
||||
where it was created. This run_info is a ref to tstate->dict.
|
||||
|
||||
- the thread state dict is used to save and restore ts_current,
|
||||
using the dictionary key 'ts_curkey'.
|
||||
*/
|
||||
|
||||
static PyGreenlet* ts_current;
|
||||
static PyGreenlet* ts_origin;
|
||||
static PyGreenlet* ts_target;
|
||||
static PyObject* ts_passaround;
|
||||
|
||||
/***********************************************************/
|
||||
/* Thread-aware routines, switching global variables when needed */
|
||||
|
||||
#define STATE_OK (ts_current->run_info == PyThreadState_GET()->dict \
|
||||
|| !green_updatecurrent())
|
||||
|
||||
static PyObject* ts_curkey;
|
||||
static PyObject* ts_delkey;
|
||||
static PyObject* PyExc_GreenletError;
|
||||
static PyObject* PyExc_GreenletExit;
|
||||
|
||||
static PyGreenlet* green_create_main(void)
|
||||
{
|
||||
PyGreenlet* gmain;
|
||||
PyObject* dict = PyThreadState_GetDict();
|
||||
if (dict == NULL) {
|
||||
if (!PyErr_Occurred())
|
||||
PyErr_NoMemory();
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* create the main greenlet for this thread */
|
||||
gmain = (PyGreenlet*) PyType_GenericAlloc(&PyGreen_Type, 0);
|
||||
if (gmain == NULL)
|
||||
return NULL;
|
||||
gmain->stack_start = (char*) 1;
|
||||
gmain->stack_stop = (char*) -1;
|
||||
gmain->run_info = dict;
|
||||
Py_INCREF(dict);
|
||||
return gmain;
|
||||
}
|
||||
|
||||
static int green_updatecurrent(void)
|
||||
{
|
||||
PyThreadState* tstate;
|
||||
PyGreenlet* next;
|
||||
PyGreenlet* previous;
|
||||
PyObject* deleteme;
|
||||
|
||||
/* save ts_current as the current greenlet of its own thread */
|
||||
previous = ts_current;
|
||||
if (PyDict_SetItem(previous->run_info, ts_curkey, (PyObject*) previous))
|
||||
return -1;
|
||||
|
||||
/* get ts_current from the active tstate */
|
||||
tstate = PyThreadState_GET();
|
||||
if (tstate->dict && (next =
|
||||
(PyGreenlet*) PyDict_GetItem(tstate->dict, ts_curkey))) {
|
||||
/* found -- remove it, to avoid keeping a ref */
|
||||
Py_INCREF(next);
|
||||
if (PyDict_SetItem(tstate->dict, ts_curkey, Py_None))
|
||||
PyErr_Clear();
|
||||
}
|
||||
else {
|
||||
/* first time we see this tstate */
|
||||
next = green_create_main();
|
||||
if (next == NULL)
|
||||
return -1;
|
||||
}
|
||||
ts_current = next;
|
||||
Py_DECREF(previous);
|
||||
/* green_dealloc() cannot delete greenlets from other threads, so
|
||||
it stores them in the thread dict; delete them now. */
|
||||
deleteme = PyDict_GetItem(tstate->dict, ts_delkey);
|
||||
if (deleteme != NULL) {
|
||||
PyList_SetSlice(deleteme, 0, INT_MAX, NULL);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* green_statedict(PyGreenlet* g)
|
||||
{
|
||||
while (!PyGreen_STARTED(g))
|
||||
g = g->parent;
|
||||
return g->run_info;
|
||||
}
|
||||
|
||||
/***********************************************************/
|
||||
|
||||
static int g_save(PyGreenlet* g, char* stop)
|
||||
{
|
||||
/* Save more of g's stack into the heap -- at least up to 'stop'
|
||||
|
||||
g->stack_stop |________|
|
||||
| |
|
||||
| __ stop . . . . .
|
||||
| | ==> . .
|
||||
|________| _______
|
||||
| | | |
|
||||
| | | |
|
||||
g->stack_start | | |_______| g->stack_copy
|
||||
|
||||
*/
|
||||
long sz1 = g->stack_saved;
|
||||
long sz2 = stop - g->stack_start;
|
||||
assert(g->stack_start != NULL);
|
||||
if (sz2 > sz1) {
|
||||
char* c = PyMem_Realloc(g->stack_copy, sz2);
|
||||
if (!c) {
|
||||
PyErr_NoMemory();
|
||||
return -1;
|
||||
}
|
||||
memcpy(c+sz1, g->stack_start+sz1, sz2-sz1);
|
||||
g->stack_copy = c;
|
||||
g->stack_saved = sz2;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void slp_restore_state(void)
|
||||
{
|
||||
PyGreenlet* g = ts_target;
|
||||
|
||||
/* Restore the heap copy back into the C stack */
|
||||
if (g->stack_saved != 0) {
|
||||
memcpy(g->stack_start, g->stack_copy, g->stack_saved);
|
||||
PyMem_Free(g->stack_copy);
|
||||
g->stack_copy = NULL;
|
||||
g->stack_saved = 0;
|
||||
}
|
||||
if (ts_current->stack_stop == g->stack_stop)
|
||||
g->stack_prev = ts_current->stack_prev;
|
||||
else
|
||||
g->stack_prev = ts_current;
|
||||
}
|
||||
|
||||
static int slp_save_state(char* stackref)
|
||||
{
|
||||
/* must free all the C stack up to target_stop */
|
||||
char* target_stop = ts_target->stack_stop;
|
||||
assert(ts_current->stack_saved == 0);
|
||||
if (ts_current->stack_start == NULL)
|
||||
ts_current = ts_current->stack_prev; /* not saved if dying */
|
||||
else
|
||||
ts_current->stack_start = stackref;
|
||||
|
||||
while (ts_current->stack_stop < target_stop) {
|
||||
/* ts_current is entierely within the area to free */
|
||||
if (g_save(ts_current, ts_current->stack_stop))
|
||||
return -1; /* XXX */
|
||||
ts_current = ts_current->stack_prev;
|
||||
}
|
||||
if (ts_current != ts_target) {
|
||||
if (g_save(ts_current, target_stop))
|
||||
return -1; /* XXX */
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* the following macros are spliced into the OS/compiler
|
||||
* specific code, in order to simplify maintenance.
|
||||
*/
|
||||
|
||||
#define SLP_SAVE_STATE(stackref, stsizediff) \
|
||||
stackref += STACK_MAGIC; \
|
||||
if (slp_save_state((char*)stackref)) return -1; \
|
||||
if (!PyGreen_ACTIVE(ts_target)) return 1; \
|
||||
stsizediff = ts_target->stack_start - (char*)stackref
|
||||
|
||||
#define SLP_RESTORE_STATE() \
|
||||
slp_restore_state()
|
||||
|
||||
|
||||
#define SLP_EVAL
|
||||
#include "slp_platformselect.h"
|
||||
|
||||
#ifndef STACK_MAGIC
|
||||
#error "greenlet needs to be ported to this platform,\
|
||||
or teached how to detect your compiler properly."
|
||||
#endif
|
||||
|
||||
|
||||
/* This is a trick to prevent the compiler from inlining or
|
||||
removing the frames */
|
||||
int (*_PyGreen_slp_switch) (void);
|
||||
int (*_PyGreen_switchstack) (void);
|
||||
void (*_PyGreen_initialstub) (void*);
|
||||
|
||||
static int g_switchstack(void)
|
||||
{
|
||||
/* perform a stack switch according to some global variables
|
||||
that must be set before:
|
||||
- ts_current: current greenlet (holds a reference)
|
||||
- ts_target: greenlet to switch to
|
||||
- ts_passaround: NULL if PyErr_Occurred(),
|
||||
else a tuple of args sent to ts_target (holds a reference)
|
||||
*/
|
||||
int err;
|
||||
{ /* save state */
|
||||
PyThreadState* tstate = PyThreadState_GET();
|
||||
ts_current->recursion_depth = tstate->recursion_depth;
|
||||
ts_current->top_frame = tstate->frame;
|
||||
}
|
||||
ts_origin = ts_current;
|
||||
err = _PyGreen_slp_switch();
|
||||
if (err < 0) { /* error */
|
||||
Py_XDECREF(ts_passaround);
|
||||
ts_passaround = NULL;
|
||||
}
|
||||
else {
|
||||
PyThreadState* tstate = PyThreadState_GET();
|
||||
tstate->recursion_depth = ts_target->recursion_depth;
|
||||
tstate->frame = ts_target->top_frame;
|
||||
ts_target->top_frame = NULL;
|
||||
ts_current = ts_target;
|
||||
Py_INCREF(ts_target);
|
||||
Py_DECREF(ts_origin);
|
||||
}
|
||||
return err;
|
||||
}
|
||||
|
||||
static PyObject* g_switch(PyGreenlet* target, PyObject* args)
|
||||
{
|
||||
/* _consumes_ a reference to the args tuple,
|
||||
and return a new tuple reference */
|
||||
|
||||
/* check ts_current */
|
||||
if (!STATE_OK) {
|
||||
Py_DECREF(args);
|
||||
return NULL;
|
||||
}
|
||||
if (green_statedict(target) != ts_current->run_info) {
|
||||
PyErr_SetString(PyExc_GreenletError,
|
||||
"cannot switch to a different thread");
|
||||
Py_DECREF(args);
|
||||
return NULL;
|
||||
}
|
||||
ts_passaround = args;
|
||||
|
||||
/* find the real target by ignoring dead greenlets,
|
||||
and if necessary starting a greenlet. */
|
||||
while (1) {
|
||||
if (PyGreen_ACTIVE(target)) {
|
||||
ts_target = target;
|
||||
_PyGreen_switchstack();
|
||||
return ts_passaround;
|
||||
}
|
||||
if (!PyGreen_STARTED(target)) {
|
||||
void* dummymarker;
|
||||
ts_target = target;
|
||||
_PyGreen_initialstub(&dummymarker);
|
||||
return ts_passaround;
|
||||
}
|
||||
target = target->parent;
|
||||
}
|
||||
}
|
||||
|
||||
static PyObject *g_handle_exit(PyObject *result)
|
||||
{
|
||||
if (result == NULL &&
|
||||
PyErr_ExceptionMatches(PyExc_GreenletExit)) {
|
||||
/* catch and ignore GreenletExit */
|
||||
PyObject *exc, *val, *tb;
|
||||
PyErr_Fetch(&exc, &val, &tb);
|
||||
if (val == NULL) {
|
||||
Py_INCREF(Py_None);
|
||||
val = Py_None;
|
||||
}
|
||||
result = val;
|
||||
Py_DECREF(exc);
|
||||
Py_XDECREF(tb);
|
||||
}
|
||||
if (result != NULL) {
|
||||
/* package the result into a 1-tuple */
|
||||
PyObject* r = result;
|
||||
result = PyTuple_New(1);
|
||||
if (result)
|
||||
PyTuple_SET_ITEM(result, 0, r);
|
||||
else
|
||||
Py_DECREF(r);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
static void g_initialstub(void* mark)
|
||||
{
|
||||
int err;
|
||||
PyObject* o;
|
||||
|
||||
/* ts_target.run is the object to call in the new greenlet */
|
||||
PyObject* run = PyObject_GetAttrString((PyObject*) ts_target, "run");
|
||||
if (run == NULL) {
|
||||
Py_XDECREF(ts_passaround);
|
||||
ts_passaround = NULL;
|
||||
return;
|
||||
}
|
||||
/* now use run_info to store the statedict */
|
||||
o = ts_target->run_info;
|
||||
ts_target->run_info = green_statedict(ts_target->parent);
|
||||
Py_INCREF(ts_target->run_info);
|
||||
Py_XDECREF(o);
|
||||
|
||||
/* start the greenlet */
|
||||
ts_target->stack_start = NULL;
|
||||
ts_target->stack_stop = (char*) mark;
|
||||
if (ts_current->stack_start == NULL) /* ts_current is dying */
|
||||
ts_target->stack_prev = ts_current->stack_prev;
|
||||
else
|
||||
ts_target->stack_prev = ts_current;
|
||||
ts_target->top_frame = NULL;
|
||||
ts_target->recursion_depth = PyThreadState_GET()->recursion_depth;
|
||||
err = _PyGreen_switchstack();
|
||||
/* returns twice!
|
||||
The 1st time with err=1: we are in the new greenlet
|
||||
The 2nd time with err=0: back in the caller's greenlet
|
||||
*/
|
||||
if (err == 1) {
|
||||
/* in the new greenlet */
|
||||
PyObject* args;
|
||||
PyObject* result;
|
||||
ts_current->stack_start = (char*) 1; /* running */
|
||||
|
||||
args = ts_passaround;
|
||||
if (args == NULL) /* pending exception */
|
||||
result = NULL;
|
||||
else {
|
||||
/* call g.run(*args) */
|
||||
result = PyEval_CallObject(run, args);
|
||||
Py_DECREF(args);
|
||||
}
|
||||
Py_DECREF(run);
|
||||
result = g_handle_exit(result);
|
||||
/* jump back to parent */
|
||||
ts_current->stack_start = NULL; /* dead */
|
||||
g_switch(ts_current->parent, result);
|
||||
/* must not return from here! */
|
||||
Py_FatalError("XXX memory exhausted at a very bad moment");
|
||||
}
|
||||
/* back in the parent */
|
||||
}
|
||||
|
||||
|
||||
/***********************************************************/
|
||||
|
||||
|
||||
static PyObject* green_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject* o;
|
||||
if (!STATE_OK)
|
||||
return NULL;
|
||||
|
||||
o = type->tp_alloc(type, 0);
|
||||
if (o != NULL) {
|
||||
Py_INCREF(ts_current);
|
||||
((PyGreenlet*) o)->parent = ts_current;
|
||||
}
|
||||
return o;
|
||||
}
|
||||
|
||||
static int green_setrun(PyGreenlet* self, PyObject* nparent, void* c);
|
||||
static int green_setparent(PyGreenlet* self, PyObject* nparent, void* c);
|
||||
|
||||
static int green_init(PyGreenlet *self, PyObject *args, PyObject *kwds)
|
||||
{
|
||||
PyObject *run = NULL;
|
||||
PyObject* nparent = NULL;
|
||||
static char *kwlist[] = {"run", "parent", 0};
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OO:green", kwlist,
|
||||
&run, &nparent))
|
||||
return -1;
|
||||
|
||||
if (run != NULL) {
|
||||
if (green_setrun(self, run, NULL))
|
||||
return -1;
|
||||
}
|
||||
if (nparent != NULL)
|
||||
return green_setparent(self, nparent, NULL);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int kill_greenlet(PyGreenlet* self)
|
||||
{
|
||||
/* Cannot raise an exception to kill the greenlet if
|
||||
it is not running in the same thread! */
|
||||
if (self->run_info == PyThreadState_GET()->dict) {
|
||||
/* The dying greenlet cannot be a parent of ts_current
|
||||
because the 'parent' field chain would hold a
|
||||
reference */
|
||||
PyObject* result;
|
||||
Py_INCREF(ts_current);
|
||||
self->parent = ts_current;
|
||||
/* Send the greenlet a GreenletExit exception. */
|
||||
PyErr_SetNone(PyExc_GreenletExit);
|
||||
result = g_switch(self, NULL);
|
||||
if (result == NULL)
|
||||
return -1;
|
||||
Py_DECREF(result);
|
||||
return 0;
|
||||
}
|
||||
else {
|
||||
/* Not the same thread! Temporarily save the greenlet
|
||||
into its thread's ts_delkey list. */
|
||||
PyObject* lst;
|
||||
lst = PyDict_GetItem(self->run_info, ts_delkey);
|
||||
if (lst == NULL) {
|
||||
lst = PyList_New(0);
|
||||
if (lst == NULL || PyDict_SetItem(self->run_info,
|
||||
ts_delkey, lst) < 0)
|
||||
return -1;
|
||||
}
|
||||
if (PyList_Append(lst, (PyObject*) self) < 0)
|
||||
return -1;
|
||||
if (!STATE_OK) /* to force ts_delkey to be reconsidered */
|
||||
return -1;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
static void green_dealloc(PyGreenlet* self)
|
||||
{
|
||||
PyObject *error_type, *error_value, *error_traceback;
|
||||
|
||||
Py_XDECREF(self->parent);
|
||||
self->parent = NULL;
|
||||
if (PyGreen_ACTIVE(self)) {
|
||||
/* Hacks hacks hacks copied from instance_dealloc() */
|
||||
/* Temporarily resurrect the greenlet. */
|
||||
assert(self->ob_refcnt == 0);
|
||||
self->ob_refcnt = 1;
|
||||
/* Save the current exception, if any. */
|
||||
PyErr_Fetch(&error_type, &error_value, &error_traceback);
|
||||
if (kill_greenlet(self) < 0) {
|
||||
PyErr_WriteUnraisable((PyObject*) self);
|
||||
/* XXX what else should we do? */
|
||||
}
|
||||
/* Restore the saved exception. */
|
||||
PyErr_Restore(error_type, error_value, error_traceback);
|
||||
/* Undo the temporary resurrection; can't use DECREF here,
|
||||
* it would cause a recursive call.
|
||||
*/
|
||||
assert(self->ob_refcnt > 0);
|
||||
--self->ob_refcnt;
|
||||
if (self->ob_refcnt == 0 && PyGreen_ACTIVE(self)) {
|
||||
/* Not resurrected, but still not dead!
|
||||
XXX what else should we do? we complain. */
|
||||
PyObject* f = PySys_GetObject("stderr");
|
||||
if (f != NULL) {
|
||||
PyFile_WriteString("GreenletExit did not kill ",
|
||||
f);
|
||||
PyFile_WriteObject((PyObject*) self, f, 0);
|
||||
PyFile_WriteString("\n", f);
|
||||
}
|
||||
Py_INCREF(self); /* leak! */
|
||||
}
|
||||
if (self->ob_refcnt != 0) {
|
||||
/* Resurrected! */
|
||||
int refcnt = self->ob_refcnt;
|
||||
_Py_NewReference((PyObject*) self);
|
||||
self->ob_refcnt = refcnt;
|
||||
#ifdef COUNT_ALLOCS
|
||||
--self->ob_type->tp_frees;
|
||||
--self->ob_type->tp_allocs;
|
||||
#endif
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (self->weakreflist != NULL)
|
||||
PyObject_ClearWeakRefs((PyObject *) self);
|
||||
Py_XDECREF(self->run_info);
|
||||
self->ob_type->tp_free((PyObject*) self);
|
||||
}
|
||||
|
||||
static PyObject* single_result(PyObject* results)
|
||||
{
|
||||
if (results != NULL && PyTuple_Check(results) &&
|
||||
PyTuple_GET_SIZE(results) == 1) {
|
||||
PyObject *result = PyTuple_GET_ITEM(results, 0);
|
||||
Py_INCREF(result);
|
||||
Py_DECREF(results);
|
||||
return result;
|
||||
}
|
||||
else
|
||||
return results;
|
||||
}
|
||||
|
||||
static PyObject *
|
||||
throw_greenlet(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb)
|
||||
{
|
||||
/* Note: _consumes_ a reference to typ, val, tb */
|
||||
PyObject *result = NULL;
|
||||
PyErr_Restore(typ, val, tb);
|
||||
if (PyGreen_STARTED(self) && !PyGreen_ACTIVE(self)) {
|
||||
/* dead greenlet: turn GreenletExit into a regular return */
|
||||
result = g_handle_exit(result);
|
||||
}
|
||||
return single_result(g_switch(self, result));
|
||||
}
|
||||
|
||||
PyDoc_STRVAR(switch_doc,
|
||||
"switch([val]) -> switch execution to greenlet optionally passing a value, "
|
||||
"return value passed when switching back");
|
||||
|
||||
static PyObject* green_switch(PyGreenlet* self, PyObject* args)
|
||||
{
|
||||
Py_INCREF(args);
|
||||
return single_result(g_switch(self, args));
|
||||
}
|
||||
|
||||
#ifndef PyExceptionClass_Check /* Python < 2.5 */
|
||||
# define PyExceptionClass_Check PyClass_Check
|
||||
#endif
|
||||
#ifndef PyExceptionInstance_Check /* Python < 2.5 */
|
||||
# define PyExceptionInstance_Check PyInstance_Check
|
||||
#endif
|
||||
#ifndef PyExceptionInstance_Class /* Python < 2.5 */
|
||||
# define PyExceptionInstance_Class(x) \
|
||||
((PyObject*)((PyInstanceObject*)(x))->in_class)
|
||||
#endif
|
||||
|
||||
PyDoc_STRVAR(throw_doc,
|
||||
"throw(typ[,val[,tb]]) -> raise exception in greenlet, return value passed "
|
||||
"when switching back");
|
||||
|
||||
static PyObject* green_throw(PyGreenlet* self, PyObject* args)
|
||||
{
|
||||
PyObject *typ = PyExc_GreenletExit;
|
||||
PyObject *val = NULL;
|
||||
PyObject *tb = NULL;
|
||||
|
||||
if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb))
|
||||
return NULL;
|
||||
|
||||
/* First, check the traceback argument, replacing None with
|
||||
NULL. */
|
||||
if (tb == Py_None)
|
||||
tb = NULL;
|
||||
else if (tb != NULL && !PyTraceBack_Check(tb)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"throw() third argument must be a traceback object");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_INCREF(typ);
|
||||
Py_XINCREF(val);
|
||||
Py_XINCREF(tb);
|
||||
|
||||
if (PyExceptionClass_Check(typ)) {
|
||||
PyErr_NormalizeException(&typ, &val, &tb);
|
||||
}
|
||||
|
||||
else if (PyExceptionInstance_Check(typ)) {
|
||||
/* Raising an instance. The value should be a dummy. */
|
||||
if (val && val != Py_None) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"instance exception may not have a separate value");
|
||||
goto failed_throw;
|
||||
}
|
||||
else {
|
||||
/* Normalize to raise <class>, <instance> */
|
||||
Py_XDECREF(val);
|
||||
val = typ;
|
||||
typ = PyExceptionInstance_Class(typ);
|
||||
Py_INCREF(typ);
|
||||
}
|
||||
}
|
||||
else {
|
||||
/* Not something you can raise. throw() fails. */
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"exceptions must be classes, or instances, not %s",
|
||||
typ->ob_type->tp_name);
|
||||
goto failed_throw;
|
||||
}
|
||||
|
||||
return throw_greenlet(self, typ, val, tb);
|
||||
|
||||
failed_throw:
|
||||
/* Didn't use our arguments, so restore their original refcounts */
|
||||
Py_DECREF(typ);
|
||||
Py_XDECREF(val);
|
||||
Py_XDECREF(tb);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static int green_nonzero(PyGreenlet* self)
|
||||
{
|
||||
return PyGreen_ACTIVE(self);
|
||||
}
|
||||
|
||||
static PyObject* green_getdead(PyGreenlet* self, void* c)
|
||||
{
|
||||
PyObject* res;
|
||||
if (PyGreen_ACTIVE(self) || !PyGreen_STARTED(self))
|
||||
res = Py_False;
|
||||
else
|
||||
res = Py_True;
|
||||
Py_INCREF(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
static PyObject* green_getrun(PyGreenlet* self, void* c)
|
||||
{
|
||||
if (PyGreen_STARTED(self) || self->run_info == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "run");
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(self->run_info);
|
||||
return self->run_info;
|
||||
}
|
||||
|
||||
static int green_setrun(PyGreenlet* self, PyObject* nrun, void* c)
|
||||
{
|
||||
PyObject* o;
|
||||
if (PyGreen_STARTED(self)) {
|
||||
PyErr_SetString(PyExc_AttributeError,
|
||||
"run cannot be set "
|
||||
"after the start of the greenlet");
|
||||
return -1;
|
||||
}
|
||||
o = self->run_info;
|
||||
self->run_info = nrun;
|
||||
Py_XINCREF(nrun);
|
||||
Py_XDECREF(o);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* green_getparent(PyGreenlet* self, void* c)
|
||||
{
|
||||
PyObject* result = self->parent ? (PyObject*) self->parent : Py_None;
|
||||
Py_INCREF(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
static int green_setparent(PyGreenlet* self, PyObject* nparent, void* c)
|
||||
{
|
||||
PyGreenlet* p;
|
||||
if (nparent == NULL) {
|
||||
PyErr_SetString(PyExc_AttributeError, "can't delete attribute");
|
||||
return -1;
|
||||
}
|
||||
if (!PyGreen_Check(nparent)) {
|
||||
PyErr_SetString(PyExc_TypeError, "parent must be a greenlet");
|
||||
return -1;
|
||||
}
|
||||
for (p=(PyGreenlet*) nparent; p; p=p->parent) {
|
||||
if (p == self) {
|
||||
PyErr_SetString(PyExc_ValueError, "cyclic parent chain");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
p = self->parent;
|
||||
self->parent = (PyGreenlet*) nparent;
|
||||
Py_INCREF(nparent);
|
||||
Py_DECREF(p);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* green_getframe(PyGreenlet* self, void* c)
|
||||
{
|
||||
PyObject* result = self->top_frame ? (PyObject*) self->top_frame : Py_None;
|
||||
Py_INCREF(result);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
||||
/***********************************************************/
|
||||
/* C interface */
|
||||
|
||||
PyObject* PyGreen_New(PyObject* run, PyObject* parent)
|
||||
{
|
||||
PyGreenlet* o;
|
||||
if (!PyGreen_Check(parent)) {
|
||||
PyErr_SetString(PyExc_TypeError, "parent must be a greenlet");
|
||||
return NULL;
|
||||
}
|
||||
o = (PyGreenlet*) PyType_GenericAlloc(&PyGreen_Type, 0);
|
||||
if (o == NULL)
|
||||
return NULL;
|
||||
Py_INCREF(run);
|
||||
o->run_info = run;
|
||||
Py_INCREF(parent);
|
||||
o->parent = (PyGreenlet*) parent;
|
||||
return (PyObject*) o;
|
||||
}
|
||||
|
||||
PyObject* PyGreen_Current(void)
|
||||
{
|
||||
if (!STATE_OK)
|
||||
return NULL;
|
||||
return (PyObject*) ts_current;
|
||||
}
|
||||
|
||||
PyObject* PyGreen_Switch(PyObject* g, PyObject* value)
|
||||
{
|
||||
PyGreenlet *self;
|
||||
if (!PyGreen_Check(g)) {
|
||||
PyErr_BadInternalCall();
|
||||
return NULL;
|
||||
}
|
||||
self = (PyGreenlet*) g;
|
||||
Py_XINCREF(value);
|
||||
if (PyGreen_STARTED(self) && !PyGreen_ACTIVE(self))
|
||||
value = g_handle_exit(value);
|
||||
return single_result(g_switch(self, value));
|
||||
}
|
||||
|
||||
int PyGreen_SetParent(PyObject* g, PyObject* nparent)
|
||||
{
|
||||
if (!PyGreen_Check(g)) {
|
||||
PyErr_BadInternalCall();
|
||||
return -1;
|
||||
}
|
||||
return green_setparent((PyGreenlet*) g, nparent, NULL);
|
||||
}
|
||||
|
||||
/***********************************************************/
|
||||
|
||||
|
||||
static PyMethodDef green_methods[] = {
|
||||
{"switch", (PyCFunction)green_switch, METH_VARARGS, switch_doc},
|
||||
{"throw", (PyCFunction)green_throw, METH_VARARGS, throw_doc},
|
||||
{NULL, NULL} /* sentinel */
|
||||
};
|
||||
|
||||
static PyGetSetDef green_getsets[] = {
|
||||
{"run", (getter)green_getrun,
|
||||
(setter)green_setrun, /*XXX*/ NULL},
|
||||
{"parent", (getter)green_getparent,
|
||||
(setter)green_setparent, /*XXX*/ NULL},
|
||||
{"gr_frame", (getter)green_getframe,
|
||||
NULL, /*XXX*/ NULL},
|
||||
{"dead", (getter)green_getdead,
|
||||
NULL, /*XXX*/ NULL},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
static PyNumberMethods green_as_number = {
|
||||
NULL, /* nb_add */
|
||||
NULL, /* nb_subtract */
|
||||
NULL, /* nb_multiply */
|
||||
NULL, /* nb_divide */
|
||||
NULL, /* nb_remainder */
|
||||
NULL, /* nb_divmod */
|
||||
NULL, /* nb_power */
|
||||
NULL, /* nb_negative */
|
||||
NULL, /* nb_positive */
|
||||
NULL, /* nb_absolute */
|
||||
(inquiry)green_nonzero, /* nb_nonzero */
|
||||
};
|
||||
|
||||
PyTypeObject PyGreen_Type = {
|
||||
PyObject_HEAD_INIT(NULL)
|
||||
0,
|
||||
"greenlet.greenlet",
|
||||
sizeof(PyGreenlet),
|
||||
0,
|
||||
(destructor)green_dealloc, /* tp_dealloc */
|
||||
0, /* tp_print */
|
||||
0, /* tp_getattr */
|
||||
0, /* tp_setattr */
|
||||
0, /* tp_compare */
|
||||
0, /* tp_repr */
|
||||
&green_as_number, /* tp_as _number*/
|
||||
0, /* tp_as _sequence*/
|
||||
0, /* tp_as _mapping*/
|
||||
0, /* tp_hash */
|
||||
0, /* tp_call */
|
||||
0, /* tp_str */
|
||||
0, /* tp_getattro */
|
||||
0, /* tp_setattro */
|
||||
0, /* tp_as_buffer*/
|
||||
Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */
|
||||
"greenlet(run=None, parent=None)\n\
|
||||
Create a new greenlet object (without running it). \"run\" is the\n\
|
||||
callable to invoke, and \"parent\" is the parent greenlet, which\n\
|
||||
defaults to the current greenlet.", /* tp_doc */
|
||||
0, /* tp_traverse */
|
||||
0, /* tp_clear */
|
||||
0, /* tp_richcompare */
|
||||
offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */
|
||||
0, /* tp_iter */
|
||||
0, /* tp_iternext */
|
||||
green_methods, /* tp_methods */
|
||||
0, /* tp_members */
|
||||
green_getsets, /* tp_getset */
|
||||
0, /* tp_base */
|
||||
0, /* tp_dict */
|
||||
0, /* tp_descr_get */
|
||||
0, /* tp_descr_set */
|
||||
0, /* tp_dictoffset */
|
||||
(initproc)green_init, /* tp_init */
|
||||
0, /* tp_alloc */
|
||||
green_new, /* tp_new */
|
||||
};
|
||||
/* XXX need GC support */
|
||||
|
||||
|
||||
static PyObject* mod_getcurrent(PyObject* self)
|
||||
{
|
||||
if (!STATE_OK)
|
||||
return NULL;
|
||||
Py_INCREF(ts_current);
|
||||
return (PyObject*) ts_current;
|
||||
}
|
||||
|
||||
static PyMethodDef GreenMethods[] = {
|
||||
{"getcurrent", (PyCFunction)mod_getcurrent, METH_NOARGS,
|
||||
"greenlet.getcurrent()\n\
|
||||
Returns the current greenlet (i.e. the one which called this\n\
|
||||
function)."},
|
||||
{NULL, NULL} /* Sentinel */
|
||||
};
|
||||
|
||||
static char* copy_on_greentype[] = {
|
||||
"getcurrent", "error", "GreenletExit", NULL
|
||||
};
|
||||
|
||||
void initgreenlet(void)
|
||||
{
|
||||
PyObject* m;
|
||||
PyObject* greenletexit_doc;
|
||||
PyObject* greenletexit_dict;
|
||||
PyObject* greenleterror_doc;
|
||||
PyObject* greenleterror_dict;
|
||||
int error;
|
||||
char** p;
|
||||
_PyGreen_switchstack = g_switchstack;
|
||||
_PyGreen_slp_switch = slp_switch;
|
||||
_PyGreen_initialstub = g_initialstub;
|
||||
m = Py_InitModule("greenlet", GreenMethods);
|
||||
|
||||
ts_curkey = PyString_InternFromString("__greenlet_ts_curkey");
|
||||
ts_delkey = PyString_InternFromString("__greenlet_ts_delkey");
|
||||
if (ts_curkey == NULL || ts_delkey == NULL)
|
||||
return;
|
||||
if (PyType_Ready(&PyGreen_Type) < 0)
|
||||
return;
|
||||
|
||||
greenleterror_dict = PyDict_New();
|
||||
if (greenleterror_dict == NULL)
|
||||
return;
|
||||
greenleterror_doc = PyString_FromString("internal greenlet error");
|
||||
if (greenleterror_doc == NULL) {
|
||||
Py_DECREF(greenleterror_dict);
|
||||
return;
|
||||
}
|
||||
|
||||
error = PyDict_SetItemString(greenleterror_dict, "__doc__", greenleterror_doc);
|
||||
Py_DECREF(greenleterror_doc);
|
||||
if (error == -1) {
|
||||
Py_DECREF(greenleterror_dict);
|
||||
return;
|
||||
}
|
||||
|
||||
PyExc_GreenletError = PyErr_NewException("py.magic.greenlet.error", NULL, greenleterror_dict);
|
||||
Py_DECREF(greenleterror_dict);
|
||||
if (PyExc_GreenletError == NULL)
|
||||
return;
|
||||
|
||||
greenletexit_dict = PyDict_New();
|
||||
if (greenletexit_dict == NULL)
|
||||
return;
|
||||
greenletexit_doc = PyString_FromString("greenlet.GreenletExit\n\
|
||||
This special exception does not propagate to the parent greenlet; it\n\
|
||||
can be used to kill a single greenlet.\n");
|
||||
if (greenletexit_doc == NULL) {
|
||||
Py_DECREF(greenletexit_dict);
|
||||
return;
|
||||
}
|
||||
|
||||
error = PyDict_SetItemString(greenletexit_dict, "__doc__", greenletexit_doc);
|
||||
Py_DECREF(greenletexit_doc);
|
||||
if (error == -1) {
|
||||
Py_DECREF(greenletexit_dict);
|
||||
return;
|
||||
}
|
||||
|
||||
PyExc_GreenletExit = PyErr_NewException("py.magic.greenlet.GreenletExit",
|
||||
NULL, greenletexit_dict);
|
||||
Py_DECREF(greenletexit_dict);
|
||||
if (PyExc_GreenletExit == NULL)
|
||||
return;
|
||||
|
||||
ts_current = green_create_main();
|
||||
if (ts_current == NULL)
|
||||
return;
|
||||
|
||||
Py_INCREF(&PyGreen_Type);
|
||||
PyModule_AddObject(m, "greenlet", (PyObject*) &PyGreen_Type);
|
||||
Py_INCREF(PyExc_GreenletError);
|
||||
PyModule_AddObject(m, "error", PyExc_GreenletError);
|
||||
Py_INCREF(PyExc_GreenletExit);
|
||||
PyModule_AddObject(m, "GreenletExit", PyExc_GreenletExit);
|
||||
|
||||
/* also publish module-level data as attributes of the greentype. */
|
||||
for (p=copy_on_greentype; *p; p++) {
|
||||
PyObject* o = PyObject_GetAttrString(m, *p);
|
||||
if (!o) continue;
|
||||
PyDict_SetItemString(PyGreen_Type.tp_dict, *p, o);
|
||||
Py_DECREF(o);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
|
||||
/* Greenlet object interface */
|
||||
|
||||
#ifndef Py_GREENLETOBJECT_H
|
||||
#define Py_GREENLETOBJECT_H
|
||||
#ifdef __cplusplus
|
||||
extern "C" {
|
||||
#endif
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
typedef struct _greenlet {
|
||||
PyObject_HEAD
|
||||
char* stack_start;
|
||||
char* stack_stop;
|
||||
char* stack_copy;
|
||||
long stack_saved;
|
||||
struct _greenlet* stack_prev;
|
||||
struct _greenlet* parent;
|
||||
PyObject* run_info;
|
||||
struct _frame* top_frame;
|
||||
int recursion_depth;
|
||||
PyObject* weakreflist;
|
||||
} PyGreenlet;
|
||||
|
||||
extern PyTypeObject PyGreen_Type;
|
||||
|
||||
#define PyGreen_Check(op) PyObject_TypeCheck(op, &PyGreen_Type)
|
||||
#define PyGreen_STARTED(op) (((PyGreenlet*)(op))->stack_stop != NULL)
|
||||
#define PyGreen_ACTIVE(op) (((PyGreenlet*)(op))->stack_start != NULL)
|
||||
#define PyGreen_GET_PARENT(op) (((PyGreenlet*)(op))->parent)
|
||||
|
||||
PyObject* PyGreen_New(PyObject* run, PyObject* parent);
|
||||
PyObject* PyGreen_Current(void);
|
||||
PyObject* PyGreen_Switch(PyObject* g, PyObject* args); /* g.switch(*args) */
|
||||
int PyGreen_SetParent(PyObject* g, PyObject* nparent); /* g.parent = ... */
|
||||
|
||||
#ifdef __cplusplus
|
||||
}
|
||||
#endif
|
||||
#endif /* !Py_GREENLETOBJECT_H */
|
|
@ -0,0 +1,8 @@
|
|||
from distutils.core import setup
|
||||
from distutils.extension import Extension
|
||||
|
||||
setup ( name = "greenlet",
|
||||
version = "0.1",
|
||||
ext_modules=[Extension(name = 'greenlet',
|
||||
sources = ['greenlet.c'])]
|
||||
)
|
|
@ -0,0 +1,21 @@
|
|||
/*
|
||||
* Platform Selection for Stackless Python
|
||||
*/
|
||||
|
||||
#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86)
|
||||
#include "switch_x86_msvc.h" /* MS Visual Studio on X86 */
|
||||
#elif defined(__GNUC__) && defined(__amd64__)
|
||||
#include "switch_amd64_unix.h" /* gcc on amd64 */
|
||||
#elif defined(__GNUC__) && defined(__i386__)
|
||||
#include "switch_x86_unix.h" /* gcc on X86 */
|
||||
#elif defined(__GNUC__) && defined(__PPC__) && defined(__linux__)
|
||||
#include "switch_ppc_unix.h" /* gcc on PowerPC */
|
||||
#elif defined(__GNUC__) && defined(__ppc__) && defined(__APPLE__)
|
||||
#include "switch_ppc_macosx.h" /* Apple MacOS X on PowerPC */
|
||||
#elif defined(__GNUC__) && defined(sparc) && defined(sun)
|
||||
#include "switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */
|
||||
#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__)
|
||||
#include "switch_s390_unix.h" /* Linux/S390 */
|
||||
#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__)
|
||||
#include "switch_s390_unix.h" /* Linux/S390 zSeries (identical) */
|
||||
#endif
|
|
@ -0,0 +1,66 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 01-Apr-04 Hye-Shik Chang <perky@FreeBSD.org>
|
||||
* Ported from i386 to amd64.
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* slightly changed framework for spark
|
||||
* 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
|
||||
* Added ebx, esi and edi register-saves.
|
||||
* 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
|
||||
* Ported from i386.
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
/* #define STACK_MAGIC 3 */
|
||||
/* the above works fine with gcc 2.96, but 2.95.3 wants this */
|
||||
#define STACK_MAGIC 0
|
||||
|
||||
#define REGS_TO_SAVE "rdx", "rbx", "r12", "r13", "r14", "r15"
|
||||
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register long *stackref, stsizediff;
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
__asm__ ("movq %%rsp, %0" : "=g" (stackref));
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm__ volatile (
|
||||
"addq %0, %%rsp\n"
|
||||
"addq %0, %%rbp\n"
|
||||
:
|
||||
: "r" (stsizediff)
|
||||
);
|
||||
SLP_RESTORE_STATE();
|
||||
}
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 07-Sep-05 (py-dev mailing list discussion)
|
||||
* removed 'r31' from the register-saved. !!!! WARNING !!!!
|
||||
* It means that this file can no longer be compiled statically!
|
||||
* It is now only suitable as part of a dynamic library!
|
||||
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
|
||||
* added cr2-cr4 to the registers to be saved.
|
||||
* Open questions: Should we save FP registers?
|
||||
* What about vector registers?
|
||||
* Differences between darwin and unix?
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* slightly changed framework for sparc
|
||||
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
|
||||
* Added register 13-29, 31 saves. The same way as
|
||||
* Armin Rigo did for the x86_unix version.
|
||||
* This seems to be now fully functional!
|
||||
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
|
||||
* Ported from i386.
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
#define STACK_MAGIC 3
|
||||
|
||||
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
|
||||
* is meant to be compiled non-dynamically!
|
||||
*/
|
||||
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
|
||||
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
|
||||
"cr2", "cr3", "cr4"
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
__asm__ ("; asm block 2\n\tmr %0, r1" : "=g" (stackref) : );
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm__ volatile (
|
||||
"; asm block 3\n"
|
||||
"\tmr r11, %0\n"
|
||||
"\tadd r1, r1, r11\n"
|
||||
"\tadd r30, r30, r11\n"
|
||||
: /* no outputs */
|
||||
: "g" (stsizediff)
|
||||
: "r11"
|
||||
);
|
||||
SLP_RESTORE_STATE();
|
||||
}
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,80 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 07-Sep-05 (py-dev mailing list discussion)
|
||||
* removed 'r31' from the register-saved. !!!! WARNING !!!!
|
||||
* It means that this file can no longer be compiled statically!
|
||||
* It is now only suitable as part of a dynamic library!
|
||||
* 14-Jan-04 Bob Ippolito <bob@redivi.com>
|
||||
* added cr2-cr4 to the registers to be saved.
|
||||
* Open questions: Should we save FP registers?
|
||||
* What about vector registers?
|
||||
* Differences between darwin and unix?
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 04-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
|
||||
* Ported from MacOS version.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* slightly changed framework for sparc
|
||||
* 29-Jun-02 Christian Tismer <tismer@tismer.com>
|
||||
* Added register 13-29, 31 saves. The same way as
|
||||
* Armin Rigo did for the x86_unix version.
|
||||
* This seems to be now fully functional!
|
||||
* 04-Mar-02 Hye-Shik Chang <perky@fallin.lv>
|
||||
* Ported from i386.
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
#define STACK_MAGIC 3
|
||||
|
||||
/* !!!!WARNING!!!! need to add "r31" in the next line if this header file
|
||||
* is meant to be compiled non-dynamically!
|
||||
*/
|
||||
#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \
|
||||
"r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \
|
||||
"cr2", "cr3", "cr4"
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
__asm__ ("mr %0, 1" : "=g" (stackref) : );
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm__ volatile (
|
||||
"mr 11, %0\n"
|
||||
"add 1, 1, 11\n"
|
||||
"add 30, 30, 11\n"
|
||||
: /* no outputs */
|
||||
: "g" (stsizediff)
|
||||
: "11"
|
||||
);
|
||||
SLP_RESTORE_STATE();
|
||||
}
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,54 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 06-Oct-02 Gustavo Niemeyer <niemeyer@conectiva.com>
|
||||
* Ported to Linux/S390.
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
#define STACK_MAGIC 0
|
||||
|
||||
#define REGS_TO_SAVE "r0", "r1", "r2", "r3", "r4", "r5", "r6", "r14", \
|
||||
"f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \
|
||||
"f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15"
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
__asm__ ("lr %0, 15" : "=g" (stackref) : );
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm__ volatile (
|
||||
"ar 15, %0"
|
||||
: /* no outputs */
|
||||
: "g" (stsizediff)
|
||||
);
|
||||
SLP_RESTORE_STATE();
|
||||
}
|
||||
__asm__ volatile ("" : : : REGS_TO_SAVE);
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* added support for SunOS sparc with gcc
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
#include <sys/trap.h>
|
||||
|
||||
#define STACK_MAGIC 0
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
|
||||
/* Put the stack pointer into stackref */
|
||||
|
||||
/* Sparc special: at first, flush register windows
|
||||
*/
|
||||
__asm__ volatile (
|
||||
"ta %1\n\t"
|
||||
"mov %%sp, %0"
|
||||
: "=r" (stackref) : "i" (ST_FLUSH_WINDOWS));
|
||||
|
||||
{ /* You shalt put SLP_SAVE_STATE into a local block */
|
||||
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
|
||||
/* Increment stack and frame pointer by stsizediff */
|
||||
|
||||
/* Sparc special: at first load new return address.
|
||||
This cannot be done later, because the stack
|
||||
might be overwritten again just after SLP_RESTORE_STATE
|
||||
has finished. BTW: All other registers (l0-l7 and i0-i5)
|
||||
might be clobbered too.
|
||||
*/
|
||||
__asm__ volatile (
|
||||
"ld [%0+60], %%i7\n\t"
|
||||
"add %1, %%sp, %%sp\n\t"
|
||||
"add %1, %%fp, %%fp"
|
||||
: : "r" (_cst->stack), "r" (stsizediff)
|
||||
: "%l0", "%l1", "%l2", "%l3", "%l4", "%l5", "%l6", "%l7",
|
||||
"%i0", "%i1", "%i2", "%i3", "%i4", "%i5");
|
||||
|
||||
SLP_RESTORE_STATE();
|
||||
|
||||
/* Run far away as fast as possible, don't look back at the sins.
|
||||
* The LORD rained down burning sulfur on Sodom and Gomorra ...
|
||||
*/
|
||||
|
||||
/* Sparc special: Must make it *very* clear to the CPU that
|
||||
it shouldn't look back into the register windows
|
||||
*/
|
||||
__asm__ volatile ( "ta %0" : : "i" (ST_CLEAN_WINDOWS));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,73 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 26-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* again as a result of virtualized stack access,
|
||||
* the compiler used less registers. Needed to
|
||||
* explicit mention registers in order to get them saved.
|
||||
* Thanks to Jeff Senn for pointing this out and help.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* slightly changed framework for sparc
|
||||
* 01-Mar-02 Christian Tismer <tismer@tismer.com>
|
||||
* Initial final version after lots of iterations for i386.
|
||||
*/
|
||||
|
||||
#define alloca _alloca
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
#define STACK_MAGIC 0
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
__asm mov stackref, esp;
|
||||
/* modify EBX, ESI and EDI in order to get them preserved */
|
||||
__asm mov ebx, ebx;
|
||||
__asm xchg esi, edi;
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm {
|
||||
mov eax, stsizediff
|
||||
add esp, eax
|
||||
add ebp, eax
|
||||
}
|
||||
SLP_RESTORE_STATE();
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/* we have IsBadReadPtr available, so we can peek at objects */
|
||||
#define STACKLESS_SPY
|
||||
|
||||
#ifdef IMPLEMENT_STACKLESSMODULE
|
||||
#include "Windows.h"
|
||||
#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes)
|
||||
|
||||
static int IS_ON_STACK(void*p)
|
||||
{
|
||||
int stackref;
|
||||
int stackbase = ((int)&stackref) & 0xfffff000;
|
||||
return (int)p >= stackbase && (int)p < stackbase + 0x00100000;
|
||||
}
|
||||
|
||||
#endif
|
|
@ -0,0 +1,69 @@
|
|||
/*
|
||||
* this is the internal transfer function.
|
||||
*
|
||||
* HISTORY
|
||||
* 07-Sep-05 (py-dev mailing list discussion)
|
||||
* removed 'ebx' from the register-saved. !!!! WARNING !!!!
|
||||
* It means that this file can no longer be compiled statically!
|
||||
* It is now only suitable as part of a dynamic library!
|
||||
* 24-Nov-02 Christian Tismer <tismer@tismer.com>
|
||||
* needed to add another magic constant to insure
|
||||
* that f in slp_eval_frame(PyFrameObject *f)
|
||||
* STACK_REFPLUS will probably be 1 in most cases.
|
||||
* gets included into the saved stack area.
|
||||
* 17-Sep-02 Christian Tismer <tismer@tismer.com>
|
||||
* after virtualizing stack save/restore, the
|
||||
* stack size shrunk a bit. Needed to introduce
|
||||
* an adjustment STACK_MAGIC per platform.
|
||||
* 15-Sep-02 Gerd Woetzel <gerd.woetzel@GMD.DE>
|
||||
* slightly changed framework for spark
|
||||
* 31-Avr-02 Armin Rigo <arigo@ulb.ac.be>
|
||||
* Added ebx, esi and edi register-saves.
|
||||
* 01-Mar-02 Samual M. Rushing <rushing@ironport.com>
|
||||
* Ported from i386.
|
||||
*/
|
||||
|
||||
#define STACK_REFPLUS 1
|
||||
|
||||
#ifdef SLP_EVAL
|
||||
|
||||
/* #define STACK_MAGIC 3 */
|
||||
/* the above works fine with gcc 2.96, but 2.95.3 wants this */
|
||||
#define STACK_MAGIC 0
|
||||
|
||||
static int
|
||||
slp_switch(void)
|
||||
{
|
||||
register int *stackref, stsizediff;
|
||||
/* !!!!WARNING!!!! need to add "ebx" in the next line, as well as in the
|
||||
* last line of this function, if this header file is meant to be compiled
|
||||
* non-dynamically!
|
||||
*/
|
||||
__asm__ volatile ("" : : : "esi", "edi");
|
||||
__asm__ ("movl %%esp, %0" : "=g" (stackref));
|
||||
{
|
||||
SLP_SAVE_STATE(stackref, stsizediff);
|
||||
__asm__ volatile (
|
||||
"addl %0, %%esp\n"
|
||||
"addl %0, %%ebp\n"
|
||||
:
|
||||
: "r" (stsizediff)
|
||||
);
|
||||
SLP_RESTORE_STATE();
|
||||
}
|
||||
__asm__ volatile ("" : : : "esi", "edi");
|
||||
return 0;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
/*
|
||||
* further self-processing support
|
||||
*/
|
||||
|
||||
/*
|
||||
* if you want to add self-inspection tools, place them
|
||||
* here. See the x86_msvc for the necessary defines.
|
||||
* These features are highly experimental und not
|
||||
* essential yet.
|
||||
*/
|
|
@ -0,0 +1,58 @@
|
|||
import py
|
||||
try:
|
||||
from py.magic import greenlet
|
||||
except RuntimeError, e:
|
||||
py.test.skip(str(e))
|
||||
|
||||
|
||||
|
||||
class genlet(greenlet):
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
self.args = args
|
||||
self.kwds = kwds
|
||||
|
||||
def run(self):
|
||||
fn, = self.fn
|
||||
fn(*self.args, **self.kwds)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def next(self):
|
||||
self.parent = greenlet.getcurrent()
|
||||
result = self.switch()
|
||||
if self:
|
||||
return result
|
||||
else:
|
||||
raise StopIteration
|
||||
|
||||
def Yield(value):
|
||||
g = greenlet.getcurrent()
|
||||
while not isinstance(g, genlet):
|
||||
if g is None:
|
||||
raise RuntimeError, 'yield outside a genlet'
|
||||
g = g.parent
|
||||
g.parent.switch(value)
|
||||
|
||||
def generator(func):
|
||||
class generator(genlet):
|
||||
fn = (func,)
|
||||
return generator
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def test_generator():
|
||||
seen = []
|
||||
|
||||
def g(n):
|
||||
for i in range(n):
|
||||
seen.append(i)
|
||||
Yield(i)
|
||||
g = generator(g)
|
||||
|
||||
for k in range(3):
|
||||
for j in g(5):
|
||||
seen.append(j)
|
||||
|
||||
assert seen == 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]
|
|
@ -0,0 +1,161 @@
|
|||
from __future__ import generators
|
||||
import py
|
||||
try:
|
||||
from py.magic import greenlet
|
||||
except (ImportError, RuntimeError), e:
|
||||
py.test.skip(str(e))
|
||||
|
||||
class genlet(greenlet):
|
||||
|
||||
def __init__(self, *args, **kwds):
|
||||
self.args = args
|
||||
self.kwds = kwds
|
||||
self.child = None
|
||||
|
||||
def run(self):
|
||||
fn, = self.fn
|
||||
fn(*self.args, **self.kwds)
|
||||
|
||||
def __iter__(self):
|
||||
return self
|
||||
|
||||
def set_child(self, child):
|
||||
self.child = child
|
||||
|
||||
def next(self):
|
||||
if self.child:
|
||||
child = self.child
|
||||
while child.child:
|
||||
tmp = child
|
||||
child = child.child
|
||||
tmp.child = None
|
||||
|
||||
result = child.switch()
|
||||
else:
|
||||
self.parent = greenlet.getcurrent()
|
||||
result = self.switch()
|
||||
|
||||
if self:
|
||||
return result
|
||||
else:
|
||||
raise StopIteration
|
||||
|
||||
def Yield(value, level = 1):
|
||||
g = greenlet.getcurrent()
|
||||
|
||||
while level != 0:
|
||||
if not isinstance(g, genlet):
|
||||
raise RuntimeError, 'yield outside a genlet'
|
||||
if level > 1:
|
||||
g.parent.set_child(g)
|
||||
g = g.parent
|
||||
level -= 1
|
||||
|
||||
g.switch(value)
|
||||
|
||||
def Genlet(func):
|
||||
class Genlet(genlet):
|
||||
fn = (func,)
|
||||
return Genlet
|
||||
|
||||
# ____________________________________________________________
|
||||
|
||||
def g1(n, seen):
|
||||
for i in range(n):
|
||||
seen.append(i+1)
|
||||
yield i
|
||||
|
||||
def g2(n, seen):
|
||||
for i in range(n):
|
||||
seen.append(i+1)
|
||||
Yield(i)
|
||||
|
||||
g2 = Genlet(g2)
|
||||
|
||||
def nested(i):
|
||||
Yield(i)
|
||||
|
||||
def g3(n, seen):
|
||||
for i in range(n):
|
||||
seen.append(i+1)
|
||||
nested(i)
|
||||
g3 = Genlet(g3)
|
||||
|
||||
def test_genlet_simple():
|
||||
|
||||
for g in [g1, g2, g3]:
|
||||
seen = []
|
||||
for k in range(3):
|
||||
for j in g(5, seen):
|
||||
seen.append(j)
|
||||
|
||||
assert seen == 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]
|
||||
|
||||
def test_genlet_bad():
|
||||
try:
|
||||
Yield(10)
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
test_genlet_bad()
|
||||
test_genlet_simple()
|
||||
test_genlet_bad()
|
||||
|
||||
def a(n):
|
||||
if n == 0:
|
||||
return
|
||||
for ii in ax(n-1):
|
||||
Yield(ii)
|
||||
Yield(n)
|
||||
ax = Genlet(a)
|
||||
|
||||
def test_nested_genlets():
|
||||
seen = []
|
||||
for ii in ax(5):
|
||||
seen.append(ii)
|
||||
|
||||
test_nested_genlets()
|
||||
|
||||
def perms(l):
|
||||
if len(l) > 1:
|
||||
for e in l:
|
||||
# No syntactical sugar for generator expressions
|
||||
[Yield([e] + p) for p in perms([x for x in l if x!=e])]
|
||||
else:
|
||||
Yield(l)
|
||||
|
||||
perms = Genlet(perms)
|
||||
|
||||
def test_perms():
|
||||
gen_perms = perms(range(4))
|
||||
permutations = list(gen_perms)
|
||||
assert len(permutations) == 4*3*2*1
|
||||
assert [0,1,2,3] in permutations
|
||||
assert [3,2,1,0] in permutations
|
||||
res = []
|
||||
for ii in zip(perms(range(4)), perms(range(3))):
|
||||
res.append(ii)
|
||||
# XXX Test to make sure we are working as a generator expression
|
||||
test_perms()
|
||||
|
||||
|
||||
def gr1(n):
|
||||
for ii in range(1, n):
|
||||
Yield(ii)
|
||||
Yield(ii * ii, 2)
|
||||
|
||||
gr1 = Genlet(gr1)
|
||||
|
||||
def gr2(n, seen):
|
||||
for ii in gr1(n):
|
||||
seen.append(ii)
|
||||
|
||||
gr2 = Genlet(gr2)
|
||||
|
||||
def test_layered_genlets():
|
||||
seen = []
|
||||
for ii in gr2(5, seen):
|
||||
seen.append(ii)
|
||||
assert seen == [1, 1, 2, 4, 3, 9, 4, 16]
|
||||
|
||||
test_layered_genlets()
|
|
@ -0,0 +1,144 @@
|
|||
import py
|
||||
try:
|
||||
from py.magic import greenlet
|
||||
except (ImportError, RuntimeError), e:
|
||||
py.test.skip(str(e))
|
||||
|
||||
import sys, gc
|
||||
from py.test import raises
|
||||
try:
|
||||
import thread, threading
|
||||
except ImportError:
|
||||
thread = None
|
||||
|
||||
def test_simple():
|
||||
lst = []
|
||||
def f():
|
||||
lst.append(1)
|
||||
greenlet.getcurrent().parent.switch()
|
||||
lst.append(3)
|
||||
g = greenlet(f)
|
||||
lst.append(0)
|
||||
g.switch()
|
||||
lst.append(2)
|
||||
g.switch()
|
||||
lst.append(4)
|
||||
assert lst == range(5)
|
||||
|
||||
def test_threads():
|
||||
if not thread:
|
||||
py.test.skip("this is a test about thread")
|
||||
success = []
|
||||
def f():
|
||||
test_simple()
|
||||
success.append(True)
|
||||
ths = [threading.Thread(target=f) for i in range(10)]
|
||||
for th in ths:
|
||||
th.start()
|
||||
for th in ths:
|
||||
th.join()
|
||||
assert len(success) == len(ths)
|
||||
|
||||
|
||||
class SomeError(Exception):
|
||||
pass
|
||||
|
||||
def fmain(seen):
|
||||
try:
|
||||
greenlet.getcurrent().parent.switch()
|
||||
except:
|
||||
seen.append(sys.exc_info()[0])
|
||||
raise
|
||||
raise SomeError
|
||||
|
||||
def test_exception():
|
||||
seen = []
|
||||
g1 = greenlet(fmain)
|
||||
g2 = greenlet(fmain)
|
||||
g1.switch(seen)
|
||||
g2.switch(seen)
|
||||
g2.parent = g1
|
||||
assert seen == []
|
||||
raises(SomeError, g2.switch)
|
||||
assert seen == [SomeError]
|
||||
g2.switch()
|
||||
assert seen == [SomeError]
|
||||
|
||||
def send_exception(g, exc):
|
||||
# note: send_exception(g, exc) can be now done with g.throw(exc).
|
||||
# the purpose of this test is to explicitely check the propagation rules.
|
||||
def crasher(exc):
|
||||
raise exc
|
||||
g1 = greenlet(crasher, parent=g)
|
||||
g1.switch(exc)
|
||||
|
||||
def test_send_exception():
|
||||
seen = []
|
||||
g1 = greenlet(fmain)
|
||||
g1.switch(seen)
|
||||
raises(KeyError, "send_exception(g1, KeyError)")
|
||||
assert seen == [KeyError]
|
||||
|
||||
def test_dealloc():
|
||||
seen = []
|
||||
g1 = greenlet(fmain)
|
||||
g2 = greenlet(fmain)
|
||||
g1.switch(seen)
|
||||
g2.switch(seen)
|
||||
assert seen == []
|
||||
del g1
|
||||
gc.collect()
|
||||
assert seen == [greenlet.GreenletExit]
|
||||
del g2
|
||||
gc.collect()
|
||||
assert seen == [greenlet.GreenletExit, greenlet.GreenletExit]
|
||||
|
||||
def test_dealloc_other_thread():
|
||||
if not thread:
|
||||
py.test.skip("this is a test about thread")
|
||||
seen = []
|
||||
someref = []
|
||||
lock = thread.allocate_lock()
|
||||
lock.acquire()
|
||||
lock2 = thread.allocate_lock()
|
||||
lock2.acquire()
|
||||
def f():
|
||||
g1 = greenlet(fmain)
|
||||
g1.switch(seen)
|
||||
someref.append(g1)
|
||||
del g1
|
||||
gc.collect()
|
||||
lock.release()
|
||||
lock2.acquire()
|
||||
greenlet() # trigger release
|
||||
lock.release()
|
||||
lock2.acquire()
|
||||
t = threading.Thread(target=f)
|
||||
t.start()
|
||||
lock.acquire()
|
||||
assert seen == []
|
||||
assert len(someref) == 1
|
||||
del someref[:]
|
||||
gc.collect()
|
||||
# g1 is not released immediately because it's from another thread
|
||||
assert seen == []
|
||||
lock2.release()
|
||||
lock.acquire()
|
||||
assert seen == [greenlet.GreenletExit]
|
||||
lock2.release()
|
||||
t.join()
|
||||
|
||||
def test_frame():
|
||||
def f1():
|
||||
f = sys._getframe(0)
|
||||
assert f.f_back is None
|
||||
greenlet.getcurrent().parent.switch(f)
|
||||
return "meaning of life"
|
||||
g = greenlet(f1)
|
||||
frame = g.switch()
|
||||
assert frame is g.gr_frame
|
||||
assert g
|
||||
next = g.switch()
|
||||
assert not g
|
||||
assert next == "meaning of life"
|
||||
assert g.gr_frame is None
|
|
@ -0,0 +1,66 @@
|
|||
import py
|
||||
try:
|
||||
from py.magic import greenlet
|
||||
except (ImportError, RuntimeError), e:
|
||||
py.test.skip(str(e))
|
||||
|
||||
|
||||
class RGreenletBunch:
|
||||
|
||||
def __init__(self, gateway):
|
||||
self.channel = gateway.remote_exec('''
|
||||
from py.magic import greenlet
|
||||
glob = {"greenlet": greenlet}
|
||||
gids = {}
|
||||
while True:
|
||||
key, code, args = channel.receive()
|
||||
if args is not None:
|
||||
if code is not None:
|
||||
def run(code=code):
|
||||
exec code in glob, {}
|
||||
gids[key] = greenlet(run)
|
||||
result = gids[key].switch(*args)
|
||||
channel.send(result)
|
||||
else:
|
||||
del gids[key]
|
||||
''')
|
||||
|
||||
def greenlet(self, code):
|
||||
return RGreenlet(self, code)
|
||||
|
||||
|
||||
class RGreenlet:
|
||||
|
||||
def __init__(self, bunch, code):
|
||||
self.channel = bunch.channel
|
||||
self.code = str(py.code.Source(code))
|
||||
|
||||
def switch(self, *args):
|
||||
self.channel.send((id(self), self.code, args))
|
||||
self.code = None # only send over the code the first time
|
||||
return self.channel.receive()
|
||||
|
||||
def __del__(self):
|
||||
if self.code is None:
|
||||
self.channel.send((id(self), None, None))
|
||||
|
||||
|
||||
def test_rgreenlet():
|
||||
gw = py.execnet.PopenGateway()
|
||||
bunch = RGreenletBunch(gw)
|
||||
g = bunch.greenlet('''
|
||||
x = greenlet.getcurrent().parent.switch(42)
|
||||
y = greenlet.getcurrent().parent.switch(x+1)
|
||||
greenlet.getcurrent().parent.switch(y+2)
|
||||
import os
|
||||
greenlet.getcurrent().parent.switch(os.getpid())
|
||||
''')
|
||||
result = g.switch()
|
||||
assert result == 42
|
||||
result = g.switch(102)
|
||||
assert result == 103
|
||||
result = g.switch(-93)
|
||||
assert result == -91
|
||||
import os
|
||||
result = g.switch()
|
||||
assert result != os.getpid()
|
|
@ -0,0 +1,97 @@
|
|||
import py
|
||||
try:
|
||||
from py.magic import greenlet
|
||||
except (ImportError, RuntimeError), e:
|
||||
py.test.skip(str(e))
|
||||
|
||||
def switch(*args):
|
||||
return greenlet.getcurrent().parent.switch(*args)
|
||||
|
||||
def test_class():
|
||||
def f():
|
||||
try:
|
||||
switch("ok")
|
||||
except RuntimeError:
|
||||
switch("ok")
|
||||
return
|
||||
switch("fail")
|
||||
|
||||
g = greenlet(f)
|
||||
res = g.switch()
|
||||
assert res == "ok"
|
||||
res = g.throw(RuntimeError)
|
||||
assert res == "ok"
|
||||
|
||||
def test_val():
|
||||
def f():
|
||||
try:
|
||||
switch("ok")
|
||||
except RuntimeError, val:
|
||||
if str(val) == "ciao":
|
||||
switch("ok")
|
||||
return
|
||||
switch("fail")
|
||||
|
||||
g = greenlet(f)
|
||||
res = g.switch()
|
||||
assert res == "ok"
|
||||
res = g.throw(RuntimeError("ciao"))
|
||||
assert res == "ok"
|
||||
|
||||
g = greenlet(f)
|
||||
res = g.switch()
|
||||
assert res == "ok"
|
||||
res = g.throw(RuntimeError, "ciao")
|
||||
assert res == "ok"
|
||||
|
||||
def test_kill():
|
||||
def f():
|
||||
switch("ok")
|
||||
switch("fail")
|
||||
|
||||
g = greenlet(f)
|
||||
res = g.switch()
|
||||
assert res == "ok"
|
||||
res = g.throw()
|
||||
assert isinstance(res, greenlet.GreenletExit)
|
||||
assert g.dead
|
||||
res = g.throw() # immediately eaten by the already-dead greenlet
|
||||
assert isinstance(res, greenlet.GreenletExit)
|
||||
|
||||
def test_throw_goes_to_original_parent():
|
||||
main = greenlet.getcurrent()
|
||||
def f1():
|
||||
try:
|
||||
main.switch("f1 ready to catch")
|
||||
except IndexError:
|
||||
return "caught"
|
||||
else:
|
||||
return "normal exit"
|
||||
def f2():
|
||||
main.switch("from f2")
|
||||
|
||||
g1 = greenlet(f1)
|
||||
g2 = greenlet(f2, parent=g1)
|
||||
py.test.raises(IndexError, g2.throw, IndexError)
|
||||
assert g2.dead
|
||||
assert g1.dead
|
||||
|
||||
g1 = greenlet(f1)
|
||||
g2 = greenlet(f2, parent=g1)
|
||||
res = g1.switch()
|
||||
assert res == "f1 ready to catch"
|
||||
res = g2.throw(IndexError)
|
||||
assert res == "caught"
|
||||
assert g2.dead
|
||||
assert g1.dead
|
||||
|
||||
g1 = greenlet(f1)
|
||||
g2 = greenlet(f2, parent=g1)
|
||||
res = g1.switch()
|
||||
assert res == "f1 ready to catch"
|
||||
res = g2.switch()
|
||||
assert res == "from f2"
|
||||
res = g2.throw(IndexError)
|
||||
assert res == "caught"
|
||||
assert g2.dead
|
||||
assert g1.dead
|
|
@ -0,0 +1 @@
|
|||
#
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue