2007-01-24 22:24:01 +08:00
|
|
|
import py
|
2007-01-25 20:54:51 +08:00
|
|
|
import os
|
2007-01-24 22:24:01 +08:00
|
|
|
import inspect
|
|
|
|
from py.__.apigen.layout import LayoutPage
|
|
|
|
from py.__.apigen.source import browser as source_browser
|
|
|
|
from py.__.apigen.source import html as source_html
|
2007-01-30 23:44:30 +08:00
|
|
|
from py.__.apigen.source import color as source_color
|
2007-01-24 22:24:01 +08:00
|
|
|
from py.__.apigen.tracer.description import is_private
|
|
|
|
from py.__.apigen.rest.genrest import split_of_last_part
|
|
|
|
from py.__.apigen.linker import relpath
|
2007-02-02 23:49:58 +08:00
|
|
|
from py.__.apigen.html import H
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
sorted = py.builtin.sorted
|
|
|
|
html = py.xml.html
|
|
|
|
raw = py.xml.raw
|
|
|
|
|
2007-02-04 22:35:28 +08:00
|
|
|
def is_navigateable(name):
|
|
|
|
return (not is_private(name) and name != '__doc__')
|
|
|
|
|
2007-02-04 23:47:33 +08:00
|
|
|
def show_property(name):
|
|
|
|
if not name.startswith('_'):
|
|
|
|
return True
|
|
|
|
if name.startswith('__') and name.endswith('__'):
|
|
|
|
# XXX do we need to skip more manually here?
|
|
|
|
if (name not in dir(object) and
|
|
|
|
name not in ['__doc__', '__dict__', '__name__', '__module__',
|
|
|
|
'__weakref__']):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2007-01-30 22:08:47 +08:00
|
|
|
def deindent(str, linesep='\n'):
|
2007-01-29 22:20:31 +08:00
|
|
|
""" de-indent string
|
|
|
|
|
|
|
|
can be used to de-indent Python docstrings, it de-indents the first
|
|
|
|
line to the side always, and determines the indentation of the rest
|
|
|
|
of the text by taking that of the least indented (filled) line
|
|
|
|
"""
|
2007-01-30 00:11:15 +08:00
|
|
|
lines = str.strip().split(linesep)
|
2007-01-29 22:20:31 +08:00
|
|
|
normalized = []
|
|
|
|
deindent = None
|
|
|
|
normalized.append(lines[0].strip())
|
2007-01-30 00:11:15 +08:00
|
|
|
# replace tabs with spaces, empty lines that contain spaces only, and
|
|
|
|
# find out what the smallest indentation is
|
2007-01-29 22:20:31 +08:00
|
|
|
for line in lines[1:]:
|
2007-01-30 22:08:47 +08:00
|
|
|
line = line.replace('\t', ' ' * 4)
|
2007-01-30 00:11:15 +08:00
|
|
|
stripped = line.strip()
|
|
|
|
if not stripped:
|
2007-01-29 22:20:31 +08:00
|
|
|
normalized.append('')
|
|
|
|
else:
|
2007-01-30 00:11:15 +08:00
|
|
|
rstripped = line.rstrip()
|
|
|
|
indent = len(rstripped) - len(stripped)
|
2007-01-29 22:20:31 +08:00
|
|
|
if deindent is None or indent < deindent:
|
|
|
|
deindent = indent
|
|
|
|
normalized.append(line)
|
|
|
|
ret = [normalized[0]]
|
|
|
|
for line in normalized[1:]:
|
|
|
|
if not line:
|
|
|
|
ret.append(line)
|
|
|
|
else:
|
|
|
|
ret.append(line[deindent:])
|
|
|
|
return '%s\n' % (linesep.join(ret),)
|
|
|
|
|
2007-02-04 22:35:28 +08:00
|
|
|
def get_linesep(s, default='\n'):
|
|
|
|
""" return the line seperator of a string
|
|
|
|
|
|
|
|
returns 'default' if no seperator can be found
|
|
|
|
"""
|
|
|
|
for sep in ('\r\n', '\r', '\n'):
|
|
|
|
if sep in s:
|
|
|
|
return sep
|
|
|
|
return default
|
|
|
|
|
2007-01-24 22:24:01 +08:00
|
|
|
def get_param_htmldesc(linker, func):
|
|
|
|
""" get the html for the parameters of a function """
|
|
|
|
import inspect
|
|
|
|
# XXX copy and modify formatargspec to produce html
|
2007-02-03 07:29:01 +08:00
|
|
|
return inspect.formatargspec(*inspect.getargspec(func))
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
# some helper functionality
|
|
|
|
def source_dirs_files(fspath):
|
|
|
|
""" returns a tuple (dirs, files) for fspath
|
|
|
|
|
|
|
|
dirs are all the subdirs, files are the files which are interesting
|
|
|
|
in building source documentation for a Python code tree (basically all
|
|
|
|
normal files excluding .pyc and .pyo ones)
|
|
|
|
|
|
|
|
all files and dirs that have a name starting with . are considered
|
|
|
|
hidden
|
|
|
|
"""
|
|
|
|
dirs = []
|
|
|
|
files = []
|
|
|
|
for child in fspath.listdir():
|
|
|
|
if child.basename.startswith('.'):
|
|
|
|
continue
|
|
|
|
if child.check(dir=True):
|
|
|
|
dirs.append(child)
|
|
|
|
elif child.check(file=True):
|
|
|
|
if child.ext in ['.pyc', '.pyo']:
|
|
|
|
continue
|
|
|
|
files.append(child)
|
|
|
|
return sorted(dirs), sorted(files)
|
|
|
|
|
|
|
|
def create_namespace_tree(dotted_names):
|
|
|
|
""" creates a tree (in dict form) from a set of dotted names
|
|
|
|
"""
|
|
|
|
ret = {}
|
|
|
|
for dn in dotted_names:
|
|
|
|
path = dn.split('.')
|
|
|
|
for i in xrange(len(path)):
|
|
|
|
ns = '.'.join(path[:i])
|
|
|
|
itempath = '.'.join(path[:i + 1])
|
|
|
|
if ns not in ret:
|
|
|
|
ret[ns] = []
|
|
|
|
if itempath not in ret[ns]:
|
|
|
|
ret[ns].append(itempath)
|
|
|
|
return ret
|
|
|
|
|
2007-02-06 21:19:16 +08:00
|
|
|
def wrap_page(project, title, contentel, navel, relbase, basepath,
|
|
|
|
pageclass):
|
|
|
|
page = pageclass(project, title, nav=navel, encoding='UTF-8',
|
2007-02-02 23:49:58 +08:00
|
|
|
relpath=relbase)
|
2007-01-24 22:24:01 +08:00
|
|
|
page.set_content(contentel)
|
2007-02-02 23:49:58 +08:00
|
|
|
page.setup_scripts_styles(basepath)
|
2007-01-24 22:24:01 +08:00
|
|
|
return page
|
|
|
|
|
2007-02-06 21:19:16 +08:00
|
|
|
def enumerate_and_color(codelines, firstlineno, enc):
|
|
|
|
snippet = H.SourceCode()
|
|
|
|
tokenizer = source_color.Tokenizer(source_color.PythonSchema)
|
|
|
|
for i, line in enumerate(codelines):
|
|
|
|
try:
|
|
|
|
snippet.add_line(i + firstlineno + 1,
|
|
|
|
source_html.prepare_line([line], tokenizer, enc))
|
|
|
|
except py.error.ENOENT:
|
|
|
|
# error reading source code, giving up
|
|
|
|
snippet = org
|
|
|
|
break
|
|
|
|
return snippet
|
|
|
|
|
|
|
|
def get_obj(pkg, dotted_name):
|
|
|
|
full_dotted_name = '%s.%s' % (pkg.__name__, dotted_name)
|
|
|
|
if dotted_name == '':
|
|
|
|
return pkg
|
|
|
|
path = dotted_name.split('.')
|
|
|
|
ret = pkg
|
|
|
|
for item in path:
|
|
|
|
marker = []
|
|
|
|
ret = getattr(ret, item, marker)
|
|
|
|
if ret is marker:
|
|
|
|
raise NameError('can not access %s in %s' % (item,
|
|
|
|
full_dotted_name))
|
|
|
|
return ret
|
|
|
|
|
2007-01-24 22:24:01 +08:00
|
|
|
# the PageBuilder classes take care of producing the docs (using the stuff
|
|
|
|
# above)
|
|
|
|
class AbstractPageBuilder(object):
|
2007-02-06 21:19:16 +08:00
|
|
|
pageclass = LayoutPage
|
|
|
|
|
2007-02-07 08:24:21 +08:00
|
|
|
def write_page(self, title, reltargetpath, tag, nav):
|
2007-01-24 22:24:01 +08:00
|
|
|
targetpath = self.base.join(reltargetpath)
|
2007-02-02 23:49:58 +08:00
|
|
|
relbase= relpath('%s%s' % (targetpath.dirpath(), targetpath.sep),
|
|
|
|
self.base.strpath + '/')
|
2007-02-07 08:24:21 +08:00
|
|
|
page = wrap_page(self.project, title, tag, nav, relbase, self.base,
|
2007-02-06 21:19:16 +08:00
|
|
|
self.pageclass)
|
2007-02-07 08:24:21 +08:00
|
|
|
# we write the page with _temporary_ hrefs here, need to be replaced
|
|
|
|
# from the TempLinker later
|
|
|
|
content = page.unicode()
|
2007-01-24 22:24:01 +08:00
|
|
|
targetpath.ensure()
|
|
|
|
targetpath.write(content.encode("utf8"))
|
|
|
|
|
|
|
|
class SourcePageBuilder(AbstractPageBuilder):
|
|
|
|
""" builds the html for a source docs page """
|
2007-02-07 08:24:21 +08:00
|
|
|
def __init__(self, base, linker, projroot, project, capture=None,
|
2007-02-06 21:19:16 +08:00
|
|
|
pageclass=LayoutPage):
|
2007-01-24 22:24:01 +08:00
|
|
|
self.base = base
|
|
|
|
self.linker = linker
|
|
|
|
self.projroot = projroot
|
2007-02-07 08:24:21 +08:00
|
|
|
self.project = project
|
2007-02-02 04:10:48 +08:00
|
|
|
self.capture = capture
|
2007-02-06 21:19:16 +08:00
|
|
|
self.pageclass = pageclass
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
def build_navigation(self, fspath):
|
2007-02-02 23:49:58 +08:00
|
|
|
nav = H.Navigation(class_='sidebar')
|
2007-01-24 22:24:01 +08:00
|
|
|
relpath = fspath.relto(self.projroot)
|
2007-01-25 20:54:51 +08:00
|
|
|
path = relpath.split(os.path.sep)
|
2007-01-24 22:24:01 +08:00
|
|
|
indent = 0
|
|
|
|
# build links to parents
|
2007-01-26 23:34:28 +08:00
|
|
|
if relpath != '':
|
|
|
|
for i in xrange(len(path)):
|
|
|
|
dirpath = os.path.sep.join(path[:i])
|
|
|
|
abspath = self.projroot.join(dirpath).strpath
|
|
|
|
if i == 0:
|
|
|
|
text = self.projroot.basename
|
|
|
|
else:
|
|
|
|
text = path[i-1]
|
2007-02-03 07:29:01 +08:00
|
|
|
nav.append(H.NavigationItem(self.linker, abspath, text,
|
|
|
|
indent, False))
|
2007-01-26 23:34:28 +08:00
|
|
|
indent += 1
|
2007-01-24 22:24:01 +08:00
|
|
|
# build siblings or children and self
|
|
|
|
if fspath.check(dir=True):
|
|
|
|
# we're a dir, build ourselves and our children
|
|
|
|
dirpath = fspath
|
2007-02-03 07:29:01 +08:00
|
|
|
nav.append(H.NavigationItem(self.linker, dirpath.strpath,
|
|
|
|
dirpath.basename, indent, True))
|
2007-01-24 22:24:01 +08:00
|
|
|
indent += 1
|
|
|
|
elif fspath.strpath == self.projroot.strpath:
|
|
|
|
dirpath = fspath
|
|
|
|
else:
|
|
|
|
# we're a file, build our parent's children only
|
|
|
|
dirpath = fspath.dirpath()
|
|
|
|
diritems, fileitems = source_dirs_files(dirpath)
|
|
|
|
for dir in diritems:
|
2007-02-03 07:29:01 +08:00
|
|
|
nav.append(H.NavigationItem(self.linker, dir.strpath, dir.basename,
|
|
|
|
indent, False))
|
2007-01-24 22:24:01 +08:00
|
|
|
for file in fileitems:
|
|
|
|
selected = (fspath.check(file=True) and
|
|
|
|
file.basename == fspath.basename)
|
2007-02-03 07:29:01 +08:00
|
|
|
nav.append(H.NavigationItem(self.linker, file.strpath,
|
|
|
|
file.basename, indent, selected))
|
2007-01-24 22:24:01 +08:00
|
|
|
return nav
|
|
|
|
|
|
|
|
re = py.std.re
|
|
|
|
_reg_body = re.compile(r'<body[^>]*>(.*)</body>', re.S)
|
|
|
|
def build_python_page(self, fspath):
|
2007-02-04 22:35:28 +08:00
|
|
|
# XXX two reads of the same file here... not very bad (disk caches
|
|
|
|
# and such) but also not very nice...
|
|
|
|
enc = source_html.get_module_encoding(fspath.strpath)
|
|
|
|
source = fspath.read()
|
|
|
|
sep = get_linesep(source)
|
|
|
|
colored = enumerate_and_color(source.split(sep), 0, enc)
|
2007-02-06 21:19:16 +08:00
|
|
|
tag = H.SourceDef(colored)
|
2007-01-24 22:24:01 +08:00
|
|
|
nav = self.build_navigation(fspath)
|
|
|
|
return tag, nav
|
|
|
|
|
|
|
|
def build_dir_page(self, fspath):
|
|
|
|
dirs, files = source_dirs_files(fspath)
|
2007-02-04 22:35:28 +08:00
|
|
|
dirs = [(p.basename, self.linker.get_lazyhref(str(p))) for p in dirs]
|
|
|
|
files = [(p.basename, self.linker.get_lazyhref(str(p))) for p in files]
|
|
|
|
tag = H.DirList(dirs, files)
|
2007-01-24 22:24:01 +08:00
|
|
|
nav = self.build_navigation(fspath)
|
|
|
|
return tag, nav
|
|
|
|
|
|
|
|
def build_nonpython_page(self, fspath):
|
|
|
|
try:
|
|
|
|
tag = H.NonPythonSource(unicode(fspath.read(), 'utf-8'))
|
|
|
|
except UnicodeError:
|
|
|
|
tag = H.NonPythonSource('no source available (binary file?)')
|
|
|
|
nav = self.build_navigation(fspath)
|
|
|
|
return tag, nav
|
|
|
|
|
2007-02-07 08:24:21 +08:00
|
|
|
def build_pages(self, base):
|
2007-01-24 22:24:01 +08:00
|
|
|
for fspath in [base] + list(base.visit()):
|
|
|
|
if fspath.ext in ['.pyc', '.pyo']:
|
|
|
|
continue
|
|
|
|
relfspath = fspath.relto(base)
|
2007-01-25 20:54:51 +08:00
|
|
|
if relfspath.find('%s.' % (os.path.sep,)) > -1:
|
2007-01-24 22:24:01 +08:00
|
|
|
# skip hidden dirs and files
|
|
|
|
continue
|
|
|
|
elif fspath.check(dir=True):
|
|
|
|
if relfspath != '':
|
2007-01-25 20:54:51 +08:00
|
|
|
relfspath += os.path.sep
|
|
|
|
reloutputpath = 'source%s%sindex.html' % (os.path.sep,
|
|
|
|
relfspath)
|
2007-01-24 22:24:01 +08:00
|
|
|
else:
|
2007-01-25 20:54:51 +08:00
|
|
|
reloutputpath = "source%s%s.html" % (os.path.sep, relfspath)
|
|
|
|
reloutputpath = reloutputpath.replace(os.path.sep, '/')
|
2007-01-24 22:24:01 +08:00
|
|
|
outputpath = self.base.join(reloutputpath)
|
|
|
|
self.linker.set_link(str(fspath), reloutputpath)
|
2007-02-07 08:24:21 +08:00
|
|
|
self.build_page(fspath, outputpath, base)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
2007-02-07 08:24:21 +08:00
|
|
|
def build_page(self, fspath, outputpath, base):
|
2007-01-24 22:24:01 +08:00
|
|
|
""" build syntax-colored source views """
|
2007-02-07 08:24:21 +08:00
|
|
|
if fspath.check(ext='.py'):
|
|
|
|
try:
|
|
|
|
tag, nav = self.build_python_page(fspath)
|
|
|
|
except (KeyboardInterrupt, SystemError):
|
|
|
|
raise
|
|
|
|
except: # XXX strange stuff going wrong at times... need to fix
|
|
|
|
raise
|
|
|
|
exc, e, tb = py.std.sys.exc_info()
|
|
|
|
print '%s - %s' % (exc, e)
|
|
|
|
print
|
|
|
|
print ''.join(py.std.traceback.format_tb(tb))
|
|
|
|
print '-' * 79
|
|
|
|
del tb
|
2007-01-24 22:24:01 +08:00
|
|
|
tag, nav = self.build_nonpython_page(fspath)
|
2007-02-07 08:24:21 +08:00
|
|
|
elif fspath.check(dir=True):
|
|
|
|
tag, nav = self.build_dir_page(fspath)
|
|
|
|
else:
|
|
|
|
tag, nav = self.build_nonpython_page(fspath)
|
|
|
|
title = 'sources for %s' % (fspath.basename,)
|
|
|
|
reltargetpath = outputpath.relto(self.base).replace(os.path.sep,
|
|
|
|
'/')
|
|
|
|
self.write_page(title, reltargetpath, tag, nav)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
class ApiPageBuilder(AbstractPageBuilder):
|
|
|
|
""" builds the html for an api docs page """
|
2007-02-07 08:24:21 +08:00
|
|
|
def __init__(self, base, linker, dsa, projroot, namespace_tree, project,
|
2007-02-06 21:19:16 +08:00
|
|
|
capture=None, pageclass=LayoutPage):
|
2007-01-24 22:24:01 +08:00
|
|
|
self.base = base
|
|
|
|
self.linker = linker
|
|
|
|
self.dsa = dsa
|
|
|
|
self.projroot = projroot
|
|
|
|
self.projpath = py.path.local(projroot)
|
2007-01-26 23:34:28 +08:00
|
|
|
self.namespace_tree = namespace_tree
|
2007-02-07 08:24:21 +08:00
|
|
|
self.project = project
|
2007-02-02 04:10:48 +08:00
|
|
|
self.capture = capture
|
2007-02-06 21:19:16 +08:00
|
|
|
self.pageclass = pageclass
|
2007-02-02 04:10:48 +08:00
|
|
|
|
2007-02-04 22:35:28 +08:00
|
|
|
pkgname = self.dsa.get_module_name().split('/')[-1]
|
|
|
|
self.pkg = __import__(pkgname)
|
|
|
|
|
2007-01-24 22:24:01 +08:00
|
|
|
def build_callable_view(self, dotted_name):
|
|
|
|
""" build the html for a class method """
|
|
|
|
# XXX we may want to have seperate
|
2007-02-04 22:35:28 +08:00
|
|
|
func = get_obj(self.pkg, dotted_name)
|
2007-01-29 22:20:31 +08:00
|
|
|
docstring = func.__doc__
|
|
|
|
if docstring:
|
|
|
|
docstring = deindent(docstring)
|
2007-01-24 22:24:01 +08:00
|
|
|
localname = func.__name__
|
|
|
|
argdesc = get_param_htmldesc(self.linker, func)
|
2007-01-25 00:06:35 +08:00
|
|
|
valuedesc = self.build_callable_signature_description(dotted_name)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
sourcefile = inspect.getsourcefile(func)
|
|
|
|
callable_source = self.dsa.get_function_source(dotted_name)
|
|
|
|
# i assume they're both either available or unavailable(XXX ?)
|
2007-01-25 00:06:35 +08:00
|
|
|
is_in_pkg = self.is_in_pkg(sourcefile)
|
2007-02-03 07:29:01 +08:00
|
|
|
href = None
|
|
|
|
text = 'could not get to source file'
|
|
|
|
colored = []
|
2007-01-30 23:44:30 +08:00
|
|
|
if sourcefile and callable_source:
|
|
|
|
enc = source_html.get_module_encoding(sourcefile)
|
2007-01-31 00:29:33 +08:00
|
|
|
tokenizer = source_color.Tokenizer(source_color.PythonSchema)
|
|
|
|
firstlineno = func.func_code.co_firstlineno
|
2007-02-04 22:35:28 +08:00
|
|
|
sep = get_linesep(callable_source)
|
|
|
|
org = callable_source.split(sep)
|
2007-02-06 21:19:16 +08:00
|
|
|
colored = [enumerate_and_color(org, firstlineno, enc)]
|
2007-02-03 07:29:01 +08:00
|
|
|
text = 'source: %s' % (sourcefile,)
|
2007-01-30 23:44:30 +08:00
|
|
|
if is_in_pkg:
|
2007-02-03 07:29:01 +08:00
|
|
|
href = self.linker.get_lazyhref(sourcefile)
|
|
|
|
|
|
|
|
csource = H.SourceSnippet(text, href, colored)
|
|
|
|
callstack = self.dsa.get_function_callpoints(dotted_name)
|
|
|
|
csitems = []
|
|
|
|
for cs, _ in callstack:
|
|
|
|
csitems.append(self.build_callsite(dotted_name, cs))
|
|
|
|
snippet = H.FunctionDescription(localname, argdesc, docstring,
|
|
|
|
valuedesc, csource, csitems)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
return snippet
|
|
|
|
|
|
|
|
def build_class_view(self, dotted_name):
|
|
|
|
""" build the html for a class """
|
2007-02-04 22:35:28 +08:00
|
|
|
cls = get_obj(self.pkg, dotted_name)
|
2007-01-24 22:24:01 +08:00
|
|
|
# XXX is this a safe check?
|
|
|
|
try:
|
|
|
|
sourcefile = inspect.getsourcefile(cls)
|
|
|
|
except TypeError:
|
2007-02-07 05:18:56 +08:00
|
|
|
sourcefile = None
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
docstring = cls.__doc__
|
2007-01-29 22:20:31 +08:00
|
|
|
if docstring:
|
|
|
|
docstring = deindent(docstring)
|
2007-02-04 23:47:33 +08:00
|
|
|
if not hasattr(cls, '__name__'):
|
|
|
|
clsname = 'instance of %s' % (cls.__class__.__name__,)
|
|
|
|
else:
|
|
|
|
clsname = cls.__name__
|
|
|
|
bases = self.build_bases(dotted_name)
|
|
|
|
properties = self.build_properties(cls)
|
|
|
|
methods = self.build_methods(dotted_name)
|
2007-02-07 05:18:56 +08:00
|
|
|
|
|
|
|
if sourcefile is None:
|
|
|
|
sourcelink = H.div('no source available')
|
|
|
|
else:
|
|
|
|
if sourcefile[-1] in ['o', 'c']:
|
|
|
|
sourcefile = sourcefile[:-1]
|
|
|
|
sourcelink = H.div(H.a('view source',
|
|
|
|
href=self.linker.get_lazyhref(sourcefile)))
|
|
|
|
|
2007-02-04 23:47:33 +08:00
|
|
|
snippet = H.ClassDescription(
|
|
|
|
# XXX bases HTML
|
2007-02-07 05:18:56 +08:00
|
|
|
H.ClassDef(clsname, bases, docstring, sourcelink,
|
|
|
|
properties, methods),
|
2007-02-04 23:47:33 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
return snippet
|
|
|
|
|
|
|
|
def build_bases(self, dotted_name):
|
2007-02-07 05:18:56 +08:00
|
|
|
ret = []
|
2007-01-24 22:24:01 +08:00
|
|
|
bases = self.dsa.get_possible_base_classes(dotted_name)
|
|
|
|
for base in bases:
|
|
|
|
try:
|
|
|
|
obj = self.dsa.get_obj(base.name)
|
|
|
|
except KeyError:
|
2007-02-07 05:18:56 +08:00
|
|
|
ret.append((base.name, None))
|
2007-01-24 22:24:01 +08:00
|
|
|
else:
|
|
|
|
href = self.linker.get_lazyhref(base.name)
|
2007-02-07 05:18:56 +08:00
|
|
|
ret.append((base.name, href))
|
|
|
|
return ret
|
2007-02-04 23:47:33 +08:00
|
|
|
|
|
|
|
def build_properties(self, cls):
|
|
|
|
properties = []
|
|
|
|
for attr in dir(cls):
|
|
|
|
val = getattr(cls, attr)
|
|
|
|
if show_property(attr) and not callable(val):
|
|
|
|
if isinstance(val, property):
|
|
|
|
val = '<property object (dynamically calculated value)>'
|
|
|
|
properties.append((attr, val))
|
|
|
|
properties.sort(key=lambda a: a[0]) # sort on name
|
2007-02-07 05:18:56 +08:00
|
|
|
return properties
|
2007-02-04 23:47:33 +08:00
|
|
|
|
|
|
|
def build_methods(self, dotted_name):
|
|
|
|
ret = []
|
|
|
|
methods = self.dsa.get_class_methods(dotted_name)
|
2007-02-07 05:18:56 +08:00
|
|
|
if '__init__' in methods:
|
|
|
|
methods.remove('__init__')
|
|
|
|
methods.insert(0, '__init__')
|
|
|
|
for method in methods:
|
|
|
|
ret += self.build_callable_view('%s.%s' % (dotted_name,
|
|
|
|
method))
|
2007-02-04 23:47:33 +08:00
|
|
|
return ret
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
def build_namespace_view(self, namespace_dotted_name, item_dotted_names):
|
|
|
|
""" build the html for a namespace (module) """
|
2007-02-04 22:35:28 +08:00
|
|
|
obj = get_obj(self.pkg, namespace_dotted_name)
|
|
|
|
docstring = obj.__doc__
|
2007-01-24 22:24:01 +08:00
|
|
|
snippet = H.NamespaceDescription(
|
|
|
|
H.NamespaceDef(namespace_dotted_name),
|
2007-01-29 22:20:31 +08:00
|
|
|
H.Docstring(docstring or '*no docstring available*')
|
2007-01-24 22:24:01 +08:00
|
|
|
)
|
2007-01-26 21:01:27 +08:00
|
|
|
for dotted_name in sorted(item_dotted_names):
|
2007-01-24 22:24:01 +08:00
|
|
|
itemname = dotted_name.split('.')[-1]
|
2007-02-04 22:35:28 +08:00
|
|
|
if not is_navigateable(itemname):
|
2007-01-24 22:24:01 +08:00
|
|
|
continue
|
|
|
|
snippet.append(
|
|
|
|
H.NamespaceItem(
|
|
|
|
H.a(itemname,
|
|
|
|
href=self.linker.get_lazyhref(dotted_name)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
)
|
|
|
|
return snippet
|
|
|
|
|
2007-02-07 08:24:21 +08:00
|
|
|
def build_class_pages(self, classes_dotted_names):
|
2007-01-24 22:24:01 +08:00
|
|
|
passed = []
|
2007-01-26 21:01:27 +08:00
|
|
|
for dotted_name in sorted(classes_dotted_names):
|
2007-01-24 22:24:01 +08:00
|
|
|
parent_dotted_name, _ = split_of_last_part(dotted_name)
|
|
|
|
try:
|
2007-01-26 23:34:28 +08:00
|
|
|
sibling_dotted_names = self.namespace_tree[parent_dotted_name]
|
2007-01-24 22:24:01 +08:00
|
|
|
except KeyError:
|
|
|
|
# no siblings (built-in module or sth)
|
|
|
|
sibling_dotted_names = []
|
2007-01-26 21:01:27 +08:00
|
|
|
tag = H.Content(self.build_class_view(dotted_name))
|
2007-01-26 23:34:28 +08:00
|
|
|
nav = self.build_navigation(dotted_name, False)
|
2007-01-24 22:24:01 +08:00
|
|
|
reltargetpath = "api/%s.html" % (dotted_name,)
|
|
|
|
self.linker.set_link(dotted_name, reltargetpath)
|
|
|
|
title = 'api documentation for %s' % (dotted_name,)
|
2007-02-07 08:24:21 +08:00
|
|
|
self.write_page(title, reltargetpath, tag, nav)
|
2007-01-24 22:24:01 +08:00
|
|
|
return passed
|
2007-02-07 08:24:21 +08:00
|
|
|
|
|
|
|
def build_function_pages(self, method_dotted_names):
|
2007-01-24 22:24:01 +08:00
|
|
|
passed = []
|
2007-01-26 21:01:27 +08:00
|
|
|
for dotted_name in sorted(method_dotted_names):
|
2007-01-24 22:24:01 +08:00
|
|
|
# XXX should we create a build_function_view instead?
|
|
|
|
parent_dotted_name, _ = split_of_last_part(dotted_name)
|
2007-01-26 23:34:28 +08:00
|
|
|
sibling_dotted_names = self.namespace_tree[parent_dotted_name]
|
2007-01-26 21:01:27 +08:00
|
|
|
tag = H.Content(self.build_callable_view(dotted_name))
|
2007-01-26 23:34:28 +08:00
|
|
|
nav = self.build_navigation(dotted_name, False)
|
2007-01-24 22:24:01 +08:00
|
|
|
reltargetpath = "api/%s.html" % (dotted_name,)
|
|
|
|
self.linker.set_link(dotted_name, reltargetpath)
|
|
|
|
title = 'api documentation for %s' % (dotted_name,)
|
2007-02-07 08:24:21 +08:00
|
|
|
self.write_page(title, reltargetpath, tag, nav)
|
|
|
|
return passed
|
2007-01-24 22:24:01 +08:00
|
|
|
|
2007-02-07 08:24:21 +08:00
|
|
|
def build_namespace_pages(self):
|
2007-01-24 22:24:01 +08:00
|
|
|
passed = []
|
|
|
|
module_name = self.dsa.get_module_name().split('/')[-1]
|
|
|
|
|
2007-01-26 23:34:28 +08:00
|
|
|
names = self.namespace_tree.keys()
|
2007-01-24 22:24:01 +08:00
|
|
|
names.sort()
|
2007-01-26 21:01:27 +08:00
|
|
|
function_names = self.dsa.get_function_names()
|
|
|
|
class_names = self.dsa.get_class_names()
|
|
|
|
for dotted_name in sorted(names):
|
|
|
|
if dotted_name in function_names or dotted_name in class_names:
|
|
|
|
continue
|
2007-01-26 23:34:28 +08:00
|
|
|
subitem_dotted_names = self.namespace_tree[dotted_name]
|
2007-01-26 21:01:27 +08:00
|
|
|
tag = H.Content(self.build_namespace_view(dotted_name,
|
|
|
|
subitem_dotted_names))
|
2007-01-26 23:34:28 +08:00
|
|
|
nav = self.build_navigation(dotted_name, True)
|
2007-01-24 22:24:01 +08:00
|
|
|
if dotted_name == '':
|
|
|
|
reltargetpath = 'api/index.html'
|
|
|
|
else:
|
|
|
|
reltargetpath = 'api/%s.html' % (dotted_name,)
|
|
|
|
self.linker.set_link(dotted_name, reltargetpath)
|
|
|
|
if dotted_name == '':
|
|
|
|
dotted_name = self.dsa.get_module_name().split('/')[-1]
|
|
|
|
title = 'index of %s namespace' % (dotted_name,)
|
2007-02-07 08:24:21 +08:00
|
|
|
self.write_page(title, reltargetpath, tag, nav)
|
|
|
|
return passed
|
2007-01-24 22:24:01 +08:00
|
|
|
|
2007-01-26 23:34:28 +08:00
|
|
|
def build_navigation(self, dotted_name, build_children=True):
|
|
|
|
navitems = []
|
|
|
|
|
|
|
|
# top namespace, index.html
|
|
|
|
module_name = self.dsa.get_module_name().split('/')[-1]
|
2007-02-03 07:29:01 +08:00
|
|
|
navitems.append(H.NavigationItem(self.linker, '', module_name, 0,
|
|
|
|
True))
|
2007-01-26 23:34:28 +08:00
|
|
|
def build_nav_level(dotted_name, depth=1):
|
|
|
|
navitems = []
|
|
|
|
path = dotted_name.split('.')[:depth]
|
|
|
|
siblings = self.namespace_tree.get('.'.join(path[:-1]))
|
|
|
|
for dn in sorted(siblings):
|
|
|
|
selected = dn == '.'.join(path)
|
|
|
|
sibpath = dn.split('.')
|
2007-01-31 00:29:33 +08:00
|
|
|
sibname = sibpath[-1]
|
2007-02-04 22:35:28 +08:00
|
|
|
if not is_navigateable(sibname):
|
2007-01-31 00:29:33 +08:00
|
|
|
continue
|
2007-02-03 07:29:01 +08:00
|
|
|
navitems.append(H.NavigationItem(self.linker, dn, sibname,
|
|
|
|
depth, selected))
|
2007-01-26 23:34:28 +08:00
|
|
|
if selected:
|
|
|
|
lastlevel = dn.count('.') == dotted_name.count('.')
|
|
|
|
if not lastlevel:
|
|
|
|
navitems += build_nav_level(dotted_name, depth+1)
|
|
|
|
elif lastlevel and build_children:
|
|
|
|
# XXX hack
|
|
|
|
navitems += build_nav_level('%s.' % (dotted_name,),
|
2007-01-30 21:24:27 +08:00
|
|
|
depth+1)
|
2007-01-26 23:34:28 +08:00
|
|
|
|
|
|
|
return navitems
|
|
|
|
|
|
|
|
navitems += build_nav_level(dotted_name)
|
2007-02-02 23:49:58 +08:00
|
|
|
return H.Navigation(class_='sidebar', *navitems)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
2007-01-25 00:06:35 +08:00
|
|
|
def build_callable_signature_description(self, dotted_name):
|
2007-01-24 22:24:01 +08:00
|
|
|
args, retval = self.dsa.get_function_signature(dotted_name)
|
|
|
|
valuedesc = H.ValueDescList()
|
2007-01-25 00:06:35 +08:00
|
|
|
for name, _type in args:
|
|
|
|
valuedesc.append(self.build_sig_value_description(name, _type))
|
|
|
|
if retval:
|
|
|
|
retval = self.process_type_link(retval)
|
2007-01-25 05:58:21 +08:00
|
|
|
ret = H.div(H.div('arguments:'), valuedesc, H.div('return value:'),
|
2007-01-25 00:06:35 +08:00
|
|
|
retval or 'None')
|
|
|
|
return ret
|
|
|
|
|
|
|
|
def build_sig_value_description(self, name, _type):
|
|
|
|
l = self.process_type_link(_type)
|
|
|
|
items = []
|
2007-01-25 05:05:18 +08:00
|
|
|
next = "%s: " % name
|
2007-01-25 00:06:35 +08:00
|
|
|
for item in l:
|
|
|
|
if isinstance(item, str):
|
|
|
|
next += item
|
|
|
|
else:
|
|
|
|
if next:
|
|
|
|
items.append(next)
|
|
|
|
next = ""
|
|
|
|
items.append(item)
|
|
|
|
if next:
|
|
|
|
items.append(next)
|
|
|
|
return H.ValueDescItem(*items)
|
2007-01-24 22:24:01 +08:00
|
|
|
|
|
|
|
def process_type_link(self, _type):
|
|
|
|
# now we do simple type dispatching and provide a link in this case
|
|
|
|
lst = []
|
|
|
|
data = self.dsa.get_type_desc(_type)
|
|
|
|
if not data:
|
|
|
|
for i in _type.striter():
|
|
|
|
if isinstance(i, str):
|
|
|
|
lst.append(i)
|
|
|
|
else:
|
|
|
|
lst += self.process_type_link(i)
|
|
|
|
return lst
|
|
|
|
name, _desc_type, is_degenerated = data
|
|
|
|
if not is_degenerated:
|
|
|
|
linktarget = self.linker.get_lazyhref(name)
|
|
|
|
lst.append(H.a(str(_type), href=linktarget))
|
|
|
|
else:
|
2007-01-25 00:06:35 +08:00
|
|
|
raise IOError('do not think we ever get here?')
|
2007-01-24 22:24:01 +08:00
|
|
|
# we should provide here some way of linking to sourcegen directly
|
|
|
|
lst.append(name)
|
|
|
|
return lst
|
|
|
|
|
2007-01-25 00:06:35 +08:00
|
|
|
def is_in_pkg(self, sourcefile):
|
|
|
|
return py.path.local(sourcefile).relto(self.projpath)
|
|
|
|
|
2007-01-26 21:01:27 +08:00
|
|
|
def build_callsite(self, functionname, call_site):
|
2007-01-29 21:09:33 +08:00
|
|
|
tbtag = self.gen_traceback(functionname, reversed(call_site))
|
2007-02-03 07:29:01 +08:00
|
|
|
return H.CallStackItem(call_site[0].filename, call_site[0].lineno + 1,
|
|
|
|
tbtag)
|
2007-01-26 21:01:27 +08:00
|
|
|
|
2007-01-29 21:09:33 +08:00
|
|
|
_reg_source = py.std.re.compile(r'([^>]*)<(.*)>')
|
2007-01-26 21:01:27 +08:00
|
|
|
def gen_traceback(self, funcname, call_site):
|
|
|
|
tbdiv = H.div()
|
2007-01-30 23:44:30 +08:00
|
|
|
for frame in call_site:
|
|
|
|
lineno = frame.lineno - frame.firstlineno
|
|
|
|
source = frame.source
|
|
|
|
sourcefile = frame.filename
|
|
|
|
|
|
|
|
tokenizer = source_color.Tokenizer(source_color.PythonSchema)
|
2007-01-26 21:01:27 +08:00
|
|
|
mangled = []
|
2007-02-04 22:35:28 +08:00
|
|
|
|
|
|
|
source = str(source)
|
|
|
|
sep = get_linesep(source)
|
|
|
|
for i, sline in enumerate(source.split(sep)):
|
2007-01-26 21:01:27 +08:00
|
|
|
if i == lineno:
|
2007-01-30 23:44:30 +08:00
|
|
|
l = '-> %s' % (sline,)
|
2007-01-26 21:01:27 +08:00
|
|
|
else:
|
2007-01-30 23:44:30 +08:00
|
|
|
l = ' %s' % (sline,)
|
2007-01-26 21:01:27 +08:00
|
|
|
mangled.append(l)
|
|
|
|
if sourcefile:
|
2007-01-30 23:44:30 +08:00
|
|
|
linktext = '%s - line %s' % (sourcefile, frame.lineno + 1)
|
2007-01-26 21:01:27 +08:00
|
|
|
# skip py.code.Source objects and source files outside of the
|
|
|
|
# package
|
2007-01-29 21:09:33 +08:00
|
|
|
is_code_source = self._reg_source.match(sourcefile)
|
2007-02-02 04:10:48 +08:00
|
|
|
if (not is_code_source and self.is_in_pkg(sourcefile) and
|
|
|
|
py.path.local(sourcefile).check()):
|
2007-01-30 23:44:30 +08:00
|
|
|
enc = source_html.get_module_encoding(sourcefile)
|
2007-01-26 21:01:27 +08:00
|
|
|
href = self.linker.get_lazyhref(sourcefile)
|
|
|
|
sourcelink = H.a(linktext, href=href)
|
|
|
|
else:
|
2007-01-30 23:44:30 +08:00
|
|
|
enc = 'latin-1'
|
2007-01-26 21:01:27 +08:00
|
|
|
sourcelink = H.div(linktext)
|
2007-02-06 21:19:16 +08:00
|
|
|
colored = [enumerate_and_color(mangled,
|
|
|
|
frame.firstlineno, enc)]
|
2007-01-26 21:01:27 +08:00
|
|
|
else:
|
2007-02-02 04:10:48 +08:00
|
|
|
sourcelink = H.div('source unknown (%s)' % (sourcefile,))
|
|
|
|
colored = mangled[:]
|
2007-01-26 21:01:27 +08:00
|
|
|
tbdiv.append(sourcelink)
|
2007-02-06 21:19:16 +08:00
|
|
|
tbdiv.append(H.div(*colored))
|
2007-01-26 21:01:27 +08:00
|
|
|
return tbdiv
|
|
|
|
|