[svn r37602] Fixed support for files ending on a comment rather than a newline, fixed some

unicode() call so it's not done on objects that are already unicode.

--HG--
branch : trunk
This commit is contained in:
guido 2007-01-30 15:46:57 +01:00
parent 0b985318fa
commit 2f8325e277
3 changed files with 9 additions and 3 deletions

View File

@ -4,7 +4,7 @@ import re
class PythonSchema(object): class PythonSchema(object):
""" contains information for syntax coloring """ """ contains information for syntax coloring """
comment = [('#', '\n')] comment = [('#', '\n'), ('#', '$')]
multiline_string = ['"""', "'''"] multiline_string = ['"""', "'''"]
string = ['"""', "'''", '"', "'"] string = ['"""', "'''", '"', "'"]
# XXX not complete # XXX not complete
@ -125,7 +125,6 @@ class Tokenizer(object):
break break
return data, token return data, token
def _check_comments(self, data): def _check_comments(self, data):
# fortunately we don't have to deal with multi-line comments # fortunately we don't have to deal with multi-line comments
token = None token = None

View File

@ -46,7 +46,10 @@ def prepare_line(text, tokenizer, encoding):
if type(item) in [str, unicode]: if type(item) in [str, unicode]:
tokens = tokenizer.tokenize(item) tokens = tokenizer.tokenize(item)
for t in tokens: for t in tokens:
data = unicode(t.data, encoding) if not isinstance(t.data, unicode):
data = unicode(t.data, encoding)
else:
data = t.data
if t.type in ['keyword', 'alt_keyword', 'number', if t.type in ['keyword', 'alt_keyword', 'number',
'string', 'comment']: 'string', 'comment']:
ret.append(html.span(data, class_=t.type)) ret.append(html.span(data, class_=t.type))

View File

@ -45,8 +45,12 @@ class TestTokenizer(object):
assert self.tokens('foo # bar\n') == [Token('foo', type='word'), assert self.tokens('foo # bar\n') == [Token('foo', type='word'),
Token(' ', type='whitespace'), Token(' ', type='whitespace'),
Token('# bar\n', type='comment')] Token('# bar\n', type='comment')]
assert self.tokens("# foo 'bar\n") == [Token("# foo 'bar\n",
type='comment')]
assert self.tokens('# foo') == [Token('# foo', type='comment')]
def test_string_simple(self): def test_string_simple(self):
assert self.tokens('""') == [Token('""', type='string')]
assert self.tokens('"foo"') == [Token('"foo"', type='string')] assert self.tokens('"foo"') == [Token('"foo"', type='string')]
assert self.tokens('"foo"\'bar\'') == [Token('"foo"', type='string'), assert self.tokens('"foo"\'bar\'') == [Token('"foo"', type='string'),
Token("'bar'", type='string')] Token("'bar'", type='string')]