Refs #24121 -- Added __repr__() to FilterExpression, Lexer, Parser, and Token.

This commit is contained in:
David Smith 2021-01-29 19:21:26 +00:00 committed by Mariusz Felisiak
parent 0c7e880e13
commit 179ee13eb3
2 changed files with 31 additions and 2 deletions

View File

@ -308,7 +308,7 @@ class Token:
self.lineno = lineno
self.position = position
def __str__(self):
def __repr__(self):
token_name = self.token_type.name.capitalize()
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
@ -334,6 +334,13 @@ class Lexer:
self.template_string = template_string
self.verbatim = False
def __repr__(self):
return '<%s template_string="%s...", verbatim=%s>' % (
self.__class__.__qualname__,
self.template_string[:20].replace('\n', ''),
self.verbatim,
)
def tokenize(self):
"""
Return a list of tokens from a given template_string.
@ -423,6 +430,9 @@ class Parser:
self.add_library(builtin)
self.origin = origin
def __repr__(self):
return '<%s tokens=%r>' % (self.__class__.__qualname__, self.tokens)
def parse(self, parse_until=None):
"""
Iterate through the parser tokens and compiles each one into a node.
@ -723,6 +733,9 @@ class FilterExpression:
def __str__(self):
return self.token
def __repr__(self):
return "<%s %r>" % (self.__class__.__qualname__, self.token)
class Variable:
"""

View File

@ -3,7 +3,7 @@ Testing some internals of the template processing. These are *not* examples to b
"""
from django.template import Library, TemplateSyntaxError
from django.template.base import (
FilterExpression, Parser, Token, TokenType, Variable,
FilterExpression, Lexer, Parser, Token, TokenType, Variable,
)
from django.template.defaultfilters import register as filter_library
from django.test import SimpleTestCase
@ -19,6 +19,22 @@ class ParserTests(SimpleTestCase):
split = token.split_contents()
self.assertEqual(split, ["sometag", '_("Page not found")', 'value|yesno:_("yes,no")'])
def test_repr(self):
token = Token(TokenType.BLOCK, 'some text')
self.assertEqual(repr(token), '<Block token: "some text...">')
parser = Parser([token], builtins=[filter_library])
self.assertEqual(
repr(parser),
'<Parser tokens=[<Block token: "some text...">]>',
)
filter_expression = FilterExpression('news|upper', parser)
self.assertEqual(repr(filter_expression), "<FilterExpression 'news|upper'>")
lexer = Lexer('{% for i in 1 %}{{ a }}\n{% endfor %}')
self.assertEqual(
repr(lexer),
'<Lexer template_string="{% for i in 1 %}{{ a...", verbatim=False>',
)
def test_filter_parsing(self):
c = {"article": {"section": "News"}}
p = Parser("", builtins=[filter_library])