Refs #32990 -- Added tests for DebugLexer/Lexer.tokenize().

This commit is contained in:
Greg Twohig 2021-08-09 08:16:29 +02:00 committed by Mariusz Felisiak
parent 259b28706e
commit c99aaf14ee
1 changed files with 40 additions and 0 deletions

View File

@ -1,8 +1,48 @@
from django.template import Context, Template, Variable, VariableDoesNotExist
from django.template.base import DebugLexer, Lexer, TokenType
from django.test import SimpleTestCase
from django.utils.translation import gettext_lazy
class LexerTestMixin:
template_string = (
'text\n'
'{% if test %}{{ varvalue }}{% endif %}'
'{#comment {{not a var}} %{not a block}% #}'
)
expected_token_tuples = [
# (token_type, contents, lineno, position)
(TokenType.TEXT, 'text\n', 1, (0, 5)),
(TokenType.BLOCK, 'if test', 2, (5, 18)),
(TokenType.VAR, 'varvalue', 2, (18, 32)),
(TokenType.BLOCK, 'endif', 2, (32, 43)),
(TokenType.COMMENT, 'comment {{not a var}} %{not a block}%', 2, (43, 85)),
]
def test_tokenize(self):
tokens = self.lexer_class(self.template_string).tokenize()
token_tuples = [(t.token_type, t.contents, t.lineno, t.position) for t in tokens]
self.assertEqual(token_tuples, self.make_expected())
def make_expected(self):
raise NotImplementedError('This method must be implemented by a subclass.')
class LexerTests(LexerTestMixin, SimpleTestCase):
lexer_class = Lexer
def make_expected(self):
# The non-debug lexer does not record position.
return [t[:-1] + (None,) for t in self.expected_token_tuples]
class DebugLexerTests(LexerTestMixin, SimpleTestCase):
lexer_class = DebugLexer
def make_expected(self):
return self.expected_token_tuples
class TemplateTests(SimpleTestCase):
def test_lazy_template_string(self):
template_string = gettext_lazy('lazy string')