Refs #33002 -- Optimized Lexer.tokenize() by skipping computing lineno when not needed.

This commit is contained in:
Chris Jerdonek 2021-08-08 01:59:41 -04:00 committed by Mariusz Felisiak
parent 65ed96fa39
commit 6242c22a2f
1 changed files with 1 additions and 1 deletions

View File

@ -357,8 +357,8 @@ class Lexer:
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, None, lineno, in_tag))
lineno += bit.count('\n')
in_tag = not in_tag
lineno += bit.count('\n')
return result
def create_token(self, token_string, position, lineno, in_tag):