Fixed #7027: template tags now corectly break tokens around strings marked for translation.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@8769 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Jacob Kaplan-Moss 2008-08-31 18:28:06 +00:00
parent 79968f9867
commit 86316f0be6
2 changed files with 19 additions and 1 deletions

View File

@ -197,7 +197,19 @@ class Token(object):
self.contents[:20].replace('\n', ''))
def split_contents(self):
return list(smart_split(self.contents))
split = []
bits = iter(smart_split(self.contents))
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith('_("') or bit.startswith("_('"):
sentinal = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinal):
bit = bits.next()
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer(object):
def __init__(self, template_string, origin):

View File

@ -130,6 +130,12 @@ class Templates(unittest.TestCase):
test_template_sources('/DIR1/index.HTML', template_dirs,
['/dir1/index.html'])
def test_token_smart_split(self):
# Regression test for #7027
token = template.Token(template.TOKEN_BLOCK, 'sometag _("Page not found") value|yesno:_("yes,no")')
split = token.split_contents()
self.assertEqual(split, ["sometag", '_("Page not found")', 'value|yesno:_("yes,no")'])
def test_templates(self):
template_tests = self.get_template_tests()
filter_tests = filters.get_filter_tests()