Fixed #26093 -- Allowed escape sequences extraction by gettext on Python 3
Thanks Sylvain Fankhauser for the report and Tim Graham for the review.
This commit is contained in:
parent
7037dc5724
commit
104eddbdf6
|
@ -566,6 +566,9 @@ def templatize(src, origin=None):
|
||||||
comment = []
|
comment = []
|
||||||
lineno_comment_map = {}
|
lineno_comment_map = {}
|
||||||
comment_lineno_cache = None
|
comment_lineno_cache = None
|
||||||
|
# Adding the u prefix allows gettext to recognize the Unicode string
|
||||||
|
# (#26093).
|
||||||
|
raw_prefix = 'u' if six.PY3 else ''
|
||||||
|
|
||||||
def join_tokens(tokens, trim=False):
|
def join_tokens(tokens, trim=False):
|
||||||
message = ''.join(tokens)
|
message = ''.join(tokens)
|
||||||
|
@ -597,26 +600,34 @@ def templatize(src, origin=None):
|
||||||
if endbmatch:
|
if endbmatch:
|
||||||
if inplural:
|
if inplural:
|
||||||
if message_context:
|
if message_context:
|
||||||
out.write(' npgettext(%r, %r, %r,count) ' % (
|
out.write(' npgettext({p}{!r}, {p}{!r}, {p}{!r},count) '.format(
|
||||||
message_context,
|
message_context,
|
||||||
join_tokens(singular, trimmed),
|
join_tokens(singular, trimmed),
|
||||||
join_tokens(plural, trimmed)))
|
join_tokens(plural, trimmed),
|
||||||
|
p=raw_prefix,
|
||||||
|
))
|
||||||
else:
|
else:
|
||||||
out.write(' ngettext(%r, %r, count) ' % (
|
out.write(' ngettext({p}{!r}, {p}{!r}, count) '.format(
|
||||||
join_tokens(singular, trimmed),
|
join_tokens(singular, trimmed),
|
||||||
join_tokens(plural, trimmed)))
|
join_tokens(plural, trimmed),
|
||||||
|
p=raw_prefix,
|
||||||
|
))
|
||||||
for part in singular:
|
for part in singular:
|
||||||
out.write(blankout(part, 'S'))
|
out.write(blankout(part, 'S'))
|
||||||
for part in plural:
|
for part in plural:
|
||||||
out.write(blankout(part, 'P'))
|
out.write(blankout(part, 'P'))
|
||||||
else:
|
else:
|
||||||
if message_context:
|
if message_context:
|
||||||
out.write(' pgettext(%r, %r) ' % (
|
out.write(' pgettext({p}{!r}, {p}{!r}) '.format(
|
||||||
message_context,
|
message_context,
|
||||||
join_tokens(singular, trimmed)))
|
join_tokens(singular, trimmed),
|
||||||
|
p=raw_prefix,
|
||||||
|
))
|
||||||
else:
|
else:
|
||||||
out.write(' gettext(%r) ' % join_tokens(singular,
|
out.write(' gettext({p}{!r}) '.format(
|
||||||
trimmed))
|
join_tokens(singular, trimmed),
|
||||||
|
p=raw_prefix,
|
||||||
|
))
|
||||||
for part in singular:
|
for part in singular:
|
||||||
out.write(blankout(part, 'S'))
|
out.write(blankout(part, 'S'))
|
||||||
message_context = None
|
message_context = None
|
||||||
|
@ -685,10 +696,12 @@ def templatize(src, origin=None):
|
||||||
message_context = message_context.strip('"')
|
message_context = message_context.strip('"')
|
||||||
elif message_context[0] == "'":
|
elif message_context[0] == "'":
|
||||||
message_context = message_context.strip("'")
|
message_context = message_context.strip("'")
|
||||||
out.write(' pgettext(%r, %r) ' % (message_context, g))
|
out.write(' pgettext({p}{!r}, {p}{!r}) '.format(
|
||||||
|
message_context, g, p=raw_prefix
|
||||||
|
))
|
||||||
message_context = None
|
message_context = None
|
||||||
else:
|
else:
|
||||||
out.write(' gettext(%r) ' % g)
|
out.write(' gettext({p}{!r}) '.format(g, p=raw_prefix))
|
||||||
elif bmatch:
|
elif bmatch:
|
||||||
for fmatch in constant_re.findall(t.contents):
|
for fmatch in constant_re.findall(t.contents):
|
||||||
out.write(' _(%s) ' % fmatch)
|
out.write(' _(%s) ' % fmatch)
|
||||||
|
|
|
@ -98,3 +98,5 @@ First `trans`, then `blocktrans` with a plural
|
||||||
{% plural %}
|
{% plural %}
|
||||||
Plural for a `trans` and `blocktrans` collision case
|
Plural for a `trans` and `blocktrans` collision case
|
||||||
{% endblocktrans %}
|
{% endblocktrans %}
|
||||||
|
|
||||||
|
{% trans "Non-breaking space :" %}
|
||||||
|
|
|
@ -204,6 +204,14 @@ class BasicExtractorTests(ExtractorTests):
|
||||||
po_contents
|
po_contents
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def test_special_char_extracted(self):
|
||||||
|
os.chdir(self.test_dir)
|
||||||
|
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
||||||
|
self.assertTrue(os.path.exists(self.PO_FILE))
|
||||||
|
with open(self.PO_FILE, 'r') as fp:
|
||||||
|
po_contents = force_text(fp.read())
|
||||||
|
self.assertMsgId("Non-breaking space\xa0:", po_contents)
|
||||||
|
|
||||||
def test_blocktrans_trimmed(self):
|
def test_blocktrans_trimmed(self):
|
||||||
os.chdir(self.test_dir)
|
os.chdir(self.test_dir)
|
||||||
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
management.call_command('makemessages', locale=[LOCALE], verbosity=0)
|
||||||
|
|
Loading…
Reference in New Issue