Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
# coding: utf-8
|
2012-06-08 00:08:47 +08:00
|
|
|
from __future__ import unicode_literals
|
2010-09-27 23:17:08 +08:00
|
|
|
|
2011-10-13 20:53:02 +08:00
|
|
|
from django.test import TestCase
|
2013-04-10 16:43:19 +08:00
|
|
|
from django.utils.encoding import iri_to_uri, force_text
|
|
|
|
from django.utils.functional import lazy
|
2012-01-29 17:00:12 +08:00
|
|
|
from django.utils.http import (cookie_date, http_date,
|
|
|
|
urlquote, urlquote_plus, urlunquote, urlunquote_plus)
|
2013-04-10 16:43:19 +08:00
|
|
|
from django.utils import six
|
2011-10-13 20:53:02 +08:00
|
|
|
from django.utils.text import get_text_list, smart_split
|
2011-05-06 21:29:44 +08:00
|
|
|
from django.utils.translation import override
|
2010-09-27 23:17:08 +08:00
|
|
|
|
2013-04-10 16:43:19 +08:00
|
|
|
lazystr = lazy(force_text, six.text_type)
|
|
|
|
|
2011-10-13 20:53:02 +08:00
|
|
|
|
2010-09-27 23:17:08 +08:00
|
|
|
class TextTests(TestCase):
|
|
|
|
"""
|
|
|
|
Tests for stuff in django.utils.text and other text munging util functions.
|
|
|
|
"""
|
|
|
|
|
2010-12-13 06:53:49 +08:00
|
|
|
def test_get_text_list(self):
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(get_text_list(['a', 'b', 'c', 'd']), 'a, b, c or d')
|
|
|
|
self.assertEqual(get_text_list(['a', 'b', 'c'], 'and'), 'a, b and c')
|
|
|
|
self.assertEqual(get_text_list(['a', 'b'], 'and'), 'a and b')
|
|
|
|
self.assertEqual(get_text_list(['a']), 'a')
|
|
|
|
self.assertEqual(get_text_list([]), '')
|
2011-05-06 21:29:44 +08:00
|
|
|
with override('ar'):
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(get_text_list(['a', 'b', 'c']), "a، b أو c")
|
2010-12-13 06:53:49 +08:00
|
|
|
|
2010-09-27 23:17:08 +08:00
|
|
|
def test_smart_split(self):
|
|
|
|
|
2013-04-10 16:43:19 +08:00
|
|
|
testdata = [
|
|
|
|
('This is "a person" test.',
|
|
|
|
['This', 'is', '"a person"', 'test.']),
|
|
|
|
('This is "a person\'s" test.',
|
|
|
|
['This', 'is', '"a person\'s"', 'test.']),
|
|
|
|
('This is "a person\\"s" test.',
|
|
|
|
['This', 'is', '"a person\\"s"', 'test.']),
|
|
|
|
('"a \'one',
|
|
|
|
['"a', "'one"]),
|
|
|
|
('all friends\' tests',
|
|
|
|
['all', 'friends\'', 'tests']),
|
|
|
|
('url search_page words="something else"',
|
|
|
|
['url', 'search_page', 'words="something else"']),
|
|
|
|
("url search_page words='something else'",
|
|
|
|
['url', 'search_page', "words='something else'"]),
|
|
|
|
('url search_page words "something else"',
|
|
|
|
['url', 'search_page', 'words', '"something else"']),
|
|
|
|
('url search_page words-"something else"',
|
|
|
|
['url', 'search_page', 'words-"something else"']),
|
|
|
|
('url search_page words=hello',
|
|
|
|
['url', 'search_page', 'words=hello']),
|
|
|
|
('url search_page words="something else',
|
|
|
|
['url', 'search_page', 'words="something', 'else']),
|
|
|
|
("cut:','|cut:' '",
|
|
|
|
["cut:','|cut:' '"]),
|
|
|
|
(lazystr("a b c d"), # Test for #20231
|
|
|
|
['a', 'b', 'c', 'd']),
|
|
|
|
]
|
|
|
|
for test, expected in testdata:
|
|
|
|
self.assertEqual(list(smart_split(test)), expected)
|
2010-09-27 23:17:08 +08:00
|
|
|
|
|
|
|
def test_urlquote(self):
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(urlquote('Paris & Orl\xe9ans'),
|
|
|
|
'Paris%20%26%20Orl%C3%A9ans')
|
|
|
|
self.assertEqual(urlquote('Paris & Orl\xe9ans', safe="&"),
|
|
|
|
'Paris%20&%20Orl%C3%A9ans')
|
2012-01-29 17:00:12 +08:00
|
|
|
self.assertEqual(
|
2012-06-08 00:08:47 +08:00
|
|
|
urlunquote('Paris%20%26%20Orl%C3%A9ans'),
|
|
|
|
'Paris & Orl\xe9ans')
|
2012-01-29 17:00:12 +08:00
|
|
|
self.assertEqual(
|
2012-06-08 00:08:47 +08:00
|
|
|
urlunquote('Paris%20&%20Orl%C3%A9ans'),
|
|
|
|
'Paris & Orl\xe9ans')
|
|
|
|
self.assertEqual(urlquote_plus('Paris & Orl\xe9ans'),
|
|
|
|
'Paris+%26+Orl%C3%A9ans')
|
|
|
|
self.assertEqual(urlquote_plus('Paris & Orl\xe9ans', safe="&"),
|
|
|
|
'Paris+&+Orl%C3%A9ans')
|
2012-01-29 17:00:12 +08:00
|
|
|
self.assertEqual(
|
2012-06-08 00:08:47 +08:00
|
|
|
urlunquote_plus('Paris+%26+Orl%C3%A9ans'),
|
|
|
|
'Paris & Orl\xe9ans')
|
2012-01-29 17:00:12 +08:00
|
|
|
self.assertEqual(
|
2012-06-08 00:08:47 +08:00
|
|
|
urlunquote_plus('Paris+&+Orl%C3%A9ans'),
|
|
|
|
'Paris & Orl\xe9ans')
|
2010-09-27 23:17:08 +08:00
|
|
|
|
|
|
|
def test_cookie_date(self):
|
|
|
|
t = 1167616461.0
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(cookie_date(t), 'Mon, 01-Jan-2007 01:54:21 GMT')
|
2010-09-27 23:17:08 +08:00
|
|
|
|
|
|
|
def test_http_date(self):
|
|
|
|
t = 1167616461.0
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(http_date(t), 'Mon, 01 Jan 2007 01:54:21 GMT')
|
2010-09-27 23:17:08 +08:00
|
|
|
|
|
|
|
def test_iri_to_uri(self):
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(iri_to_uri('red%09ros\xe9#red'),
|
2010-09-27 23:17:08 +08:00
|
|
|
'red%09ros%C3%A9#red')
|
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(iri_to_uri('/blog/for/J\xfcrgen M\xfcnster/'),
|
2010-09-27 23:17:08 +08:00
|
|
|
'/blog/for/J%C3%BCrgen%20M%C3%BCnster/')
|
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(iri_to_uri('locations/%s' % urlquote_plus('Paris & Orl\xe9ans')),
|
2010-09-27 23:17:08 +08:00
|
|
|
'locations/Paris+%26+Orl%C3%A9ans')
|
|
|
|
|
|
|
|
def test_iri_to_uri_idempotent(self):
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(iri_to_uri(iri_to_uri('red%09ros\xe9#red')),
|
2010-09-27 23:17:08 +08:00
|
|
|
'red%09ros%C3%A9#red')
|