2010-05-29 00:39:52 +08:00
|
|
|
import copy
|
2014-02-15 01:28:51 +08:00
|
|
|
import json
|
2012-10-20 23:40:14 +08:00
|
|
|
import os
|
2010-05-29 00:27:09 +08:00
|
|
|
import pickle
|
2013-07-01 20:22:27 +08:00
|
|
|
import unittest
|
2015-06-24 04:56:22 +08:00
|
|
|
import uuid
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-12-17 23:34:32 +08:00
|
|
|
from django.core.exceptions import DisallowedRedirect, SuspiciousOperation
|
2014-02-15 01:28:51 +08:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2013-02-18 21:26:33 +08:00
|
|
|
from django.core.signals import request_finished
|
2013-02-18 18:37:26 +08:00
|
|
|
from django.db import close_old_connections
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.http import (
|
|
|
|
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
|
|
|
|
HttpResponseNotModified, HttpResponsePermanentRedirect,
|
|
|
|
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
|
|
|
|
StreamingHttpResponse, parse_cookie,
|
|
|
|
)
|
2015-04-18 05:38:20 +08:00
|
|
|
from django.test import SimpleTestCase
|
2015-11-07 21:30:20 +08:00
|
|
|
from django.utils.functional import lazystr
|
2013-04-20 11:20:01 +08:00
|
|
|
|
2011-10-14 05:34:56 +08:00
|
|
|
|
2016-06-04 06:50:38 +08:00
|
|
|
class QueryDictTests(SimpleTestCase):
|
2014-06-25 10:01:39 +08:00
|
|
|
def test_create_with_no_args(self):
|
|
|
|
self.assertEqual(QueryDict(), QueryDict(str('')))
|
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_missing_key(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__('foo')
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_immutability(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__('something', 'bar')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist('foo', ['bar'])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist('foo', ['bar'])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({'foo': 'bar'})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop('foo')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_immutable_get_with_default(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
|
|
|
|
|
|
|
def test_immutable_basic_operations(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.getlist('foo'), [])
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertNotIn('foo', q)
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [])
|
|
|
|
self.assertEqual(list(q.lists()), [])
|
|
|
|
self.assertEqual(list(q.keys()), [])
|
|
|
|
self.assertEqual(list(q.values()), [])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 0)
|
|
|
|
self.assertEqual(q.urlencode(), '')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_single_key_value(self):
|
|
|
|
"""Test QueryDict with one key/value pair"""
|
|
|
|
|
2012-08-14 15:44:04 +08:00
|
|
|
q = QueryDict(str('foo=bar'))
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q['foo'], 'bar')
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__('bar')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__('something', 'bar')
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
self.assertEqual(q.get('foo', 'default'), 'bar')
|
|
|
|
self.assertEqual(q.get('bar', 'default'), 'default')
|
|
|
|
self.assertEqual(q.getlist('foo'), ['bar'])
|
|
|
|
self.assertEqual(q.getlist('bar'), [])
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist('foo', ['bar'])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist('foo', ['bar'])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn('foo', q)
|
|
|
|
self.assertNotIn('bar', q)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [('foo', 'bar')])
|
|
|
|
self.assertEqual(list(q.lists()), [('foo', ['bar'])])
|
|
|
|
self.assertEqual(list(q.keys()), ['foo'])
|
|
|
|
self.assertEqual(list(q.values()), ['bar'])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 1)
|
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({'foo': 'bar'})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop('foo')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setdefault('foo', 'bar')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.urlencode(), 'foo=bar')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-12-02 06:25:17 +08:00
|
|
|
def test_urlencode(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-12-02 06:25:17 +08:00
|
|
|
q['next'] = '/a&b/'
|
|
|
|
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
|
|
|
|
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2012-06-08 00:08:47 +08:00
|
|
|
q['next'] = '/t\xebst&key/'
|
2010-12-02 06:25:17 +08:00
|
|
|
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
|
|
|
|
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
|
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_mutable_copy(self):
|
|
|
|
"""A copy of a QueryDict is mutable."""
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict().copy()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__("foo")
|
2010-05-29 00:27:09 +08:00
|
|
|
q['name'] = 'john'
|
|
|
|
self.assertEqual(q['name'], 'john')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_mutable_delete(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-05-29 00:27:09 +08:00
|
|
|
q['name'] = 'john'
|
|
|
|
del q['name']
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertNotIn('name', q)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_basic_mutable_operations(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-05-29 00:27:09 +08:00
|
|
|
q['name'] = 'john'
|
|
|
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
|
|
|
self.assertEqual(q.get('name', 'default'), 'john')
|
|
|
|
self.assertEqual(q.getlist('name'), ['john'])
|
|
|
|
self.assertEqual(q.getlist('foo'), [])
|
|
|
|
|
|
|
|
q.setlist('foo', ['bar', 'baz'])
|
|
|
|
self.assertEqual(q.get('foo', 'default'), 'baz')
|
|
|
|
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
|
|
|
|
|
|
|
|
q.appendlist('foo', 'another')
|
|
|
|
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
|
|
|
|
self.assertEqual(q['foo'], 'another')
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn('foo', q)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertListEqual(sorted(q.items()), [('foo', 'another'), ('name', 'john')])
|
|
|
|
self.assertListEqual(sorted(q.lists()), [('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
|
|
|
|
self.assertListEqual(sorted(q.keys()), ['foo', 'name'])
|
|
|
|
self.assertListEqual(sorted(q.values()), ['another', 'john'])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
q.update({'foo': 'hello'})
|
|
|
|
self.assertEqual(q['foo'], 'hello')
|
|
|
|
self.assertEqual(q.get('foo', 'not available'), 'hello')
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
|
|
|
|
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.pop('foo', 'not there'), 'not there')
|
|
|
|
self.assertEqual(q.get('foo', 'not there'), 'not there')
|
|
|
|
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
|
|
|
|
self.assertEqual(q['foo'], 'bar')
|
|
|
|
self.assertEqual(q.getlist('foo'), ['bar'])
|
2012-10-27 04:09:48 +08:00
|
|
|
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
q.clear()
|
|
|
|
self.assertEqual(len(q), 0)
|
|
|
|
|
|
|
|
def test_multiple_keys(self):
|
|
|
|
"""Test QueryDict with two key/value pairs with same keys."""
|
|
|
|
|
2012-08-14 15:44:04 +08:00
|
|
|
q = QueryDict(str('vote=yes&vote=no'))
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q['vote'], 'no')
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__('something', 'bar')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.get('vote', 'default'), 'no')
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.get('foo', 'default'), 'default')
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.getlist('foo'), [])
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist('foo', ['bar', 'baz'])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist('foo', ['bar', 'baz'])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist('foo', ['bar'])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIn('vote', q)
|
|
|
|
self.assertNotIn('foo', q)
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [('vote', 'no')])
|
|
|
|
self.assertEqual(list(q.lists()), [('vote', ['yes', 'no'])])
|
|
|
|
self.assertEqual(list(q.keys()), ['vote'])
|
|
|
|
self.assertEqual(list(q.values()), ['no'])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 1)
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({'foo': 'bar'})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop('foo')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setdefault('foo', 'bar')
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__delitem__('vote')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_pickle(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2012-08-14 15:44:04 +08:00
|
|
|
q = QueryDict(str('a=b&c=d'))
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2012-08-14 15:44:04 +08:00
|
|
|
q = QueryDict(str('a=b&c=d&a=1'))
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_update_from_querydict(self):
|
|
|
|
"""Regression test for #8278: QueryDict.update(QueryDict)"""
|
2012-08-14 15:44:04 +08:00
|
|
|
x = QueryDict(str("a=1&a=2"), mutable=True)
|
|
|
|
y = QueryDict(str("a=3&a=4"))
|
2010-05-29 00:27:09 +08:00
|
|
|
x.update(y)
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2010-05-29 00:39:52 +08:00
|
|
|
def test_non_default_encoding(self):
|
|
|
|
"""#13572 - QueryDict with a non-default encoding"""
|
2012-08-14 15:44:04 +08:00
|
|
|
q = QueryDict(str('cur=%A4'), encoding='iso-8859-15')
|
|
|
|
self.assertEqual(q.encoding, 'iso-8859-15')
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [('cur', '€')])
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.urlencode(), 'cur=%A4')
|
2010-10-11 20:55:17 +08:00
|
|
|
q = q.copy()
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.encoding, 'iso-8859-15')
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [('cur', '€')])
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.urlencode(), 'cur=%A4')
|
|
|
|
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
|
|
|
|
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-06-04 06:50:38 +08:00
|
|
|
def test_querydict_fromkeys(self):
|
|
|
|
self.assertEqual(QueryDict.fromkeys(['key1', 'key2', 'key3']), QueryDict('key1&key2&key3'))
|
|
|
|
|
|
|
|
def test_fromkeys_with_nonempty_value(self):
|
|
|
|
self.assertEqual(
|
|
|
|
QueryDict.fromkeys(['key1', 'key2', 'key3'], value='val'),
|
|
|
|
QueryDict('key1=val&key2=val&key3=val')
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_fromkeys_is_immutable_by_default(self):
|
|
|
|
# Match behavior of __init__() which is also immutable by default.
|
|
|
|
q = QueryDict.fromkeys(['key1', 'key2', 'key3'])
|
|
|
|
with self.assertRaisesMessage(AttributeError, 'This QueryDict instance is immutable'):
|
|
|
|
q['key4'] = 'nope'
|
|
|
|
|
|
|
|
def test_fromkeys_mutable_override(self):
|
|
|
|
q = QueryDict.fromkeys(['key1', 'key2', 'key3'], mutable=True)
|
|
|
|
q['key4'] = 'yep'
|
|
|
|
self.assertEqual(q, QueryDict('key1&key2&key3&key4=yep'))
|
|
|
|
|
|
|
|
def test_duplicates_in_fromkeys_iterable(self):
|
|
|
|
self.assertEqual(QueryDict.fromkeys('xyzzy'), QueryDict('x&y&z&z&y'))
|
|
|
|
|
|
|
|
def test_fromkeys_with_nondefault_encoding(self):
|
|
|
|
key_utf16 = b'\xff\xfe\x8e\x02\xdd\x01\x9e\x02'
|
|
|
|
value_utf16 = b'\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02'
|
|
|
|
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding='utf-16')
|
|
|
|
expected = QueryDict('', mutable=True)
|
|
|
|
expected['ʎǝʞ'] = 'ǝnlɐʌ'
|
|
|
|
self.assertEqual(q, expected)
|
|
|
|
|
|
|
|
def test_fromkeys_empty_iterable(self):
|
|
|
|
self.assertEqual(QueryDict.fromkeys([]), QueryDict(''))
|
|
|
|
|
|
|
|
def test_fromkeys_noniterable(self):
|
|
|
|
with self.assertRaises(TypeError):
|
|
|
|
QueryDict.fromkeys(0)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
class HttpResponseTests(unittest.TestCase):
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-09-08 07:08:57 +08:00
|
|
|
def test_headers_type(self):
|
|
|
|
r = HttpResponse()
|
2010-09-11 09:41:53 +08:00
|
|
|
|
2012-09-08 07:08:57 +08:00
|
|
|
# The following tests explicitly test types in addition to values
|
|
|
|
# because in Python 2 u'foo' == b'foo'.
|
|
|
|
|
|
|
|
# ASCII unicode or bytes values are converted to native strings.
|
|
|
|
r['key'] = 'test'
|
|
|
|
self.assertEqual(r['key'], str('test'))
|
|
|
|
self.assertIsInstance(r['key'], str)
|
|
|
|
r['key'] = 'test'.encode('ascii')
|
|
|
|
self.assertEqual(r['key'], str('test'))
|
|
|
|
self.assertIsInstance(r['key'], str)
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn(b'test', r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
2017-01-12 06:17:25 +08:00
|
|
|
# Non-ASCII values are serialized to Latin-1.
|
2012-09-08 07:08:57 +08:00
|
|
|
r['key'] = 'café'
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
|
|
|
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
|
|
|
|
r['key'] = '†'
|
|
|
|
self.assertEqual(r['key'], str('=?utf-8?b?4oCg?='))
|
|
|
|
self.assertIsInstance(r['key'], str)
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
|
|
|
# The response also converts unicode or bytes keys to strings, but requires
|
|
|
|
# them to contain ASCII
|
|
|
|
r = HttpResponse()
|
2012-10-27 04:09:48 +08:00
|
|
|
del r['Content-Type']
|
2012-09-08 07:08:57 +08:00
|
|
|
r['foo'] = 'bar'
|
2016-11-15 06:40:28 +08:00
|
|
|
headers = list(r.items())
|
|
|
|
self.assertEqual(len(headers), 1)
|
|
|
|
self.assertEqual(headers[0], ('foo', 'bar'))
|
|
|
|
self.assertIsInstance(headers[0][0], str)
|
2010-09-11 09:41:53 +08:00
|
|
|
|
2012-09-08 07:08:57 +08:00
|
|
|
r = HttpResponse()
|
2012-10-27 04:09:48 +08:00
|
|
|
del r['Content-Type']
|
2012-09-08 07:08:57 +08:00
|
|
|
r[b'foo'] = 'bar'
|
2016-11-15 06:40:28 +08:00
|
|
|
headers = list(r.items())
|
|
|
|
self.assertEqual(len(headers), 1)
|
|
|
|
self.assertEqual(headers[0], ('foo', 'bar'))
|
|
|
|
self.assertIsInstance(headers[0][0], str)
|
2012-09-08 07:08:57 +08:00
|
|
|
|
|
|
|
r = HttpResponse()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(UnicodeError):
|
|
|
|
r.__setitem__('føø', 'bar')
|
|
|
|
with self.assertRaises(UnicodeError):
|
|
|
|
r.__setitem__('føø'.encode('utf-8'), 'bar')
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2013-09-07 02:47:08 +08:00
|
|
|
def test_long_line(self):
|
|
|
|
# Bug #20889: long lines trigger newlines to be added to headers
|
|
|
|
# (which is not allowed due to bug #10188)
|
|
|
|
h = HttpResponse()
|
|
|
|
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
|
|
|
|
f = f.decode('utf-8')
|
2013-09-09 21:54:08 +08:00
|
|
|
h['Content-Disposition'] = 'attachment; filename="%s"' % f
|
2015-02-18 17:09:33 +08:00
|
|
|
# This one is triggering http://bugs.python.org/issue20747, that is Python
|
|
|
|
# will itself insert a newline in the header
|
2015-12-03 07:55:50 +08:00
|
|
|
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_newlines_in_headers(self):
|
|
|
|
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
|
|
|
r = HttpResponse()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(BadHeaderError):
|
|
|
|
r.__setitem__('test\rstr', 'test')
|
|
|
|
with self.assertRaises(BadHeaderError):
|
|
|
|
r.__setitem__('test\nstr', 'test')
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2011-06-17 00:34:46 +08:00
|
|
|
def test_dict_behavior(self):
|
|
|
|
"""
|
|
|
|
Test for bug #14020: Make HttpResponse.get work like dict.get
|
|
|
|
"""
|
|
|
|
r = HttpResponse()
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(r.get('test'))
|
2011-06-17 00:34:46 +08:00
|
|
|
|
2011-09-15 07:27:35 +08:00
|
|
|
def test_non_string_content(self):
|
2013-11-03 05:02:56 +08:00
|
|
|
# Bug 16494: HttpResponse should behave consistently with non-strings
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse(12345)
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b'12345')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test content via property
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
|
|
|
r.content = 12345
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b'12345')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
|
|
|
def test_iter_content(self):
|
|
|
|
r = HttpResponse(['abc', 'def', 'ghi'])
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b'abcdefghi')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test iter content via property
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
|
|
|
r.content = ['idan', 'alex', 'jacob']
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b'idanalexjacob')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
|
|
|
r = HttpResponse()
|
|
|
|
r.content = [1, 2, 3]
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b'123')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test odd inputs
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
2012-08-12 04:29:18 +08:00
|
|
|
r.content = ['1', '2', 3, '\u079e']
|
2015-02-06 02:25:34 +08:00
|
|
|
# '\xde\x9e' == unichr(1950).encode('utf-8')
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(r.content, b'123\xde\x9e')
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2012-10-24 17:33:56 +08:00
|
|
|
# .content can safely be accessed multiple times.
|
2012-10-24 04:25:38 +08:00
|
|
|
r = HttpResponse(iter(['hello', 'world']))
|
|
|
|
self.assertEqual(r.content, r.content)
|
|
|
|
self.assertEqual(r.content, b'helloworld')
|
2013-11-24 00:03:43 +08:00
|
|
|
# __iter__ can safely be called multiple times (#20187).
|
2012-10-24 17:33:56 +08:00
|
|
|
self.assertEqual(b''.join(r), b'helloworld')
|
2013-11-24 00:03:43 +08:00
|
|
|
self.assertEqual(b''.join(r), b'helloworld')
|
|
|
|
# Accessing .content still works.
|
2012-10-24 17:33:56 +08:00
|
|
|
self.assertEqual(r.content, b'helloworld')
|
|
|
|
|
2013-06-29 21:41:57 +08:00
|
|
|
# Accessing .content also works if the response was iterated first.
|
2012-10-24 17:33:56 +08:00
|
|
|
r = HttpResponse(iter(['hello', 'world']))
|
2013-06-29 21:41:57 +08:00
|
|
|
self.assertEqual(b''.join(r), b'helloworld')
|
|
|
|
self.assertEqual(r.content, b'helloworld')
|
2012-10-24 04:25:38 +08:00
|
|
|
|
2013-06-29 21:41:57 +08:00
|
|
|
# Additional content can be written to the response.
|
2012-10-24 17:33:56 +08:00
|
|
|
r = HttpResponse(iter(['hello', 'world']))
|
|
|
|
self.assertEqual(r.content, b'helloworld')
|
2012-10-24 04:25:38 +08:00
|
|
|
r.write('!')
|
|
|
|
self.assertEqual(r.content, b'helloworld!')
|
|
|
|
|
2012-10-24 17:33:56 +08:00
|
|
|
def test_iterator_isnt_rewound(self):
|
|
|
|
# Regression test for #13222
|
|
|
|
r = HttpResponse('abc')
|
|
|
|
i = iter(r)
|
|
|
|
self.assertEqual(list(i), [b'abc'])
|
|
|
|
self.assertEqual(list(i), [])
|
2012-10-24 04:25:38 +08:00
|
|
|
|
2013-04-20 11:20:01 +08:00
|
|
|
def test_lazy_content(self):
|
|
|
|
r = HttpResponse(lazystr('helloworld'))
|
|
|
|
self.assertEqual(r.content, b'helloworld')
|
|
|
|
|
2012-07-18 03:58:18 +08:00
|
|
|
def test_file_interface(self):
|
|
|
|
r = HttpResponse()
|
|
|
|
r.write(b"hello")
|
|
|
|
self.assertEqual(r.tell(), 5)
|
|
|
|
r.write("привет")
|
|
|
|
self.assertEqual(r.tell(), 17)
|
|
|
|
|
|
|
|
r = HttpResponse(['abc'])
|
2012-10-24 04:25:38 +08:00
|
|
|
r.write('def')
|
|
|
|
self.assertEqual(r.tell(), 6)
|
|
|
|
self.assertEqual(r.content, b'abcdef')
|
2012-07-18 03:58:18 +08:00
|
|
|
|
2013-08-25 00:08:05 +08:00
|
|
|
# with Content-Encoding header
|
|
|
|
r = HttpResponse()
|
|
|
|
r['Content-Encoding'] = 'winning'
|
|
|
|
r.write(b'abc')
|
|
|
|
r.write(b'def')
|
|
|
|
self.assertEqual(r.content, b'abcdef')
|
|
|
|
|
2014-04-14 23:58:59 +08:00
|
|
|
def test_stream_interface(self):
|
|
|
|
r = HttpResponse('asdf')
|
|
|
|
self.assertEqual(r.getvalue(), b'asdf')
|
|
|
|
|
|
|
|
r = HttpResponse()
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIs(r.writable(), True)
|
2014-04-14 23:58:59 +08:00
|
|
|
r.writelines(['foo\n', 'bar\n', 'baz\n'])
|
|
|
|
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
|
|
|
|
|
2012-07-31 04:01:50 +08:00
|
|
|
def test_unsafe_redirect(self):
|
|
|
|
bad_urls = [
|
|
|
|
'data:text/html,<script>window.alert("xss")</script>',
|
|
|
|
'mailto:test@example.com',
|
|
|
|
'file:///etc/passwd',
|
|
|
|
]
|
|
|
|
for url in bad_urls:
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
HttpResponseRedirect(url)
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
HttpResponsePermanentRedirect(url)
|
2012-07-31 04:01:50 +08:00
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class HttpResponseSubclassesTests(SimpleTestCase):
|
2012-08-23 16:56:55 +08:00
|
|
|
def test_redirect(self):
|
|
|
|
response = HttpResponseRedirect('/redirected/')
|
|
|
|
self.assertEqual(response.status_code, 302)
|
2016-10-27 15:53:39 +08:00
|
|
|
# Standard HttpResponse init args can be used
|
2016-04-08 10:04:45 +08:00
|
|
|
response = HttpResponseRedirect(
|
|
|
|
'/redirected/',
|
2012-08-23 16:56:55 +08:00
|
|
|
content='The resource has temporarily moved',
|
2016-04-08 10:04:45 +08:00
|
|
|
content_type='text/html',
|
|
|
|
)
|
2012-08-23 16:56:55 +08:00
|
|
|
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
|
2013-02-13 16:55:43 +08:00
|
|
|
self.assertEqual(response.url, response['Location'])
|
2012-08-23 16:56:55 +08:00
|
|
|
|
2013-04-20 11:20:01 +08:00
|
|
|
def test_redirect_lazy(self):
|
|
|
|
"""Make sure HttpResponseRedirect works with lazy strings."""
|
|
|
|
r = HttpResponseRedirect(lazystr('/redirected/'))
|
|
|
|
self.assertEqual(r.url, '/redirected/')
|
|
|
|
|
2015-07-13 22:32:09 +08:00
|
|
|
def test_redirect_repr(self):
|
|
|
|
response = HttpResponseRedirect('/redirected/')
|
|
|
|
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
|
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2016-12-17 23:34:32 +08:00
|
|
|
def test_invalid_redirect_repr(self):
|
|
|
|
"""
|
|
|
|
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
|
|
|
|
should work (in the debug view, for example).
|
|
|
|
"""
|
|
|
|
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
|
|
|
|
with self.assertRaisesMessage(DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"):
|
|
|
|
HttpResponseRedirect.__init__(response, 'ssh://foo')
|
|
|
|
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="ssh://foo">'
|
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2012-08-23 00:06:03 +08:00
|
|
|
def test_not_modified(self):
|
|
|
|
response = HttpResponseNotModified()
|
|
|
|
self.assertEqual(response.status_code, 304)
|
|
|
|
# 304 responses should not have content/content-type
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
response.content = "Hello dear"
|
|
|
|
self.assertNotIn('content-type', response)
|
|
|
|
|
2016-12-27 12:29:31 +08:00
|
|
|
def test_not_modified_repr(self):
|
|
|
|
response = HttpResponseNotModified()
|
|
|
|
self.assertEqual(repr(response), '<HttpResponseNotModified status_code=304>')
|
|
|
|
|
2012-08-23 16:56:55 +08:00
|
|
|
def test_not_allowed(self):
|
|
|
|
response = HttpResponseNotAllowed(['GET'])
|
|
|
|
self.assertEqual(response.status_code, 405)
|
2016-10-27 15:53:39 +08:00
|
|
|
# Standard HttpResponse init args can be used
|
2016-04-08 10:04:45 +08:00
|
|
|
response = HttpResponseNotAllowed(['GET'], content='Only the GET method is allowed', content_type='text/html')
|
2012-08-23 16:56:55 +08:00
|
|
|
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
|
2012-08-23 00:06:03 +08:00
|
|
|
|
2015-07-13 22:32:09 +08:00
|
|
|
def test_not_allowed_repr(self):
|
|
|
|
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
|
|
|
|
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
|
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2016-12-27 12:29:31 +08:00
|
|
|
def test_not_allowed_repr_no_content_type(self):
|
|
|
|
response = HttpResponseNotAllowed(('GET', 'POST'))
|
|
|
|
del response['Content-Type']
|
|
|
|
self.assertEqual(repr(response), '<HttpResponseNotAllowed [GET, POST] status_code=405>')
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class JsonResponseTests(SimpleTestCase):
|
2014-02-15 01:28:51 +08:00
|
|
|
def test_json_response_non_ascii(self):
|
|
|
|
data = {'key': 'łóżko'}
|
|
|
|
response = JsonResponse(data)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), data)
|
|
|
|
|
|
|
|
def test_json_response_raises_type_error_with_default_setting(self):
|
2016-02-04 19:10:50 +08:00
|
|
|
with self.assertRaisesMessage(
|
|
|
|
TypeError,
|
|
|
|
'In order to allow non-dict objects to be serialized set the '
|
|
|
|
'safe parameter to False'
|
|
|
|
):
|
2014-02-15 01:28:51 +08:00
|
|
|
JsonResponse([1, 2, 3])
|
|
|
|
|
|
|
|
def test_json_response_text(self):
|
|
|
|
response = JsonResponse('foobar', safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), 'foobar')
|
|
|
|
|
|
|
|
def test_json_response_list(self):
|
|
|
|
response = JsonResponse(['foo', 'bar'], safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
|
|
|
|
|
2015-06-24 04:56:22 +08:00
|
|
|
def test_json_response_uuid(self):
|
|
|
|
u = uuid.uuid4()
|
|
|
|
response = JsonResponse(u, safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), str(u))
|
|
|
|
|
2014-02-15 01:28:51 +08:00
|
|
|
def test_json_response_custom_encoder(self):
|
|
|
|
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
|
|
|
|
def encode(self, o):
|
|
|
|
return json.dumps({'foo': 'bar'})
|
|
|
|
|
|
|
|
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
|
|
|
|
|
2015-08-12 08:53:26 +08:00
|
|
|
def test_json_response_passing_arguments_to_json_dumps(self):
|
|
|
|
response = JsonResponse({'foo': 'bar'}, json_dumps_params={'indent': 2})
|
|
|
|
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
|
|
|
|
|
2014-02-15 01:28:51 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class StreamingHttpResponseTests(SimpleTestCase):
|
2012-10-20 23:40:14 +08:00
|
|
|
def test_streaming_response(self):
|
|
|
|
r = StreamingHttpResponse(iter(['hello', 'world']))
|
|
|
|
|
|
|
|
# iterating over the response itself yields bytestring chunks.
|
|
|
|
chunks = list(r)
|
|
|
|
self.assertEqual(chunks, [b'hello', b'world'])
|
|
|
|
for chunk in chunks:
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertIsInstance(chunk, bytes)
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# and the response can only be iterated once.
|
|
|
|
self.assertEqual(list(r), [])
|
|
|
|
|
|
|
|
# even when a sequence that can be iterated many times, like a list,
|
|
|
|
# is given as content.
|
|
|
|
r = StreamingHttpResponse(['abc', 'def'])
|
|
|
|
self.assertEqual(list(r), [b'abc', b'def'])
|
|
|
|
self.assertEqual(list(r), [])
|
|
|
|
|
Fixed #24240 -- Allowed GZipping a Unicode StreamingHttpResponse
make_bytes() assumed that if the Content-Encoding header is set, then
everything had already been dealt with bytes-wise, but in a streaming
situation this was not necessarily the case.
make_bytes() is only called when necessary when working with a
StreamingHttpResponse iterable, but by that point the middleware has
added the Content-Encoding header and thus make_bytes() tried to call
bytes(value) (and dies). If it had been a normal HttpResponse,
make_bytes() would have been called when the content was set, well
before the middleware set the Content-Encoding header.
This commit removes the special casing when Content-Encoding is set,
allowing unicode strings to be encoded during the iteration before they
are e.g. gzipped. This behaviour was added a long time ago for #4969 and
it doesn't appear to be necessary any more, as everything is correctly
made into bytes at the appropriate places.
Two new tests, to show that supplying non-ASCII characters to a
StreamingHttpResponse works fine normally, and when passed through the
GZip middleware (the latter dies without the change to make_bytes()).
Removes the test with a nonsense Content-Encoding and Unicode input - if
this were to happen, it can still be encoded as bytes fine.
2015-01-29 05:43:23 +08:00
|
|
|
# iterating over Unicode strings still yields bytestring chunks.
|
|
|
|
r.streaming_content = iter(['hello', 'café'])
|
|
|
|
chunks = list(r)
|
|
|
|
# '\xc3\xa9' == unichr(233).encode('utf-8')
|
|
|
|
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
|
|
|
|
for chunk in chunks:
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertIsInstance(chunk, bytes)
|
Fixed #24240 -- Allowed GZipping a Unicode StreamingHttpResponse
make_bytes() assumed that if the Content-Encoding header is set, then
everything had already been dealt with bytes-wise, but in a streaming
situation this was not necessarily the case.
make_bytes() is only called when necessary when working with a
StreamingHttpResponse iterable, but by that point the middleware has
added the Content-Encoding header and thus make_bytes() tried to call
bytes(value) (and dies). If it had been a normal HttpResponse,
make_bytes() would have been called when the content was set, well
before the middleware set the Content-Encoding header.
This commit removes the special casing when Content-Encoding is set,
allowing unicode strings to be encoded during the iteration before they
are e.g. gzipped. This behaviour was added a long time ago for #4969 and
it doesn't appear to be necessary any more, as everything is correctly
made into bytes at the appropriate places.
Two new tests, to show that supplying non-ASCII characters to a
StreamingHttpResponse works fine normally, and when passed through the
GZip middleware (the latter dies without the change to make_bytes()).
Removes the test with a nonsense Content-Encoding and Unicode input - if
this were to happen, it can still be encoded as bytes fine.
2015-01-29 05:43:23 +08:00
|
|
|
|
2012-10-20 23:40:14 +08:00
|
|
|
# streaming responses don't have a `content` attribute.
|
|
|
|
self.assertFalse(hasattr(r, 'content'))
|
|
|
|
|
|
|
|
# and you can't accidentally assign to a `content` attribute.
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
r.content = 'xyz'
|
|
|
|
|
|
|
|
# but they do have a `streaming_content` attribute.
|
|
|
|
self.assertTrue(hasattr(r, 'streaming_content'))
|
|
|
|
|
|
|
|
# that exists so we can check if a response is streaming, and wrap or
|
|
|
|
# replace the content iterator.
|
|
|
|
r.streaming_content = iter(['abc', 'def'])
|
|
|
|
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
|
|
|
|
self.assertEqual(list(r), [b'ABC', b'DEF'])
|
|
|
|
|
|
|
|
# coercing a streaming response to bytes doesn't return a complete HTTP
|
|
|
|
# message like a regular response does. it only gives us the headers.
|
|
|
|
r = StreamingHttpResponse(iter(['hello', 'world']))
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertEqual(bytes(r), b'Content-Type: text/html; charset=utf-8')
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# and this won't consume its content.
|
|
|
|
self.assertEqual(list(r), [b'hello', b'world'])
|
|
|
|
|
|
|
|
# additional content cannot be written to the response.
|
|
|
|
r = StreamingHttpResponse(iter(['hello', 'world']))
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
r.write('!')
|
|
|
|
|
|
|
|
# and we can't tell the current position.
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
r.tell()
|
|
|
|
|
2014-04-14 23:58:59 +08:00
|
|
|
r = StreamingHttpResponse(iter(['hello', 'world']))
|
|
|
|
self.assertEqual(r.getvalue(), b'helloworld')
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class FileCloseTests(SimpleTestCase):
|
2013-02-18 21:26:33 +08:00
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
# Disable the request_finished signal during this test
|
|
|
|
# to avoid interfering with the database connection.
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.disconnect(close_old_connections)
|
2013-02-18 21:26:33 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.connect(close_old_connections)
|
2013-02-18 21:26:33 +08:00
|
|
|
|
2012-10-20 23:40:14 +08:00
|
|
|
def test_response(self):
|
2017-01-20 21:01:02 +08:00
|
|
|
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# file isn't closed until we close the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
r = HttpResponse(file1)
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
r.close()
|
|
|
|
|
|
|
|
# when multiple file are assigned as content, make sure they are all
|
|
|
|
# closed with the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
file2 = open(filename)
|
|
|
|
r = HttpResponse(file1)
|
|
|
|
r.content = file2
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
self.assertTrue(file2.closed)
|
|
|
|
|
|
|
|
def test_streaming_response(self):
|
2017-01-20 21:01:02 +08:00
|
|
|
filename = os.path.join(os.path.dirname(__file__), 'abc.txt')
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# file isn't closed until we close the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
r = StreamingHttpResponse(file1)
|
|
|
|
self.assertFalse(file1.closed)
|
|
|
|
r.close()
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
|
|
|
|
# when multiple file are assigned as content, make sure they are all
|
|
|
|
# closed with the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
file2 = open(filename)
|
|
|
|
r = StreamingHttpResponse(file1)
|
|
|
|
r.streaming_content = file2
|
|
|
|
self.assertFalse(file1.closed)
|
|
|
|
self.assertFalse(file2.closed)
|
|
|
|
r.close()
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
self.assertTrue(file2.closed)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
class CookieTests(unittest.TestCase):
|
2010-01-24 07:13:00 +08:00
|
|
|
def test_encode(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""Semicolons and commas are encoded."""
|
2011-01-25 04:35:46 +08:00
|
|
|
c = SimpleCookie()
|
2010-01-24 07:13:00 +08:00
|
|
|
c['test'] = "An,awkward;value"
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
|
|
|
|
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
|
2010-01-24 07:13:00 +08:00
|
|
|
|
|
|
|
def test_decode(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""Semicolons and commas are decoded."""
|
2011-01-25 04:35:46 +08:00
|
|
|
c = SimpleCookie()
|
2010-01-24 07:13:00 +08:00
|
|
|
c['test'] = "An,awkward;value"
|
2011-01-25 04:35:46 +08:00
|
|
|
c2 = SimpleCookie()
|
2014-11-01 02:26:27 +08:00
|
|
|
c2.load(c.output()[12:])
|
2010-01-24 07:13:00 +08:00
|
|
|
self.assertEqual(c['test'].value, c2['test'].value)
|
2016-03-12 10:36:08 +08:00
|
|
|
c3 = parse_cookie(c.output()[12:])
|
|
|
|
self.assertEqual(c['test'].value, c3['test'])
|
2010-01-24 07:13:00 +08:00
|
|
|
|
|
|
|
def test_decode_2(self):
|
2011-01-25 04:35:46 +08:00
|
|
|
c = SimpleCookie()
|
2012-05-19 23:43:34 +08:00
|
|
|
c['test'] = b"\xf0"
|
2011-01-25 04:35:46 +08:00
|
|
|
c2 = SimpleCookie()
|
2014-11-01 02:26:27 +08:00
|
|
|
c2.load(c.output()[12:])
|
2010-01-24 07:13:00 +08:00
|
|
|
self.assertEqual(c['test'].value, c2['test'].value)
|
2016-03-12 10:36:08 +08:00
|
|
|
c3 = parse_cookie(c.output()[12:])
|
|
|
|
self.assertEqual(c['test'].value, c3['test'])
|
2011-02-13 10:24:05 +08:00
|
|
|
|
|
|
|
def test_nonstandard_keys(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A single non-standard cookie name doesn't affect all cookies (#13007).
|
2011-02-13 10:24:05 +08:00
|
|
|
"""
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
|
2011-06-26 00:18:40 +08:00
|
|
|
|
|
|
|
def test_repeated_nonstandard_keys(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A repeated non-standard name doesn't affect all cookies (#15852).
|
2011-06-26 00:18:40 +08:00
|
|
|
"""
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
|
2011-07-08 20:07:54 +08:00
|
|
|
|
2016-03-12 10:36:08 +08:00
|
|
|
def test_python_cookies(self):
|
|
|
|
"""
|
|
|
|
Test cases copied from Python's Lib/test/test_http_cookies.py
|
|
|
|
"""
|
|
|
|
self.assertEqual(parse_cookie('chips=ahoy; vienna=finger'), {'chips': 'ahoy', 'vienna': 'finger'})
|
|
|
|
# Here parse_cookie() differs from Python's cookie parsing in that it
|
|
|
|
# treats all semicolons as delimiters, even within quotes.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
|
|
|
|
{'keebler': '"E=mc2', 'L': '\\"Loves\\"', 'fudge': '\\012', '': '"'}
|
|
|
|
)
|
|
|
|
# Illegal cookies that have an '=' char in an unquoted value.
|
|
|
|
self.assertEqual(parse_cookie('keebler=E=mc2'), {'keebler': 'E=mc2'})
|
|
|
|
# Cookies with ':' character in their name.
|
|
|
|
self.assertEqual(parse_cookie('key:term=value:term'), {'key:term': 'value:term'})
|
|
|
|
# Cookies with '[' and ']'.
|
|
|
|
self.assertEqual(parse_cookie('a=b; c=[; d=r; f=h'), {'a': 'b', 'c': '[', 'd': 'r', 'f': 'h'})
|
|
|
|
|
|
|
|
def test_cookie_edgecases(self):
|
|
|
|
# Cookies that RFC6265 allows.
|
|
|
|
self.assertEqual(parse_cookie('a=b; Domain=example.com'), {'a': 'b', 'Domain': 'example.com'})
|
|
|
|
# parse_cookie() has historically kept only the last cookie with the
|
|
|
|
# same name.
|
|
|
|
self.assertEqual(parse_cookie('a=b; h=i; a=c'), {'a': 'c', 'h': 'i'})
|
|
|
|
|
|
|
|
def test_invalid_cookies(self):
|
|
|
|
"""
|
|
|
|
Cookie strings that go against RFC6265 but browsers will send if set
|
|
|
|
via document.cookie.
|
|
|
|
"""
|
|
|
|
# Chunks without an equals sign appear as unnamed values per
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
|
|
|
|
self.assertIn('django_language', parse_cookie('abc=def; unnamed; django_language=en').keys())
|
|
|
|
# Even a double quote may be an unamed value.
|
|
|
|
self.assertEqual(parse_cookie('a=b; "; c=d'), {'a': 'b', '': '"', 'c': 'd'})
|
|
|
|
# Spaces in names and values, and an equals sign in values.
|
|
|
|
self.assertEqual(parse_cookie('a b c=d e = f; gh=i'), {'a b c': 'd e = f', 'gh': 'i'})
|
|
|
|
# More characters the spec forbids.
|
|
|
|
self.assertEqual(parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'), {'a b,c<>@:/[]?{}': 'd " =e,f g'})
|
|
|
|
# Unicode characters. The spec only allows ASCII.
|
2017-01-12 06:17:25 +08:00
|
|
|
self.assertEqual(parse_cookie('saint=André Bessette'), {'saint': 'André Bessette'})
|
2016-03-12 10:36:08 +08:00
|
|
|
# Browsers don't send extra whitespace or semicolons in Cookie headers,
|
|
|
|
# but parse_cookie() should parse whitespace the same way
|
|
|
|
# document.cookie parses whitespace.
|
|
|
|
self.assertEqual(parse_cookie(' = b ; ; = ; c = ; '), {'': 'b', 'c': ''})
|
|
|
|
|
2011-07-08 20:07:54 +08:00
|
|
|
def test_httponly_after_load(self):
|
|
|
|
c = SimpleCookie()
|
|
|
|
c.load("name=val")
|
|
|
|
c['name']['httponly'] = True
|
|
|
|
self.assertTrue(c['name']['httponly'])
|
|
|
|
|
2013-01-12 04:09:33 +08:00
|
|
|
def test_load_dict(self):
|
|
|
|
c = SimpleCookie()
|
|
|
|
c.load({'name': 'val'})
|
|
|
|
self.assertEqual(c['name'].value, 'val')
|
2013-09-09 17:40:37 +08:00
|
|
|
|
2014-11-01 02:26:27 +08:00
|
|
|
def test_pickle(self):
|
|
|
|
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
|
|
|
|
expected_output = 'Set-Cookie: %s' % rawdata
|
|
|
|
|
|
|
|
C = SimpleCookie()
|
|
|
|
C.load(rawdata)
|
|
|
|
self.assertEqual(C.output(), expected_output)
|
|
|
|
|
|
|
|
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
|
|
|
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
|
|
|
|
self.assertEqual(C1.output(), expected_output)
|