2010-05-29 00:39:52 +08:00
|
|
|
import copy
|
2014-02-15 01:28:51 +08:00
|
|
|
import json
|
2012-10-20 23:40:14 +08:00
|
|
|
import os
|
2010-05-29 00:27:09 +08:00
|
|
|
import pickle
|
2013-07-01 20:22:27 +08:00
|
|
|
import unittest
|
2015-06-24 04:56:22 +08:00
|
|
|
import uuid
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2017-05-25 20:19:01 +08:00
|
|
|
from django.core.exceptions import DisallowedRedirect
|
2014-02-15 01:28:51 +08:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2013-02-18 21:26:33 +08:00
|
|
|
from django.core.signals import request_finished
|
2013-02-18 18:37:26 +08:00
|
|
|
from django.db import close_old_connections
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.http import (
|
|
|
|
BadHeaderError,
|
|
|
|
HttpResponse,
|
|
|
|
HttpResponseNotAllowed,
|
|
|
|
HttpResponseNotModified,
|
|
|
|
HttpResponsePermanentRedirect,
|
|
|
|
HttpResponseRedirect,
|
|
|
|
JsonResponse,
|
|
|
|
QueryDict,
|
|
|
|
SimpleCookie,
|
|
|
|
StreamingHttpResponse,
|
|
|
|
parse_cookie,
|
|
|
|
)
|
2015-04-18 05:38:20 +08:00
|
|
|
from django.test import SimpleTestCase
|
2015-11-07 21:30:20 +08:00
|
|
|
from django.utils.functional import lazystr
|
2013-04-20 11:20:01 +08:00
|
|
|
|
2011-10-14 05:34:56 +08:00
|
|
|
|
2016-06-04 06:50:38 +08:00
|
|
|
class QueryDictTests(SimpleTestCase):
|
2014-06-25 10:01:39 +08:00
|
|
|
def test_create_with_no_args(self):
|
2017-01-20 17:20:53 +08:00
|
|
|
self.assertEqual(QueryDict(), QueryDict(""))
|
2014-06-25 10:01:39 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_missing_key(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__("foo")
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_immutability(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__("something", "bar")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist("foo", ["bar"])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist("foo", ["bar"])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({"foo": "bar"})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop("foo")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_immutable_get_with_default(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.get("foo", "default"), "default")
|
|
|
|
|
|
|
|
def test_immutable_basic_operations(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.getlist("foo"), [])
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertNotIn("foo", q)
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertEqual(list(q), [])
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [])
|
|
|
|
self.assertEqual(list(q.lists()), [])
|
|
|
|
self.assertEqual(list(q.keys()), [])
|
|
|
|
self.assertEqual(list(q.values()), [])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 0)
|
|
|
|
self.assertEqual(q.urlencode(), "")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_single_key_value(self):
|
|
|
|
"""Test QueryDict with one key/value pair"""
|
|
|
|
|
2017-01-20 17:20:53 +08:00
|
|
|
q = QueryDict("foo=bar")
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q["foo"], "bar")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__("bar")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__("something", "bar")
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
self.assertEqual(q.get("foo", "default"), "bar")
|
|
|
|
self.assertEqual(q.get("bar", "default"), "default")
|
|
|
|
self.assertEqual(q.getlist("foo"), ["bar"])
|
|
|
|
self.assertEqual(q.getlist("bar"), [])
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist("foo", ["bar"])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist("foo", ["bar"])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn("foo", q)
|
|
|
|
self.assertNotIn("bar", q)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertEqual(list(q), ["foo"])
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [("foo", "bar")])
|
|
|
|
self.assertEqual(list(q.lists()), [("foo", ["bar"])])
|
|
|
|
self.assertEqual(list(q.keys()), ["foo"])
|
|
|
|
self.assertEqual(list(q.values()), ["bar"])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 1)
|
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({"foo": "bar"})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop("foo")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setdefault("foo", "bar")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.urlencode(), "foo=bar")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-12-02 06:25:17 +08:00
|
|
|
def test_urlencode(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-12-02 06:25:17 +08:00
|
|
|
q["next"] = "/a&b/"
|
|
|
|
self.assertEqual(q.urlencode(), "next=%2Fa%26b%2F")
|
|
|
|
self.assertEqual(q.urlencode(safe="/"), "next=/a%26b/")
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2012-06-08 00:08:47 +08:00
|
|
|
q["next"] = "/t\xebst&key/"
|
2010-12-02 06:25:17 +08:00
|
|
|
self.assertEqual(q.urlencode(), "next=%2Ft%C3%ABst%26key%2F")
|
|
|
|
self.assertEqual(q.urlencode(safe="/"), "next=/t%C3%ABst%26key/")
|
|
|
|
|
2018-08-03 09:56:26 +08:00
|
|
|
def test_urlencode_int(self):
|
|
|
|
# Normally QueryDict doesn't contain non-string values but lazily
|
|
|
|
# written tests may make that mistake.
|
|
|
|
q = QueryDict(mutable=True)
|
|
|
|
q["a"] = 1
|
|
|
|
self.assertEqual(q.urlencode(), "a=1")
|
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_mutable_copy(self):
|
|
|
|
"""A copy of a QueryDict is mutable."""
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict().copy()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(KeyError):
|
|
|
|
q.__getitem__("foo")
|
2010-05-29 00:27:09 +08:00
|
|
|
q["name"] = "john"
|
|
|
|
self.assertEqual(q["name"], "john")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_mutable_delete(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-05-29 00:27:09 +08:00
|
|
|
q["name"] = "john"
|
|
|
|
del q["name"]
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertNotIn("name", q)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_basic_mutable_operations(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict(mutable=True)
|
2010-05-29 00:27:09 +08:00
|
|
|
q["name"] = "john"
|
|
|
|
self.assertEqual(q.get("foo", "default"), "default")
|
|
|
|
self.assertEqual(q.get("name", "default"), "john")
|
|
|
|
self.assertEqual(q.getlist("name"), ["john"])
|
|
|
|
self.assertEqual(q.getlist("foo"), [])
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
q.setlist("foo", ["bar", "baz"])
|
|
|
|
self.assertEqual(q.get("foo", "default"), "baz")
|
|
|
|
self.assertEqual(q.getlist("foo"), ["bar", "baz"])
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
q.appendlist("foo", "another")
|
|
|
|
self.assertEqual(q.getlist("foo"), ["bar", "baz", "another"])
|
|
|
|
self.assertEqual(q["foo"], "another")
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertIn("foo", q)
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertCountEqual(q, ["foo", "name"])
|
2017-03-17 19:51:48 +08:00
|
|
|
self.assertCountEqual(q.items(), [("foo", "another"), ("name", "john")])
|
|
|
|
self.assertCountEqual(
|
|
|
|
q.lists(), [("foo", ["bar", "baz", "another"]), ("name", ["john"])]
|
|
|
|
)
|
|
|
|
self.assertCountEqual(q.keys(), ["foo", "name"])
|
|
|
|
self.assertCountEqual(q.values(), ["another", "john"])
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
q.update({"foo": "hello"})
|
|
|
|
self.assertEqual(q["foo"], "hello")
|
|
|
|
self.assertEqual(q.get("foo", "not available"), "hello")
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.getlist("foo"), ["bar", "baz", "another", "hello"])
|
|
|
|
self.assertEqual(q.pop("foo"), ["bar", "baz", "another", "hello"])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.pop("foo", "not there"), "not there")
|
|
|
|
self.assertEqual(q.get("foo", "not there"), "not there")
|
|
|
|
self.assertEqual(q.setdefault("foo", "bar"), "bar")
|
|
|
|
self.assertEqual(q["foo"], "bar")
|
|
|
|
self.assertEqual(q.getlist("foo"), ["bar"])
|
2012-10-27 04:09:48 +08:00
|
|
|
self.assertIn(q.urlencode(), ["foo=bar&name=john", "name=john&foo=bar"])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
q.clear()
|
|
|
|
self.assertEqual(len(q), 0)
|
|
|
|
|
|
|
|
def test_multiple_keys(self):
|
|
|
|
"""Test QueryDict with two key/value pairs with same keys."""
|
|
|
|
|
2017-01-20 17:20:53 +08:00
|
|
|
q = QueryDict("vote=yes&vote=no")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q["vote"], "no")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__setitem__("something", "bar")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.get("vote", "default"), "no")
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.get("foo", "default"), "default")
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(q.getlist("vote"), ["yes", "no"])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(q.getlist("foo"), [])
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist("foo", ["bar", "baz"])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setlist("foo", ["bar", "baz"])
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.appendlist("foo", ["bar"])
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIn("vote", q)
|
|
|
|
self.assertNotIn("foo", q)
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertEqual(list(q), ["vote"])
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [("vote", "no")])
|
|
|
|
self.assertEqual(list(q.lists()), [("vote", ["yes", "no"])])
|
|
|
|
self.assertEqual(list(q.keys()), ["vote"])
|
|
|
|
self.assertEqual(list(q.values()), ["no"])
|
2010-05-29 00:27:09 +08:00
|
|
|
self.assertEqual(len(q), 1)
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.update({"foo": "bar"})
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.pop("foo")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.popitem()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.clear()
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.setdefault("foo", "bar")
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
q.__delitem__("vote")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
def test_pickle(self):
|
2014-06-25 10:01:39 +08:00
|
|
|
q = QueryDict()
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2017-01-20 17:20:53 +08:00
|
|
|
q = QueryDict("a=b&c=d")
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2017-01-20 17:20:53 +08:00
|
|
|
q = QueryDict("a=b&c=d&a=1")
|
2010-05-29 00:27:09 +08:00
|
|
|
q1 = pickle.loads(pickle.dumps(q, 2))
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertEqual(q, q1)
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_update_from_querydict(self):
|
|
|
|
"""Regression test for #8278: QueryDict.update(QueryDict)"""
|
2017-01-20 17:20:53 +08:00
|
|
|
x = QueryDict("a=1&a=2", mutable=True)
|
|
|
|
y = QueryDict("a=3&a=4")
|
2010-05-29 00:27:09 +08:00
|
|
|
x.update(y)
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(x.getlist("a"), ["1", "2", "3", "4"])
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2010-05-29 00:39:52 +08:00
|
|
|
def test_non_default_encoding(self):
|
|
|
|
"""#13572 - QueryDict with a non-default encoding"""
|
2017-01-20 17:20:53 +08:00
|
|
|
q = QueryDict("cur=%A4", encoding="iso-8859-15")
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.encoding, "iso-8859-15")
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [("cur", "€")])
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.urlencode(), "cur=%A4")
|
2010-10-11 20:55:17 +08:00
|
|
|
q = q.copy()
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.encoding, "iso-8859-15")
|
2017-01-07 19:11:46 +08:00
|
|
|
self.assertEqual(list(q.items()), [("cur", "€")])
|
2012-08-14 15:44:04 +08:00
|
|
|
self.assertEqual(q.urlencode(), "cur=%A4")
|
|
|
|
self.assertEqual(copy.copy(q).encoding, "iso-8859-15")
|
|
|
|
self.assertEqual(copy.deepcopy(q).encoding, "iso-8859-15")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2016-06-04 06:50:38 +08:00
|
|
|
def test_querydict_fromkeys(self):
|
|
|
|
self.assertEqual(
|
|
|
|
QueryDict.fromkeys(["key1", "key2", "key3"]), QueryDict("key1&key2&key3")
|
2022-02-04 03:24:19 +08:00
|
|
|
)
|
2016-06-04 06:50:38 +08:00
|
|
|
|
|
|
|
def test_fromkeys_with_nonempty_value(self):
|
|
|
|
self.assertEqual(
|
|
|
|
QueryDict.fromkeys(["key1", "key2", "key3"], value="val"),
|
|
|
|
QueryDict("key1=val&key2=val&key3=val"),
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_fromkeys_is_immutable_by_default(self):
|
|
|
|
# Match behavior of __init__() which is also immutable by default.
|
|
|
|
q = QueryDict.fromkeys(["key1", "key2", "key3"])
|
|
|
|
with self.assertRaisesMessage(
|
|
|
|
AttributeError, "This QueryDict instance is immutable"
|
|
|
|
):
|
|
|
|
q["key4"] = "nope"
|
|
|
|
|
|
|
|
def test_fromkeys_mutable_override(self):
|
|
|
|
q = QueryDict.fromkeys(["key1", "key2", "key3"], mutable=True)
|
|
|
|
q["key4"] = "yep"
|
|
|
|
self.assertEqual(q, QueryDict("key1&key2&key3&key4=yep"))
|
|
|
|
|
|
|
|
def test_duplicates_in_fromkeys_iterable(self):
|
|
|
|
self.assertEqual(QueryDict.fromkeys("xyzzy"), QueryDict("x&y&z&z&y"))
|
|
|
|
|
|
|
|
def test_fromkeys_with_nondefault_encoding(self):
|
|
|
|
key_utf16 = b"\xff\xfe\x8e\x02\xdd\x01\x9e\x02"
|
|
|
|
value_utf16 = b"\xff\xfe\xdd\x01n\x00l\x00P\x02\x8c\x02"
|
|
|
|
q = QueryDict.fromkeys([key_utf16], value=value_utf16, encoding="utf-16")
|
|
|
|
expected = QueryDict("", mutable=True)
|
|
|
|
expected["ʎǝʞ"] = "ǝnlɐʌ"
|
|
|
|
self.assertEqual(q, expected)
|
|
|
|
|
|
|
|
def test_fromkeys_empty_iterable(self):
|
|
|
|
self.assertEqual(QueryDict.fromkeys([]), QueryDict(""))
|
|
|
|
|
|
|
|
def test_fromkeys_noniterable(self):
|
|
|
|
with self.assertRaises(TypeError):
|
|
|
|
QueryDict.fromkeys(0)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2020-09-15 18:43:37 +08:00
|
|
|
class HttpResponseTests(SimpleTestCase):
|
2012-09-08 07:08:57 +08:00
|
|
|
def test_headers_type(self):
|
|
|
|
r = HttpResponse()
|
2010-09-11 09:41:53 +08:00
|
|
|
|
2017-01-21 05:04:05 +08:00
|
|
|
# ASCII strings or bytes values are converted to strings.
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers["key"] = "test"
|
|
|
|
self.assertEqual(r.headers["key"], "test")
|
|
|
|
r.headers["key"] = b"test"
|
|
|
|
self.assertEqual(r.headers["key"], "test")
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn(b"test", r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
2017-01-12 06:17:25 +08:00
|
|
|
# Non-ASCII values are serialized to Latin-1.
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers["key"] = "café"
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn("café".encode("latin-1"), r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
2020-04-18 22:46:05 +08:00
|
|
|
# Other Unicode values are MIME-encoded (there's no way to pass them as
|
|
|
|
# bytes).
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers["key"] = "†"
|
|
|
|
self.assertEqual(r.headers["key"], "=?utf-8?b?4oCg?=")
|
2013-10-18 00:07:47 +08:00
|
|
|
self.assertIn(b"=?utf-8?b?4oCg?=", r.serialize_headers())
|
2012-09-08 07:08:57 +08:00
|
|
|
|
2017-01-21 05:04:05 +08:00
|
|
|
# The response also converts string or bytes keys to strings, but requires
|
2012-09-08 07:08:57 +08:00
|
|
|
# them to contain ASCII
|
|
|
|
r = HttpResponse()
|
2020-07-14 19:32:24 +08:00
|
|
|
del r.headers["Content-Type"]
|
|
|
|
r.headers["foo"] = "bar"
|
|
|
|
headers = list(r.headers.items())
|
2016-11-15 06:40:28 +08:00
|
|
|
self.assertEqual(len(headers), 1)
|
|
|
|
self.assertEqual(headers[0], ("foo", "bar"))
|
2010-09-11 09:41:53 +08:00
|
|
|
|
2012-09-08 07:08:57 +08:00
|
|
|
r = HttpResponse()
|
2020-07-14 19:32:24 +08:00
|
|
|
del r.headers["Content-Type"]
|
|
|
|
r.headers[b"foo"] = "bar"
|
|
|
|
headers = list(r.headers.items())
|
2016-11-15 06:40:28 +08:00
|
|
|
self.assertEqual(len(headers), 1)
|
|
|
|
self.assertEqual(headers[0], ("foo", "bar"))
|
|
|
|
self.assertIsInstance(headers[0][0], str)
|
2012-09-08 07:08:57 +08:00
|
|
|
|
|
|
|
r = HttpResponse()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(UnicodeError):
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers.__setitem__("føø", "bar")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(UnicodeError):
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers.__setitem__("føø".encode(), "bar")
|
2010-10-11 20:55:17 +08:00
|
|
|
|
2013-09-07 02:47:08 +08:00
|
|
|
def test_long_line(self):
|
|
|
|
# Bug #20889: long lines trigger newlines to be added to headers
|
|
|
|
# (which is not allowed due to bug #10188)
|
|
|
|
h = HttpResponse()
|
2019-11-18 22:31:42 +08:00
|
|
|
f = b"zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88"
|
2013-09-07 02:47:08 +08:00
|
|
|
f = f.decode("utf-8")
|
2020-07-14 19:32:24 +08:00
|
|
|
h.headers["Content-Disposition"] = 'attachment; filename="%s"' % f
|
2018-09-26 14:48:47 +08:00
|
|
|
# This one is triggering https://bugs.python.org/issue20747, that is Python
|
2015-02-18 17:09:33 +08:00
|
|
|
# will itself insert a newline in the header
|
2020-07-14 19:32:24 +08:00
|
|
|
h.headers[
|
|
|
|
"Content-Disposition"
|
|
|
|
] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
2010-05-29 00:27:09 +08:00
|
|
|
|
|
|
|
def test_newlines_in_headers(self):
|
|
|
|
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
|
|
|
r = HttpResponse()
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(BadHeaderError):
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers.__setitem__("test\rstr", "test")
|
2016-01-17 19:26:39 +08:00
|
|
|
with self.assertRaises(BadHeaderError):
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers.__setitem__("test\nstr", "test")
|
2010-05-29 00:27:09 +08:00
|
|
|
|
2011-06-17 00:34:46 +08:00
|
|
|
def test_dict_behavior(self):
|
|
|
|
"""
|
|
|
|
Test for bug #14020: Make HttpResponse.get work like dict.get
|
|
|
|
"""
|
|
|
|
r = HttpResponse()
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIsNone(r.get("test"))
|
2011-06-17 00:34:46 +08:00
|
|
|
|
2011-09-15 07:27:35 +08:00
|
|
|
def test_non_string_content(self):
|
2013-11-03 05:02:56 +08:00
|
|
|
# Bug 16494: HttpResponse should behave consistently with non-strings
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse(12345)
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b"12345")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test content via property
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
|
|
|
r.content = 12345
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b"12345")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2019-03-27 12:40:10 +08:00
|
|
|
def test_memoryview_content(self):
|
|
|
|
r = HttpResponse(memoryview(b"memoryview"))
|
|
|
|
self.assertEqual(r.content, b"memoryview")
|
|
|
|
|
2011-09-15 07:27:35 +08:00
|
|
|
def test_iter_content(self):
|
|
|
|
r = HttpResponse(["abc", "def", "ghi"])
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b"abcdefghi")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test iter content via property
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
|
|
|
r.content = ["idan", "alex", "jacob"]
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b"idanalexjacob")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
|
|
|
r = HttpResponse()
|
|
|
|
r.content = [1, 2, 3]
|
2012-08-12 04:29:18 +08:00
|
|
|
self.assertEqual(r.content, b"123")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2013-11-03 05:02:56 +08:00
|
|
|
# test odd inputs
|
2011-09-15 07:27:35 +08:00
|
|
|
r = HttpResponse()
|
2012-08-12 04:29:18 +08:00
|
|
|
r.content = ["1", "2", 3, "\u079e"]
|
2017-02-08 01:05:47 +08:00
|
|
|
# '\xde\x9e' == unichr(1950).encode()
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(r.content, b"123\xde\x9e")
|
2011-09-15 07:27:35 +08:00
|
|
|
|
2012-10-24 17:33:56 +08:00
|
|
|
# .content can safely be accessed multiple times.
|
2012-10-24 04:25:38 +08:00
|
|
|
r = HttpResponse(iter(["hello", "world"]))
|
|
|
|
self.assertEqual(r.content, r.content)
|
|
|
|
self.assertEqual(r.content, b"helloworld")
|
2013-11-24 00:03:43 +08:00
|
|
|
# __iter__ can safely be called multiple times (#20187).
|
2012-10-24 17:33:56 +08:00
|
|
|
self.assertEqual(b"".join(r), b"helloworld")
|
2013-11-24 00:03:43 +08:00
|
|
|
self.assertEqual(b"".join(r), b"helloworld")
|
|
|
|
# Accessing .content still works.
|
2012-10-24 17:33:56 +08:00
|
|
|
self.assertEqual(r.content, b"helloworld")
|
|
|
|
|
2013-06-29 21:41:57 +08:00
|
|
|
# Accessing .content also works if the response was iterated first.
|
2012-10-24 17:33:56 +08:00
|
|
|
r = HttpResponse(iter(["hello", "world"]))
|
2013-06-29 21:41:57 +08:00
|
|
|
self.assertEqual(b"".join(r), b"helloworld")
|
|
|
|
self.assertEqual(r.content, b"helloworld")
|
2012-10-24 04:25:38 +08:00
|
|
|
|
2013-06-29 21:41:57 +08:00
|
|
|
# Additional content can be written to the response.
|
2012-10-24 17:33:56 +08:00
|
|
|
r = HttpResponse(iter(["hello", "world"]))
|
|
|
|
self.assertEqual(r.content, b"helloworld")
|
2012-10-24 04:25:38 +08:00
|
|
|
r.write("!")
|
|
|
|
self.assertEqual(r.content, b"helloworld!")
|
|
|
|
|
2012-10-24 17:33:56 +08:00
|
|
|
def test_iterator_isnt_rewound(self):
|
|
|
|
# Regression test for #13222
|
|
|
|
r = HttpResponse("abc")
|
|
|
|
i = iter(r)
|
|
|
|
self.assertEqual(list(i), [b"abc"])
|
|
|
|
self.assertEqual(list(i), [])
|
2012-10-24 04:25:38 +08:00
|
|
|
|
2013-04-20 11:20:01 +08:00
|
|
|
def test_lazy_content(self):
|
|
|
|
r = HttpResponse(lazystr("helloworld"))
|
|
|
|
self.assertEqual(r.content, b"helloworld")
|
|
|
|
|
2012-07-18 03:58:18 +08:00
|
|
|
def test_file_interface(self):
|
|
|
|
r = HttpResponse()
|
|
|
|
r.write(b"hello")
|
|
|
|
self.assertEqual(r.tell(), 5)
|
|
|
|
r.write("привет")
|
|
|
|
self.assertEqual(r.tell(), 17)
|
|
|
|
|
|
|
|
r = HttpResponse(["abc"])
|
2012-10-24 04:25:38 +08:00
|
|
|
r.write("def")
|
|
|
|
self.assertEqual(r.tell(), 6)
|
|
|
|
self.assertEqual(r.content, b"abcdef")
|
2012-07-18 03:58:18 +08:00
|
|
|
|
2013-08-25 00:08:05 +08:00
|
|
|
# with Content-Encoding header
|
|
|
|
r = HttpResponse()
|
2020-07-14 19:32:24 +08:00
|
|
|
r.headers["Content-Encoding"] = "winning"
|
2013-08-25 00:08:05 +08:00
|
|
|
r.write(b"abc")
|
|
|
|
r.write(b"def")
|
|
|
|
self.assertEqual(r.content, b"abcdef")
|
|
|
|
|
2014-04-14 23:58:59 +08:00
|
|
|
def test_stream_interface(self):
|
|
|
|
r = HttpResponse("asdf")
|
|
|
|
self.assertEqual(r.getvalue(), b"asdf")
|
|
|
|
|
|
|
|
r = HttpResponse()
|
2016-06-17 02:19:18 +08:00
|
|
|
self.assertIs(r.writable(), True)
|
2014-04-14 23:58:59 +08:00
|
|
|
r.writelines(["foo\n", "bar\n", "baz\n"])
|
|
|
|
self.assertEqual(r.content, b"foo\nbar\nbaz\n")
|
|
|
|
|
2012-07-31 04:01:50 +08:00
|
|
|
def test_unsafe_redirect(self):
|
|
|
|
bad_urls = [
|
|
|
|
'data:text/html,<script>window.alert("xss")</script>',
|
|
|
|
"mailto:test@example.com",
|
|
|
|
"file:///etc/passwd",
|
|
|
|
]
|
|
|
|
for url in bad_urls:
|
2017-05-25 20:19:01 +08:00
|
|
|
with self.assertRaises(DisallowedRedirect):
|
2016-01-17 19:26:39 +08:00
|
|
|
HttpResponseRedirect(url)
|
2017-05-25 20:19:01 +08:00
|
|
|
with self.assertRaises(DisallowedRedirect):
|
2016-01-17 19:26:39 +08:00
|
|
|
HttpResponsePermanentRedirect(url)
|
2012-07-31 04:01:50 +08:00
|
|
|
|
2020-07-14 19:32:24 +08:00
|
|
|
def test_header_deletion(self):
|
|
|
|
r = HttpResponse("hello")
|
|
|
|
r.headers["X-Foo"] = "foo"
|
|
|
|
del r.headers["X-Foo"]
|
|
|
|
self.assertNotIn("X-Foo", r.headers)
|
|
|
|
# del doesn't raise a KeyError on nonexistent headers.
|
|
|
|
del r.headers["X-Foo"]
|
|
|
|
|
2020-09-15 18:43:37 +08:00
|
|
|
def test_instantiate_with_headers(self):
|
|
|
|
r = HttpResponse("hello", headers={"X-Foo": "foo"})
|
|
|
|
self.assertEqual(r.headers["X-Foo"], "foo")
|
|
|
|
self.assertEqual(r.headers["x-foo"], "foo")
|
|
|
|
|
2020-10-07 14:10:35 +08:00
|
|
|
def test_content_type(self):
|
|
|
|
r = HttpResponse("hello", content_type="application/json")
|
|
|
|
self.assertEqual(r.headers["Content-Type"], "application/json")
|
|
|
|
|
2020-09-15 18:43:37 +08:00
|
|
|
def test_content_type_headers(self):
|
|
|
|
r = HttpResponse("hello", headers={"Content-Type": "application/json"})
|
|
|
|
self.assertEqual(r.headers["Content-Type"], "application/json")
|
|
|
|
|
|
|
|
def test_content_type_mutually_exclusive(self):
|
|
|
|
msg = (
|
|
|
|
"'headers' must not contain 'Content-Type' when the "
|
|
|
|
"'content_type' parameter is provided."
|
|
|
|
)
|
|
|
|
with self.assertRaisesMessage(ValueError, msg):
|
|
|
|
HttpResponse(
|
|
|
|
"hello",
|
|
|
|
content_type="application/json",
|
|
|
|
headers={"Content-Type": "text/csv"},
|
|
|
|
)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class HttpResponseSubclassesTests(SimpleTestCase):
|
2012-08-23 16:56:55 +08:00
|
|
|
def test_redirect(self):
|
|
|
|
response = HttpResponseRedirect("/redirected/")
|
|
|
|
self.assertEqual(response.status_code, 302)
|
2016-10-27 15:53:39 +08:00
|
|
|
# Standard HttpResponse init args can be used
|
2016-04-08 10:04:45 +08:00
|
|
|
response = HttpResponseRedirect(
|
|
|
|
"/redirected/",
|
2012-08-23 16:56:55 +08:00
|
|
|
content="The resource has temporarily moved",
|
2016-04-08 10:04:45 +08:00
|
|
|
content_type="text/html",
|
|
|
|
)
|
2012-08-23 16:56:55 +08:00
|
|
|
self.assertContains(
|
|
|
|
response, "The resource has temporarily moved", status_code=302
|
|
|
|
)
|
2020-07-14 19:32:24 +08:00
|
|
|
self.assertEqual(response.url, response.headers["Location"])
|
2012-08-23 16:56:55 +08:00
|
|
|
|
2013-04-20 11:20:01 +08:00
|
|
|
def test_redirect_lazy(self):
|
|
|
|
"""Make sure HttpResponseRedirect works with lazy strings."""
|
|
|
|
r = HttpResponseRedirect(lazystr("/redirected/"))
|
|
|
|
self.assertEqual(r.url, "/redirected/")
|
|
|
|
|
2015-07-13 22:32:09 +08:00
|
|
|
def test_redirect_repr(self):
|
|
|
|
response = HttpResponseRedirect("/redirected/")
|
|
|
|
expected = (
|
|
|
|
'<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", '
|
|
|
|
'url="/redirected/">'
|
2022-02-04 15:08:27 +08:00
|
|
|
)
|
2015-07-13 22:32:09 +08:00
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2016-12-17 23:34:32 +08:00
|
|
|
def test_invalid_redirect_repr(self):
|
|
|
|
"""
|
|
|
|
If HttpResponseRedirect raises DisallowedRedirect, its __repr__()
|
|
|
|
should work (in the debug view, for example).
|
|
|
|
"""
|
|
|
|
response = HttpResponseRedirect.__new__(HttpResponseRedirect)
|
|
|
|
with self.assertRaisesMessage(
|
|
|
|
DisallowedRedirect, "Unsafe redirect to URL with protocol 'ssh'"
|
|
|
|
):
|
|
|
|
HttpResponseRedirect.__init__(response, "ssh://foo")
|
|
|
|
expected = (
|
|
|
|
'<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", '
|
|
|
|
'url="ssh://foo">'
|
2022-02-04 15:08:27 +08:00
|
|
|
)
|
2016-12-17 23:34:32 +08:00
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2012-08-23 00:06:03 +08:00
|
|
|
def test_not_modified(self):
|
|
|
|
response = HttpResponseNotModified()
|
|
|
|
self.assertEqual(response.status_code, 304)
|
|
|
|
# 304 responses should not have content/content-type
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
response.content = "Hello dear"
|
|
|
|
self.assertNotIn("content-type", response)
|
|
|
|
|
2016-12-27 12:29:31 +08:00
|
|
|
def test_not_modified_repr(self):
|
|
|
|
response = HttpResponseNotModified()
|
|
|
|
self.assertEqual(repr(response), "<HttpResponseNotModified status_code=304>")
|
|
|
|
|
2012-08-23 16:56:55 +08:00
|
|
|
def test_not_allowed(self):
|
|
|
|
response = HttpResponseNotAllowed(["GET"])
|
|
|
|
self.assertEqual(response.status_code, 405)
|
2016-10-27 15:53:39 +08:00
|
|
|
# Standard HttpResponse init args can be used
|
2016-04-08 10:04:45 +08:00
|
|
|
response = HttpResponseNotAllowed(
|
|
|
|
["GET"], content="Only the GET method is allowed", content_type="text/html"
|
|
|
|
)
|
2012-08-23 16:56:55 +08:00
|
|
|
self.assertContains(response, "Only the GET method is allowed", status_code=405)
|
2012-08-23 00:06:03 +08:00
|
|
|
|
2015-07-13 22:32:09 +08:00
|
|
|
def test_not_allowed_repr(self):
|
|
|
|
response = HttpResponseNotAllowed(["GET", "OPTIONS"], content_type="text/plain")
|
|
|
|
expected = (
|
|
|
|
'<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
|
2022-02-04 03:24:19 +08:00
|
|
|
)
|
2015-07-13 22:32:09 +08:00
|
|
|
self.assertEqual(repr(response), expected)
|
|
|
|
|
2016-12-27 12:29:31 +08:00
|
|
|
def test_not_allowed_repr_no_content_type(self):
|
|
|
|
response = HttpResponseNotAllowed(("GET", "POST"))
|
2020-07-14 19:32:24 +08:00
|
|
|
del response.headers["Content-Type"]
|
2016-12-27 12:29:31 +08:00
|
|
|
self.assertEqual(
|
|
|
|
repr(response), "<HttpResponseNotAllowed [GET, POST] status_code=405>"
|
|
|
|
)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class JsonResponseTests(SimpleTestCase):
|
2014-02-15 01:28:51 +08:00
|
|
|
def test_json_response_non_ascii(self):
|
|
|
|
data = {"key": "łóżko"}
|
|
|
|
response = JsonResponse(data)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), data)
|
|
|
|
|
|
|
|
def test_json_response_raises_type_error_with_default_setting(self):
|
2016-02-04 19:10:50 +08:00
|
|
|
with self.assertRaisesMessage(
|
|
|
|
TypeError,
|
|
|
|
"In order to allow non-dict objects to be serialized set the "
|
|
|
|
"safe parameter to False",
|
|
|
|
):
|
2014-02-15 01:28:51 +08:00
|
|
|
JsonResponse([1, 2, 3])
|
|
|
|
|
|
|
|
def test_json_response_text(self):
|
|
|
|
response = JsonResponse("foobar", safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), "foobar")
|
|
|
|
|
|
|
|
def test_json_response_list(self):
|
|
|
|
response = JsonResponse(["foo", "bar"], safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), ["foo", "bar"])
|
|
|
|
|
2015-06-24 04:56:22 +08:00
|
|
|
def test_json_response_uuid(self):
|
|
|
|
u = uuid.uuid4()
|
|
|
|
response = JsonResponse(u, safe=False)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), str(u))
|
|
|
|
|
2014-02-15 01:28:51 +08:00
|
|
|
def test_json_response_custom_encoder(self):
|
|
|
|
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
|
|
|
|
def encode(self, o):
|
|
|
|
return json.dumps({"foo": "bar"})
|
|
|
|
|
|
|
|
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
|
|
|
|
self.assertEqual(json.loads(response.content.decode()), {"foo": "bar"})
|
|
|
|
|
2015-08-12 08:53:26 +08:00
|
|
|
def test_json_response_passing_arguments_to_json_dumps(self):
|
|
|
|
response = JsonResponse({"foo": "bar"}, json_dumps_params={"indent": 2})
|
|
|
|
self.assertEqual(response.content.decode(), '{\n "foo": "bar"\n}')
|
|
|
|
|
2014-02-15 01:28:51 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class StreamingHttpResponseTests(SimpleTestCase):
|
2012-10-20 23:40:14 +08:00
|
|
|
def test_streaming_response(self):
|
|
|
|
r = StreamingHttpResponse(iter(["hello", "world"]))
|
|
|
|
|
|
|
|
# iterating over the response itself yields bytestring chunks.
|
|
|
|
chunks = list(r)
|
|
|
|
self.assertEqual(chunks, [b"hello", b"world"])
|
|
|
|
for chunk in chunks:
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertIsInstance(chunk, bytes)
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# and the response can only be iterated once.
|
|
|
|
self.assertEqual(list(r), [])
|
|
|
|
|
|
|
|
# even when a sequence that can be iterated many times, like a list,
|
|
|
|
# is given as content.
|
|
|
|
r = StreamingHttpResponse(["abc", "def"])
|
|
|
|
self.assertEqual(list(r), [b"abc", b"def"])
|
|
|
|
self.assertEqual(list(r), [])
|
|
|
|
|
2017-01-21 05:04:05 +08:00
|
|
|
# iterating over strings still yields bytestring chunks.
|
Fixed #24240 -- Allowed GZipping a Unicode StreamingHttpResponse
make_bytes() assumed that if the Content-Encoding header is set, then
everything had already been dealt with bytes-wise, but in a streaming
situation this was not necessarily the case.
make_bytes() is only called when necessary when working with a
StreamingHttpResponse iterable, but by that point the middleware has
added the Content-Encoding header and thus make_bytes() tried to call
bytes(value) (and dies). If it had been a normal HttpResponse,
make_bytes() would have been called when the content was set, well
before the middleware set the Content-Encoding header.
This commit removes the special casing when Content-Encoding is set,
allowing unicode strings to be encoded during the iteration before they
are e.g. gzipped. This behaviour was added a long time ago for #4969 and
it doesn't appear to be necessary any more, as everything is correctly
made into bytes at the appropriate places.
Two new tests, to show that supplying non-ASCII characters to a
StreamingHttpResponse works fine normally, and when passed through the
GZip middleware (the latter dies without the change to make_bytes()).
Removes the test with a nonsense Content-Encoding and Unicode input - if
this were to happen, it can still be encoded as bytes fine.
2015-01-29 05:43:23 +08:00
|
|
|
r.streaming_content = iter(["hello", "café"])
|
|
|
|
chunks = list(r)
|
2017-02-08 01:05:47 +08:00
|
|
|
# '\xc3\xa9' == unichr(233).encode()
|
Fixed #24240 -- Allowed GZipping a Unicode StreamingHttpResponse
make_bytes() assumed that if the Content-Encoding header is set, then
everything had already been dealt with bytes-wise, but in a streaming
situation this was not necessarily the case.
make_bytes() is only called when necessary when working with a
StreamingHttpResponse iterable, but by that point the middleware has
added the Content-Encoding header and thus make_bytes() tried to call
bytes(value) (and dies). If it had been a normal HttpResponse,
make_bytes() would have been called when the content was set, well
before the middleware set the Content-Encoding header.
This commit removes the special casing when Content-Encoding is set,
allowing unicode strings to be encoded during the iteration before they
are e.g. gzipped. This behaviour was added a long time ago for #4969 and
it doesn't appear to be necessary any more, as everything is correctly
made into bytes at the appropriate places.
Two new tests, to show that supplying non-ASCII characters to a
StreamingHttpResponse works fine normally, and when passed through the
GZip middleware (the latter dies without the change to make_bytes()).
Removes the test with a nonsense Content-Encoding and Unicode input - if
this were to happen, it can still be encoded as bytes fine.
2015-01-29 05:43:23 +08:00
|
|
|
self.assertEqual(chunks, [b"hello", b"caf\xc3\xa9"])
|
|
|
|
for chunk in chunks:
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertIsInstance(chunk, bytes)
|
Fixed #24240 -- Allowed GZipping a Unicode StreamingHttpResponse
make_bytes() assumed that if the Content-Encoding header is set, then
everything had already been dealt with bytes-wise, but in a streaming
situation this was not necessarily the case.
make_bytes() is only called when necessary when working with a
StreamingHttpResponse iterable, but by that point the middleware has
added the Content-Encoding header and thus make_bytes() tried to call
bytes(value) (and dies). If it had been a normal HttpResponse,
make_bytes() would have been called when the content was set, well
before the middleware set the Content-Encoding header.
This commit removes the special casing when Content-Encoding is set,
allowing unicode strings to be encoded during the iteration before they
are e.g. gzipped. This behaviour was added a long time ago for #4969 and
it doesn't appear to be necessary any more, as everything is correctly
made into bytes at the appropriate places.
Two new tests, to show that supplying non-ASCII characters to a
StreamingHttpResponse works fine normally, and when passed through the
GZip middleware (the latter dies without the change to make_bytes()).
Removes the test with a nonsense Content-Encoding and Unicode input - if
this were to happen, it can still be encoded as bytes fine.
2015-01-29 05:43:23 +08:00
|
|
|
|
2012-10-20 23:40:14 +08:00
|
|
|
# streaming responses don't have a `content` attribute.
|
|
|
|
self.assertFalse(hasattr(r, "content"))
|
|
|
|
|
|
|
|
# and you can't accidentally assign to a `content` attribute.
|
|
|
|
with self.assertRaises(AttributeError):
|
|
|
|
r.content = "xyz"
|
|
|
|
|
|
|
|
# but they do have a `streaming_content` attribute.
|
|
|
|
self.assertTrue(hasattr(r, "streaming_content"))
|
|
|
|
|
|
|
|
# that exists so we can check if a response is streaming, and wrap or
|
|
|
|
# replace the content iterator.
|
|
|
|
r.streaming_content = iter(["abc", "def"])
|
|
|
|
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
|
|
|
|
self.assertEqual(list(r), [b"ABC", b"DEF"])
|
|
|
|
|
|
|
|
# coercing a streaming response to bytes doesn't return a complete HTTP
|
|
|
|
# message like a regular response does. it only gives us the headers.
|
|
|
|
r = StreamingHttpResponse(iter(["hello", "world"]))
|
2016-12-29 23:27:49 +08:00
|
|
|
self.assertEqual(bytes(r), b"Content-Type: text/html; charset=utf-8")
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# and this won't consume its content.
|
|
|
|
self.assertEqual(list(r), [b"hello", b"world"])
|
|
|
|
|
|
|
|
# additional content cannot be written to the response.
|
|
|
|
r = StreamingHttpResponse(iter(["hello", "world"]))
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
r.write("!")
|
|
|
|
|
|
|
|
# and we can't tell the current position.
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
r.tell()
|
|
|
|
|
2014-04-14 23:58:59 +08:00
|
|
|
r = StreamingHttpResponse(iter(["hello", "world"]))
|
|
|
|
self.assertEqual(r.getvalue(), b"helloworld")
|
|
|
|
|
2021-06-17 19:04:04 +08:00
|
|
|
def test_repr(self):
|
|
|
|
r = StreamingHttpResponse(iter(["hello", "café"]))
|
|
|
|
self.assertEqual(
|
|
|
|
repr(r),
|
|
|
|
'<StreamingHttpResponse status_code=200, "text/html; charset=utf-8">',
|
|
|
|
)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2015-04-18 05:38:20 +08:00
|
|
|
class FileCloseTests(SimpleTestCase):
|
2013-02-18 21:26:33 +08:00
|
|
|
def setUp(self):
|
|
|
|
# Disable the request_finished signal during this test
|
|
|
|
# to avoid interfering with the database connection.
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.disconnect(close_old_connections)
|
2013-02-18 21:26:33 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.connect(close_old_connections)
|
2013-02-18 21:26:33 +08:00
|
|
|
|
2012-10-20 23:40:14 +08:00
|
|
|
def test_response(self):
|
2017-01-20 21:01:02 +08:00
|
|
|
filename = os.path.join(os.path.dirname(__file__), "abc.txt")
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# file isn't closed until we close the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
r = HttpResponse(file1)
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
r.close()
|
|
|
|
|
|
|
|
# when multiple file are assigned as content, make sure they are all
|
|
|
|
# closed with the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
file2 = open(filename)
|
|
|
|
r = HttpResponse(file1)
|
|
|
|
r.content = file2
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
self.assertTrue(file2.closed)
|
|
|
|
|
|
|
|
def test_streaming_response(self):
|
2017-01-20 21:01:02 +08:00
|
|
|
filename = os.path.join(os.path.dirname(__file__), "abc.txt")
|
2012-10-20 23:40:14 +08:00
|
|
|
|
|
|
|
# file isn't closed until we close the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
r = StreamingHttpResponse(file1)
|
|
|
|
self.assertFalse(file1.closed)
|
|
|
|
r.close()
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
|
|
|
|
# when multiple file are assigned as content, make sure they are all
|
|
|
|
# closed with the response.
|
|
|
|
file1 = open(filename)
|
|
|
|
file2 = open(filename)
|
|
|
|
r = StreamingHttpResponse(file1)
|
|
|
|
r.streaming_content = file2
|
|
|
|
self.assertFalse(file1.closed)
|
|
|
|
self.assertFalse(file2.closed)
|
|
|
|
r.close()
|
|
|
|
self.assertTrue(file1.closed)
|
|
|
|
self.assertTrue(file2.closed)
|
|
|
|
|
2013-11-03 12:36:09 +08:00
|
|
|
|
2010-05-29 00:27:09 +08:00
|
|
|
class CookieTests(unittest.TestCase):
|
2010-01-24 07:13:00 +08:00
|
|
|
def test_encode(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""Semicolons and commas are encoded."""
|
2011-01-25 04:35:46 +08:00
|
|
|
c = SimpleCookie()
|
2010-01-24 07:13:00 +08:00
|
|
|
c["test"] = "An,awkward;value"
|
2014-10-28 18:02:56 +08:00
|
|
|
self.assertNotIn(";", c.output().rstrip(";")) # IE compat
|
|
|
|
self.assertNotIn(",", c.output().rstrip(";")) # Safari compat
|
2010-01-24 07:13:00 +08:00
|
|
|
|
|
|
|
def test_decode(self):
|
2016-10-27 15:53:39 +08:00
|
|
|
"""Semicolons and commas are decoded."""
|
2011-01-25 04:35:46 +08:00
|
|
|
c = SimpleCookie()
|
2010-01-24 07:13:00 +08:00
|
|
|
c["test"] = "An,awkward;value"
|
2011-01-25 04:35:46 +08:00
|
|
|
c2 = SimpleCookie()
|
2014-11-01 02:26:27 +08:00
|
|
|
c2.load(c.output()[12:])
|
2010-01-24 07:13:00 +08:00
|
|
|
self.assertEqual(c["test"].value, c2["test"].value)
|
2016-03-12 10:36:08 +08:00
|
|
|
c3 = parse_cookie(c.output()[12:])
|
|
|
|
self.assertEqual(c["test"].value, c3["test"])
|
2010-01-24 07:13:00 +08:00
|
|
|
|
2011-02-13 10:24:05 +08:00
|
|
|
def test_nonstandard_keys(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A single non-standard cookie name doesn't affect all cookies (#13007).
|
2011-02-13 10:24:05 +08:00
|
|
|
"""
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertIn("good_cookie", parse_cookie("good_cookie=yes;bad:cookie=yes"))
|
2011-06-26 00:18:40 +08:00
|
|
|
|
|
|
|
def test_repeated_nonstandard_keys(self):
|
|
|
|
"""
|
2016-10-27 15:53:39 +08:00
|
|
|
A repeated non-standard name doesn't affect all cookies (#15852).
|
2011-06-26 00:18:40 +08:00
|
|
|
"""
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertIn("good_cookie", parse_cookie("a:=b; a:=c; good_cookie=yes"))
|
2011-07-08 20:07:54 +08:00
|
|
|
|
2016-03-12 10:36:08 +08:00
|
|
|
def test_python_cookies(self):
|
|
|
|
"""
|
|
|
|
Test cases copied from Python's Lib/test/test_http_cookies.py
|
|
|
|
"""
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("chips=ahoy; vienna=finger"),
|
|
|
|
{"chips": "ahoy", "vienna": "finger"},
|
|
|
|
)
|
|
|
|
# Here parse_cookie() differs from Python's cookie parsing in that it
|
|
|
|
# treats all semicolons as delimiters, even within quotes.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie('keebler="E=mc2; L=\\"Loves\\"; fudge=\\012;"'),
|
|
|
|
{"keebler": '"E=mc2', "L": '\\"Loves\\"', "fudge": "\\012", "": '"'},
|
|
|
|
)
|
|
|
|
# Illegal cookies that have an '=' char in an unquoted value.
|
|
|
|
self.assertEqual(parse_cookie("keebler=E=mc2"), {"keebler": "E=mc2"})
|
|
|
|
# Cookies with ':' character in their name.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("key:term=value:term"), {"key:term": "value:term"}
|
|
|
|
)
|
|
|
|
# Cookies with '[' and ']'.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("a=b; c=[; d=r; f=h"), {"a": "b", "c": "[", "d": "r", "f": "h"}
|
|
|
|
)
|
|
|
|
|
|
|
|
def test_cookie_edgecases(self):
|
|
|
|
# Cookies that RFC6265 allows.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("a=b; Domain=example.com"), {"a": "b", "Domain": "example.com"}
|
|
|
|
)
|
|
|
|
# parse_cookie() has historically kept only the last cookie with the
|
|
|
|
# same name.
|
|
|
|
self.assertEqual(parse_cookie("a=b; h=i; a=c"), {"a": "c", "h": "i"})
|
|
|
|
|
|
|
|
def test_invalid_cookies(self):
|
|
|
|
"""
|
|
|
|
Cookie strings that go against RFC6265 but browsers will send if set
|
|
|
|
via document.cookie.
|
|
|
|
"""
|
|
|
|
# Chunks without an equals sign appear as unnamed values per
|
|
|
|
# https://bugzilla.mozilla.org/show_bug.cgi?id=169091
|
2017-05-28 07:08:46 +08:00
|
|
|
self.assertIn(
|
|
|
|
"django_language", parse_cookie("abc=def; unnamed; django_language=en")
|
2022-02-04 03:24:19 +08:00
|
|
|
)
|
2018-08-02 00:55:53 +08:00
|
|
|
# Even a double quote may be an unnamed value.
|
2016-03-12 10:36:08 +08:00
|
|
|
self.assertEqual(parse_cookie('a=b; "; c=d'), {"a": "b", "": '"', "c": "d"})
|
|
|
|
# Spaces in names and values, and an equals sign in values.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("a b c=d e = f; gh=i"), {"a b c": "d e = f", "gh": "i"}
|
|
|
|
)
|
|
|
|
# More characters the spec forbids.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie('a b,c<>@:/[]?{}=d " =e,f g'),
|
|
|
|
{"a b,c<>@:/[]?{}": 'd " =e,f g'},
|
|
|
|
)
|
|
|
|
# Unicode characters. The spec only allows ASCII.
|
2017-01-12 06:17:25 +08:00
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie("saint=André Bessette"), {"saint": "André Bessette"}
|
|
|
|
)
|
2016-03-12 10:36:08 +08:00
|
|
|
# Browsers don't send extra whitespace or semicolons in Cookie headers,
|
|
|
|
# but parse_cookie() should parse whitespace the same way
|
|
|
|
# document.cookie parses whitespace.
|
|
|
|
self.assertEqual(
|
|
|
|
parse_cookie(" = b ; ; = ; c = ; "), {"": "b", "c": ""}
|
|
|
|
)
|
|
|
|
|
2018-04-14 08:58:31 +08:00
|
|
|
def test_samesite(self):
|
|
|
|
c = SimpleCookie("name=value; samesite=lax; httponly")
|
|
|
|
self.assertEqual(c["name"]["samesite"], "lax")
|
|
|
|
self.assertIn("SameSite=lax", c.output())
|
|
|
|
|
2011-07-08 20:07:54 +08:00
|
|
|
def test_httponly_after_load(self):
|
|
|
|
c = SimpleCookie()
|
|
|
|
c.load("name=val")
|
|
|
|
c["name"]["httponly"] = True
|
|
|
|
self.assertTrue(c["name"]["httponly"])
|
|
|
|
|
2013-01-12 04:09:33 +08:00
|
|
|
def test_load_dict(self):
|
|
|
|
c = SimpleCookie()
|
|
|
|
c.load({"name": "val"})
|
|
|
|
self.assertEqual(c["name"].value, "val")
|
2013-09-09 17:40:37 +08:00
|
|
|
|
2014-11-01 02:26:27 +08:00
|
|
|
def test_pickle(self):
|
|
|
|
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
|
|
|
|
expected_output = "Set-Cookie: %s" % rawdata
|
|
|
|
|
|
|
|
C = SimpleCookie()
|
|
|
|
C.load(rawdata)
|
|
|
|
self.assertEqual(C.output(), expected_output)
|
|
|
|
|
|
|
|
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
|
|
|
|
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
|
|
|
|
self.assertEqual(C1.output(), expected_output)
|
2020-07-14 19:32:24 +08:00
|
|
|
|
|
|
|
|
|
|
|
class HttpResponseHeadersTestCase(SimpleTestCase):
|
|
|
|
"""Headers by treating HttpResponse like a dictionary."""
|
2022-02-04 03:24:19 +08:00
|
|
|
|
2020-07-14 19:32:24 +08:00
|
|
|
def test_headers(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
response["X-Foo"] = "bar"
|
|
|
|
self.assertEqual(response["X-Foo"], "bar")
|
|
|
|
self.assertEqual(response.headers["X-Foo"], "bar")
|
|
|
|
self.assertIn("X-Foo", response)
|
|
|
|
self.assertIs(response.has_header("X-Foo"), True)
|
|
|
|
del response["X-Foo"]
|
|
|
|
self.assertNotIn("X-Foo", response)
|
|
|
|
self.assertNotIn("X-Foo", response.headers)
|
|
|
|
# del doesn't raise a KeyError on nonexistent headers.
|
|
|
|
del response["X-Foo"]
|
|
|
|
|
2021-01-28 13:28:14 +08:00
|
|
|
def test_headers_as_iterable_of_tuple_pairs(self):
|
|
|
|
response = HttpResponse(headers=(("X-Foo", "bar"),))
|
|
|
|
self.assertEqual(response["X-Foo"], "bar")
|
|
|
|
|
2020-07-14 19:32:24 +08:00
|
|
|
def test_headers_bytestring(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
response["X-Foo"] = b"bar"
|
|
|
|
self.assertEqual(response["X-Foo"], "bar")
|
|
|
|
self.assertEqual(response.headers["X-Foo"], "bar")
|
|
|
|
|
|
|
|
def test_newlines_in_headers(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
with self.assertRaises(BadHeaderError):
|
|
|
|
response["test\rstr"] = "test"
|
|
|
|
with self.assertRaises(BadHeaderError):
|
|
|
|
response["test\nstr"] = "test"
|