2012-10-30 03:33:00 +08:00
|
|
|
# -*- encoding: utf-8 -*-
|
2012-06-08 00:08:47 +08:00
|
|
|
from __future__ import unicode_literals
|
|
|
|
|
2010-11-07 23:41:17 +08:00
|
|
|
from datetime import datetime, timedelta
|
2012-08-11 17:11:20 +08:00
|
|
|
from io import BytesIO
|
2013-07-13 16:16:40 +08:00
|
|
|
from itertools import chain
|
2013-07-01 20:22:27 +08:00
|
|
|
import time
|
|
|
|
from unittest import skipIf
|
|
|
|
import warnings
|
2008-03-08 11:06:30 +08:00
|
|
|
|
2013-02-06 05:52:29 +08:00
|
|
|
from django.db import connection, connections, DEFAULT_DB_ALIAS
|
|
|
|
from django.core import signals
|
2012-10-19 02:10:46 +08:00
|
|
|
from django.core.exceptions import SuspiciousOperation
|
2010-11-07 23:41:17 +08:00
|
|
|
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
|
2012-02-11 06:51:07 +08:00
|
|
|
from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
|
2013-04-04 04:27:20 +08:00
|
|
|
from django.test import SimpleTestCase, TransactionTestCase
|
2012-10-20 21:53:28 +08:00
|
|
|
from django.test.client import FakePayload
|
2012-10-21 05:22:46 +08:00
|
|
|
from django.test.utils import override_settings, str_prefix
|
2012-12-17 17:49:26 +08:00
|
|
|
from django.utils import six
|
2012-11-03 19:54:06 +08:00
|
|
|
from django.utils.http import cookie_date, urlencode
|
2012-03-19 04:58:22 +08:00
|
|
|
from django.utils.timezone import utc
|
2008-03-08 11:06:30 +08:00
|
|
|
|
2011-09-10 08:46:38 +08:00
|
|
|
|
2013-04-04 04:27:20 +08:00
|
|
|
class RequestsTests(SimpleTestCase):
|
2010-09-28 15:06:37 +08:00
|
|
|
def test_httprequest(self):
|
2010-09-28 19:54:58 +08:00
|
|
|
request = HttpRequest()
|
2012-08-11 17:11:20 +08:00
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
|
|
|
self.assertEqual(list(request.META.keys()), [])
|
2008-08-24 01:28:12 +08:00
|
|
|
|
2011-06-10 07:15:42 +08:00
|
|
|
def test_httprequest_repr(self):
|
|
|
|
request = HttpRequest()
|
2012-06-08 00:08:47 +08:00
|
|
|
request.path = '/somepath/'
|
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-10 07:15:42 +08:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-10 07:15:42 +08:00
|
|
|
|
2010-09-28 15:06:37 +08:00
|
|
|
def test_wsgirequest(self):
|
2012-08-11 17:11:20 +08:00
|
|
|
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(list(request.GET.keys()), [])
|
|
|
|
self.assertEqual(list(request.POST.keys()), [])
|
|
|
|
self.assertEqual(list(request.COOKIES.keys()), [])
|
2010-10-30 00:39:25 +08:00
|
|
|
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
|
2010-09-28 19:54:58 +08:00
|
|
|
self.assertEqual(request.META['PATH_INFO'], 'bogus')
|
|
|
|
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
|
|
|
|
self.assertEqual(request.META['SCRIPT_NAME'], '')
|
2010-09-13 04:52:49 +08:00
|
|
|
|
2013-03-31 15:54:52 +08:00
|
|
|
def test_wsgirequest_with_script_name(self):
|
|
|
|
"""
|
|
|
|
Ensure that the request's path is correctly assembled, regardless of
|
|
|
|
whether or not the SCRIPT_NAME has a trailing slash.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
# With trailing slash
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/PREFIX/somepath/')
|
|
|
|
# Without trailing slash
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/PREFIX/somepath/')
|
|
|
|
|
|
|
|
def test_wsgirequest_with_force_script_name(self):
|
|
|
|
"""
|
|
|
|
Ensure that the FORCE_SCRIPT_NAME setting takes precedence over the
|
|
|
|
request's SCRIPT_NAME environment parameter.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
|
|
|
|
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
|
|
|
|
"""
|
|
|
|
Ensure that the request's path is correctly assembled, regardless of
|
|
|
|
whether or not the FORCE_SCRIPT_NAME setting has a trailing slash.
|
|
|
|
Refs #20169.
|
|
|
|
"""
|
|
|
|
# With trailing slash
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
# Without trailing slash
|
|
|
|
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
|
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
|
|
|
|
|
2011-06-10 07:15:42 +08:00
|
|
|
def test_wsgirequest_repr(self):
|
2012-08-11 17:11:20 +08:00
|
|
|
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
2012-06-08 00:08:47 +08:00
|
|
|
request.GET = {'get-key': 'get-value'}
|
|
|
|
request.POST = {'post-key': 'post-value'}
|
|
|
|
request.COOKIES = {'post-key': 'post-value'}
|
|
|
|
request.META = {'post-key': 'post-value'}
|
|
|
|
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
|
2011-06-10 07:15:42 +08:00
|
|
|
self.assertEqual(build_request_repr(request), repr(request))
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
|
|
|
|
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
|
2011-06-10 07:15:42 +08:00
|
|
|
|
2012-12-17 17:49:26 +08:00
|
|
|
def test_wsgirequest_path_info(self):
|
|
|
|
def wsgi_str(path_info):
|
|
|
|
path_info = path_info.encode('utf-8') # Actual URL sent by the browser (bytestring)
|
|
|
|
if six.PY3:
|
|
|
|
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
|
|
|
|
return path_info
|
|
|
|
# Regression for #19468
|
|
|
|
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
|
|
|
|
self.assertEqual(request.path, "/سلام/")
|
|
|
|
|
2010-09-28 15:06:37 +08:00
|
|
|
def test_parse_cookie(self):
|
2012-09-28 23:09:05 +08:00
|
|
|
self.assertEqual(parse_cookie('invalid@key=true'), {})
|
2010-09-28 15:06:37 +08:00
|
|
|
|
|
|
|
def test_httprequest_location(self):
|
|
|
|
request = HttpRequest()
|
2010-09-28 19:54:58 +08:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
|
2010-09-28 15:06:37 +08:00
|
|
|
'https://www.example.com/asdf')
|
|
|
|
|
|
|
|
request.get_host = lambda: 'www.example.com'
|
|
|
|
request.path = ''
|
2010-09-28 19:54:58 +08:00
|
|
|
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
|
2010-09-28 15:06:37 +08:00
|
|
|
'http://www.example.com/path/with:colons')
|
|
|
|
|
|
|
|
def test_near_expiration(self):
|
|
|
|
"Cookie will expire when an near expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
# There is a timing weakness in this test; The
|
|
|
|
# expected result for max-age requires that there be
|
|
|
|
# a very slight difference between the evaluated expiration
|
|
|
|
# time, and the time evaluated in set_cookie(). If this
|
|
|
|
# difference doesn't exist, the cookie time will be
|
|
|
|
# 1 second larger. To avoid the problem, put in a quick sleep,
|
|
|
|
# which guarantees that there will be a time difference.
|
|
|
|
expires = datetime.utcnow() + timedelta(seconds=10)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 19:54:58 +08:00
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
2010-09-28 15:06:37 +08:00
|
|
|
|
2012-03-19 04:58:22 +08:00
|
|
|
def test_aware_expiration(self):
|
|
|
|
"Cookie accepts an aware datetime as expiration time"
|
|
|
|
response = HttpResponse()
|
|
|
|
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
|
|
|
|
time.sleep(0.001)
|
|
|
|
response.set_cookie('datetime', expires=expires)
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
|
|
|
self.assertEqual(datetime_cookie['max-age'], 10)
|
|
|
|
|
2010-09-28 15:06:37 +08:00
|
|
|
def test_far_expiration(self):
|
|
|
|
"Cookie will expire when an distant expiration time is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
|
|
|
|
datetime_cookie = response.cookies['datetime']
|
2010-09-28 19:54:58 +08:00
|
|
|
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
|
2010-09-28 15:06:37 +08:00
|
|
|
|
|
|
|
def test_max_age_expiration(self):
|
|
|
|
"Cookie will expire if max_age is provided"
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('max_age', max_age=10)
|
|
|
|
max_age_cookie = response.cookies['max_age']
|
|
|
|
self.assertEqual(max_age_cookie['max-age'], 10)
|
|
|
|
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
|
2010-10-30 00:39:25 +08:00
|
|
|
|
2010-11-26 21:30:50 +08:00
|
|
|
def test_httponly_cookie(self):
|
|
|
|
response = HttpResponse()
|
|
|
|
response.set_cookie('example', httponly=True)
|
|
|
|
example_cookie = response.cookies['example']
|
|
|
|
# A compat cookie may be in use -- check that it has worked
|
|
|
|
# both as an output string, and using the cookie attributes
|
|
|
|
self.assertTrue('; httponly' in str(example_cookie))
|
|
|
|
self.assertTrue(example_cookie['httponly'])
|
|
|
|
|
2010-10-30 00:39:25 +08:00
|
|
|
def test_limited_stream(self):
|
|
|
|
# Read all of a limited stream
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.read(), b'te')
|
2011-01-16 15:31:35 +08:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.read(), b'')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
|
|
|
# Read a number of characters greater than the stream has to offer
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'test'), 2)
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.read(5), b'te')
|
2011-01-16 15:31:35 +08:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
|
|
|
# Read sequentially from a stream
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'12345678'), 8)
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.read(5), b'12345')
|
|
|
|
self.assertEqual(stream.read(5), b'678')
|
2011-01-16 15:31:35 +08:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(5), b'')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
|
|
|
# Read lines from a stream
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read a full line, unconditionally
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(), b'1234\n')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read a number of characters less than a line
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(2), b'56')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read the rest of the partial line
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(), b'78\n')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read a full line, with a character limit greater than the line length
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(6), b'abcd\n')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read the next line, deliberately terminated at the line end
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(4), b'efgh')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read the next line... just the line end
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(), b'\n')
|
2010-10-30 00:39:25 +08:00
|
|
|
# Read everything else.
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(), b'ijkl')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
2011-01-16 15:31:35 +08:00
|
|
|
# Regression for #15018
|
|
|
|
# If a stream contains a newline, but the provided length
|
|
|
|
# is less than the number of provided characters, the newline
|
|
|
|
# doesn't reset the available character count
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(10), b'1234\n')
|
|
|
|
self.assertEqual(stream.readline(3), b'abc')
|
2011-01-16 15:31:35 +08:00
|
|
|
# Now expire the available characters
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(3), b'd')
|
2011-01-16 15:31:35 +08:00
|
|
|
# Reading again returns nothing.
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.readline(2), b'')
|
2011-01-16 15:31:35 +08:00
|
|
|
|
|
|
|
# Same test, but with read, not readline.
|
2012-08-11 17:11:20 +08:00
|
|
|
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(stream.read(6), b'1234\na')
|
|
|
|
self.assertEqual(stream.read(2), b'bc')
|
|
|
|
self.assertEqual(stream.read(2), b'd')
|
|
|
|
self.assertEqual(stream.read(2), b'')
|
|
|
|
self.assertEqual(stream.read(), b'')
|
2011-01-16 15:31:35 +08:00
|
|
|
|
2010-10-30 00:39:25 +08:00
|
|
|
def test_stream(self):
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
|
|
|
def test_read_after_value(self):
|
|
|
|
"""
|
|
|
|
Reading from request is allowed after accessing request contents as
|
2011-12-17 07:40:32 +08:00
|
|
|
POST or body.
|
2010-10-30 00:39:25 +08:00
|
|
|
"""
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(request.body, b'name=value')
|
|
|
|
self.assertEqual(request.read(), b'name=value')
|
2010-10-30 00:39:25 +08:00
|
|
|
|
|
|
|
def test_value_after_read(self):
|
|
|
|
"""
|
2011-12-17 07:40:32 +08:00
|
|
|
Construction of POST or body is not allowed after reading
|
2010-10-30 00:39:25 +08:00
|
|
|
from request.
|
|
|
|
"""
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(request.read(2), b'na')
|
2011-12-17 07:40:32 +08:00
|
|
|
self.assertRaises(Exception, lambda: request.body)
|
2010-10-30 00:39:25 +08:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-11-03 19:54:06 +08:00
|
|
|
def test_non_ascii_POST(self):
|
|
|
|
payload = FakePayload(urlencode({'key': 'España'}))
|
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2012-10-30 03:33:00 +08:00
|
|
|
def test_alternate_charset_POST(self):
|
|
|
|
"""
|
|
|
|
Test a POST with non-utf-8 payload encoding.
|
|
|
|
"""
|
|
|
|
from django.utils.http import urllib_parse
|
|
|
|
payload = FakePayload(urllib_parse.urlencode({'key': 'España'.encode('latin-1')}))
|
|
|
|
request = WSGIRequest({
|
|
|
|
'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
|
|
|
|
'wsgi.input': payload,
|
|
|
|
})
|
|
|
|
self.assertEqual(request.POST, {'key': ['España']})
|
|
|
|
|
2013-05-18 18:26:38 +08:00
|
|
|
def test_body_after_POST_multipart_form_data(self):
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
2013-05-18 18:26:38 +08:00
|
|
|
Reading body after parsing multipart/form-data is not allowed
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
|
|
|
# Because multipart is used for large amounts fo data i.e. file uploads,
|
|
|
|
# we don't want the data held in memory twice, and we don't want to
|
2011-12-17 07:40:32 +08:00
|
|
|
# silence the error by setting body = '' either.
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-03-29 00:11:40 +08:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 21:53:28 +08:00
|
|
|
'']))
|
2011-03-29 00:11:40 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-12-17 07:40:32 +08:00
|
|
|
self.assertRaises(Exception, lambda: request.body)
|
2011-03-29 00:11:40 +08:00
|
|
|
|
2013-05-18 18:26:38 +08:00
|
|
|
def test_body_after_POST_multipart_related(self):
|
|
|
|
"""
|
|
|
|
Reading body after parsing multipart that isn't form-data is allowed
|
|
|
|
"""
|
|
|
|
# Ticket #9054
|
|
|
|
# There are cases in which the multipart data is related instead of
|
|
|
|
# being a binary upload, in which case it should still be accessible
|
|
|
|
# via body.
|
2013-05-18 19:34:08 +08:00
|
|
|
payload_data = b"\r\n".join([
|
|
|
|
b'--boundary',
|
|
|
|
b'Content-ID: id; name="name"',
|
|
|
|
b'',
|
|
|
|
b'value',
|
|
|
|
b'--boundary--'
|
|
|
|
b''])
|
2013-05-18 18:26:38 +08:00
|
|
|
payload = FakePayload(payload_data)
|
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': payload})
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.body, payload_data)
|
|
|
|
|
2011-06-10 16:39:38 +08:00
|
|
|
def test_POST_multipart_with_content_length_zero(self):
|
|
|
|
"""
|
|
|
|
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
|
|
|
|
"""
|
|
|
|
# According to:
|
|
|
|
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
|
|
|
|
# Every request.POST with Content-Length >= 0 is a valid request,
|
|
|
|
# this test ensures that we handle Content-Length == 0.
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-06-10 16:39:38 +08:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 21:53:28 +08:00
|
|
|
'']))
|
2011-06-10 16:39:38 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': 0,
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2011-06-10 16:39:38 +08:00
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
|
2012-10-20 20:33:57 +08:00
|
|
|
def test_POST_binary_only(self):
|
|
|
|
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
|
|
|
|
environ = {'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'application/octet-stream',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': BytesIO(payload)}
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
|
|
|
# Same test without specifying content-type
|
|
|
|
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
|
|
|
|
request = WSGIRequest(environ)
|
|
|
|
self.assertEqual(request.POST, {})
|
|
|
|
self.assertEqual(request.FILES, {})
|
|
|
|
self.assertEqual(request.body, payload)
|
|
|
|
|
2010-10-30 00:39:25 +08:00
|
|
|
def test_read_by_lines(self):
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(list(request), [b'name=value'])
|
2011-03-29 00:11:40 +08:00
|
|
|
|
2011-12-17 07:40:32 +08:00
|
|
|
def test_POST_after_body_read(self):
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
2011-12-17 07:40:32 +08:00
|
|
|
POST should be populated even if body is read first
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 07:40:32 +08:00
|
|
|
raw_data = request.body
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-29 00:11:40 +08:00
|
|
|
|
2011-12-17 07:40:32 +08:00
|
|
|
def test_POST_after_body_read_and_stream_read(self):
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
2011-12-17 07:40:32 +08:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-29 00:11:40 +08:00
|
|
|
the stream is read second.
|
|
|
|
"""
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload('name=value')
|
2011-06-28 18:17:56 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2011-06-28 18:17:56 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 07:40:32 +08:00
|
|
|
raw_data = request.body
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(request.read(1), b'n')
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-03-29 00:11:40 +08:00
|
|
|
|
2011-12-17 07:40:32 +08:00
|
|
|
def test_POST_after_body_read_and_stream_read_multipart(self):
|
2011-03-29 00:11:40 +08:00
|
|
|
"""
|
2011-12-17 07:40:32 +08:00
|
|
|
POST should be populated even if body is read first, and then
|
2011-03-29 00:11:40 +08:00
|
|
|
the stream is read second. Using multipart/form-data instead of urlencoded.
|
|
|
|
"""
|
2012-10-20 21:53:28 +08:00
|
|
|
payload = FakePayload("\r\n".join([
|
2011-03-29 00:11:40 +08:00
|
|
|
'--boundary',
|
|
|
|
'Content-Disposition: form-data; name="name"',
|
|
|
|
'',
|
|
|
|
'value',
|
|
|
|
'--boundary--'
|
2012-10-20 21:53:28 +08:00
|
|
|
'']))
|
2011-03-29 00:11:40 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-10-20 21:53:28 +08:00
|
|
|
'wsgi.input': payload})
|
2011-12-17 07:40:32 +08:00
|
|
|
raw_data = request.body
|
2011-03-29 00:11:40 +08:00
|
|
|
# Consume enough data to mess up the parsing:
|
2012-05-19 23:43:34 +08:00
|
|
|
self.assertEqual(request.read(13), b'--boundary\r\nC')
|
2012-06-08 00:08:47 +08:00
|
|
|
self.assertEqual(request.POST, {'name': ['value']})
|
2011-12-17 07:40:32 +08:00
|
|
|
|
2012-02-11 06:51:07 +08:00
|
|
|
def test_POST_connection_error(self):
|
|
|
|
"""
|
|
|
|
If wsgi.input.read() raises an exception while trying to read() the
|
|
|
|
POST, the exception should be identifiable (not a generic IOError).
|
|
|
|
"""
|
2012-08-11 17:11:20 +08:00
|
|
|
class ExplodingBytesIO(BytesIO):
|
2012-02-11 06:51:07 +08:00
|
|
|
def read(self, len=0):
|
|
|
|
raise IOError("kaboom!")
|
|
|
|
|
2012-05-19 23:43:34 +08:00
|
|
|
payload = b'name=value'
|
2012-02-11 06:51:07 +08:00
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
2012-10-20 20:33:57 +08:00
|
|
|
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
|
2012-02-11 06:51:07 +08:00
|
|
|
'CONTENT_LENGTH': len(payload),
|
2012-08-11 17:11:20 +08:00
|
|
|
'wsgi.input': ExplodingBytesIO(payload)})
|
2012-02-11 06:51:07 +08:00
|
|
|
|
2012-12-25 06:20:38 +08:00
|
|
|
with self.assertRaises(UnreadablePostError):
|
|
|
|
request.body
|
2013-02-06 05:52:29 +08:00
|
|
|
|
2013-06-01 16:26:46 +08:00
|
|
|
def test_FILES_connection_error(self):
|
|
|
|
"""
|
|
|
|
If wsgi.input.read() raises an exception while trying to read() the
|
|
|
|
FILES, the exception should be identifiable (not a generic IOError).
|
|
|
|
"""
|
|
|
|
class ExplodingBytesIO(BytesIO):
|
|
|
|
def read(self, len=0):
|
|
|
|
raise IOError("kaboom!")
|
|
|
|
|
|
|
|
payload = b'x'
|
|
|
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
|
|
|
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
|
|
|
|
'CONTENT_LENGTH': len(payload),
|
|
|
|
'wsgi.input': ExplodingBytesIO(payload)})
|
|
|
|
|
|
|
|
with self.assertRaises(UnreadablePostError):
|
|
|
|
request.FILES
|
|
|
|
|
2013-03-01 00:05:25 +08:00
|
|
|
|
2013-07-13 16:16:40 +08:00
|
|
|
class HostValidationTests(SimpleTestCase):
|
|
|
|
poisoned_hosts = [
|
|
|
|
'example.com@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld',
|
|
|
|
'example.com:dr.frankenstein@evil.tld:80',
|
|
|
|
'example.com:80/badpath',
|
|
|
|
'example.com: recovermypassword.com',
|
|
|
|
]
|
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
USE_X_FORWARDED_HOST=False,
|
|
|
|
ALLOWED_HOSTS=[
|
|
|
|
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
|
|
|
|
'.multitenant.com', 'INSENSITIVE.com',
|
|
|
|
])
|
|
|
|
def test_http_get_host(self):
|
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is ignored.
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
|
|
|
'anything.multitenant.com',
|
|
|
|
'multitenant.com',
|
|
|
|
'insensitive.com',
|
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
|
|
|
for host in chain(self.poisoned_hosts, ['other.com']):
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
|
|
|
|
def test_http_get_host_with_x_forwarded_host(self):
|
|
|
|
# Check if X_FORWARDED_HOST is provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_X_FORWARDED_HOST': 'forward.com',
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
# X_FORWARDED_HOST is obeyed.
|
|
|
|
self.assertEqual(request.get_host(), 'forward.com')
|
|
|
|
|
|
|
|
# Check if X_FORWARDED_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com')
|
|
|
|
|
|
|
|
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'internal.com',
|
|
|
|
'SERVER_PORT': 8042,
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'internal.com:8042')
|
|
|
|
|
|
|
|
# Poisoned host headers are rejected as suspicious
|
|
|
|
legit_hosts = [
|
|
|
|
'example.com',
|
|
|
|
'example.com:80',
|
|
|
|
'12.34.56.78',
|
|
|
|
'12.34.56.78:443',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
|
|
|
]
|
|
|
|
|
|
|
|
for host in legit_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
for host in self.poisoned_hosts:
|
|
|
|
with self.assertRaises(SuspiciousOperation):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': host,
|
|
|
|
}
|
|
|
|
request.get_host()
|
|
|
|
|
|
|
|
|
|
|
|
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
|
|
|
|
def test_host_validation_disabled_in_debug_mode(self):
|
|
|
|
"""If ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass."""
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': 'example.com',
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), 'example.com')
|
|
|
|
|
2013-03-28 00:37:08 +08:00
|
|
|
# Invalid hostnames would normally raise a SuspiciousOperation,
|
|
|
|
# but we have DEBUG=True, so this check is disabled.
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'HTTP_HOST': "invalid_hostname.com",
|
|
|
|
}
|
|
|
|
self.assertEqual(request.get_host(), "invalid_hostname.com")
|
2013-07-13 16:16:40 +08:00
|
|
|
|
|
|
|
@override_settings(ALLOWED_HOSTS=[])
|
|
|
|
def test_get_host_suggestion_of_allowed_host(self):
|
|
|
|
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
|
|
|
|
msg_invalid_host = "Invalid HTTP_HOST header: %r."
|
|
|
|
msg_suggestion = msg_invalid_host + "You may need to add %r to ALLOWED_HOSTS."
|
2013-03-28 00:37:08 +08:00
|
|
|
msg_suggestion2 = msg_invalid_host + "The domain name provided is not valid according to RFC 1034/1035"
|
2013-07-13 16:16:40 +08:00
|
|
|
|
|
|
|
for host in [ # Valid-looking hosts
|
|
|
|
'example.com',
|
|
|
|
'12.34.56.78',
|
|
|
|
'[2001:19f0:feee::dead:beef:cafe]',
|
|
|
|
'xn--4ca9at.com', # Punnycode for öäü.com
|
|
|
|
]:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion % (host, host),
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
|
|
|
for domain, port in [ # Valid-looking hosts with a port number
|
|
|
|
('example.com', 80),
|
|
|
|
('12.34.56.78', 443),
|
|
|
|
('[2001:19f0:feee::dead:beef:cafe]', 8080),
|
|
|
|
]:
|
|
|
|
host = '%s:%s' % (domain, port)
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion % (host, domain),
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
|
|
|
for host in self.poisoned_hosts:
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': host}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_invalid_host % host,
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
2013-03-28 00:37:08 +08:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {'HTTP_HOST': "invalid_hostname.com"}
|
|
|
|
self.assertRaisesMessage(
|
|
|
|
SuspiciousOperation,
|
|
|
|
msg_suggestion2 % "invalid_hostname.com",
|
|
|
|
request.get_host
|
|
|
|
)
|
|
|
|
|
2013-07-13 16:16:40 +08:00
|
|
|
|
2013-04-04 04:27:20 +08:00
|
|
|
@skipIf(connection.vendor == 'sqlite'
|
2013-07-10 03:35:01 +08:00
|
|
|
and connection.settings_dict['TEST_NAME'] in (None, '', ':memory:'),
|
2013-03-01 00:05:25 +08:00
|
|
|
"Cannot establish two connections to an in-memory SQLite database.")
|
|
|
|
class DatabaseConnectionHandlingTests(TransactionTestCase):
|
|
|
|
|
2013-06-04 14:09:29 +08:00
|
|
|
available_apps = []
|
|
|
|
|
2013-03-01 00:05:25 +08:00
|
|
|
def setUp(self):
|
|
|
|
# Use a temporary connection to avoid messing with the main one.
|
|
|
|
self._old_default_connection = connections['default']
|
|
|
|
del connections['default']
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
try:
|
|
|
|
connections['default'].close()
|
|
|
|
finally:
|
|
|
|
connections['default'] = self._old_default_connection
|
|
|
|
|
2013-02-06 05:52:29 +08:00
|
|
|
def test_request_finished_db_state(self):
|
2013-03-01 00:05:25 +08:00
|
|
|
# Force closing connection on request end
|
|
|
|
connection.settings_dict['CONN_MAX_AGE'] = 0
|
|
|
|
|
2013-02-06 05:52:29 +08:00
|
|
|
# The GET below will not succeed, but it will give a response with
|
|
|
|
# defined ._handler_class. That is needed for sending the
|
|
|
|
# request_finished signal.
|
|
|
|
response = self.client.get('/')
|
|
|
|
# Make sure there is an open connection
|
|
|
|
connection.cursor()
|
|
|
|
connection.enter_transaction_management()
|
|
|
|
signals.request_finished.send(sender=response._handler_class)
|
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|
|
|
|
|
|
|
|
def test_request_finished_failed_connection(self):
|
2013-03-01 00:05:25 +08:00
|
|
|
# Force closing connection on request end
|
|
|
|
connection.settings_dict['CONN_MAX_AGE'] = 0
|
|
|
|
|
|
|
|
connection.enter_transaction_management()
|
|
|
|
connection.set_dirty()
|
2013-02-06 05:52:29 +08:00
|
|
|
# Test that the rollback doesn't succeed (for example network failure
|
|
|
|
# could cause this).
|
|
|
|
def fail_horribly():
|
|
|
|
raise Exception("Horrible failure!")
|
2013-03-01 00:05:25 +08:00
|
|
|
connection._rollback = fail_horribly
|
2013-02-13 05:11:22 +08:00
|
|
|
try:
|
|
|
|
with self.assertRaises(Exception):
|
|
|
|
signals.request_finished.send(sender=self.__class__)
|
|
|
|
# The connection's state wasn't cleaned up
|
2013-03-01 00:05:25 +08:00
|
|
|
self.assertEqual(len(connection.transaction_state), 1)
|
2013-02-13 05:11:22 +08:00
|
|
|
finally:
|
2013-03-01 00:05:25 +08:00
|
|
|
del connection._rollback
|
2013-02-13 05:11:22 +08:00
|
|
|
# The connection will be cleaned on next request where the conn
|
|
|
|
# works again.
|
|
|
|
signals.request_finished.send(sender=self.__class__)
|
2013-02-06 05:52:29 +08:00
|
|
|
self.assertEqual(len(connection.transaction_state), 0)
|