2015-05-09 13:33:26 +08:00
|
|
|
import json
|
2015-01-28 20:35:27 +08:00
|
|
|
import mimetypes
|
2008-06-06 21:39:42 +08:00
|
|
|
import os
|
2015-01-28 20:35:27 +08:00
|
|
|
import sys
|
2011-02-21 18:12:23 +08:00
|
|
|
from copy import copy
|
2017-09-07 01:11:18 +08:00
|
|
|
from functools import partial
|
2017-05-22 18:49:39 +08:00
|
|
|
from http import HTTPStatus
|
2013-07-29 21:50:58 +08:00
|
|
|
from importlib import import_module
|
2012-05-06 01:47:03 +08:00
|
|
|
from io import BytesIO
|
2017-02-07 21:55:44 +08:00
|
|
|
from urllib.parse import unquote_to_bytes, urljoin, urlparse, urlsplit
|
2008-07-02 12:34:05 +08:00
|
|
|
|
2020-02-13 06:15:00 +08:00
|
|
|
from asgiref.sync import sync_to_async
|
|
|
|
|
2007-02-09 21:47:36 +08:00
|
|
|
from django.conf import settings
|
2020-02-13 06:15:00 +08:00
|
|
|
from django.core.handlers.asgi import ASGIRequest
|
2006-08-27 20:24:59 +08:00
|
|
|
from django.core.handlers.base import BaseHandler
|
2017-02-08 01:05:47 +08:00
|
|
|
from django.core.handlers.wsgi import WSGIRequest
|
2018-02-05 18:22:24 +08:00
|
|
|
from django.core.serializers.json import DjangoJSONEncoder
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.core.signals import (
|
|
|
|
got_request_exception, request_finished, request_started,
|
|
|
|
)
|
2013-02-18 18:37:26 +08:00
|
|
|
from django.db import close_old_connections
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.http import HttpRequest, QueryDict, SimpleCookie
|
2006-09-02 17:26:24 +08:00
|
|
|
from django.test import signals
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.test.utils import ContextList
|
2015-12-30 23:51:16 +08:00
|
|
|
from django.urls import resolve
|
2017-02-07 21:55:44 +08:00
|
|
|
from django.utils.encoding import force_bytes
|
2017-09-07 01:11:18 +08:00
|
|
|
from django.utils.functional import SimpleLazyObject
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
from django.utils.http import urlencode
|
2007-09-15 03:55:24 +08:00
|
|
|
from django.utils.itercompat import is_iterable
|
2019-10-26 22:42:32 +08:00
|
|
|
from django.utils.regex_helper import _lazy_re_compile
|
2006-08-27 20:24:59 +08:00
|
|
|
|
2020-08-13 19:02:56 +08:00
|
|
|
__all__ = (
|
|
|
|
'AsyncClient', 'AsyncRequestFactory', 'Client', 'RedirectCycleError',
|
|
|
|
'RequestFactory', 'encode_file', 'encode_multipart',
|
|
|
|
)
|
2010-10-19 00:08:25 +08:00
|
|
|
|
2010-10-18 23:53:55 +08:00
|
|
|
|
2007-02-17 08:23:09 +08:00
|
|
|
BOUNDARY = 'BoUnDaRyStRiNg'
|
|
|
|
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
|
2019-10-26 22:42:32 +08:00
|
|
|
CONTENT_TYPE_RE = _lazy_re_compile(r'.*; charset=([\w\d-]+);?')
|
2018-08-15 22:27:45 +08:00
|
|
|
# Structured suffix spec: https://tools.ietf.org/html/rfc6838#section-4.2.8
|
2019-10-26 22:42:32 +08:00
|
|
|
JSON_CONTENT_TYPE_RE = _lazy_re_compile(r'^application\/(.+\+)?json')
|
2008-07-02 12:34:05 +08:00
|
|
|
|
2013-08-05 23:07:12 +08:00
|
|
|
|
2014-10-17 21:46:42 +08:00
|
|
|
class RedirectCycleError(Exception):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""The test client has been asked to follow a redirect loop."""
|
2014-10-17 21:46:42 +08:00
|
|
|
def __init__(self, message, last_response):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(message)
|
2014-10-17 21:46:42 +08:00
|
|
|
self.last_response = last_response
|
|
|
|
self.redirect_chain = last_response.redirect_chain
|
|
|
|
|
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class FakePayload:
|
2008-07-01 23:10:51 +08:00
|
|
|
"""
|
2012-05-06 01:47:03 +08:00
|
|
|
A wrapper around BytesIO that restricts what can be read since data from
|
2019-06-17 18:38:04 +08:00
|
|
|
the network can't be sought and cannot be read outside of its content
|
2008-07-01 23:10:51 +08:00
|
|
|
length. This makes sure that views can't do anything under the test client
|
2017-01-25 04:37:33 +08:00
|
|
|
that wouldn't work in real life.
|
2008-07-01 23:10:51 +08:00
|
|
|
"""
|
2012-10-20 21:36:24 +08:00
|
|
|
def __init__(self, content=None):
|
|
|
|
self.__content = BytesIO()
|
|
|
|
self.__len = 0
|
|
|
|
self.read_started = False
|
|
|
|
if content is not None:
|
|
|
|
self.write(content)
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return self.__len
|
2008-07-01 23:10:51 +08:00
|
|
|
|
|
|
|
def read(self, num_bytes=None):
|
2012-10-20 21:36:24 +08:00
|
|
|
if not self.read_started:
|
|
|
|
self.__content.seek(0)
|
|
|
|
self.read_started = True
|
2008-07-01 23:10:51 +08:00
|
|
|
if num_bytes is None:
|
2011-01-21 09:04:05 +08:00
|
|
|
num_bytes = self.__len or 0
|
2008-07-01 23:10:51 +08:00
|
|
|
assert self.__len >= num_bytes, "Cannot read more than the available bytes from the HTTP incoming data."
|
|
|
|
content = self.__content.read(num_bytes)
|
|
|
|
self.__len -= num_bytes
|
|
|
|
return content
|
|
|
|
|
2012-10-20 21:36:24 +08:00
|
|
|
def write(self, content):
|
|
|
|
if self.read_started:
|
2019-01-25 22:36:34 +08:00
|
|
|
raise ValueError("Unable to write a payload after it's been read")
|
2012-10-20 21:36:24 +08:00
|
|
|
content = force_bytes(content)
|
|
|
|
self.__content.write(content)
|
|
|
|
self.__len += len(content)
|
|
|
|
|
2008-07-02 12:34:05 +08:00
|
|
|
|
2012-12-30 22:19:22 +08:00
|
|
|
def closing_iterator_wrapper(iterable, close):
|
|
|
|
try:
|
2017-02-24 09:06:01 +08:00
|
|
|
yield from iterable
|
2012-12-30 22:19:22 +08:00
|
|
|
finally:
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.disconnect(close_old_connections)
|
2013-01-01 17:12:06 +08:00
|
|
|
close() # will fire request_finished
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.connect(close_old_connections)
|
2012-12-30 22:19:22 +08:00
|
|
|
|
|
|
|
|
2016-04-25 19:56:07 +08:00
|
|
|
def conditional_content_removal(request, response):
|
|
|
|
"""
|
2021-07-23 14:48:16 +08:00
|
|
|
Simulate the behavior of most web servers by removing the content of
|
2017-01-25 04:37:33 +08:00
|
|
|
responses for HEAD requests, 1xx, 204, and 304 responses. Ensure
|
2016-05-02 20:35:05 +08:00
|
|
|
compliance with RFC 7230, section 3.3.3.
|
2016-04-25 19:56:07 +08:00
|
|
|
"""
|
|
|
|
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
|
|
|
|
if response.streaming:
|
|
|
|
response.streaming_content = []
|
|
|
|
else:
|
|
|
|
response.content = b''
|
|
|
|
if request.method == 'HEAD':
|
|
|
|
if response.streaming:
|
|
|
|
response.streaming_content = []
|
|
|
|
else:
|
|
|
|
response.content = b''
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
class ClientHandler(BaseHandler):
|
|
|
|
"""
|
2021-10-19 00:06:00 +08:00
|
|
|
An HTTP Handler that can be used for testing purposes. Use the WSGI
|
2017-01-25 04:37:33 +08:00
|
|
|
interface to compose requests, but return the raw HttpResponse object with
|
2013-11-19 16:44:30 +08:00
|
|
|
the originating WSGIRequest attached to its ``wsgi_request`` attribute.
|
2006-08-27 20:24:59 +08:00
|
|
|
"""
|
2010-08-27 21:54:13 +08:00
|
|
|
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
|
|
|
|
self.enforce_csrf_checks = enforce_csrf_checks
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(*args, **kwargs)
|
2010-08-27 21:54:13 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
def __call__(self, environ):
|
|
|
|
# Set up middleware if needed. We couldn't do this earlier, because
|
|
|
|
# settings weren't available.
|
2015-11-07 23:12:37 +08:00
|
|
|
if self._middleware_chain is None:
|
2006-08-27 20:24:59 +08:00
|
|
|
self.load_middleware()
|
|
|
|
|
2013-02-18 18:37:26 +08:00
|
|
|
request_started.disconnect(close_old_connections)
|
2013-11-21 09:03:02 +08:00
|
|
|
request_started.send(sender=self.__class__, environ=environ)
|
2013-02-18 18:37:26 +08:00
|
|
|
request_started.connect(close_old_connections)
|
2012-12-30 22:19:22 +08:00
|
|
|
request = WSGIRequest(environ)
|
|
|
|
# sneaky little hack so that we can easily get round
|
|
|
|
# CsrfViewMiddleware. This makes life easier, and is probably
|
|
|
|
# required for backwards compatibility with external tests against
|
|
|
|
# admin views.
|
|
|
|
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
|
2013-10-30 01:31:54 +08:00
|
|
|
|
|
|
|
# Request goes through middleware.
|
2012-12-30 22:19:22 +08:00
|
|
|
response = self.get_response(request)
|
2016-04-25 19:56:07 +08:00
|
|
|
|
2021-07-23 14:48:16 +08:00
|
|
|
# Simulate behaviors of most web servers.
|
2016-04-25 19:56:07 +08:00
|
|
|
conditional_content_removal(request, response)
|
|
|
|
|
2013-10-30 01:31:54 +08:00
|
|
|
# Attach the originating request to the response so that it could be
|
|
|
|
# later retrieved.
|
2013-11-19 16:44:30 +08:00
|
|
|
response.wsgi_request = request
|
2013-10-30 01:31:54 +08:00
|
|
|
|
2017-01-25 04:37:33 +08:00
|
|
|
# Emulate a WSGI server by calling the close method on completion.
|
2012-12-30 22:19:22 +08:00
|
|
|
if response.streaming:
|
|
|
|
response.streaming_content = closing_iterator_wrapper(
|
|
|
|
response.streaming_content, response.close)
|
|
|
|
else:
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.disconnect(close_old_connections)
|
2013-01-01 17:12:06 +08:00
|
|
|
response.close() # will fire request_finished
|
2013-02-18 18:37:26 +08:00
|
|
|
request_finished.connect(close_old_connections)
|
2006-09-28 09:56:02 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
return response
|
|
|
|
|
2013-08-05 23:07:12 +08:00
|
|
|
|
2020-02-13 06:15:00 +08:00
|
|
|
class AsyncClientHandler(BaseHandler):
|
|
|
|
"""An async version of ClientHandler."""
|
|
|
|
def __init__(self, enforce_csrf_checks=True, *args, **kwargs):
|
|
|
|
self.enforce_csrf_checks = enforce_csrf_checks
|
|
|
|
super().__init__(*args, **kwargs)
|
|
|
|
|
|
|
|
async def __call__(self, scope):
|
|
|
|
# Set up middleware if needed. We couldn't do this earlier, because
|
|
|
|
# settings weren't available.
|
|
|
|
if self._middleware_chain is None:
|
|
|
|
self.load_middleware(is_async=True)
|
|
|
|
# Extract body file from the scope, if provided.
|
|
|
|
if '_body_file' in scope:
|
|
|
|
body_file = scope.pop('_body_file')
|
|
|
|
else:
|
|
|
|
body_file = FakePayload('')
|
|
|
|
|
|
|
|
request_started.disconnect(close_old_connections)
|
2020-10-22 15:35:06 +08:00
|
|
|
await sync_to_async(request_started.send, thread_sensitive=False)(sender=self.__class__, scope=scope)
|
2020-02-13 06:15:00 +08:00
|
|
|
request_started.connect(close_old_connections)
|
|
|
|
request = ASGIRequest(scope, body_file)
|
|
|
|
# Sneaky little hack so that we can easily get round
|
|
|
|
# CsrfViewMiddleware. This makes life easier, and is probably required
|
|
|
|
# for backwards compatibility with external tests against admin views.
|
|
|
|
request._dont_enforce_csrf_checks = not self.enforce_csrf_checks
|
|
|
|
# Request goes through middleware.
|
|
|
|
response = await self.get_response_async(request)
|
2021-07-23 14:48:16 +08:00
|
|
|
# Simulate behaviors of most web servers.
|
2020-02-13 06:15:00 +08:00
|
|
|
conditional_content_removal(request, response)
|
|
|
|
# Attach the originating ASGI request to the response so that it could
|
|
|
|
# be later retrieved.
|
|
|
|
response.asgi_request = request
|
|
|
|
# Emulate a server by calling the close method on completion.
|
|
|
|
if response.streaming:
|
2020-10-22 15:35:06 +08:00
|
|
|
response.streaming_content = await sync_to_async(closing_iterator_wrapper, thread_sensitive=False)(
|
2020-02-13 06:15:00 +08:00
|
|
|
response.streaming_content,
|
|
|
|
response.close,
|
|
|
|
)
|
|
|
|
else:
|
|
|
|
request_finished.disconnect(close_old_connections)
|
|
|
|
# Will fire request_finished.
|
2020-10-22 15:35:06 +08:00
|
|
|
await sync_to_async(response.close, thread_sensitive=False)()
|
2020-02-13 06:15:00 +08:00
|
|
|
request_finished.connect(close_old_connections)
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
2008-08-06 23:32:46 +08:00
|
|
|
def store_rendered_templates(store, signal, sender, template, context, **kwargs):
|
2008-07-02 12:34:05 +08:00
|
|
|
"""
|
2017-01-25 04:37:33 +08:00
|
|
|
Store templates and contexts that are rendered.
|
2011-02-21 18:12:23 +08:00
|
|
|
|
|
|
|
The context is copied so that it is an accurate representation at the time
|
|
|
|
of rendering.
|
2008-07-02 12:34:05 +08:00
|
|
|
"""
|
2010-10-10 10:16:33 +08:00
|
|
|
store.setdefault('templates', []).append(template)
|
2015-12-27 04:11:53 +08:00
|
|
|
if 'context' not in store:
|
|
|
|
store['context'] = ContextList()
|
|
|
|
store['context'].append(copy(context))
|
2006-08-27 20:24:59 +08:00
|
|
|
|
2013-08-05 23:07:12 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
def encode_multipart(boundary, data):
|
|
|
|
"""
|
2017-01-25 04:37:33 +08:00
|
|
|
Encode multipart POST data from a dictionary of form values.
|
2006-09-28 09:56:02 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
The key will be used as the form data name; the value will be transmitted
|
|
|
|
as content. If the value is a file, the contents of the file will be sent
|
|
|
|
as an application/octet-stream; otherwise, str(value) will be sent.
|
|
|
|
"""
|
|
|
|
lines = []
|
2016-01-24 00:47:07 +08:00
|
|
|
|
|
|
|
def to_bytes(s):
|
|
|
|
return force_bytes(s, settings.DEFAULT_CHARSET)
|
2008-07-08 06:06:32 +08:00
|
|
|
|
|
|
|
# Not by any means perfect, but good enough for our purposes.
|
2016-01-24 00:47:07 +08:00
|
|
|
def is_file(thing):
|
|
|
|
return hasattr(thing, "read") and callable(thing.read)
|
2008-07-08 06:06:32 +08:00
|
|
|
|
|
|
|
# Each bit of the multipart form data could be either a form value or a
|
|
|
|
# file, or a *list* of form values and/or files. Remember that HTTP field
|
|
|
|
# names can be duplicated!
|
2006-08-27 20:24:59 +08:00
|
|
|
for (key, value) in data.items():
|
2018-12-28 00:19:55 +08:00
|
|
|
if value is None:
|
|
|
|
raise TypeError(
|
2019-08-09 16:19:18 +08:00
|
|
|
"Cannot encode None for key '%s' as POST data. Did you mean "
|
|
|
|
"to pass an empty string or omit the value?" % key
|
2018-12-28 00:19:55 +08:00
|
|
|
)
|
|
|
|
elif is_file(value):
|
2008-07-08 06:06:32 +08:00
|
|
|
lines.extend(encode_file(boundary, key, value))
|
2016-12-29 23:27:49 +08:00
|
|
|
elif not isinstance(value, str) and is_iterable(value):
|
2008-07-08 06:06:32 +08:00
|
|
|
for item in value:
|
|
|
|
if is_file(item):
|
|
|
|
lines.extend(encode_file(boundary, key, item))
|
|
|
|
else:
|
2014-12-07 05:00:09 +08:00
|
|
|
lines.extend(to_bytes(val) for val in [
|
2012-08-13 05:25:42 +08:00
|
|
|
'--%s' % boundary,
|
|
|
|
'Content-Disposition: form-data; name="%s"' % key,
|
2007-09-15 03:55:24 +08:00
|
|
|
'',
|
2012-08-13 05:25:42 +08:00
|
|
|
item
|
2014-12-07 05:00:09 +08:00
|
|
|
])
|
2008-07-08 06:06:32 +08:00
|
|
|
else:
|
2014-12-07 05:00:09 +08:00
|
|
|
lines.extend(to_bytes(val) for val in [
|
2012-08-13 05:25:42 +08:00
|
|
|
'--%s' % boundary,
|
|
|
|
'Content-Disposition: form-data; name="%s"' % key,
|
2008-07-08 06:06:32 +08:00
|
|
|
'',
|
2012-08-13 05:25:42 +08:00
|
|
|
value
|
2014-12-07 05:00:09 +08:00
|
|
|
])
|
2006-09-28 09:56:02 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
lines.extend([
|
2012-08-13 05:25:42 +08:00
|
|
|
to_bytes('--%s--' % boundary),
|
|
|
|
b'',
|
2006-08-27 20:24:59 +08:00
|
|
|
])
|
2012-08-13 05:25:42 +08:00
|
|
|
return b'\r\n'.join(lines)
|
2006-08-27 20:24:59 +08:00
|
|
|
|
2013-08-05 23:07:12 +08:00
|
|
|
|
2008-07-08 06:06:32 +08:00
|
|
|
def encode_file(boundary, key, file):
|
2016-01-24 00:47:07 +08:00
|
|
|
def to_bytes(s):
|
|
|
|
return force_bytes(s, settings.DEFAULT_CHARSET)
|
2016-09-11 17:23:35 +08:00
|
|
|
|
|
|
|
# file.name might not be a string. For example, it's an int for
|
|
|
|
# tempfile.TemporaryFile().
|
2016-12-29 23:27:49 +08:00
|
|
|
file_has_string_name = hasattr(file, 'name') and isinstance(file.name, str)
|
2016-09-11 17:23:35 +08:00
|
|
|
filename = os.path.basename(file.name) if file_has_string_name else ''
|
|
|
|
|
2013-04-20 01:20:23 +08:00
|
|
|
if hasattr(file, 'content_type'):
|
|
|
|
content_type = file.content_type
|
2014-11-30 00:41:06 +08:00
|
|
|
elif filename:
|
|
|
|
content_type = mimetypes.guess_type(filename)[0]
|
2013-04-20 01:20:23 +08:00
|
|
|
else:
|
2014-11-30 00:41:06 +08:00
|
|
|
content_type = None
|
2013-04-20 01:20:23 +08:00
|
|
|
|
2010-08-07 01:14:02 +08:00
|
|
|
if content_type is None:
|
|
|
|
content_type = 'application/octet-stream'
|
2018-01-04 07:52:12 +08:00
|
|
|
filename = filename or key
|
2008-07-08 06:06:32 +08:00
|
|
|
return [
|
2012-08-13 05:25:42 +08:00
|
|
|
to_bytes('--%s' % boundary),
|
2013-08-05 23:07:12 +08:00
|
|
|
to_bytes('Content-Disposition: form-data; name="%s"; filename="%s"'
|
2014-11-30 00:41:06 +08:00
|
|
|
% (key, filename)),
|
2012-08-13 05:25:42 +08:00
|
|
|
to_bytes('Content-Type: %s' % content_type),
|
|
|
|
b'',
|
2014-11-30 00:41:06 +08:00
|
|
|
to_bytes(file.read())
|
2008-07-08 06:06:32 +08:00
|
|
|
]
|
2008-09-17 19:32:11 +08:00
|
|
|
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class RequestFactory:
|
2010-10-13 07:37:47 +08:00
|
|
|
"""
|
|
|
|
Class that lets you create mock Request objects for use in testing.
|
|
|
|
|
|
|
|
Usage:
|
|
|
|
|
|
|
|
rf = RequestFactory()
|
|
|
|
get_request = rf.get('/hello/')
|
|
|
|
post_request = rf.post('/submit/', {'foo': 'bar'})
|
|
|
|
|
|
|
|
Once you have a request object you can pass it to any view function,
|
|
|
|
just as if that view had been hooked up using a URLconf.
|
|
|
|
"""
|
2018-02-05 18:22:24 +08:00
|
|
|
def __init__(self, *, json_encoder=DjangoJSONEncoder, **defaults):
|
|
|
|
self.json_encoder = json_encoder
|
2010-10-13 07:37:47 +08:00
|
|
|
self.defaults = defaults
|
|
|
|
self.cookies = SimpleCookie()
|
2012-05-06 01:47:03 +08:00
|
|
|
self.errors = BytesIO()
|
2010-10-13 07:37:47 +08:00
|
|
|
|
|
|
|
def _base_environ(self, **request):
|
|
|
|
"""
|
|
|
|
The base environment for a request.
|
|
|
|
"""
|
2011-10-19 00:42:32 +08:00
|
|
|
# This is a minimal valid WSGI environ dictionary, plus:
|
|
|
|
# - HTTP_COOKIE: for cookie support,
|
|
|
|
# - REMOTE_ADDR: often useful, see #8551.
|
2018-09-26 14:48:47 +08:00
|
|
|
# See https://www.python.org/dev/peps/pep-3333/#environ-variables
|
2017-12-11 20:08:45 +08:00
|
|
|
return {
|
2018-07-19 10:07:02 +08:00
|
|
|
'HTTP_COOKIE': '; '.join(sorted(
|
|
|
|
'%s=%s' % (morsel.key, morsel.coded_value)
|
|
|
|
for morsel in self.cookies.values()
|
|
|
|
)),
|
2017-01-20 17:20:53 +08:00
|
|
|
'PATH_INFO': '/',
|
|
|
|
'REMOTE_ADDR': '127.0.0.1',
|
|
|
|
'REQUEST_METHOD': 'GET',
|
|
|
|
'SCRIPT_NAME': '',
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': '80',
|
|
|
|
'SERVER_PROTOCOL': 'HTTP/1.1',
|
2013-11-03 03:37:48 +08:00
|
|
|
'wsgi.version': (1, 0),
|
2017-01-20 17:20:53 +08:00
|
|
|
'wsgi.url_scheme': 'http',
|
2013-11-03 03:37:48 +08:00
|
|
|
'wsgi.input': FakePayload(b''),
|
|
|
|
'wsgi.errors': self.errors,
|
2010-10-13 07:37:47 +08:00
|
|
|
'wsgi.multiprocess': True,
|
2013-11-03 03:37:48 +08:00
|
|
|
'wsgi.multithread': False,
|
|
|
|
'wsgi.run_once': False,
|
2017-12-11 20:08:45 +08:00
|
|
|
**self.defaults,
|
|
|
|
**request,
|
2010-10-13 07:37:47 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
def request(self, **request):
|
|
|
|
"Construct a generic request object."
|
|
|
|
return WSGIRequest(self._base_environ(**request))
|
|
|
|
|
2014-02-14 00:36:53 +08:00
|
|
|
def _encode_data(self, data, content_type):
|
2011-08-23 08:52:45 +08:00
|
|
|
if content_type is MULTIPART_CONTENT:
|
|
|
|
return encode_multipart(BOUNDARY, data)
|
|
|
|
else:
|
|
|
|
# Encode the content so that the byte representation is correct.
|
|
|
|
match = CONTENT_TYPE_RE.match(content_type)
|
|
|
|
if match:
|
2020-05-11 04:03:39 +08:00
|
|
|
charset = match[1]
|
2011-08-23 08:52:45 +08:00
|
|
|
else:
|
|
|
|
charset = settings.DEFAULT_CHARSET
|
2018-06-28 02:50:03 +08:00
|
|
|
return force_bytes(data, encoding=charset)
|
2011-08-23 08:52:45 +08:00
|
|
|
|
2018-02-05 18:22:24 +08:00
|
|
|
def _encode_json(self, data, content_type):
|
|
|
|
"""
|
2018-08-18 20:15:24 +08:00
|
|
|
Return encoded JSON if data is a dict, list, or tuple and content_type
|
|
|
|
is application/json.
|
2018-02-05 18:22:24 +08:00
|
|
|
"""
|
2018-08-18 20:15:24 +08:00
|
|
|
should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple))
|
2018-02-05 18:22:24 +08:00
|
|
|
return json.dumps(data, cls=self.json_encoder) if should_encode else data
|
|
|
|
|
2010-12-23 07:55:13 +08:00
|
|
|
def _get_path(self, parsed):
|
2017-01-12 06:17:25 +08:00
|
|
|
path = parsed.path
|
2010-12-23 07:55:13 +08:00
|
|
|
# If there are parameters, add them
|
2017-01-12 06:17:25 +08:00
|
|
|
if parsed.params:
|
|
|
|
path += ";" + parsed.params
|
2017-02-07 21:55:44 +08:00
|
|
|
path = unquote_to_bytes(path)
|
2016-12-01 18:38:01 +08:00
|
|
|
# Replace the behavior where non-ASCII values in the WSGI environ are
|
|
|
|
# arbitrarily decoded with ISO-8859-1.
|
2014-07-22 20:25:22 +08:00
|
|
|
# Refs comment in `get_bytes_from_wsgi()`.
|
2017-02-08 01:05:47 +08:00
|
|
|
return path.decode('iso-8859-1')
|
2010-12-23 07:55:13 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def get(self, path, data=None, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a GET request."""
|
2014-10-21 04:32:43 +08:00
|
|
|
data = {} if data is None else data
|
2017-12-11 20:08:45 +08:00
|
|
|
return self.generic('GET', path, secure=secure, **{
|
2014-10-21 04:32:43 +08:00
|
|
|
'QUERY_STRING': urlencode(data, doseq=True),
|
2017-12-11 20:08:45 +08:00
|
|
|
**extra,
|
|
|
|
})
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
|
2013-10-28 22:00:54 +08:00
|
|
|
secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a POST request."""
|
2018-02-05 18:22:24 +08:00
|
|
|
data = self._encode_json({} if data is None else data, content_type)
|
2014-10-21 04:32:43 +08:00
|
|
|
post_data = self._encode_data(data, content_type)
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2013-10-28 22:00:54 +08:00
|
|
|
return self.generic('POST', path, post_data, content_type,
|
|
|
|
secure=secure, **extra)
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def head(self, path, data=None, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a HEAD request."""
|
2014-10-21 04:32:43 +08:00
|
|
|
data = {} if data is None else data
|
2017-12-11 20:08:45 +08:00
|
|
|
return self.generic('HEAD', path, secure=secure, **{
|
2014-10-21 04:32:43 +08:00
|
|
|
'QUERY_STRING': urlencode(data, doseq=True),
|
2017-12-11 20:08:45 +08:00
|
|
|
**extra,
|
|
|
|
})
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2014-10-13 19:10:00 +08:00
|
|
|
def trace(self, path, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a TRACE request."""
|
2014-10-13 19:10:00 +08:00
|
|
|
return self.generic('TRACE', path, secure=secure, **extra)
|
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def options(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
secure=False, **extra):
|
2012-05-26 01:03:15 +08:00
|
|
|
"Construct an OPTIONS request."
|
2013-10-28 22:00:54 +08:00
|
|
|
return self.generic('OPTIONS', path, data, content_type,
|
|
|
|
secure=secure, **extra)
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def put(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a PUT request."""
|
2018-02-05 18:22:24 +08:00
|
|
|
data = self._encode_json(data, content_type)
|
2013-10-28 22:00:54 +08:00
|
|
|
return self.generic('PUT', path, data, content_type,
|
|
|
|
secure=secure, **extra)
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2013-02-03 10:22:40 +08:00
|
|
|
def patch(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a PATCH request."""
|
2018-02-05 18:22:24 +08:00
|
|
|
data = self._encode_json(data, content_type)
|
2013-10-28 22:00:54 +08:00
|
|
|
return self.generic('PATCH', path, data, content_type,
|
|
|
|
secure=secure, **extra)
|
2013-02-03 10:22:40 +08:00
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def delete(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct a DELETE request."""
|
2018-02-05 18:22:24 +08:00
|
|
|
data = self._encode_json(data, content_type)
|
2013-10-28 22:00:54 +08:00
|
|
|
return self.generic('DELETE', path, data, content_type,
|
|
|
|
secure=secure, **extra)
|
2010-10-13 07:37:47 +08:00
|
|
|
|
2013-10-28 22:00:54 +08:00
|
|
|
def generic(self, method, path, data='',
|
|
|
|
content_type='application/octet-stream', secure=False,
|
|
|
|
**extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Construct an arbitrary HTTP request."""
|
2017-01-12 06:17:25 +08:00
|
|
|
parsed = urlparse(str(path)) # path can be lazy
|
2012-08-29 02:59:56 +08:00
|
|
|
data = force_bytes(data, settings.DEFAULT_CHARSET)
|
2010-10-13 07:37:47 +08:00
|
|
|
r = {
|
2013-11-03 03:37:48 +08:00
|
|
|
'PATH_INFO': self._get_path(parsed),
|
2017-01-20 17:20:53 +08:00
|
|
|
'REQUEST_METHOD': method,
|
|
|
|
'SERVER_PORT': '443' if secure else '80',
|
|
|
|
'wsgi.url_scheme': 'https' if secure else 'http',
|
2010-10-13 07:37:47 +08:00
|
|
|
}
|
2012-05-26 01:03:15 +08:00
|
|
|
if data:
|
|
|
|
r.update({
|
2018-07-09 23:44:49 +08:00
|
|
|
'CONTENT_LENGTH': str(len(data)),
|
2017-01-20 17:20:53 +08:00
|
|
|
'CONTENT_TYPE': content_type,
|
2013-11-03 03:37:48 +08:00
|
|
|
'wsgi.input': FakePayload(data),
|
2012-05-26 01:03:15 +08:00
|
|
|
})
|
2010-10-13 07:37:47 +08:00
|
|
|
r.update(extra)
|
2013-08-05 23:07:12 +08:00
|
|
|
# If QUERY_STRING is absent or empty, we want to extract it from the URL.
|
|
|
|
if not r.get('QUERY_STRING'):
|
2013-09-08 00:30:03 +08:00
|
|
|
# WSGI requires latin-1 encoded strings. See get_path_info().
|
2018-02-11 09:31:18 +08:00
|
|
|
query_string = parsed[4].encode().decode('iso-8859-1')
|
2013-09-08 00:30:03 +08:00
|
|
|
r['QUERY_STRING'] = query_string
|
2010-10-13 07:37:47 +08:00
|
|
|
return self.request(**r)
|
|
|
|
|
2013-08-05 23:07:12 +08:00
|
|
|
|
2020-02-13 06:15:00 +08:00
|
|
|
class AsyncRequestFactory(RequestFactory):
|
|
|
|
"""
|
|
|
|
Class that lets you create mock ASGI-like Request objects for use in
|
|
|
|
testing. Usage:
|
|
|
|
|
|
|
|
rf = AsyncRequestFactory()
|
|
|
|
get_request = await rf.get('/hello/')
|
|
|
|
post_request = await rf.post('/submit/', {'foo': 'bar'})
|
|
|
|
|
|
|
|
Once you have a request object you can pass it to any view function,
|
|
|
|
including synchronous ones. The reason we have a separate class here is:
|
|
|
|
a) this makes ASGIRequest subclasses, and
|
|
|
|
b) AsyncTestClient can subclass it.
|
|
|
|
"""
|
|
|
|
def _base_scope(self, **request):
|
|
|
|
"""The base scope for a request."""
|
|
|
|
# This is a minimal valid ASGI scope, plus:
|
|
|
|
# - headers['cookie'] for cookie support,
|
|
|
|
# - 'client' often useful, see #8551.
|
|
|
|
scope = {
|
|
|
|
'asgi': {'version': '3.0'},
|
|
|
|
'type': 'http',
|
|
|
|
'http_version': '1.1',
|
|
|
|
'client': ['127.0.0.1', 0],
|
|
|
|
'server': ('testserver', '80'),
|
|
|
|
'scheme': 'http',
|
|
|
|
'method': 'GET',
|
|
|
|
'headers': [],
|
|
|
|
**self.defaults,
|
|
|
|
**request,
|
|
|
|
}
|
|
|
|
scope['headers'].append((
|
|
|
|
b'cookie',
|
|
|
|
b'; '.join(sorted(
|
|
|
|
('%s=%s' % (morsel.key, morsel.coded_value)).encode('ascii')
|
|
|
|
for morsel in self.cookies.values()
|
|
|
|
)),
|
|
|
|
))
|
|
|
|
return scope
|
|
|
|
|
|
|
|
def request(self, **request):
|
|
|
|
"""Construct a generic request object."""
|
|
|
|
# This is synchronous, which means all methods on this class are.
|
|
|
|
# AsyncClient, however, has an async request function, which makes all
|
|
|
|
# its methods async.
|
|
|
|
if '_body_file' in request:
|
|
|
|
body_file = request.pop('_body_file')
|
|
|
|
else:
|
|
|
|
body_file = FakePayload('')
|
|
|
|
return ASGIRequest(self._base_scope(**request), body_file)
|
|
|
|
|
|
|
|
def generic(
|
|
|
|
self, method, path, data='', content_type='application/octet-stream',
|
|
|
|
secure=False, **extra,
|
|
|
|
):
|
|
|
|
"""Construct an arbitrary HTTP request."""
|
|
|
|
parsed = urlparse(str(path)) # path can be lazy.
|
|
|
|
data = force_bytes(data, settings.DEFAULT_CHARSET)
|
|
|
|
s = {
|
|
|
|
'method': method,
|
|
|
|
'path': self._get_path(parsed),
|
|
|
|
'server': ('127.0.0.1', '443' if secure else '80'),
|
|
|
|
'scheme': 'https' if secure else 'http',
|
|
|
|
'headers': [(b'host', b'testserver')],
|
|
|
|
}
|
|
|
|
if data:
|
|
|
|
s['headers'].extend([
|
2020-11-03 01:42:13 +08:00
|
|
|
(b'content-length', str(len(data)).encode('ascii')),
|
2020-02-13 06:15:00 +08:00
|
|
|
(b'content-type', content_type.encode('ascii')),
|
|
|
|
])
|
|
|
|
s['_body_file'] = FakePayload(data)
|
2020-11-04 18:07:15 +08:00
|
|
|
follow = extra.pop('follow', None)
|
|
|
|
if follow is not None:
|
|
|
|
s['follow'] = follow
|
2021-07-15 19:09:29 +08:00
|
|
|
if query_string := extra.pop('QUERY_STRING', None):
|
|
|
|
s['query_string'] = query_string
|
2020-11-04 18:07:15 +08:00
|
|
|
s['headers'] += [
|
|
|
|
(key.lower().encode('ascii'), value.encode('latin1'))
|
|
|
|
for key, value in extra.items()
|
|
|
|
]
|
2020-02-13 06:15:00 +08:00
|
|
|
# If QUERY_STRING is absent or empty, we want to extract it from the
|
|
|
|
# URL.
|
|
|
|
if not s.get('query_string'):
|
|
|
|
s['query_string'] = parsed[4]
|
|
|
|
return self.request(**s)
|
|
|
|
|
|
|
|
|
|
|
|
class ClientMixin:
|
|
|
|
"""
|
|
|
|
Mixin with common methods between Client and AsyncClient.
|
|
|
|
"""
|
|
|
|
def store_exc_info(self, **kwargs):
|
|
|
|
"""Store exceptions when they are generated by a view."""
|
|
|
|
self.exc_info = sys.exc_info()
|
|
|
|
|
|
|
|
def check_exception(self, response):
|
|
|
|
"""
|
|
|
|
Look for a signaled exception, clear the current context exception
|
|
|
|
data, re-raise the signaled exception, and clear the signaled exception
|
|
|
|
from the local cache.
|
|
|
|
"""
|
|
|
|
response.exc_info = self.exc_info
|
|
|
|
if self.exc_info:
|
|
|
|
_, exc_value, _ = self.exc_info
|
|
|
|
self.exc_info = None
|
|
|
|
if self.raise_request_exception:
|
|
|
|
raise exc_value
|
|
|
|
|
|
|
|
@property
|
|
|
|
def session(self):
|
|
|
|
"""Return the current session variables."""
|
|
|
|
engine = import_module(settings.SESSION_ENGINE)
|
|
|
|
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME)
|
|
|
|
if cookie:
|
|
|
|
return engine.SessionStore(cookie.value)
|
|
|
|
session = engine.SessionStore()
|
|
|
|
session.save()
|
|
|
|
self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key
|
|
|
|
return session
|
|
|
|
|
|
|
|
def login(self, **credentials):
|
|
|
|
"""
|
|
|
|
Set the Factory to appear as if it has successfully logged into a site.
|
|
|
|
|
|
|
|
Return True if login is possible or False if the provided credentials
|
|
|
|
are incorrect.
|
|
|
|
"""
|
|
|
|
from django.contrib.auth import authenticate
|
|
|
|
user = authenticate(**credentials)
|
|
|
|
if user:
|
|
|
|
self._login(user)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def force_login(self, user, backend=None):
|
|
|
|
def get_backend():
|
|
|
|
from django.contrib.auth import load_backend
|
|
|
|
for backend_path in settings.AUTHENTICATION_BACKENDS:
|
|
|
|
backend = load_backend(backend_path)
|
|
|
|
if hasattr(backend, 'get_user'):
|
|
|
|
return backend_path
|
|
|
|
|
|
|
|
if backend is None:
|
|
|
|
backend = get_backend()
|
|
|
|
user.backend = backend
|
|
|
|
self._login(user, backend)
|
|
|
|
|
|
|
|
def _login(self, user, backend=None):
|
|
|
|
from django.contrib.auth import login
|
2020-07-24 14:25:47 +08:00
|
|
|
|
2020-02-13 06:15:00 +08:00
|
|
|
# Create a fake request to store login details.
|
|
|
|
request = HttpRequest()
|
|
|
|
if self.session:
|
|
|
|
request.session = self.session
|
|
|
|
else:
|
|
|
|
engine = import_module(settings.SESSION_ENGINE)
|
|
|
|
request.session = engine.SessionStore()
|
|
|
|
login(request, user, backend)
|
|
|
|
# Save the session values.
|
|
|
|
request.session.save()
|
|
|
|
# Set the cookie to represent the session.
|
|
|
|
session_cookie = settings.SESSION_COOKIE_NAME
|
|
|
|
self.cookies[session_cookie] = request.session.session_key
|
|
|
|
cookie_data = {
|
|
|
|
'max-age': None,
|
|
|
|
'path': '/',
|
|
|
|
'domain': settings.SESSION_COOKIE_DOMAIN,
|
|
|
|
'secure': settings.SESSION_COOKIE_SECURE or None,
|
|
|
|
'expires': None,
|
|
|
|
}
|
|
|
|
self.cookies[session_cookie].update(cookie_data)
|
|
|
|
|
|
|
|
def logout(self):
|
|
|
|
"""Log out the user by removing the cookies and session object."""
|
|
|
|
from django.contrib.auth import get_user, logout
|
|
|
|
request = HttpRequest()
|
|
|
|
if self.session:
|
|
|
|
request.session = self.session
|
|
|
|
request.user = get_user(request)
|
|
|
|
else:
|
|
|
|
engine = import_module(settings.SESSION_ENGINE)
|
|
|
|
request.session = engine.SessionStore()
|
|
|
|
logout(request)
|
|
|
|
self.cookies = SimpleCookie()
|
|
|
|
|
|
|
|
def _parse_json(self, response, **extra):
|
|
|
|
if not hasattr(response, '_json'):
|
|
|
|
if not JSON_CONTENT_TYPE_RE.match(response.get('Content-Type')):
|
|
|
|
raise ValueError(
|
|
|
|
'Content-Type header is "%s", not "application/json"'
|
|
|
|
% response.get('Content-Type')
|
|
|
|
)
|
|
|
|
response._json = json.loads(response.content.decode(response.charset), **extra)
|
|
|
|
return response._json
|
|
|
|
|
|
|
|
|
|
|
|
class Client(ClientMixin, RequestFactory):
|
2006-08-27 20:24:59 +08:00
|
|
|
"""
|
2006-09-28 09:56:02 +08:00
|
|
|
A class that can act as a client for testing purposes.
|
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
It allows the user to compose GET and POST requests, and
|
|
|
|
obtain the response that the server gave to those requests.
|
|
|
|
The server Response objects are annotated with the details
|
|
|
|
of the contexts and templates that were rendered during the
|
|
|
|
process of serving the request.
|
|
|
|
|
|
|
|
Client objects are stateful - they will retain cookie (and
|
|
|
|
thus session) details for the lifetime of the Client instance.
|
2006-09-28 09:56:02 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
This is not intended as a replacement for Twill/Selenium or
|
|
|
|
the like - it is here to allow testing against the
|
|
|
|
contexts and templates produced by a view, rather than the
|
|
|
|
HTML rendered to the end-user.
|
|
|
|
"""
|
2019-02-20 19:16:10 +08:00
|
|
|
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(**defaults)
|
2010-08-27 21:54:13 +08:00
|
|
|
self.handler = ClientHandler(enforce_csrf_checks)
|
2019-02-20 19:16:10 +08:00
|
|
|
self.raise_request_exception = raise_request_exception
|
2007-02-11 08:23:31 +08:00
|
|
|
self.exc_info = None
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = None
|
Merged Unicode branch into trunk (r4952:5608). This should be fully
backwards compatible for all practical purposes.
Fixed #2391, #2489, #2996, #3322, #3344, #3370, #3406, #3432, #3454, #3492, #3582, #3690, #3878, #3891, #3937, #4039, #4141, #4227, #4286, #4291, #4300, #4452, #4702
git-svn-id: http://code.djangoproject.com/svn/django/trunk@5609 bcc190cf-cafb-0310-a4f2-bffc1f526a37
2007-07-04 20:11:04 +08:00
|
|
|
|
2006-08-27 20:24:59 +08:00
|
|
|
def request(self, **request):
|
|
|
|
"""
|
2017-01-25 04:37:33 +08:00
|
|
|
The master request method. Compose the environment dictionary and pass
|
|
|
|
to the handler, return the result of the handler. Assume defaults for
|
|
|
|
the query environment, which can be overridden using the arguments to
|
|
|
|
the request.
|
2006-08-27 20:24:59 +08:00
|
|
|
"""
|
2010-10-13 07:37:47 +08:00
|
|
|
environ = self._base_environ(**request)
|
2006-08-27 20:24:59 +08:00
|
|
|
|
2008-07-02 12:34:05 +08:00
|
|
|
# Curry a data dictionary into an instance of the template renderer
|
|
|
|
# callback function.
|
2006-08-27 20:24:59 +08:00
|
|
|
data = {}
|
2017-09-07 01:11:18 +08:00
|
|
|
on_template_render = partial(store_rendered_templates, data)
|
2013-02-04 23:50:15 +08:00
|
|
|
signal_uid = "template-render-%s" % id(request)
|
|
|
|
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
|
2008-07-02 12:34:05 +08:00
|
|
|
# Capture exceptions created by the handler.
|
2015-08-08 03:28:54 +08:00
|
|
|
exception_uid = "request-exception-%s" % id(request)
|
|
|
|
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
|
2007-08-28 21:03:22 +08:00
|
|
|
try:
|
2019-03-11 09:10:51 +08:00
|
|
|
response = self.handler(environ)
|
2010-04-13 10:41:37 +08:00
|
|
|
finally:
|
2013-02-04 23:50:15 +08:00
|
|
|
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
|
2015-08-08 03:28:54 +08:00
|
|
|
got_request_exception.disconnect(dispatch_uid=exception_uid)
|
2020-02-13 06:15:00 +08:00
|
|
|
# Check for signaled exceptions.
|
|
|
|
self.check_exception(response)
|
2019-03-11 22:39:59 +08:00
|
|
|
# Save the client and request that stimulated the response.
|
|
|
|
response.client = self
|
|
|
|
response.request = request
|
|
|
|
# Add any rendered template detail to the response.
|
|
|
|
response.templates = data.get('templates', [])
|
|
|
|
response.context = data.get('context')
|
|
|
|
response.json = partial(self._parse_json, response)
|
|
|
|
# Attach the ResolverMatch instance to the response.
|
2021-02-22 04:12:41 +08:00
|
|
|
urlconf = getattr(response.wsgi_request, 'urlconf', None)
|
|
|
|
response.resolver_match = SimpleLazyObject(
|
|
|
|
lambda: resolve(request['PATH_INFO'], urlconf=urlconf),
|
|
|
|
)
|
2019-03-11 22:39:59 +08:00
|
|
|
# Flatten a single context. Not really necessary anymore thanks to the
|
|
|
|
# __getattr__ flattening in ContextList, but has some edge case
|
|
|
|
# backwards compatibility implications.
|
|
|
|
if response.context and len(response.context) == 1:
|
|
|
|
response.context = response.context[0]
|
|
|
|
# Update persistent cookie data.
|
|
|
|
if response.cookies:
|
|
|
|
self.cookies.update(response.cookies)
|
|
|
|
return response
|
2007-02-09 21:47:36 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def get(self, path, data=None, follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Request a response from the server using GET."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().get(path, data=data, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2006-09-28 09:56:02 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def post(self, path, data=None, content_type=MULTIPART_CONTENT,
|
2013-10-28 22:00:54 +08:00
|
|
|
follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Request a response from the server using POST."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().post(path, data=data, content_type=content_type, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2006-08-27 20:24:59 +08:00
|
|
|
|
2014-01-17 00:44:20 +08:00
|
|
|
def head(self, path, data=None, follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Request a response from the server using HEAD."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().head(path, data=data, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2008-10-07 17:23:40 +08:00
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def options(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Request a response from the server using OPTIONS."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().options(path, data=data, content_type=content_type, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2008-10-07 17:23:40 +08:00
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def put(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Send a resource to the server using PUT."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().put(path, data=data, content_type=content_type, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2008-10-07 17:23:40 +08:00
|
|
|
|
2013-02-03 10:22:40 +08:00
|
|
|
def patch(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Send a resource to the server using PATCH."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().patch(path, data=data, content_type=content_type, secure=secure, **extra)
|
2013-02-03 10:22:40 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
|
2013-02-03 10:22:40 +08:00
|
|
|
return response
|
|
|
|
|
2012-05-26 01:03:15 +08:00
|
|
|
def delete(self, path, data='', content_type='application/octet-stream',
|
2013-10-28 22:00:54 +08:00
|
|
|
follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Send a DELETE request to the server."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().delete(path, data=data, content_type=content_type, secure=secure, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, content_type=content_type, **extra)
|
2014-10-13 19:10:00 +08:00
|
|
|
return response
|
|
|
|
|
|
|
|
def trace(self, path, data='', follow=False, secure=False, **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""Send a TRACE request to the server."""
|
2017-08-18 08:10:10 +08:00
|
|
|
self.extra = extra
|
2017-01-21 21:13:44 +08:00
|
|
|
response = super().trace(path, data=data, secure=secure, **extra)
|
2014-10-13 19:10:00 +08:00
|
|
|
if follow:
|
2017-05-22 18:49:39 +08:00
|
|
|
response = self._handle_redirects(response, data=data, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2008-10-07 17:23:40 +08:00
|
|
|
|
2017-05-22 18:49:39 +08:00
|
|
|
def _handle_redirects(self, response, data='', content_type='', **extra):
|
2017-01-25 04:37:33 +08:00
|
|
|
"""
|
|
|
|
Follow any redirects by requesting responses from the server using GET.
|
|
|
|
"""
|
2009-02-27 21:14:59 +08:00
|
|
|
response.redirect_chain = []
|
2017-05-22 18:49:39 +08:00
|
|
|
redirect_status_codes = (
|
|
|
|
HTTPStatus.MOVED_PERMANENTLY,
|
|
|
|
HTTPStatus.FOUND,
|
|
|
|
HTTPStatus.SEE_OTHER,
|
|
|
|
HTTPStatus.TEMPORARY_REDIRECT,
|
|
|
|
HTTPStatus.PERMANENT_REDIRECT,
|
|
|
|
)
|
|
|
|
while response.status_code in redirect_status_codes:
|
2014-10-17 21:46:42 +08:00
|
|
|
response_url = response.url
|
2009-02-27 21:14:59 +08:00
|
|
|
redirect_chain = response.redirect_chain
|
2014-10-17 21:46:42 +08:00
|
|
|
redirect_chain.append((response_url, response.status_code))
|
2009-02-27 21:14:59 +08:00
|
|
|
|
2014-10-17 21:46:42 +08:00
|
|
|
url = urlsplit(response_url)
|
2011-11-28 04:04:13 +08:00
|
|
|
if url.scheme:
|
|
|
|
extra['wsgi.url_scheme'] = url.scheme
|
|
|
|
if url.hostname:
|
|
|
|
extra['SERVER_NAME'] = url.hostname
|
|
|
|
if url.port:
|
|
|
|
extra['SERVER_PORT'] = str(url.port)
|
2010-02-13 20:01:14 +08:00
|
|
|
|
2021-09-23 18:18:15 +08:00
|
|
|
path = url.path
|
|
|
|
# RFC 2616: bare domains without path are treated as the root.
|
|
|
|
if not path and url.netloc:
|
|
|
|
path = '/'
|
2016-04-29 20:55:36 +08:00
|
|
|
# Prepend the request path to handle relative path redirects
|
|
|
|
if not path.startswith('/'):
|
|
|
|
path = urljoin(response.request['PATH_INFO'], path)
|
|
|
|
|
2017-05-22 18:49:39 +08:00
|
|
|
if response.status_code in (HTTPStatus.TEMPORARY_REDIRECT, HTTPStatus.PERMANENT_REDIRECT):
|
2020-05-27 14:10:08 +08:00
|
|
|
# Preserve request method and query string (if needed)
|
|
|
|
# post-redirect for 307/308 responses.
|
|
|
|
request_method = response.request['REQUEST_METHOD'].lower()
|
|
|
|
if request_method not in ('get', 'head'):
|
|
|
|
extra['QUERY_STRING'] = url.query
|
|
|
|
request_method = getattr(self, request_method)
|
2017-05-22 18:49:39 +08:00
|
|
|
else:
|
|
|
|
request_method = self.get
|
|
|
|
data = QueryDict(url.query)
|
|
|
|
content_type = None
|
|
|
|
|
|
|
|
response = request_method(path, data=data, content_type=content_type, follow=False, **extra)
|
2009-02-27 21:14:59 +08:00
|
|
|
response.redirect_chain = redirect_chain
|
|
|
|
|
2014-10-17 21:46:42 +08:00
|
|
|
if redirect_chain[-1] in redirect_chain[:-1]:
|
|
|
|
# Check that we're not redirecting to somewhere we've already
|
|
|
|
# been to, to prevent loops.
|
|
|
|
raise RedirectCycleError("Redirect loop detected.", last_response=response)
|
|
|
|
if len(redirect_chain) > 20:
|
|
|
|
# Such a lengthy chain likely also means a loop, but one with
|
|
|
|
# a growing path, changing view, or changing query argument;
|
|
|
|
# 20 is the value of "network.http.redirection-limit" from Firefox.
|
|
|
|
raise RedirectCycleError("Too many redirects.", last_response=response)
|
|
|
|
|
2009-02-27 21:14:59 +08:00
|
|
|
return response
|
2020-02-13 06:15:00 +08:00
|
|
|
|
|
|
|
|
|
|
|
class AsyncClient(ClientMixin, AsyncRequestFactory):
|
|
|
|
"""
|
|
|
|
An async version of Client that creates ASGIRequests and calls through an
|
|
|
|
async request path.
|
|
|
|
|
|
|
|
Does not currently support "follow" on its methods.
|
|
|
|
"""
|
|
|
|
def __init__(self, enforce_csrf_checks=False, raise_request_exception=True, **defaults):
|
|
|
|
super().__init__(**defaults)
|
|
|
|
self.handler = AsyncClientHandler(enforce_csrf_checks)
|
|
|
|
self.raise_request_exception = raise_request_exception
|
|
|
|
self.exc_info = None
|
|
|
|
self.extra = None
|
|
|
|
|
|
|
|
async def request(self, **request):
|
|
|
|
"""
|
|
|
|
The master request method. Compose the scope dictionary and pass to the
|
|
|
|
handler, return the result of the handler. Assume defaults for the
|
|
|
|
query environment, which can be overridden using the arguments to the
|
|
|
|
request.
|
|
|
|
"""
|
|
|
|
if 'follow' in request:
|
|
|
|
raise NotImplementedError(
|
|
|
|
'AsyncClient request methods do not accept the follow '
|
|
|
|
'parameter.'
|
|
|
|
)
|
|
|
|
scope = self._base_scope(**request)
|
|
|
|
# Curry a data dictionary into an instance of the template renderer
|
|
|
|
# callback function.
|
|
|
|
data = {}
|
|
|
|
on_template_render = partial(store_rendered_templates, data)
|
|
|
|
signal_uid = 'template-render-%s' % id(request)
|
|
|
|
signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid)
|
|
|
|
# Capture exceptions created by the handler.
|
|
|
|
exception_uid = 'request-exception-%s' % id(request)
|
|
|
|
got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid)
|
|
|
|
try:
|
|
|
|
response = await self.handler(scope)
|
|
|
|
finally:
|
|
|
|
signals.template_rendered.disconnect(dispatch_uid=signal_uid)
|
|
|
|
got_request_exception.disconnect(dispatch_uid=exception_uid)
|
|
|
|
# Check for signaled exceptions.
|
|
|
|
self.check_exception(response)
|
|
|
|
# Save the client and request that stimulated the response.
|
|
|
|
response.client = self
|
|
|
|
response.request = request
|
|
|
|
# Add any rendered template detail to the response.
|
|
|
|
response.templates = data.get('templates', [])
|
|
|
|
response.context = data.get('context')
|
|
|
|
response.json = partial(self._parse_json, response)
|
|
|
|
# Attach the ResolverMatch instance to the response.
|
2021-02-22 04:12:41 +08:00
|
|
|
urlconf = getattr(response.asgi_request, 'urlconf', None)
|
|
|
|
response.resolver_match = SimpleLazyObject(
|
|
|
|
lambda: resolve(request['path'], urlconf=urlconf),
|
|
|
|
)
|
2020-02-13 06:15:00 +08:00
|
|
|
# Flatten a single context. Not really necessary anymore thanks to the
|
|
|
|
# __getattr__ flattening in ContextList, but has some edge case
|
|
|
|
# backwards compatibility implications.
|
|
|
|
if response.context and len(response.context) == 1:
|
|
|
|
response.context = response.context[0]
|
|
|
|
# Update persistent cookie data.
|
|
|
|
if response.cookies:
|
|
|
|
self.cookies.update(response.cookies)
|
|
|
|
return response
|