2005-10-09 07:19:21 +08:00
|
|
|
"""
|
2005-10-09 08:37:56 +08:00
|
|
|
This module contains helper functions for controlling caching. It does so by
|
|
|
|
managing the "Vary" header of responses. It includes functions to patch the
|
|
|
|
header of response objects directly and decorators that change functions to do
|
|
|
|
that header-patching themselves.
|
2005-10-09 07:19:21 +08:00
|
|
|
|
|
|
|
For information on the Vary header, see:
|
|
|
|
|
2016-05-02 20:35:05 +08:00
|
|
|
https://tools.ietf.org/html/rfc7231#section-7.1.4
|
2005-10-09 07:19:21 +08:00
|
|
|
|
|
|
|
Essentially, the "Vary" HTTP header defines which headers a cache should take
|
|
|
|
into account when building its cache key. Requests with the same path but
|
|
|
|
different header content for headers named in "Vary" need to get different
|
|
|
|
cache keys to prevent delivery of wrong content.
|
|
|
|
|
2007-10-29 06:32:25 +08:00
|
|
|
An example: i18n middleware would need to distinguish caches by the
|
2005-10-09 07:19:21 +08:00
|
|
|
"Accept-language" header.
|
|
|
|
"""
|
2011-03-28 10:11:19 +08:00
|
|
|
import hashlib
|
2015-06-05 21:26:48 +08:00
|
|
|
import logging
|
2007-07-16 11:50:22 +08:00
|
|
|
import re
|
|
|
|
import time
|
2007-10-29 06:32:25 +08:00
|
|
|
|
2005-10-09 07:19:21 +08:00
|
|
|
from django.conf import settings
|
2013-10-19 06:49:24 +08:00
|
|
|
from django.core.cache import caches
|
2015-06-05 21:26:48 +08:00
|
|
|
from django.http import HttpResponse, HttpResponseNotModified
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.utils.encoding import force_bytes, force_text, iri_to_uri
|
2015-06-05 21:26:48 +08:00
|
|
|
from django.utils.http import (
|
|
|
|
http_date, parse_etags, parse_http_date_safe, quote_etag,
|
|
|
|
)
|
2011-11-18 21:01:06 +08:00
|
|
|
from django.utils.timezone import get_current_timezone_name
|
2010-03-01 18:19:01 +08:00
|
|
|
from django.utils.translation import get_language
|
2005-10-09 07:19:21 +08:00
|
|
|
|
2005-10-30 01:00:20 +08:00
|
|
|
cc_delim_re = re.compile(r'\s*,\s*')
|
2005-10-31 11:14:57 +08:00
|
|
|
|
2015-06-05 21:26:48 +08:00
|
|
|
logger = logging.getLogger('django.request')
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2005-10-30 01:00:20 +08:00
|
|
|
def patch_cache_control(response, **kwargs):
|
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Patch the Cache-Control header by adding all keyword arguments to it.
|
|
|
|
The transformation is as follows:
|
2005-10-30 01:00:20 +08:00
|
|
|
|
2005-10-31 11:14:57 +08:00
|
|
|
* All keyword parameter names are turned to lowercase, and underscores
|
|
|
|
are converted to hyphens.
|
|
|
|
* If the value of a parameter is True (exactly True, not just a
|
|
|
|
true value), only the parameter name is added to the header.
|
|
|
|
* All other parameters are added with their value, after applying
|
|
|
|
str() to it.
|
2005-10-30 01:00:20 +08:00
|
|
|
"""
|
|
|
|
def dictitem(s):
|
2007-10-29 06:32:25 +08:00
|
|
|
t = s.split('=', 1)
|
2005-10-30 01:00:20 +08:00
|
|
|
if len(t) > 1:
|
2007-09-15 05:42:26 +08:00
|
|
|
return (t[0].lower(), t[1])
|
2005-10-30 01:00:20 +08:00
|
|
|
else:
|
2007-09-15 05:42:26 +08:00
|
|
|
return (t[0].lower(), True)
|
2005-10-30 01:00:20 +08:00
|
|
|
|
|
|
|
def dictvalue(t):
|
2007-07-16 11:50:22 +08:00
|
|
|
if t[1] is True:
|
2005-10-30 01:00:20 +08:00
|
|
|
return t[0]
|
|
|
|
else:
|
2012-05-29 03:13:09 +08:00
|
|
|
return '%s=%s' % (t[0], t[1])
|
2005-10-30 01:00:20 +08:00
|
|
|
|
2015-11-07 23:26:38 +08:00
|
|
|
if response.get('Cache-Control'):
|
2005-10-30 01:00:20 +08:00
|
|
|
cc = cc_delim_re.split(response['Cache-Control'])
|
2013-08-30 00:09:35 +08:00
|
|
|
cc = dict(dictitem(el) for el in cc)
|
2005-10-30 01:00:20 +08:00
|
|
|
else:
|
|
|
|
cc = {}
|
2007-09-29 06:30:59 +08:00
|
|
|
|
|
|
|
# If there's already a max-age header but we're being asked to set a new
|
2008-08-16 00:38:41 +08:00
|
|
|
# max-age, use the minimum of the two ages. In practice this happens when
|
2007-09-29 06:30:59 +08:00
|
|
|
# a decorator and a piece of middleware both operate on a given view.
|
|
|
|
if 'max-age' in cc and 'max_age' in kwargs:
|
2012-08-14 20:36:11 +08:00
|
|
|
kwargs['max_age'] = min(int(cc['max-age']), kwargs['max_age'])
|
2007-09-29 06:30:59 +08:00
|
|
|
|
2011-08-23 11:51:10 +08:00
|
|
|
# Allow overriding private caching and vice versa
|
|
|
|
if 'private' in cc and 'public' in kwargs:
|
|
|
|
del cc['private']
|
|
|
|
elif 'public' in cc and 'private' in kwargs:
|
|
|
|
del cc['public']
|
|
|
|
|
2007-10-29 06:32:25 +08:00
|
|
|
for (k, v) in kwargs.items():
|
2005-10-30 01:00:20 +08:00
|
|
|
cc[k.replace('_', '-')] = v
|
2013-08-30 07:20:00 +08:00
|
|
|
cc = ', '.join(dictvalue(el) for el in cc.items())
|
2005-10-30 01:00:20 +08:00
|
|
|
response['Cache-Control'] = cc
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2007-11-30 00:57:18 +08:00
|
|
|
def get_max_age(response):
|
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Return the max-age from the response Cache-Control header as an integer,
|
|
|
|
or None if it wasn't found or wasn't an integer.
|
2007-11-30 00:57:18 +08:00
|
|
|
"""
|
|
|
|
if not response.has_header('Cache-Control'):
|
|
|
|
return
|
2016-03-29 06:33:29 +08:00
|
|
|
cc = dict(_to_tuple(el) for el in cc_delim_re.split(response['Cache-Control']))
|
2007-11-30 00:57:18 +08:00
|
|
|
if 'max-age' in cc:
|
|
|
|
try:
|
|
|
|
return int(cc['max-age'])
|
|
|
|
except (ValueError, TypeError):
|
|
|
|
pass
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2015-06-05 21:26:48 +08:00
|
|
|
def set_response_etag(response):
|
2012-10-20 23:40:14 +08:00
|
|
|
if not response.streaming:
|
2015-06-05 21:26:48 +08:00
|
|
|
response['ETag'] = quote_etag(hashlib.md5(response.content).hexdigest())
|
|
|
|
return response
|
|
|
|
|
|
|
|
|
|
|
|
def _precondition_failed(request):
|
2016-03-29 06:33:29 +08:00
|
|
|
logger.warning(
|
|
|
|
'Precondition Failed: %s', request.path,
|
2015-06-05 21:26:48 +08:00
|
|
|
extra={
|
|
|
|
'status_code': 412,
|
|
|
|
'request': request,
|
|
|
|
},
|
|
|
|
)
|
|
|
|
return HttpResponse(status=412)
|
|
|
|
|
|
|
|
|
|
|
|
def _not_modified(request, response=None):
|
2016-10-11 12:08:37 +08:00
|
|
|
new_response = HttpResponseNotModified()
|
2015-06-05 21:26:48 +08:00
|
|
|
if response:
|
2016-10-11 12:08:37 +08:00
|
|
|
# Preserve the headers required by Section 4.1 of RFC 7232, as well as
|
|
|
|
# Last-Modified.
|
|
|
|
for header in ('Cache-Control', 'Content-Location', 'Date', 'ETag', 'Expires', 'Last-Modified', 'Vary'):
|
|
|
|
if header in response:
|
|
|
|
new_response[header] = response[header]
|
|
|
|
|
|
|
|
# Preserve cookies as per the cookie specification: "If a proxy server
|
|
|
|
# receives a response which contains a Set-cookie header, it should
|
|
|
|
# propagate the Set-cookie header to the client, regardless of whether
|
|
|
|
# the response was 304 (Not Modified) or 200 (OK).
|
|
|
|
# https://curl.haxx.se/rfc/cookie_spec.html
|
|
|
|
new_response.cookies = response.cookies
|
|
|
|
return new_response
|
2015-06-05 21:26:48 +08:00
|
|
|
|
|
|
|
|
|
|
|
def get_conditional_response(request, etag=None, last_modified=None, response=None):
|
2016-09-13 11:26:24 +08:00
|
|
|
# Only return conditional responses on successful requests.
|
|
|
|
if response and not (200 <= response.status_code < 300):
|
|
|
|
return response
|
|
|
|
|
|
|
|
# Get HTTP request headers.
|
|
|
|
if_match_etags = parse_etags(request.META.get('HTTP_IF_MATCH', ''))
|
2015-06-05 21:26:48 +08:00
|
|
|
if_unmodified_since = request.META.get('HTTP_IF_UNMODIFIED_SINCE')
|
|
|
|
if if_unmodified_since:
|
|
|
|
if_unmodified_since = parse_http_date_safe(if_unmodified_since)
|
2016-09-13 11:26:24 +08:00
|
|
|
if_none_match_etags = parse_etags(request.META.get('HTTP_IF_NONE_MATCH', ''))
|
|
|
|
if_modified_since = request.META.get('HTTP_IF_MODIFIED_SINCE')
|
|
|
|
if if_modified_since:
|
|
|
|
if_modified_since = parse_http_date_safe(if_modified_since)
|
|
|
|
|
|
|
|
# Step 1 of section 6 of RFC 7232: Test the If-Match precondition.
|
2017-09-14 09:20:29 +08:00
|
|
|
if if_match_etags and not _if_match_passes(etag, if_match_etags):
|
2016-09-13 11:26:24 +08:00
|
|
|
return _precondition_failed(request)
|
|
|
|
|
|
|
|
# Step 2: Test the If-Unmodified-Since precondition.
|
|
|
|
if (not if_match_etags and if_unmodified_since and
|
|
|
|
not _if_unmodified_since_passes(last_modified, if_unmodified_since)):
|
|
|
|
return _precondition_failed(request)
|
|
|
|
|
|
|
|
# Step 3: Test the If-None-Match precondition.
|
2017-09-14 09:20:29 +08:00
|
|
|
if if_none_match_etags and not _if_none_match_passes(etag, if_none_match_etags):
|
2016-09-13 11:26:24 +08:00
|
|
|
if request.method in ('GET', 'HEAD'):
|
2015-06-05 21:26:48 +08:00
|
|
|
return _not_modified(request, response)
|
2016-09-13 11:26:24 +08:00
|
|
|
else:
|
2015-06-05 21:26:48 +08:00
|
|
|
return _precondition_failed(request)
|
|
|
|
|
2016-09-13 11:26:24 +08:00
|
|
|
# Step 4: Test the If-Modified-Since precondition.
|
|
|
|
if (not if_none_match_etags and if_modified_since and
|
|
|
|
not _if_modified_since_passes(last_modified, if_modified_since)):
|
|
|
|
if request.method in ('GET', 'HEAD'):
|
|
|
|
return _not_modified(request, response)
|
|
|
|
|
|
|
|
# Step 5: Test the If-Range precondition (not supported).
|
|
|
|
# Step 6: Return original response since there isn't a conditional response.
|
2011-09-08 21:25:31 +08:00
|
|
|
return response
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2016-09-13 11:26:24 +08:00
|
|
|
def _if_match_passes(target_etag, etags):
|
|
|
|
"""
|
|
|
|
Test the If-Match comparison as defined in section 3.1 of RFC 7232.
|
|
|
|
"""
|
|
|
|
if not target_etag:
|
|
|
|
# If there isn't an ETag, then there can't be a match.
|
|
|
|
return False
|
|
|
|
elif etags == ['*']:
|
|
|
|
# The existence of an ETag means that there is "a current
|
|
|
|
# representation for the target resource", even if the ETag is weak,
|
|
|
|
# so there is a match to '*'.
|
|
|
|
return True
|
|
|
|
elif target_etag.startswith('W/'):
|
|
|
|
# A weak ETag can never strongly match another ETag.
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
# Since the ETag is strong, this will only return True if there's a
|
|
|
|
# strong match.
|
|
|
|
return target_etag in etags
|
|
|
|
|
|
|
|
|
|
|
|
def _if_unmodified_since_passes(last_modified, if_unmodified_since):
|
|
|
|
"""
|
|
|
|
Test the If-Unmodified-Since comparison as defined in section 3.4 of
|
|
|
|
RFC 7232.
|
|
|
|
"""
|
|
|
|
return last_modified and last_modified <= if_unmodified_since
|
|
|
|
|
|
|
|
|
|
|
|
def _if_none_match_passes(target_etag, etags):
|
|
|
|
"""
|
|
|
|
Test the If-None-Match comparison as defined in section 3.2 of RFC 7232.
|
|
|
|
"""
|
|
|
|
if not target_etag:
|
|
|
|
# If there isn't an ETag, then there isn't a match.
|
|
|
|
return True
|
|
|
|
elif etags == ['*']:
|
|
|
|
# The existence of an ETag means that there is "a current
|
|
|
|
# representation for the target resource", so there is a match to '*'.
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
# The comparison should be weak, so look for a match after stripping
|
|
|
|
# off any weak indicators.
|
|
|
|
target_etag = target_etag.strip('W/')
|
|
|
|
etags = (etag.strip('W/') for etag in etags)
|
|
|
|
return target_etag not in etags
|
|
|
|
|
|
|
|
|
|
|
|
def _if_modified_since_passes(last_modified, if_modified_since):
|
|
|
|
"""
|
|
|
|
Test the If-Modified-Since comparison as defined in section 3.3 of RFC 7232.
|
|
|
|
"""
|
|
|
|
return not last_modified or last_modified > if_modified_since
|
|
|
|
|
|
|
|
|
2005-10-09 07:19:21 +08:00
|
|
|
def patch_response_headers(response, cache_timeout=None):
|
|
|
|
"""
|
2016-09-15 16:10:21 +08:00
|
|
|
Add HTTP caching headers to the given HttpResponse: Expires and
|
|
|
|
Cache-Control.
|
2005-10-09 07:19:21 +08:00
|
|
|
|
|
|
|
Each header is only added if it isn't already set.
|
|
|
|
|
|
|
|
cache_timeout is in seconds. The CACHE_MIDDLEWARE_SECONDS setting is used
|
|
|
|
by default.
|
|
|
|
"""
|
|
|
|
if cache_timeout is None:
|
|
|
|
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
|
2007-07-16 11:50:22 +08:00
|
|
|
if cache_timeout < 0:
|
2013-11-03 03:27:47 +08:00
|
|
|
cache_timeout = 0 # Can't have max-age negative
|
2005-10-09 07:19:21 +08:00
|
|
|
if not response.has_header('Expires'):
|
2007-10-31 11:59:40 +08:00
|
|
|
response['Expires'] = http_date(time.time() + cache_timeout)
|
2005-10-30 01:00:20 +08:00
|
|
|
patch_cache_control(response, max_age=cache_timeout)
|
2005-10-09 07:19:21 +08:00
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2006-05-02 09:31:56 +08:00
|
|
|
def add_never_cache_headers(response):
|
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Add headers to a response to indicate that a page should never be cached.
|
2006-05-02 09:31:56 +08:00
|
|
|
"""
|
|
|
|
patch_response_headers(response, cache_timeout=-1)
|
2015-04-28 05:56:02 +08:00
|
|
|
patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True)
|
2006-05-02 09:31:56 +08:00
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2005-10-09 07:19:21 +08:00
|
|
|
def patch_vary_headers(response, newheaders):
|
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Add (or update) the "Vary" header in the given HttpResponse object.
|
2005-10-09 07:19:21 +08:00
|
|
|
newheaders is a list of header names that should be in "Vary". Existing
|
|
|
|
headers in "Vary" aren't removed.
|
|
|
|
"""
|
|
|
|
# Note that we need to keep the original order intact, because cache
|
|
|
|
# implementations may rely on the order of the Vary contents in, say,
|
|
|
|
# computing an MD5 hash.
|
|
|
|
if response.has_header('Vary'):
|
2007-11-19 12:00:41 +08:00
|
|
|
vary_headers = cc_delim_re.split(response['Vary'])
|
|
|
|
else:
|
|
|
|
vary_headers = []
|
|
|
|
# Use .lower() here so we treat headers as case-insensitive.
|
2017-06-02 07:08:59 +08:00
|
|
|
existing_headers = {header.lower() for header in vary_headers}
|
2007-11-19 12:00:41 +08:00
|
|
|
additional_headers = [newheader for newheader in newheaders
|
|
|
|
if newheader.lower() not in existing_headers]
|
|
|
|
response['Vary'] = ', '.join(vary_headers + additional_headers)
|
2005-10-09 07:19:21 +08:00
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2011-02-01 08:20:31 +08:00
|
|
|
def has_vary_header(response, header_query):
|
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Check to see if the response has a given header name in its Vary header.
|
2011-02-01 08:20:31 +08:00
|
|
|
"""
|
|
|
|
if not response.has_header('Vary'):
|
|
|
|
return False
|
|
|
|
vary_headers = cc_delim_re.split(response['Vary'])
|
2017-06-02 07:08:59 +08:00
|
|
|
existing_headers = {header.lower() for header in vary_headers}
|
2011-02-01 08:20:31 +08:00
|
|
|
return header_query.lower() in existing_headers
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2010-03-01 18:19:01 +08:00
|
|
|
def _i18n_cache_key_suffix(request, cache_key):
|
2017-01-25 04:32:33 +08:00
|
|
|
"""If necessary, add the current locale or time zone to the cache key."""
|
2011-11-01 22:02:31 +08:00
|
|
|
if settings.USE_I18N or settings.USE_L10N:
|
2010-03-01 18:19:01 +08:00
|
|
|
# first check if LocaleMiddleware or another middleware added
|
|
|
|
# LANGUAGE_CODE to request, then fall back to the active language
|
|
|
|
# which in turn can also fall back to settings.LANGUAGE_CODE
|
|
|
|
cache_key += '.%s' % getattr(request, 'LANGUAGE_CODE', get_language())
|
2011-11-18 21:01:06 +08:00
|
|
|
if settings.USE_TZ:
|
2011-12-29 21:57:32 +08:00
|
|
|
# The datetime module doesn't restrict the output of tzname().
|
2013-11-02 09:29:39 +08:00
|
|
|
# Windows is known to use non-standard, locale-dependent names.
|
2011-12-29 21:57:32 +08:00
|
|
|
# User-defined tzinfo classes may return absolutely anything.
|
|
|
|
# Hence this paranoid conversion to create a valid cache key.
|
2012-07-21 16:00:10 +08:00
|
|
|
tz_name = force_text(get_current_timezone_name(), errors='ignore')
|
2012-08-13 18:09:20 +08:00
|
|
|
cache_key += '.%s' % tz_name.encode('ascii', 'ignore').decode('ascii').replace(' ', '_')
|
2010-03-01 18:19:01 +08:00
|
|
|
return cache_key
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2010-10-29 09:31:15 +08:00
|
|
|
def _generate_cache_key(request, method, headerlist, key_prefix):
|
2017-01-25 04:32:33 +08:00
|
|
|
"""Return a cache key from the headers given in the header list."""
|
2011-03-28 10:11:19 +08:00
|
|
|
ctx = hashlib.md5()
|
2005-10-09 07:19:21 +08:00
|
|
|
for header in headerlist:
|
2015-05-14 02:51:18 +08:00
|
|
|
value = request.META.get(header)
|
2005-10-09 07:19:21 +08:00
|
|
|
if value is not None:
|
2013-02-27 11:26:15 +08:00
|
|
|
ctx.update(force_bytes(value))
|
2013-12-27 12:14:08 +08:00
|
|
|
url = hashlib.md5(force_bytes(iri_to_uri(request.build_absolute_uri())))
|
2010-10-29 09:31:15 +08:00
|
|
|
cache_key = 'views.decorators.cache.cache_page.%s.%s.%s.%s' % (
|
2013-12-27 12:14:08 +08:00
|
|
|
key_prefix, method, url.hexdigest(), ctx.hexdigest())
|
2010-03-01 18:19:01 +08:00
|
|
|
return _i18n_cache_key_suffix(request, cache_key)
|
2005-10-09 07:19:21 +08:00
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2009-03-31 06:37:59 +08:00
|
|
|
def _generate_cache_header_key(key_prefix, request):
|
2017-01-25 04:32:33 +08:00
|
|
|
"""Return a cache key for the header cache."""
|
2013-12-27 12:14:08 +08:00
|
|
|
url = hashlib.md5(force_bytes(iri_to_uri(request.build_absolute_uri())))
|
2010-02-24 04:45:28 +08:00
|
|
|
cache_key = 'views.decorators.cache.cache_header.%s.%s' % (
|
2013-12-27 12:14:08 +08:00
|
|
|
key_prefix, url.hexdigest())
|
2010-03-01 18:19:01 +08:00
|
|
|
return _i18n_cache_key_suffix(request, cache_key)
|
2009-03-31 06:37:59 +08:00
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2010-12-22 15:52:44 +08:00
|
|
|
def get_cache_key(request, key_prefix=None, method='GET', cache=None):
|
2005-10-09 07:19:21 +08:00
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Return a cache key based on the request URL and query. It can be used
|
2011-03-02 20:47:36 +08:00
|
|
|
in the request phase because it pulls the list of headers to take into
|
2013-12-27 12:14:08 +08:00
|
|
|
account from the global URL registry and uses those to build a cache key
|
2011-03-02 20:47:36 +08:00
|
|
|
to check against.
|
2005-10-09 07:19:21 +08:00
|
|
|
|
2017-01-25 04:32:33 +08:00
|
|
|
If there isn't a headerlist stored, return None, indicating that the page
|
|
|
|
needs to be rebuilt.
|
2005-10-09 07:19:21 +08:00
|
|
|
"""
|
|
|
|
if key_prefix is None:
|
|
|
|
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
2009-03-31 06:37:59 +08:00
|
|
|
cache_key = _generate_cache_header_key(key_prefix, request)
|
2010-12-22 15:52:44 +08:00
|
|
|
if cache is None:
|
2013-10-19 06:49:24 +08:00
|
|
|
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
|
2015-05-14 02:51:18 +08:00
|
|
|
headerlist = cache.get(cache_key)
|
2005-10-09 07:19:21 +08:00
|
|
|
if headerlist is not None:
|
2010-10-29 09:31:15 +08:00
|
|
|
return _generate_cache_key(request, method, headerlist, key_prefix)
|
2005-10-09 07:19:21 +08:00
|
|
|
else:
|
|
|
|
return None
|
|
|
|
|
2013-11-03 07:53:29 +08:00
|
|
|
|
2010-12-22 15:52:44 +08:00
|
|
|
def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cache=None):
|
2005-10-09 07:19:21 +08:00
|
|
|
"""
|
2017-01-25 04:32:33 +08:00
|
|
|
Learn what headers to take into account for some request URL from the
|
|
|
|
response object. Store those headers in a global URL registry so that
|
2013-12-27 12:14:08 +08:00
|
|
|
later access to that URL will know what headers to take into account
|
2005-10-09 07:19:21 +08:00
|
|
|
without building the response object itself. The headers are named in the
|
|
|
|
Vary header of the response, but we want to prevent response generation.
|
|
|
|
|
|
|
|
The list of headers to use for cache key generation is stored in the same
|
|
|
|
cache as the pages themselves. If the cache ages some data out of the
|
|
|
|
cache, this just means that we have to build the response once to get at
|
|
|
|
the Vary header and so at the list of headers to use for the cache key.
|
|
|
|
"""
|
|
|
|
if key_prefix is None:
|
|
|
|
key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
|
|
|
if cache_timeout is None:
|
|
|
|
cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
|
2009-03-31 06:37:59 +08:00
|
|
|
cache_key = _generate_cache_header_key(key_prefix, request)
|
2010-12-22 15:52:44 +08:00
|
|
|
if cache is None:
|
2013-10-19 06:49:24 +08:00
|
|
|
cache = caches[settings.CACHE_MIDDLEWARE_ALIAS]
|
2005-10-09 07:19:21 +08:00
|
|
|
if response.has_header('Vary'):
|
2013-02-26 07:10:57 +08:00
|
|
|
is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N
|
|
|
|
# If i18n or l10n are used, the generated cache key will be suffixed
|
|
|
|
# with the current locale. Adding the raw value of Accept-Language is
|
|
|
|
# redundant in that case and would result in storing the same content
|
|
|
|
# under multiple keys in the cache. See #18191 for details.
|
|
|
|
headerlist = []
|
|
|
|
for header in cc_delim_re.split(response['Vary']):
|
|
|
|
header = header.upper().replace('-', '_')
|
|
|
|
if header == 'ACCEPT_LANGUAGE' and is_accept_language_redundant:
|
|
|
|
continue
|
|
|
|
headerlist.append('HTTP_' + header)
|
|
|
|
headerlist.sort()
|
2005-10-09 07:19:21 +08:00
|
|
|
cache.set(cache_key, headerlist, cache_timeout)
|
2010-10-29 09:31:15 +08:00
|
|
|
return _generate_cache_key(request, request.method, headerlist, key_prefix)
|
2005-10-09 07:19:21 +08:00
|
|
|
else:
|
|
|
|
# if there is no Vary header, we still need a cache key
|
2013-12-27 12:14:08 +08:00
|
|
|
# for the request.build_absolute_uri()
|
2005-10-09 07:19:21 +08:00
|
|
|
cache.set(cache_key, [], cache_timeout)
|
2010-10-29 09:31:15 +08:00
|
|
|
return _generate_cache_key(request, request.method, [], key_prefix)
|
2007-11-30 00:57:18 +08:00
|
|
|
|
|
|
|
|
|
|
|
def _to_tuple(s):
|
2013-10-27 01:50:40 +08:00
|
|
|
t = s.split('=', 1)
|
2007-11-30 00:57:18 +08:00
|
|
|
if len(t) == 2:
|
|
|
|
return t[0].lower(), t[1]
|
|
|
|
return t[0].lower(), True
|