Fixed #13795 -- Added a site-wide cache prefix and cache versioning. Thanks to bruth for the patch.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@14623 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
261aee26c1
commit
99d247f4cb
|
@ -433,6 +433,9 @@ SESSION_FILE_PATH = None # Directory to store ses
|
|||
# The cache backend to use. See the docstring in django.core.cache for the
|
||||
# possible values.
|
||||
CACHE_BACKEND = 'locmem://'
|
||||
CACHE_VERSION = 1
|
||||
CACHE_KEY_PREFIX = ''
|
||||
CACHE_KEY_FUNCTION = None
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = ''
|
||||
CACHE_MIDDLEWARE_SECONDS = 600
|
||||
|
||||
|
|
|
@ -67,18 +67,30 @@ def parse_backend_uri(backend_uri):
|
|||
|
||||
return scheme, host, params
|
||||
|
||||
def get_cache(backend_uri):
|
||||
def get_cache(backend_uri, key_prefix=None, version=None, key_func=None):
|
||||
if key_prefix is None:
|
||||
key_prefix = settings.CACHE_KEY_PREFIX
|
||||
if version is None:
|
||||
version = settings.CACHE_VERSION
|
||||
if key_func is None:
|
||||
key_func = settings.CACHE_KEY_FUNCTION
|
||||
|
||||
if key_func is not None and not callable(key_func):
|
||||
key_func_module_path, key_func_name = key_func.rsplit('.', 1)
|
||||
key_func_module = importlib.import_module(key_func_module_path)
|
||||
key_func = getattr(key_func_module, key_func_name)
|
||||
|
||||
scheme, host, params = parse_backend_uri(backend_uri)
|
||||
if scheme in BACKENDS:
|
||||
name = 'django.core.cache.backends.%s' % BACKENDS[scheme]
|
||||
else:
|
||||
name = scheme
|
||||
module = importlib.import_module(name)
|
||||
return module.CacheClass(host, params)
|
||||
return module.CacheClass(host, params, key_prefix=key_prefix, version=version, key_func=key_func)
|
||||
|
||||
cache = get_cache(settings.CACHE_BACKEND)
|
||||
|
||||
# Some caches -- pythont-memcached in particular -- need to do a cleanup at the
|
||||
# Some caches -- python-memcached in particular -- need to do a cleanup at the
|
||||
# end of a request cycle. If the cache provides a close() method, wire it up
|
||||
# here.
|
||||
if hasattr(cache, 'close'):
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
import warnings
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning
|
||||
from django.utils.encoding import smart_str
|
||||
|
||||
class InvalidCacheBackendError(ImproperlyConfigured):
|
||||
pass
|
||||
|
@ -13,8 +14,17 @@ class CacheKeyWarning(DjangoRuntimeWarning):
|
|||
# Memcached does not accept keys longer than this.
|
||||
MEMCACHE_MAX_KEY_LENGTH = 250
|
||||
|
||||
def default_key_func(key, key_prefix, version):
|
||||
"""Default function to generate keys.
|
||||
|
||||
Constructs the key used by all other methods. By default it prepends
|
||||
the `key_prefix'. CACHE_KEY_FUNCTION can be used to specify an alternate
|
||||
function with custom key making behavior.
|
||||
"""
|
||||
return ':'.join([key_prefix, str(version), smart_str(key)])
|
||||
|
||||
class BaseCache(object):
|
||||
def __init__(self, params):
|
||||
def __init__(self, params, key_prefix='', version=1, key_func=None):
|
||||
timeout = params.get('timeout', 300)
|
||||
try:
|
||||
timeout = int(timeout)
|
||||
|
@ -34,7 +44,25 @@ class BaseCache(object):
|
|||
except (ValueError, TypeError):
|
||||
self._cull_frequency = 3
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
self.key_prefix = smart_str(key_prefix)
|
||||
self.version = version
|
||||
self.key_func = key_func or default_key_func
|
||||
|
||||
def make_key(self, key, version=None):
|
||||
"""Constructs the key used by all other methods. By default it
|
||||
uses the key_func to generate a key (which, by default,
|
||||
prepends the `key_prefix' and 'version'). An different key
|
||||
function can be provided at the time of cache construction;
|
||||
alternatively, you can subclass the cache backend to provide
|
||||
custom key making behavior.
|
||||
"""
|
||||
if version is None:
|
||||
version = self.version
|
||||
|
||||
new_key = self.key_func(key, self.key_prefix, version)
|
||||
return new_key
|
||||
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
"""
|
||||
Set a value in the cache if the key does not already exist. If
|
||||
timeout is given, that timeout will be used for the key; otherwise
|
||||
|
@ -44,27 +72,27 @@ class BaseCache(object):
|
|||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get(self, key, default=None):
|
||||
def get(self, key, default=None, version=None):
|
||||
"""
|
||||
Fetch a given key from the cache. If the key does not exist, return
|
||||
default, which itself defaults to None.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
def set(self, key, value, timeout=None, version=None):
|
||||
"""
|
||||
Set a value in the cache. If timeout is given, that timeout will be
|
||||
used for the key; otherwise the default cache timeout will be used.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key, version=None):
|
||||
"""
|
||||
Delete a key from the cache, failing silently.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def get_many(self, keys):
|
||||
def get_many(self, keys, version=None):
|
||||
"""
|
||||
Fetch a bunch of keys from the cache. For certain backends (memcached,
|
||||
pgsql) this can be *much* faster when fetching multiple values.
|
||||
|
@ -74,34 +102,35 @@ class BaseCache(object):
|
|||
"""
|
||||
d = {}
|
||||
for k in keys:
|
||||
val = self.get(k)
|
||||
val = self.get(k, version=version)
|
||||
if val is not None:
|
||||
d[k] = val
|
||||
return d
|
||||
|
||||
def has_key(self, key):
|
||||
def has_key(self, key, version=None):
|
||||
"""
|
||||
Returns True if the key is in the cache and has not expired.
|
||||
"""
|
||||
return self.get(key) is not None
|
||||
return self.get(key, version=version) is not None
|
||||
|
||||
def incr(self, key, delta=1):
|
||||
def incr(self, key, delta=1, version=None):
|
||||
"""
|
||||
Add delta to value in the cache. If the key does not exist, raise a
|
||||
ValueError exception.
|
||||
"""
|
||||
if key not in self:
|
||||
value = self.get(key, version=version)
|
||||
if value is None:
|
||||
raise ValueError("Key '%s' not found" % key)
|
||||
new_value = self.get(key) + delta
|
||||
self.set(key, new_value)
|
||||
new_value = value + delta
|
||||
self.set(key, new_value, version=version)
|
||||
return new_value
|
||||
|
||||
def decr(self, key, delta=1):
|
||||
def decr(self, key, delta=1, version=None):
|
||||
"""
|
||||
Subtract delta from value in the cache. If the key does not exist, raise
|
||||
a ValueError exception.
|
||||
"""
|
||||
return self.incr(key, -delta)
|
||||
return self.incr(key, -delta, version=version)
|
||||
|
||||
def __contains__(self, key):
|
||||
"""
|
||||
|
@ -112,7 +141,7 @@ class BaseCache(object):
|
|||
# if a subclass overrides it.
|
||||
return self.has_key(key)
|
||||
|
||||
def set_many(self, data, timeout=None):
|
||||
def set_many(self, data, timeout=None, version=None):
|
||||
"""
|
||||
Set a bunch of values in the cache at once from a dict of key/value
|
||||
pairs. For certain backends (memcached), this is much more efficient
|
||||
|
@ -122,16 +151,16 @@ class BaseCache(object):
|
|||
the default cache timeout will be used.
|
||||
"""
|
||||
for key, value in data.items():
|
||||
self.set(key, value, timeout)
|
||||
self.set(key, value, timeout=timeout, version=version)
|
||||
|
||||
def delete_many(self, keys):
|
||||
def delete_many(self, keys, version=None):
|
||||
"""
|
||||
Set a bunch of values in the cache at once. For certain backends
|
||||
(memcached), this is much more efficient than calling delete() multiple
|
||||
times.
|
||||
"""
|
||||
for key in keys:
|
||||
self.delete(key)
|
||||
self.delete(key, version=version)
|
||||
|
||||
def clear(self):
|
||||
"""Remove *all* values from the cache at once."""
|
||||
|
@ -154,3 +183,23 @@ class BaseCache(object):
|
|||
'errors if used with memcached: %r' % key,
|
||||
CacheKeyWarning)
|
||||
|
||||
def incr_version(self, key, delta=1, version=None):
|
||||
"""Adds delta to the cache version for the supplied key. Returns the
|
||||
new version.
|
||||
"""
|
||||
if version is None:
|
||||
version = self.version
|
||||
|
||||
value = self.get(key, version=version)
|
||||
if value is None:
|
||||
raise ValueError("Key '%s' not found" % key)
|
||||
|
||||
self.set(key, value, version=version+delta)
|
||||
self.delete(key, version=version)
|
||||
return version+delta
|
||||
|
||||
def decr_version(self, key, delta=1, version=None):
|
||||
"""Substracts delta from the cache version for the supplied key. Returns
|
||||
the new version.
|
||||
"""
|
||||
return self.incr_version(key, -delta, version)
|
||||
|
|
|
@ -26,8 +26,8 @@ class Options(object):
|
|||
self.proxy = False
|
||||
|
||||
class BaseDatabaseCacheClass(BaseCache):
|
||||
def __init__(self, table, params):
|
||||
BaseCache.__init__(self, params)
|
||||
def __init__(self, table, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._table = table
|
||||
|
||||
class CacheEntry(object):
|
||||
|
@ -35,7 +35,8 @@ class BaseDatabaseCacheClass(BaseCache):
|
|||
self.cache_model_class = CacheEntry
|
||||
|
||||
class CacheClass(BaseDatabaseCacheClass):
|
||||
def get(self, key, default=None):
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
db = router.db_for_read(self.cache_model_class)
|
||||
table = connections[db].ops.quote_name(self._table)
|
||||
|
@ -55,11 +56,13 @@ class CacheClass(BaseDatabaseCacheClass):
|
|||
value = connections[db].ops.process_clob(row[1])
|
||||
return pickle.loads(base64.decodestring(value))
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
def set(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._base_set('set', key, value, timeout)
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
return self._base_set('add', key, value, timeout)
|
||||
|
||||
|
@ -95,8 +98,10 @@ class CacheClass(BaseDatabaseCacheClass):
|
|||
transaction.commit_unless_managed(using=db)
|
||||
return True
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
db = router.db_for_write(self.cache_model_class)
|
||||
table = connections[db].ops.quote_name(self._table)
|
||||
cursor = connections[db].cursor()
|
||||
|
@ -104,8 +109,10 @@ class CacheClass(BaseDatabaseCacheClass):
|
|||
cursor.execute("DELETE FROM %s WHERE cache_key = %%s" % table, [key])
|
||||
transaction.commit_unless_managed(using=db)
|
||||
|
||||
def has_key(self, key):
|
||||
def has_key(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
db = router.db_for_read(self.cache_model_class)
|
||||
table = connections[db].ops.quote_name(self._table)
|
||||
cursor = connections[db].cursor()
|
||||
|
|
|
@ -3,34 +3,39 @@
|
|||
from django.core.cache.backends.base import BaseCache
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, *args, **kwargs):
|
||||
pass
|
||||
def __init__(self, host, *args, **kwargs):
|
||||
BaseCache.__init__(self, *args, **kwargs)
|
||||
|
||||
def add(self, key, *args, **kwargs):
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
return True
|
||||
|
||||
def get(self, key, default=None):
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
return default
|
||||
|
||||
def set(self, key, *args, **kwargs):
|
||||
def set(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
def delete(self, key, *args, **kwargs):
|
||||
def delete(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
def get_many(self, *args, **kwargs):
|
||||
def get_many(self, keys, version=None):
|
||||
return {}
|
||||
|
||||
def has_key(self, key, *args, **kwargs):
|
||||
def has_key(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
return False
|
||||
|
||||
def set_many(self, *args, **kwargs):
|
||||
def set_many(self, data, version=None):
|
||||
pass
|
||||
|
||||
def delete_many(self, *args, **kwargs):
|
||||
def delete_many(self, keys, version=None):
|
||||
pass
|
||||
|
||||
def clear(self):
|
||||
|
|
|
@ -12,22 +12,23 @@ from django.core.cache.backends.base import BaseCache
|
|||
from django.utils.hashcompat import md5_constructor
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, dir, params):
|
||||
BaseCache.__init__(self, params)
|
||||
def __init__(self, dir, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._dir = dir
|
||||
if not os.path.exists(self._dir):
|
||||
self._createdir()
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
self.validate_key(key)
|
||||
if self.has_key(key):
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
if self.has_key(key, version=version):
|
||||
return False
|
||||
|
||||
self.set(key, value, timeout)
|
||||
self.set(key, value, timeout, version=version)
|
||||
return True
|
||||
|
||||
def get(self, key, default=None):
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
fname = self._key_to_file(key)
|
||||
try:
|
||||
f = open(fname, 'rb')
|
||||
|
@ -44,8 +45,10 @@ class CacheClass(BaseCache):
|
|||
pass
|
||||
return default
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
def set(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
||||
fname = self._key_to_file(key)
|
||||
dirname = os.path.dirname(fname)
|
||||
|
||||
|
@ -68,7 +71,8 @@ class CacheClass(BaseCache):
|
|||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
try:
|
||||
self._delete(self._key_to_file(key))
|
||||
|
@ -85,7 +89,8 @@ class CacheClass(BaseCache):
|
|||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
def has_key(self, key):
|
||||
def has_key(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
fname = self._key_to_file(key)
|
||||
try:
|
||||
|
@ -140,7 +145,7 @@ class CacheClass(BaseCache):
|
|||
Thus, a cache key of "foo" gets turnned into a file named
|
||||
``{cache-dir}ac/bd/18db4cc2f85cedef654fccc4a4d8``.
|
||||
"""
|
||||
path = md5_constructor(key.encode('utf-8')).hexdigest()
|
||||
path = md5_constructor(key).hexdigest()
|
||||
path = os.path.join(path[:2], path[2:4], path[4:])
|
||||
return os.path.join(self._dir, path)
|
||||
|
||||
|
|
|
@ -10,13 +10,14 @@ from django.core.cache.backends.base import BaseCache
|
|||
from django.utils.synch import RWLock
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, _, params):
|
||||
BaseCache.__init__(self, params)
|
||||
def __init__(self, _, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._cache = {}
|
||||
self._expire_info = {}
|
||||
self._lock = RWLock()
|
||||
|
||||
def add(self, key, value, timeout=None):
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._lock.writer_enters()
|
||||
try:
|
||||
|
@ -31,7 +32,8 @@ class CacheClass(BaseCache):
|
|||
finally:
|
||||
self._lock.writer_leaves()
|
||||
|
||||
def get(self, key, default=None):
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._lock.reader_enters()
|
||||
try:
|
||||
|
@ -64,7 +66,8 @@ class CacheClass(BaseCache):
|
|||
self._cache[key] = value
|
||||
self._expire_info[key] = time.time() + timeout
|
||||
|
||||
def set(self, key, value, timeout=None):
|
||||
def set(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._lock.writer_enters()
|
||||
# Python 2.4 doesn't allow combined try-except-finally blocks.
|
||||
|
@ -76,7 +79,8 @@ class CacheClass(BaseCache):
|
|||
finally:
|
||||
self._lock.writer_leaves()
|
||||
|
||||
def has_key(self, key):
|
||||
def has_key(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._lock.reader_enters()
|
||||
try:
|
||||
|
@ -117,7 +121,8 @@ class CacheClass(BaseCache):
|
|||
except KeyError:
|
||||
pass
|
||||
|
||||
def delete(self, key):
|
||||
def delete(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
self._lock.writer_enters()
|
||||
try:
|
||||
|
|
|
@ -3,7 +3,6 @@
|
|||
import time
|
||||
|
||||
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
|
||||
from django.utils.encoding import smart_unicode, smart_str
|
||||
|
||||
try:
|
||||
import cmemcache as memcache
|
||||
|
@ -19,8 +18,8 @@ except ImportError:
|
|||
raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, server, params):
|
||||
BaseCache.__init__(self, params)
|
||||
def __init__(self, server, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._cache = memcache.Client(server.split(';'))
|
||||
|
||||
def _get_memcache_timeout(self, timeout):
|
||||
|
@ -39,30 +38,43 @@ class CacheClass(BaseCache):
|
|||
timeout += int(time.time())
|
||||
return timeout
|
||||
|
||||
def add(self, key, value, timeout=0):
|
||||
def add(self, key, value, timeout=0, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode('utf-8')
|
||||
return self._cache.add(smart_str(key), value, self._get_memcache_timeout(timeout))
|
||||
return self._cache.add(key, value, self._get_memcache_timeout(timeout))
|
||||
|
||||
def get(self, key, default=None):
|
||||
val = self._cache.get(smart_str(key))
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
val = self._cache.get(key)
|
||||
if val is None:
|
||||
return default
|
||||
return val
|
||||
|
||||
def set(self, key, value, timeout=0):
|
||||
self._cache.set(smart_str(key), value, self._get_memcache_timeout(timeout))
|
||||
def set(self, key, value, timeout=0, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self._cache.set(key, value, self._get_memcache_timeout(timeout))
|
||||
|
||||
def delete(self, key):
|
||||
self._cache.delete(smart_str(key))
|
||||
def delete(self, key, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self._cache.delete(key)
|
||||
|
||||
def get_many(self, keys):
|
||||
return self._cache.get_multi(map(smart_str,keys))
|
||||
def get_many(self, keys, version=None):
|
||||
new_keys = map(lambda x: self.make_key(x, version=version), keys)
|
||||
ret = self._cache.get_multi(new_keys)
|
||||
if ret:
|
||||
_ = {}
|
||||
m = dict(zip(new_keys, keys))
|
||||
for k, v in ret.items():
|
||||
_[m[k]] = v
|
||||
ret = _
|
||||
return ret
|
||||
|
||||
def close(self, **kwargs):
|
||||
self._cache.disconnect_all()
|
||||
|
||||
def incr(self, key, delta=1):
|
||||
def incr(self, key, delta=1, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
try:
|
||||
val = self._cache.incr(key, delta)
|
||||
|
||||
|
@ -76,7 +88,8 @@ class CacheClass(BaseCache):
|
|||
|
||||
return val
|
||||
|
||||
def decr(self, key, delta=1):
|
||||
def decr(self, key, delta=1, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
try:
|
||||
val = self._cache.decr(key, delta)
|
||||
|
||||
|
@ -89,16 +102,18 @@ class CacheClass(BaseCache):
|
|||
raise ValueError("Key '%s' not found" % key)
|
||||
return val
|
||||
|
||||
def set_many(self, data, timeout=0):
|
||||
def set_many(self, data, timeout=0, version=None):
|
||||
safe_data = {}
|
||||
for key, value in data.items():
|
||||
key = self.make_key(key, version=version)
|
||||
if isinstance(value, unicode):
|
||||
value = value.encode('utf-8')
|
||||
safe_data[smart_str(key)] = value
|
||||
safe_data[key] = value
|
||||
self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
|
||||
|
||||
def delete_many(self, keys):
|
||||
self._cache.delete_multi(map(smart_str, keys))
|
||||
def delete_many(self, keys, version=None):
|
||||
l = lambda x: self.make_key(x, version=version)
|
||||
self._cache.delete_multi(map(l, keys))
|
||||
|
||||
def clear(self):
|
||||
self._cache.flush_all()
|
||||
|
|
|
@ -136,6 +136,25 @@ Default: ``'locmem://'``
|
|||
|
||||
The cache backend to use. See :doc:`/topics/cache`.
|
||||
|
||||
.. setting:: CACHE_KEY_FUNCTION
|
||||
|
||||
CACHE_KEY_FUNCTION
|
||||
------------------
|
||||
|
||||
Default: ``None``
|
||||
|
||||
A string containing a dotted path to a function that defines how to
|
||||
compose a prefix, version and key into a final cache key. The default
|
||||
implementation is equivalent to the function::
|
||||
|
||||
def make_key(key, key_prefix, version):
|
||||
return ':'.join([key_prefix, str(version), smart_str(key)])
|
||||
|
||||
You may use any key function you want, as long as it has the same
|
||||
argument signature.
|
||||
|
||||
See the :ref:`cache documentation <cache_key_transformation>` for more information.
|
||||
|
||||
.. setting:: CACHE_MIDDLEWARE_ANONYMOUS_ONLY
|
||||
|
||||
CACHE_MIDDLEWARE_ANONYMOUS_ONLY
|
||||
|
@ -172,6 +191,30 @@ Default: ``600``
|
|||
The default number of seconds to cache a page when the caching middleware or
|
||||
``cache_page()`` decorator is used.
|
||||
|
||||
.. setting:: CACHE_PREFIX
|
||||
|
||||
CACHE_PREFIX
|
||||
------------
|
||||
|
||||
Default: ``''`` (Empty string)
|
||||
|
||||
A string that will be automatically included (prepended by default) to
|
||||
all cache keys used by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_key_prefixing>` for more information.
|
||||
|
||||
.. setting:: CACHE_VERSION
|
||||
|
||||
CACHE_VERSION
|
||||
-------------
|
||||
|
||||
Default: ``1``
|
||||
|
||||
The default version number for cache keys generated by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_versioning>` for more information.
|
||||
|
||||
|
||||
.. setting:: CSRF_COOKIE_DOMAIN
|
||||
|
||||
CSRF_COOKIE_DOMAIN
|
||||
|
|
|
@ -155,6 +155,9 @@ requests. These include:
|
|||
:meth:`~django.test.client.Client.assertNumQueries` -- making it
|
||||
easier to test the database activity associated with a view.
|
||||
|
||||
* :ref:`Versioning <cache_versioning>`, :ref:`site-wide prefixing
|
||||
<cache_key_prefixing>` and :ref:`transformation
|
||||
<cache_key_transformation>` has been added to the cache API.
|
||||
|
||||
.. _backwards-incompatible-changes-1.3:
|
||||
|
||||
|
|
|
@ -643,6 +643,101 @@ nonexistent cache key.::
|
|||
However, if the backend doesn't natively provide an increment/decrement
|
||||
operation, it will be implemented using a two-step retrieve/update.
|
||||
|
||||
.. _cache_key_prefixing:
|
||||
|
||||
Cache key prefixing
|
||||
-------------------
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
If you are sharing a cache instance between servers, or between your
|
||||
production and development environments, it's possible for data cached
|
||||
by one server to be used by another server. If the format of cached
|
||||
data is different between servers, this can lead to some very hard to
|
||||
diagnose problems.
|
||||
|
||||
To prevent this, Django provides the ability to prefix all cache keys
|
||||
used by a server. When a particular cache key is saved or retrieved,
|
||||
Django will automatically prefix the cache key with the value of the
|
||||
:setting:`CACHE_KEY_PREFIX` setting.
|
||||
|
||||
By ensuring each Django instance has a different
|
||||
:setting:`CACHE_KEY_PREFIX`, you can ensure that there will be no
|
||||
collisions in cache values.
|
||||
|
||||
.. _cache_versioning:
|
||||
|
||||
Cache versioning
|
||||
----------------
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
When you change running code that uses cached values, you may need to
|
||||
purge any existing cached values. The easiest way to do this is to
|
||||
flush the entire cache, but this can lead to the loss of cache values
|
||||
that are still valid and useful.
|
||||
|
||||
Django provides a better way to target individual cache values.
|
||||
Django's cache framework has a system-wide version identifier,
|
||||
specified using the :setting:`CACHE_VERSION` setting. The value of
|
||||
this setting is automatically combined with the cache prefix and the
|
||||
user-provided cache key to obtain the final cache key.
|
||||
|
||||
By default, any key request will automatically include the site
|
||||
default cache key version. However, the primitive cache functions all
|
||||
include a ``version`` argument, so you can specify a particular cache
|
||||
key version to set or get. For example::
|
||||
|
||||
# Set version 2 of a cache key
|
||||
>>> cache.set('my_key', 'hello world!', version=2)
|
||||
# Get the default version (assuming version=1)
|
||||
>>> cache.get('my_key')
|
||||
None
|
||||
# Get version 2 of the same key
|
||||
>>> cache.get('my_key', version=2)
|
||||
'hello world!'
|
||||
|
||||
The version of a specific key can be incremented and decremented using
|
||||
the :func:`incr_version()` and :func:`decr_version()` methods. This
|
||||
enables specific keys to be bumped to a new version, leaving other
|
||||
keys unaffected. Continuing our previous example::
|
||||
|
||||
# Increment the version of 'my_key'
|
||||
>>> cache.incr_version('my_key')
|
||||
# The default version still isn't available
|
||||
>>> cache.get('my_key')
|
||||
None
|
||||
# Version 2 isn't available, either
|
||||
>>> cache.get('my_key', version=2)
|
||||
None
|
||||
# But version 3 *is* availble
|
||||
>>> cache.get('my_key', version=3)
|
||||
'hello world!'
|
||||
|
||||
.. _cache_key_transformation:
|
||||
|
||||
Cache key transformation
|
||||
------------------------
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
As described in the previous two sections, the cache key provided by a
|
||||
user is not used verbatim -- it is combined with the cache prefix and
|
||||
key version to provide a final cache key. By default, the three parts
|
||||
are joined using colons to produce a final string::
|
||||
|
||||
def make_key(key, key_prefix, version):
|
||||
return ':'.join([key_prefix, str(version), smart_str(key)])
|
||||
|
||||
If you want to combine the parts in different ways, or apply other
|
||||
processing to the final key (e.g., taking a hash digest of the key
|
||||
parts), you can provide a custom key function.
|
||||
|
||||
The setting :setting:`CACHE_KEY_FUNCTION` specifies a dotted-path to
|
||||
a function matching the prototype of :func:`make_key()` above. If
|
||||
provided, this custom key function will be used instead of the default
|
||||
key combining function.
|
||||
|
||||
Cache key warnings
|
||||
------------------
|
||||
|
||||
|
|
|
@ -145,11 +145,21 @@ class DummyCacheTests(unittest.TestCase):
|
|||
"clear does nothing for the dummy cache backend"
|
||||
self.cache.clear()
|
||||
|
||||
def test_incr_version(self):
|
||||
"Dummy cache versions can't be incremented"
|
||||
self.cache.set('answer', 42)
|
||||
self.assertRaises(ValueError, self.cache.incr_version, 'answer')
|
||||
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
|
||||
|
||||
def test_decr_version(self):
|
||||
"Dummy cache versions can't be decremented"
|
||||
self.cache.set('answer', 42)
|
||||
self.assertRaises(ValueError, self.cache.decr_version, 'answer')
|
||||
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist')
|
||||
|
||||
|
||||
class BaseCacheTests(object):
|
||||
# A common set of tests to apply to all cache backends
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
||||
def test_simple(self):
|
||||
# Simple cache set/get works
|
||||
|
@ -163,6 +173,18 @@ class BaseCacheTests(object):
|
|||
self.assertEqual(result, False)
|
||||
self.assertEqual(self.cache.get("addkey1"), "value")
|
||||
|
||||
def test_prefix(self):
|
||||
# Test for same cache key conflicts between shared backend
|
||||
self.cache.set('somekey', 'value')
|
||||
|
||||
# should not be set in the prefixed cache
|
||||
self.assertFalse(self.prefix_cache.has_key('somekey'))
|
||||
|
||||
self.prefix_cache.set('somekey', 'value2')
|
||||
|
||||
self.assertEqual(self.cache.get('somekey'), 'value')
|
||||
self.assertEqual(self.prefix_cache.get('somekey'), 'value2')
|
||||
|
||||
def test_non_existent(self):
|
||||
# Non-existent cache keys return as None/default
|
||||
# get with non-existent keys
|
||||
|
@ -376,6 +398,13 @@ class BaseCacheTests(object):
|
|||
with more liberal key rules. Refs #6447.
|
||||
|
||||
"""
|
||||
# mimic custom ``make_key`` method being defined since the default will
|
||||
# never show the below warnings
|
||||
def func(key, *args):
|
||||
return key
|
||||
|
||||
old_func = self.cache.key_func
|
||||
self.cache.key_func = func
|
||||
# On Python 2.6+ we could use the catch_warnings context
|
||||
# manager to test this warning nicely. Since we can't do that
|
||||
# yet, the cleanest option is to temporarily ask for
|
||||
|
@ -390,6 +419,285 @@ class BaseCacheTests(object):
|
|||
self.assertRaises(CacheKeyWarning, self.cache.set, 'a' * 251, 'value')
|
||||
finally:
|
||||
restore_warnings_state(_warnings_state)
|
||||
self.cache.key_func = old_func
|
||||
|
||||
def test_cache_versioning_get_set(self):
|
||||
# set, using default version = 1
|
||||
self.cache.set('answer1', 42)
|
||||
self.assertEqual(self.cache.get('answer1'), 42)
|
||||
self.assertEqual(self.cache.get('answer1', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), None)
|
||||
|
||||
self.assertEqual(self.v2_cache.get('answer1'), None)
|
||||
# print '---'
|
||||
# print 'c1',self.cache._cache
|
||||
# print 'v2',self.v2_cache._cache
|
||||
self.assertEqual(self.v2_cache.get('answer1', version=1), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer1', version=2), None)
|
||||
|
||||
# set, default version = 1, but manually override version = 2
|
||||
self.cache.set('answer2', 42, version=2)
|
||||
self.assertEqual(self.cache.get('answer2'), None)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
||||
|
||||
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
||||
|
||||
# v2 set, using default version = 2
|
||||
self.v2_cache.set('answer3', 42)
|
||||
self.assertEqual(self.cache.get('answer3'), None)
|
||||
self.assertEqual(self.cache.get('answer3', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), 42)
|
||||
|
||||
self.assertEqual(self.v2_cache.get('answer3'), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer3', version=1), None)
|
||||
self.assertEqual(self.v2_cache.get('answer3', version=2), 42)
|
||||
|
||||
# v2 set, default version = 2, but manually override version = 1
|
||||
self.v2_cache.set('answer4', 42, version=1)
|
||||
self.assertEqual(self.cache.get('answer4'), 42)
|
||||
self.assertEqual(self.cache.get('answer4', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer4', version=2), None)
|
||||
|
||||
self.assertEqual(self.v2_cache.get('answer4'), None)
|
||||
self.assertEqual(self.v2_cache.get('answer4', version=1), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer4', version=2), None)
|
||||
|
||||
def test_cache_versioning_add(self):
|
||||
|
||||
# add, default version = 1, but manually override version = 2
|
||||
self.cache.add('answer1', 42, version=2)
|
||||
self.assertEqual(self.cache.get('answer1', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
|
||||
self.cache.add('answer1', 37, version=2)
|
||||
self.assertEqual(self.cache.get('answer1', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
|
||||
self.cache.add('answer1', 37, version=1)
|
||||
self.assertEqual(self.cache.get('answer1', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
|
||||
# v2 add, using default version = 2
|
||||
self.v2_cache.add('answer2', 42)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
||||
|
||||
self.v2_cache.add('answer2', 37)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
||||
|
||||
self.v2_cache.add('answer2', 37, version=1)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
||||
|
||||
# v2 add, default version = 2, but manually override version = 1
|
||||
self.v2_cache.add('answer3', 42, version=1)
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), None)
|
||||
|
||||
self.v2_cache.add('answer3', 37, version=1)
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), None)
|
||||
|
||||
self.v2_cache.add('answer3', 37)
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), 37)
|
||||
|
||||
def test_cache_versioning_has_key(self):
|
||||
self.cache.set('answer1', 42)
|
||||
|
||||
# has_key
|
||||
self.assertTrue(self.cache.has_key('answer1'))
|
||||
self.assertTrue(self.cache.has_key('answer1', version=1))
|
||||
self.assertFalse(self.cache.has_key('answer1', version=2))
|
||||
|
||||
self.assertFalse(self.v2_cache.has_key('answer1'))
|
||||
self.assertTrue(self.v2_cache.has_key('answer1', version=1))
|
||||
self.assertFalse(self.v2_cache.has_key('answer1', version=2))
|
||||
|
||||
def test_cache_versioning_delete(self):
|
||||
self.cache.set('answer1', 37, version=1)
|
||||
self.cache.set('answer1', 42, version=2)
|
||||
self.cache.delete('answer1')
|
||||
self.assertEqual(self.cache.get('answer1', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
|
||||
self.cache.set('answer2', 37, version=1)
|
||||
self.cache.set('answer2', 42, version=2)
|
||||
self.cache.delete('answer2', version=2)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), None)
|
||||
|
||||
self.cache.set('answer3', 37, version=1)
|
||||
self.cache.set('answer3', 42, version=2)
|
||||
self.v2_cache.delete('answer3')
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), None)
|
||||
|
||||
self.cache.set('answer4', 37, version=1)
|
||||
self.cache.set('answer4', 42, version=2)
|
||||
self.v2_cache.delete('answer4', version=1)
|
||||
self.assertEqual(self.cache.get('answer4', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
||||
|
||||
def test_cache_versioning_incr_decr(self):
|
||||
self.cache.set('answer1', 37, version=1)
|
||||
self.cache.set('answer1', 42, version=2)
|
||||
self.cache.incr('answer1')
|
||||
self.assertEqual(self.cache.get('answer1', version=1), 38)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
self.cache.decr('answer1')
|
||||
self.assertEqual(self.cache.get('answer1', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
||||
|
||||
self.cache.set('answer2', 37, version=1)
|
||||
self.cache.set('answer2', 42, version=2)
|
||||
self.cache.incr('answer2', version=2)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 43)
|
||||
self.cache.decr('answer2', version=2)
|
||||
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
||||
|
||||
self.cache.set('answer3', 37, version=1)
|
||||
self.cache.set('answer3', 42, version=2)
|
||||
self.v2_cache.incr('answer3')
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), 43)
|
||||
self.v2_cache.decr('answer3')
|
||||
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer3', version=2), 42)
|
||||
|
||||
self.cache.set('answer4', 37, version=1)
|
||||
self.cache.set('answer4', 42, version=2)
|
||||
self.v2_cache.incr('answer4', version=1)
|
||||
self.assertEqual(self.cache.get('answer4', version=1), 38)
|
||||
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
||||
self.v2_cache.decr('answer4', version=1)
|
||||
self.assertEqual(self.cache.get('answer4', version=1), 37)
|
||||
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
||||
|
||||
def test_cache_versioning_get_set_many(self):
|
||||
# set, using default version = 1
|
||||
self.cache.set_many({'ford1': 37, 'arthur1': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford1','arthur1']),
|
||||
{'ford1': 37, 'arthur1': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=1),
|
||||
{'ford1': 37, 'arthur1': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=2), {})
|
||||
|
||||
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1']), {})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=1),
|
||||
{'ford1': 37, 'arthur1': 42})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=2), {})
|
||||
|
||||
# set, default version = 1, but manually override version = 2
|
||||
self.cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
|
||||
self.assertEqual(self.cache.get_many(['ford2','arthur2']), {})
|
||||
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=1), {})
|
||||
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=2),
|
||||
{'ford2': 37, 'arthur2': 42})
|
||||
|
||||
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2']),
|
||||
{'ford2': 37, 'arthur2': 42})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=1), {})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=2),
|
||||
{'ford2': 37, 'arthur2': 42})
|
||||
|
||||
# v2 set, using default version = 2
|
||||
self.v2_cache.set_many({'ford3': 37, 'arthur3': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford3','arthur3']), {})
|
||||
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=1), {})
|
||||
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=2),
|
||||
{'ford3': 37, 'arthur3': 42})
|
||||
|
||||
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3']),
|
||||
{'ford3': 37, 'arthur3': 42})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=1), {})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=2),
|
||||
{'ford3': 37, 'arthur3': 42})
|
||||
|
||||
# v2 set, default version = 2, but manually override version = 1
|
||||
self.v2_cache.set_many({'ford4': 37, 'arthur4': 42}, version=1)
|
||||
self.assertEqual(self.cache.get_many(['ford4','arthur4']),
|
||||
{'ford4': 37, 'arthur4': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=1),
|
||||
{'ford4': 37, 'arthur4': 42})
|
||||
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=2), {})
|
||||
|
||||
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4']), {})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=1),
|
||||
{'ford4': 37, 'arthur4': 42})
|
||||
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=2), {})
|
||||
|
||||
def test_incr_version(self):
|
||||
self.cache.set('answer', 42, version=2)
|
||||
self.assertEqual(self.cache.get('answer'), None)
|
||||
self.assertEqual(self.cache.get('answer', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer', version=2), 42)
|
||||
self.assertEqual(self.cache.get('answer', version=3), None)
|
||||
|
||||
self.assertEqual(self.cache.incr_version('answer', version=2), 3)
|
||||
self.assertEqual(self.cache.get('answer'), None)
|
||||
self.assertEqual(self.cache.get('answer', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer', version=2), None)
|
||||
self.assertEqual(self.cache.get('answer', version=3), 42)
|
||||
|
||||
self.v2_cache.set('answer2', 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=3), None)
|
||||
|
||||
self.assertEqual(self.v2_cache.incr_version('answer2'), 3)
|
||||
self.assertEqual(self.v2_cache.get('answer2'), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=3), 42)
|
||||
|
||||
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
|
||||
|
||||
def test_decr_version(self):
|
||||
self.cache.set('answer', 42, version=2)
|
||||
self.assertEqual(self.cache.get('answer'), None)
|
||||
self.assertEqual(self.cache.get('answer', version=1), None)
|
||||
self.assertEqual(self.cache.get('answer', version=2), 42)
|
||||
|
||||
self.assertEqual(self.cache.decr_version('answer', version=2), 1)
|
||||
self.assertEqual(self.cache.get('answer'), 42)
|
||||
self.assertEqual(self.cache.get('answer', version=1), 42)
|
||||
self.assertEqual(self.cache.get('answer', version=2), None)
|
||||
|
||||
self.v2_cache.set('answer2', 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
||||
|
||||
self.assertEqual(self.v2_cache.decr_version('answer2'), 1)
|
||||
self.assertEqual(self.v2_cache.get('answer2'), None)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=1), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
|
||||
|
||||
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist', version=2)
|
||||
|
||||
def test_custom_key_func(self):
|
||||
# Two caches with different key functions aren't visible to each other
|
||||
self.cache.set('answer1', 42)
|
||||
self.assertEqual(self.cache.get('answer1'), 42)
|
||||
self.assertEqual(self.custom_key_cache.get('answer1'), None)
|
||||
self.assertEqual(self.custom_key_cache2.get('answer1'), None)
|
||||
|
||||
self.custom_key_cache.set('answer2', 42)
|
||||
self.assertEqual(self.cache.get('answer2'), None)
|
||||
self.assertEqual(self.custom_key_cache.get('answer2'), 42)
|
||||
self.assertEqual(self.custom_key_cache2.get('answer2'), 42)
|
||||
|
||||
def custom_key_func(key, key_prefix, version):
|
||||
"A customized cache key function"
|
||||
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
|
||||
|
||||
class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
def setUp(self):
|
||||
|
@ -397,6 +705,10 @@ class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
self._table_name = 'test cache table'
|
||||
management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
|
||||
self.cache = get_cache('db://%s?max_entries=30' % self._table_name)
|
||||
self.prefix_cache = get_cache('db://%s' % self._table_name, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('db://%s' % self._table_name, version=2)
|
||||
self.custom_key_cache = get_cache('db://%s' % self._table_name, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('db://%s' % self._table_name, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
from django.db import connection
|
||||
|
@ -413,6 +725,24 @@ class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
def setUp(self):
|
||||
self.cache = get_cache('locmem://?max_entries=30')
|
||||
self.prefix_cache = get_cache('locmem://', key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('locmem://', version=2)
|
||||
self.custom_key_cache = get_cache('locmem://?max_entries=30', key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('locmem://?max_entries=30', key_func='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
# LocMem requires a hack to make the other caches
|
||||
# share a data store with the 'normal' cache.
|
||||
self.prefix_cache._cache = self.cache._cache
|
||||
self.prefix_cache._expire_info = self.cache._expire_info
|
||||
|
||||
self.v2_cache._cache = self.cache._cache
|
||||
self.v2_cache._expire_info = self.cache._expire_info
|
||||
|
||||
self.custom_key_cache._cache = self.cache._cache
|
||||
self.custom_key_cache._expire_info = self.cache._expire_info
|
||||
|
||||
self.custom_key_cache2._cache = self.cache._cache
|
||||
self.custom_key_cache2._expire_info = self.cache._expire_info
|
||||
|
||||
def test_cull(self):
|
||||
self.perform_cull_test(50, 29)
|
||||
|
@ -428,6 +758,13 @@ class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
def setUp(self):
|
||||
self.cache = get_cache(settings.CACHE_BACKEND)
|
||||
self.prefix_cache = get_cache(settings.CACHE_BACKEND, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache(settings.CACHE_BACKEND, version=2)
|
||||
self.custom_key_cache = get_cache(settings.CACHE_BACKEND, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache(settings.CACHE_BACKEND, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
||||
def test_invalid_keys(self):
|
||||
"""
|
||||
|
@ -443,6 +780,7 @@ class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
self.assertRaises(Exception, self.cache.set, 'key with spaces', 'value')
|
||||
# memcached limits key length to 250
|
||||
self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value')
|
||||
|
||||
MemcachedCacheTests = unittest.skipUnless(settings.CACHE_BACKEND.startswith('memcached://'), "memcached not available")(MemcachedCacheTests)
|
||||
|
||||
class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
|
@ -452,11 +790,19 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
def setUp(self):
|
||||
self.dirname = tempfile.mkdtemp()
|
||||
self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
|
||||
self.prefix_cache = get_cache('file://%s' % self.dirname, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('file://%s' % self.dirname, version=2)
|
||||
self.custom_key_cache = get_cache('file://%s' % self.dirname, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('file://%s' % self.dirname, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
||||
def test_hashing(self):
|
||||
"""Test that keys are hashed into subdirectories correctly"""
|
||||
self.cache.set("foo", "bar")
|
||||
keyhash = md5_constructor("foo").hexdigest()
|
||||
key = self.cache.make_key("foo")
|
||||
keyhash = md5_constructor(key).hexdigest()
|
||||
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
|
||||
self.assert_(os.path.exists(keypath))
|
||||
|
||||
|
@ -465,7 +811,8 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
Make sure that the created subdirectories are correctly removed when empty.
|
||||
"""
|
||||
self.cache.set("foo", "bar")
|
||||
keyhash = md5_constructor("foo").hexdigest()
|
||||
key = self.cache.make_key("foo")
|
||||
keyhash = md5_constructor(key).hexdigest()
|
||||
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
|
||||
self.assert_(os.path.exists(keypath))
|
||||
|
||||
|
@ -475,7 +822,7 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
self.assert_(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
|
||||
|
||||
def test_cull(self):
|
||||
self.perform_cull_test(50, 28)
|
||||
self.perform_cull_test(50, 29)
|
||||
|
||||
class CustomCacheKeyValidationTests(unittest.TestCase):
|
||||
"""
|
||||
|
@ -498,16 +845,16 @@ class CacheUtils(unittest.TestCase):
|
|||
|
||||
def setUp(self):
|
||||
self.path = '/cache/test/'
|
||||
self.old_settings_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.old_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.old_cache_middleware_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.old_cache_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.orig_use_i18n = settings.USE_I18N
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = 'settingsprefix'
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = 1
|
||||
settings.USE_I18N = False
|
||||
|
||||
def tearDown(self):
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.old_settings_key_prefix
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = self.old_middleware_seconds
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.old_cache_middleware_key_prefix
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = self.old_cache_middleware_seconds
|
||||
settings.USE_I18N = self.orig_use_i18n
|
||||
|
||||
def _get_request(self, path, method='GET'):
|
||||
|
@ -561,6 +908,16 @@ class CacheUtils(unittest.TestCase):
|
|||
learn_cache_key(request, response)
|
||||
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.HEAD.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
||||
|
||||
class PrefixedCacheUtils(CacheUtils):
|
||||
def setUp(self):
|
||||
super(PrefixedCacheUtils, self).setUp()
|
||||
self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
|
||||
settings.CACHE_KEY_PREFIX = 'cacheprefix'
|
||||
|
||||
def tearDown(self):
|
||||
super(PrefixedCacheUtils, self).tearDown()
|
||||
settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
|
||||
|
||||
class CacheHEADTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
|
@ -714,5 +1071,15 @@ class CacheI18nTest(unittest.TestCase):
|
|||
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
||||
self.assertEqual(get_cache_data.content, es_message)
|
||||
|
||||
class PrefixedCacheI18nTest(CacheI18nTest):
|
||||
def setUp(self):
|
||||
super(PrefixedCacheI18nTest, self).setUp()
|
||||
self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
|
||||
settings.CACHE_KEY_PREFIX = 'cacheprefix'
|
||||
|
||||
def tearDown(self):
|
||||
super(PrefixedCacheI18nTest, self).tearDown()
|
||||
settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
Loading…
Reference in New Issue