Fixed #11675 -- Added support for the PyLibMC cache library. In order to support this, and clean up some other 1.3 caching additions, this patch also includes some changes to the way caches are defined. This means you can now have multiple caches, in the same way you have multiple databases. A huge thanks to Jacob Burch for the work on the PyLibMC backend, and to Jannis for his work on the cache definition changes.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@15005 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
3cf8502d35
commit
673e6fc7fb
|
@ -431,14 +431,15 @@ SESSION_FILE_PATH = None # Directory to store ses
|
|||
# CACHE #
|
||||
#########
|
||||
|
||||
# New format
|
||||
CACHES = {
|
||||
}
|
||||
# The cache backend to use. See the docstring in django.core.cache for the
|
||||
# possible values.
|
||||
CACHE_BACKEND = 'locmem://'
|
||||
CACHE_VERSION = 1
|
||||
CACHE_KEY_PREFIX = ''
|
||||
CACHE_KEY_FUNCTION = None
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX = ''
|
||||
CACHE_MIDDLEWARE_SECONDS = 600
|
||||
CACHE_MIDDLEWARE_ALIAS = 'default'
|
||||
|
||||
####################
|
||||
# COMMENTS #
|
||||
|
|
|
@ -1,5 +1,7 @@
|
|||
import os
|
||||
from django.conf import settings
|
||||
from django.core.cache import get_cache
|
||||
from django.core.cache.backends.db import BaseDatabaseCache
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.core.management import call_command
|
||||
from django.db.backends.sqlite3.creation import DatabaseCreation
|
||||
|
@ -28,11 +30,12 @@ class SpatiaLiteCreation(DatabaseCreation):
|
|||
self.load_spatialite_sql()
|
||||
call_command('syncdb', verbosity=verbosity, interactive=False, database=self.connection.alias)
|
||||
|
||||
if settings.CACHE_BACKEND.startswith('db://'):
|
||||
from django.core.cache import parse_backend_uri
|
||||
_, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
|
||||
call_command('createcachetable', cache_name)
|
||||
|
||||
for cache_alias in settings.CACHES:
|
||||
cache = get_cache(cache_alias)
|
||||
if isinstance(cache, BaseDatabaseCache):
|
||||
from django.db import router
|
||||
if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
|
||||
call_command('createcachetable', cache._table, database=self.connection.alias)
|
||||
# Get a cursor (even though we don't need one yet). This has
|
||||
# the side effect of initializing the test database.
|
||||
cursor = self.connection.cursor()
|
||||
|
|
|
@ -12,8 +12,13 @@ get_cache() function made available here. get_cache() takes a backend URI
|
|||
(e.g. "memcached://127.0.0.1:11211/") and returns an instance of a backend
|
||||
cache class.
|
||||
|
||||
See docs/cache.txt for information on the public API.
|
||||
See docs/topics/cache.txt for information on the public API.
|
||||
"""
|
||||
from django.conf import settings
|
||||
from django.core import signals
|
||||
from django.core.cache.backends.base import (
|
||||
InvalidCacheBackendError, CacheKeyWarning, BaseCache)
|
||||
from django.utils import importlib
|
||||
|
||||
try:
|
||||
# The mod_python version is more efficient, so try importing it first.
|
||||
|
@ -27,10 +32,9 @@ except ImportError:
|
|||
# PendingDeprecationWarning
|
||||
from cgi import parse_qsl
|
||||
|
||||
from django.conf import settings
|
||||
from django.core import signals
|
||||
from django.core.cache.backends.base import InvalidCacheBackendError, CacheKeyWarning
|
||||
from django.utils import importlib
|
||||
__all__ = [
|
||||
'get_cache', 'cache', 'DEFAULT_CACHE_ALIAS'
|
||||
]
|
||||
|
||||
# Name for use in settings file --> name of module in "backends" directory.
|
||||
# Any backend scheme that is not in this dictionary is treated as a Python
|
||||
|
@ -43,6 +47,8 @@ BACKENDS = {
|
|||
'dummy': 'dummy',
|
||||
}
|
||||
|
||||
DEFAULT_CACHE_ALIAS = 'default'
|
||||
|
||||
def parse_backend_uri(backend_uri):
|
||||
"""
|
||||
Converts the "backend_uri" into a cache scheme ('db', 'memcached', etc), a
|
||||
|
@ -67,32 +73,102 @@ def parse_backend_uri(backend_uri):
|
|||
|
||||
return scheme, host, params
|
||||
|
||||
def get_cache(backend_uri, key_prefix=None, version=None, key_func=None):
|
||||
if key_prefix is None:
|
||||
key_prefix = settings.CACHE_KEY_PREFIX
|
||||
if version is None:
|
||||
version = settings.CACHE_VERSION
|
||||
if key_func is None:
|
||||
key_func = settings.CACHE_KEY_FUNCTION
|
||||
if not settings.CACHES:
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"settings.CACHE_* is deprecated; use settings.CACHES instead.",
|
||||
PendingDeprecationWarning
|
||||
)
|
||||
# Mapping for new-style cache backend api
|
||||
backend_classes = {
|
||||
'memcached': 'memcached.CacheClass',
|
||||
'locmem': 'locmem.LocMemCache',
|
||||
'file': 'filebased.FileBasedCache',
|
||||
'db': 'db.DatabaseCache',
|
||||
'dummy': 'dummy.DummyCache',
|
||||
}
|
||||
engine, host, params = parse_backend_uri(settings.CACHE_BACKEND)
|
||||
if engine in backend_classes:
|
||||
engine = 'django.core.cache.backends.%s' % backend_classes[engine]
|
||||
defaults = {
|
||||
'BACKEND': engine,
|
||||
'LOCATION': host,
|
||||
}
|
||||
defaults.update(params)
|
||||
settings.CACHES[DEFAULT_CACHE_ALIAS] = defaults
|
||||
|
||||
if key_func is not None and not callable(key_func):
|
||||
key_func_module_path, key_func_name = key_func.rsplit('.', 1)
|
||||
key_func_module = importlib.import_module(key_func_module_path)
|
||||
key_func = getattr(key_func_module, key_func_name)
|
||||
if DEFAULT_CACHE_ALIAS not in settings.CACHES:
|
||||
raise ImproperlyConfigured("You must define a '%s' cache" % DEFAULT_CACHE_ALIAS)
|
||||
|
||||
scheme, host, params = parse_backend_uri(backend_uri)
|
||||
if scheme in BACKENDS:
|
||||
name = 'django.core.cache.backends.%s' % BACKENDS[scheme]
|
||||
def parse_backend_conf(backend, **kwargs):
|
||||
"""
|
||||
Helper function to parse the backend configuration
|
||||
that doesn't use the URI notation.
|
||||
"""
|
||||
# Try to get the CACHES entry for the given backend name first
|
||||
conf = settings.CACHES.get(backend, None)
|
||||
if conf is not None:
|
||||
args = conf.copy()
|
||||
backend = args.pop('BACKEND')
|
||||
location = args.pop('LOCATION', '')
|
||||
return backend, location, args
|
||||
else:
|
||||
name = scheme
|
||||
module = importlib.import_module(name)
|
||||
return module.CacheClass(host, params, key_prefix=key_prefix, version=version, key_func=key_func)
|
||||
# Trying to import the given backend, in case it's a dotted path
|
||||
mod_path, cls_name = backend.rsplit('.', 1)
|
||||
try:
|
||||
mod = importlib.import_module(mod_path)
|
||||
backend_cls = getattr(mod, cls_name)
|
||||
except (AttributeError, ImportError):
|
||||
raise InvalidCacheBackendError("Could not find backend '%s'" % backend)
|
||||
location = kwargs.pop('LOCATION', '')
|
||||
return backend, location, kwargs
|
||||
raise InvalidCacheBackendError(
|
||||
"Couldn't find a cache backend named '%s'" % backend)
|
||||
|
||||
cache = get_cache(settings.CACHE_BACKEND)
|
||||
def get_cache(backend, **kwargs):
|
||||
"""
|
||||
Function to load a cache backend dynamically. This is flexible by design
|
||||
to allow different use cases:
|
||||
|
||||
To load a backend with the old URI-based notation::
|
||||
|
||||
cache = get_cache('locmem://')
|
||||
|
||||
To load a backend that is pre-defined in the settings::
|
||||
|
||||
cache = get_cache('default')
|
||||
|
||||
To load a backend with its dotted import path,
|
||||
including arbitrary options::
|
||||
|
||||
cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', **{
|
||||
'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 30,
|
||||
})
|
||||
|
||||
"""
|
||||
try:
|
||||
if '://' in backend:
|
||||
# for backwards compatibility
|
||||
backend, location, params = parse_backend_uri(backend)
|
||||
if backend in BACKENDS:
|
||||
backend = 'django.core.cache.backends.%s' % BACKENDS[backend]
|
||||
params.update(kwargs)
|
||||
mod = importlib.import_module(backend)
|
||||
backend_cls = mod.CacheClass
|
||||
else:
|
||||
backend, location, params = parse_backend_conf(backend, **kwargs)
|
||||
mod_path, cls_name = backend.rsplit('.', 1)
|
||||
mod = importlib.import_module(mod_path)
|
||||
backend_cls = getattr(mod, cls_name)
|
||||
except (AttributeError, ImportError), e:
|
||||
raise InvalidCacheBackendError(
|
||||
"Could not find backend '%s': %s" % (backend, e))
|
||||
return backend_cls(location, params)
|
||||
|
||||
cache = get_cache(DEFAULT_CACHE_ALIAS)
|
||||
|
||||
# Some caches -- python-memcached in particular -- need to do a cleanup at the
|
||||
# end of a request cycle. If the cache provides a close() method, wire it up
|
||||
# here.
|
||||
if hasattr(cache, 'close'):
|
||||
signals.request_finished.connect(cache.close)
|
||||
|
||||
|
|
|
@ -2,8 +2,10 @@
|
|||
|
||||
import warnings
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.exceptions import ImproperlyConfigured, DjangoRuntimeWarning
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.importlib import import_module
|
||||
|
||||
class InvalidCacheBackendError(ImproperlyConfigured):
|
||||
pass
|
||||
|
@ -15,38 +17,55 @@ class CacheKeyWarning(DjangoRuntimeWarning):
|
|||
MEMCACHE_MAX_KEY_LENGTH = 250
|
||||
|
||||
def default_key_func(key, key_prefix, version):
|
||||
"""Default function to generate keys.
|
||||
"""
|
||||
Default function to generate keys.
|
||||
|
||||
Constructs the key used by all other methods. By default it prepends
|
||||
the `key_prefix'. CACHE_KEY_FUNCTION can be used to specify an alternate
|
||||
the `key_prefix'. KEY_FUNCTION can be used to specify an alternate
|
||||
function with custom key making behavior.
|
||||
"""
|
||||
return ':'.join([key_prefix, str(version), smart_str(key)])
|
||||
|
||||
def get_key_func(key_func):
|
||||
"""
|
||||
Function to decide which key function to use.
|
||||
|
||||
Defaults to ``default_key_func``.
|
||||
"""
|
||||
if key_func is not None:
|
||||
if callable(key_func):
|
||||
return key_func
|
||||
else:
|
||||
key_func_module_path, key_func_name = key_func.rsplit('.', 1)
|
||||
key_func_module = import_module(key_func_module_path)
|
||||
return getattr(key_func_module, key_func_name)
|
||||
return default_key_func
|
||||
|
||||
class BaseCache(object):
|
||||
def __init__(self, params, key_prefix='', version=1, key_func=None):
|
||||
timeout = params.get('timeout', 300)
|
||||
def __init__(self, params):
|
||||
timeout = params.get('timeout', params.get('TIMEOUT', 300))
|
||||
try:
|
||||
timeout = int(timeout)
|
||||
except (ValueError, TypeError):
|
||||
timeout = 300
|
||||
self.default_timeout = timeout
|
||||
|
||||
max_entries = params.get('max_entries', 300)
|
||||
options = params.get('OPTIONS', {})
|
||||
max_entries = params.get('max_entries', options.get('MAX_ENTRIES', 300))
|
||||
try:
|
||||
self._max_entries = int(max_entries)
|
||||
except (ValueError, TypeError):
|
||||
self._max_entries = 300
|
||||
|
||||
cull_frequency = params.get('cull_frequency', 3)
|
||||
cull_frequency = params.get('cull_frequency', options.get('CULL_FREQUENCY', 3))
|
||||
try:
|
||||
self._cull_frequency = int(cull_frequency)
|
||||
except (ValueError, TypeError):
|
||||
self._cull_frequency = 3
|
||||
|
||||
self.key_prefix = smart_str(key_prefix)
|
||||
self.version = version
|
||||
self.key_func = key_func or default_key_func
|
||||
self.key_prefix = smart_str(params.get('KEY_PREFIX', ''))
|
||||
self.version = params.get('VERSION', 1)
|
||||
self.key_func = get_key_func(params.get('KEY_FUNCTION', None))
|
||||
|
||||
def make_key(self, key, version=None):
|
||||
"""Constructs the key used by all other methods. By default it
|
||||
|
|
|
@ -25,16 +25,16 @@ class Options(object):
|
|||
self.managed = True
|
||||
self.proxy = False
|
||||
|
||||
class BaseDatabaseCacheClass(BaseCache):
|
||||
def __init__(self, table, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
class BaseDatabaseCache(BaseCache):
|
||||
def __init__(self, table, params):
|
||||
BaseCache.__init__(self, params)
|
||||
self._table = table
|
||||
|
||||
class CacheEntry(object):
|
||||
_meta = Options(table)
|
||||
self.cache_model_class = CacheEntry
|
||||
|
||||
class CacheClass(BaseDatabaseCacheClass):
|
||||
class DatabaseCache(BaseDatabaseCache):
|
||||
def get(self, key, default=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
self.validate_key(key)
|
||||
|
@ -140,3 +140,7 @@ class CacheClass(BaseDatabaseCacheClass):
|
|||
table = connections[db].ops.quote_name(self._table)
|
||||
cursor = connections[db].cursor()
|
||||
cursor.execute('DELETE FROM %s' % table)
|
||||
|
||||
# For backwards compatibility
|
||||
class CacheClass(DatabaseCache):
|
||||
pass
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
|
||||
from django.core.cache.backends.base import BaseCache
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
class DummyCache(BaseCache):
|
||||
def __init__(self, host, *args, **kwargs):
|
||||
BaseCache.__init__(self, *args, **kwargs)
|
||||
|
||||
|
@ -40,3 +40,7 @@ class CacheClass(BaseCache):
|
|||
|
||||
def clear(self):
|
||||
pass
|
||||
|
||||
# For backwards compatibility
|
||||
class CacheClass(DummyCache):
|
||||
pass
|
||||
|
|
|
@ -11,9 +11,9 @@ except ImportError:
|
|||
from django.core.cache.backends.base import BaseCache
|
||||
from django.utils.hashcompat import md5_constructor
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, dir, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
class FileBasedCache(BaseCache):
|
||||
def __init__(self, dir, params):
|
||||
BaseCache.__init__(self, params)
|
||||
self._dir = dir
|
||||
if not os.path.exists(self._dir):
|
||||
self._createdir()
|
||||
|
@ -161,3 +161,7 @@ class CacheClass(BaseCache):
|
|||
shutil.rmtree(self._dir)
|
||||
except (IOError, OSError):
|
||||
pass
|
||||
|
||||
# For backwards compatibility
|
||||
class CacheClass(FileBasedCache):
|
||||
pass
|
||||
|
|
|
@ -9,12 +9,19 @@ except ImportError:
|
|||
from django.core.cache.backends.base import BaseCache
|
||||
from django.utils.synch import RWLock
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, _, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._cache = {}
|
||||
self._expire_info = {}
|
||||
self._lock = RWLock()
|
||||
# Global in-memory store of cache data. Keyed by name, to provide
|
||||
# multiple named local memory caches.
|
||||
_caches = {}
|
||||
_expire_info = {}
|
||||
_locks = {}
|
||||
|
||||
class LocMemCache(BaseCache):
|
||||
def __init__(self, name, params):
|
||||
BaseCache.__init__(self, params)
|
||||
global _caches, _expire_info, _locks
|
||||
self._cache = _caches.setdefault(name, {})
|
||||
self._expire_info = _expire_info.setdefault(name, {})
|
||||
self._lock = _locks.setdefault(name, RWLock())
|
||||
|
||||
def add(self, key, value, timeout=None, version=None):
|
||||
key = self.make_key(key, version=version)
|
||||
|
@ -133,3 +140,7 @@ class CacheClass(BaseCache):
|
|||
def clear(self):
|
||||
self._cache.clear()
|
||||
self._expire_info.clear()
|
||||
|
||||
# For backwards compatibility
|
||||
class CacheClass(LocMemCache):
|
||||
pass
|
||||
|
|
|
@ -1,26 +1,34 @@
|
|||
"Memcached cache backend"
|
||||
|
||||
import time
|
||||
from threading import local
|
||||
|
||||
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
|
||||
from django.utils import importlib
|
||||
|
||||
try:
|
||||
import cmemcache as memcache
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"Support for the 'cmemcache' library has been deprecated. Please use python-memcached instead.",
|
||||
DeprecationWarning
|
||||
)
|
||||
except ImportError:
|
||||
try:
|
||||
import memcache
|
||||
except:
|
||||
raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
|
||||
class BaseMemcachedCache(BaseCache):
|
||||
def __init__(self, server, params, library, value_not_found_exception):
|
||||
super(BaseMemcachedCache, self).__init__(params)
|
||||
if isinstance(server, basestring):
|
||||
self._servers = server.split(';')
|
||||
else:
|
||||
self._servers = server
|
||||
|
||||
class CacheClass(BaseCache):
|
||||
def __init__(self, server, params, key_prefix='', version=1, key_func=None):
|
||||
BaseCache.__init__(self, params, key_prefix, version, key_func)
|
||||
self._cache = memcache.Client(server.split(';'))
|
||||
# The exception type to catch from the underlying library for a key
|
||||
# that was not found. This is a ValueError for python-memcache,
|
||||
# pylibmc.NotFound for pylibmc, and cmemcache will return None without
|
||||
# raising an exception.
|
||||
self.LibraryValueNotFoundException = value_not_found_exception
|
||||
|
||||
self._lib = library
|
||||
self._options = params.get('OPTIONS', None)
|
||||
|
||||
@property
|
||||
def _cache(self):
|
||||
"""
|
||||
Implements transparent thread-safe access to a memcached client.
|
||||
"""
|
||||
return self._lib.Client(self._servers)
|
||||
|
||||
def _get_memcache_timeout(self, timeout):
|
||||
"""
|
||||
|
@ -79,13 +87,13 @@ class CacheClass(BaseCache):
|
|||
val = self._cache.incr(key, delta)
|
||||
|
||||
# python-memcache responds to incr on non-existent keys by
|
||||
# raising a ValueError. Cmemcache returns None. In both
|
||||
# cases, we should raise a ValueError though.
|
||||
except ValueError:
|
||||
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
|
||||
# and Cmemcache returns None. In all cases,
|
||||
# we should raise a ValueError though.
|
||||
except self.LibraryValueNotFoundException:
|
||||
val = None
|
||||
if val is None:
|
||||
raise ValueError("Key '%s' not found" % key)
|
||||
|
||||
return val
|
||||
|
||||
def decr(self, key, delta=1, version=None):
|
||||
|
@ -93,10 +101,11 @@ class CacheClass(BaseCache):
|
|||
try:
|
||||
val = self._cache.decr(key, delta)
|
||||
|
||||
# python-memcache responds to decr on non-existent keys by
|
||||
# raising a ValueError. Cmemcache returns None. In both
|
||||
# cases, we should raise a ValueError though.
|
||||
except ValueError:
|
||||
# python-memcache responds to incr on non-existent keys by
|
||||
# raising a ValueError, pylibmc by raising a pylibmc.NotFound
|
||||
# and Cmemcache returns None. In all cases,
|
||||
# we should raise a ValueError though.
|
||||
except self.LibraryValueNotFoundException:
|
||||
val = None
|
||||
if val is None:
|
||||
raise ValueError("Key '%s' not found" % key)
|
||||
|
@ -117,3 +126,59 @@ class CacheClass(BaseCache):
|
|||
|
||||
def clear(self):
|
||||
self._cache.flush_all()
|
||||
|
||||
# For backwards compatibility -- the default cache class tries a
|
||||
# cascading lookup of cmemcache, then memcache.
|
||||
class CacheClass(BaseMemcachedCache):
|
||||
def __init__(self, server, params):
|
||||
try:
|
||||
import cmemcache as memcache
|
||||
import warnings
|
||||
warnings.warn(
|
||||
"Support for the 'cmemcache' library has been deprecated. Please use python-memcached or pyblimc instead.",
|
||||
DeprecationWarning
|
||||
)
|
||||
except ImportError:
|
||||
try:
|
||||
import memcache
|
||||
except:
|
||||
raise InvalidCacheBackendError(
|
||||
"Memcached cache backend requires either the 'memcache' or 'cmemcache' library"
|
||||
)
|
||||
super(CacheClass, self).__init__(server, params,
|
||||
library=memcache,
|
||||
value_not_found_exception=ValueError)
|
||||
|
||||
class MemcachedCache(BaseMemcachedCache):
|
||||
"An implementation of a cache binding using python-memcached"
|
||||
def __init__(self, server, params):
|
||||
import memcache
|
||||
super(MemcachedCache, self).__init__(server, params,
|
||||
library=memcache,
|
||||
value_not_found_exception=ValueError)
|
||||
|
||||
class PyLibMCCache(BaseMemcachedCache):
|
||||
"An implementation of a cache binding using pylibmc"
|
||||
def __init__(self, server, params):
|
||||
import pylibmc
|
||||
self._local = local()
|
||||
super(PyLibMCCache, self).__init__(server, params,
|
||||
library=pylibmc,
|
||||
value_not_found_exception=pylibmc.NotFound)
|
||||
|
||||
@property
|
||||
def _cache(self):
|
||||
# PylibMC uses cache options as the 'behaviors' attribute.
|
||||
# It also needs to use threadlocals, because some versions of
|
||||
# PylibMC don't play well with the GIL.
|
||||
client = getattr(self._local, 'client', None)
|
||||
if client:
|
||||
return client
|
||||
|
||||
client = self._lib.Client(self._servers)
|
||||
if self._options:
|
||||
client.behaviors = self._options
|
||||
|
||||
self._local.client = client
|
||||
|
||||
return client
|
||||
|
|
|
@ -359,12 +359,14 @@ class BaseDatabaseCreation(object):
|
|||
# (unless you really ask to be flooded)
|
||||
call_command('syncdb', verbosity=max(verbosity - 1, 0), interactive=False, database=self.connection.alias)
|
||||
|
||||
if settings.CACHE_BACKEND.startswith('db://'):
|
||||
from django.core.cache import parse_backend_uri, cache
|
||||
from django.db import router
|
||||
if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
|
||||
_, cache_name, _ = parse_backend_uri(settings.CACHE_BACKEND)
|
||||
call_command('createcachetable', cache_name, database=self.connection.alias)
|
||||
from django.core.cache import get_cache
|
||||
from django.core.cache.backends.db import BaseDatabaseCache
|
||||
for cache_alias in settings.CACHES:
|
||||
cache = get_cache(cache_alias)
|
||||
if isinstance(cache, BaseDatabaseCache):
|
||||
from django.db import router
|
||||
if router.allow_syncdb(self.connection.alias, cache.cache_model_class):
|
||||
call_command('createcachetable', cache._table, database=self.connection.alias)
|
||||
|
||||
# Get a cursor (even though we don't need one yet). This has
|
||||
# the side effect of initializing the test database.
|
||||
|
|
|
@ -49,7 +49,7 @@ More details about how the caching works:
|
|||
"""
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.cache import cache
|
||||
from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
|
||||
from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age
|
||||
|
||||
class UpdateCacheMiddleware(object):
|
||||
|
@ -65,6 +65,7 @@ class UpdateCacheMiddleware(object):
|
|||
self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False)
|
||||
self.cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS)
|
||||
|
||||
def process_response(self, request, response):
|
||||
"""Sets the cache, if needed."""
|
||||
|
@ -85,7 +86,7 @@ class UpdateCacheMiddleware(object):
|
|||
patch_response_headers(response, timeout)
|
||||
if timeout:
|
||||
cache_key = learn_cache_key(request, response, timeout, self.key_prefix)
|
||||
cache.set(cache_key, response, timeout)
|
||||
self.cache.set(cache_key, response, timeout)
|
||||
return response
|
||||
|
||||
class FetchFromCacheMiddleware(object):
|
||||
|
@ -100,6 +101,7 @@ class FetchFromCacheMiddleware(object):
|
|||
self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False)
|
||||
self.cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS)
|
||||
|
||||
def process_request(self, request):
|
||||
"""
|
||||
|
@ -124,12 +126,12 @@ class FetchFromCacheMiddleware(object):
|
|||
request._cache_update_cache = True
|
||||
return None # No cache information available, need to rebuild.
|
||||
|
||||
response = cache.get(cache_key, None)
|
||||
response = self.cache.get(cache_key, None)
|
||||
|
||||
# if it wasn't found and we are looking for a HEAD, try looking just for that
|
||||
if response is None and request.method == 'HEAD':
|
||||
cache_key = get_cache_key(request, self.key_prefix, 'HEAD')
|
||||
response = cache.get(cache_key, None)
|
||||
response = self.cache.get(cache_key, None)
|
||||
|
||||
if response is None:
|
||||
request._cache_update_cache = True
|
||||
|
@ -146,14 +148,33 @@ class CacheMiddleware(UpdateCacheMiddleware, FetchFromCacheMiddleware):
|
|||
Also used as the hook point for the cache decorator, which is generated
|
||||
using the decorator-from-middleware utility.
|
||||
"""
|
||||
def __init__(self, cache_timeout=None, key_prefix=None, cache_anonymous_only=None):
|
||||
def __init__(self, cache_timeout=None, cache_anonymous_only=None, **kwargs):
|
||||
self.cache_timeout = cache_timeout
|
||||
if cache_timeout is None:
|
||||
self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.key_prefix = key_prefix
|
||||
if key_prefix is None:
|
||||
self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
|
||||
# We need to differentiate between "provided, but using default value",
|
||||
# and "not provided". If the value is provided using a default, then
|
||||
# we fall back to system defaults. If it is not provided at all,
|
||||
# we need to use middleware defaults.
|
||||
try:
|
||||
cache_alias = kwargs.get('cache_alias')
|
||||
if cache_alias is None:
|
||||
cache_alias = DEFAULT_CACHE_ALIAS
|
||||
except KeyError:
|
||||
cache_alias = settings.CACHE_MIDDLEWARE_ALIAS
|
||||
|
||||
cache_kwargs = {}
|
||||
try:
|
||||
key_prefix = kwargs.get('key_prefix')
|
||||
if key_prefix is not None:
|
||||
cache_kwargs['KEY_PREFIX'] = key_prefix
|
||||
except KeyError:
|
||||
cache_kwargs['KEY_PREFIX'] = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
|
||||
if cache_anonymous_only is None:
|
||||
self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False)
|
||||
else:
|
||||
self.cache_anonymous_only = cache_anonymous_only
|
||||
|
||||
self.cache = get_cache(cache_alias, **cache_kwargs)
|
||||
|
|
|
@ -40,23 +40,24 @@ def cache_page(*args, **kwargs):
|
|||
|
||||
# We also add some asserts to give better error messages in case people are
|
||||
# using other ways to call cache_page that no longer work.
|
||||
cache_alias = kwargs.pop('cache', None)
|
||||
key_prefix = kwargs.pop('key_prefix', None)
|
||||
assert not kwargs, "The only keyword argument accepted is key_prefix"
|
||||
assert not kwargs, "The only keyword arguments are cache and key_prefix"
|
||||
if len(args) > 1:
|
||||
assert len(args) == 2, "cache_page accepts at most 2 arguments"
|
||||
if callable(args[0]):
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[1], key_prefix=key_prefix)(args[0])
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[1], cache_alias=cache_alias, key_prefix=key_prefix)(args[0])
|
||||
elif callable(args[1]):
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], key_prefix=key_prefix)(args[1])
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix)(args[1])
|
||||
else:
|
||||
assert False, "cache_page must be passed a view function if called with two arguments"
|
||||
elif len(args) == 1:
|
||||
if callable(args[0]):
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(key_prefix=key_prefix)(args[0])
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)(args[0])
|
||||
else:
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], key_prefix=key_prefix)
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix)
|
||||
else:
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(key_prefix=key_prefix)
|
||||
return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)
|
||||
|
||||
|
||||
def cache_control(**kwargs):
|
||||
|
|
|
@ -940,7 +940,7 @@ dependencies:
|
|||
* gettext_ (:ref:`gettext_on_windows`)
|
||||
|
||||
If you want to test the memcached cache backend, you will also need to define
|
||||
a :setting:`CACHE_BACKEND` setting that points at your memcached instance.
|
||||
a :setting:`CACHES` setting that points at your memcached instance.
|
||||
|
||||
Each of these dependencies is optional. If you're missing any of them, the
|
||||
associated tests will be skipped.
|
||||
|
|
|
@ -127,21 +127,63 @@ Default: Not defined
|
|||
The site-specific user profile model used by this site. See
|
||||
:ref:`auth-profiles`.
|
||||
|
||||
.. setting:: CACHE_BACKEND
|
||||
.. setting:: CACHES
|
||||
|
||||
CACHE_BACKEND
|
||||
-------------
|
||||
CACHES
|
||||
------
|
||||
|
||||
Default: ``'locmem://'``
|
||||
.. versionadded:: 1.3
|
||||
|
||||
The cache backend to use. See :doc:`/topics/cache`.
|
||||
Default::
|
||||
|
||||
.. setting:: CACHE_KEY_FUNCTION
|
||||
{
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
}
|
||||
}
|
||||
|
||||
CACHE_KEY_FUNCTION
|
||||
------------------
|
||||
A dictionary containing the settings for all caches to be used with
|
||||
Django. It is a nested dictionary whose contents maps cache aliases
|
||||
to a dictionary containing the options for an individual cache.
|
||||
|
||||
Default: ``None``
|
||||
The :setting:`CACHES` setting must configure a ``default`` cache;
|
||||
any number of additional caches may also be specified. If you
|
||||
are using a cache backend other than the local memory cache, or
|
||||
you need to define multiple caches, other options will be required.
|
||||
The following cache options are available.
|
||||
|
||||
.. setting:: CACHES-BACKEND
|
||||
|
||||
BACKEND
|
||||
~~~~~~~
|
||||
|
||||
Default: ``''`` (Empty string)
|
||||
|
||||
The cache backend to use. The built-in cache backends are:
|
||||
|
||||
* ``'django.core.cache.backends.db.DatabaseCache'``
|
||||
* ``'django.core.cache.backends.dummy.DummyCache'``
|
||||
* ``'django.core.cache.backends.filebased.FileBasedCache'``
|
||||
* ``'django.core.cache.backends.locmem.LocMemCache'``
|
||||
* ``'django.core.cache.backends.memcached.MemcachedCache'``
|
||||
* ``'django.core.cache.backends.memcached.PyLibMCCache'``
|
||||
|
||||
You can use a cache backend that doesn't ship with Django by setting
|
||||
:setting:`BACKEND <CACHE-BACKEND>` to a fully-qualified path of a cache
|
||||
backend class (i.e. ``mypackage.backends.whatever.WhateverCache``).
|
||||
Writing a whole new cache backend from scratch is left as an exercise
|
||||
to the reader; see the other backends for examples.
|
||||
|
||||
.. note::
|
||||
Prior to Django 1.3, you could use a URI based version of the backend
|
||||
name to reference the built-in cache backends (e.g., you could use
|
||||
``'db://tablename'`` to refer to the database backend). This format has
|
||||
been deprecated, and will be removed in Django 1.5.
|
||||
|
||||
.. setting:: CACHES-KEY_FUNCTION
|
||||
|
||||
KEY_FUNCTION
|
||||
~~~~~~~~~~~~
|
||||
|
||||
A string containing a dotted path to a function that defines how to
|
||||
compose a prefix, version and key into a final cache key. The default
|
||||
|
@ -155,10 +197,10 @@ argument signature.
|
|||
|
||||
See the :ref:`cache documentation <cache_key_transformation>` for more information.
|
||||
|
||||
.. setting:: CACHE_KEY_PREFIX
|
||||
.. setting:: CACHES-KEY_PREFIX
|
||||
|
||||
CACHE_KEY_PREFIX
|
||||
----------------
|
||||
KEY_PREFIX
|
||||
~~~~~~~~~~
|
||||
|
||||
Default: ``''`` (Empty string)
|
||||
|
||||
|
@ -167,6 +209,67 @@ all cache keys used by the Django server.
|
|||
|
||||
See the :ref:`cache documentation <cache_key_prefixing>` for more information.
|
||||
|
||||
.. setting:: CACHES-LOCATION
|
||||
|
||||
LOCATION
|
||||
~~~~~~~~
|
||||
|
||||
Default: ``''`` (Empty string)
|
||||
|
||||
The location of the cache to use. This might be the directory for a
|
||||
file system cache, a host and port for a memcache server, or simply an
|
||||
identifying name for a local memory cache. e.g.::
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
|
||||
'LOCATION': '/var/tmp/django_cache',
|
||||
}
|
||||
}
|
||||
|
||||
.. setting:: CACHES-OPTIONS
|
||||
|
||||
OPTIONS
|
||||
~~~~~~~
|
||||
|
||||
Default: None
|
||||
|
||||
Extra parameters to pass to the cache backend. Available parameters
|
||||
vary depending on your cache backend.
|
||||
|
||||
Some information on available parameters can be found in the
|
||||
:doc:`Cache Backends </topics/cache>` documentation. For more information,
|
||||
consult your backend module's own documentation.
|
||||
|
||||
.. setting:: CACHES-TIMEOUT
|
||||
|
||||
TIMEOUT
|
||||
~~~~~~~
|
||||
|
||||
Default: 300
|
||||
|
||||
The number of seconds before a cache entry is considered stale.
|
||||
|
||||
.. setting:: CACHES-VERSION
|
||||
|
||||
VERSION
|
||||
~~~~~~~
|
||||
|
||||
Default: ``1``
|
||||
|
||||
The default version number for cache keys generated by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_versioning>` for more information.
|
||||
|
||||
.. setting:: CACHE_MIDDLEWARE_ALIAS
|
||||
|
||||
CACHE_MIDDLEWARE_ALIAS
|
||||
----------------------
|
||||
|
||||
Default: ``default``
|
||||
|
||||
The cache connection to use for the cache middleware.
|
||||
|
||||
.. setting:: CACHE_MIDDLEWARE_ANONYMOUS_ONLY
|
||||
|
||||
CACHE_MIDDLEWARE_ANONYMOUS_ONLY
|
||||
|
@ -206,18 +309,6 @@ The default number of seconds to cache a page when the caching middleware or
|
|||
|
||||
See :doc:`/topics/cache`.
|
||||
|
||||
.. setting:: CACHE_VERSION
|
||||
|
||||
CACHE_VERSION
|
||||
-------------
|
||||
|
||||
Default: ``1``
|
||||
|
||||
The default version number for cache keys generated by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_versioning>` for more information.
|
||||
|
||||
|
||||
.. setting:: CSRF_COOKIE_DOMAIN
|
||||
|
||||
CSRF_COOKIE_DOMAIN
|
||||
|
@ -293,7 +384,7 @@ SQLite. This can be configured using the following::
|
|||
For other database backends, or more complex SQLite configurations, other options
|
||||
will be required. The following inner options are available.
|
||||
|
||||
.. setting:: ENGINE
|
||||
.. setting:: DATABASE-ENGINE
|
||||
|
||||
ENGINE
|
||||
~~~~~~
|
||||
|
@ -1896,6 +1987,15 @@ See :tfilter:`allowed date format strings <date>`. See also ``DATE_FORMAT``,
|
|||
Deprecated settings
|
||||
===================
|
||||
|
||||
.. setting:: CACHE_BACKEND
|
||||
|
||||
CACHE_BACKEND
|
||||
-------------
|
||||
|
||||
.. deprecated:: 1.3
|
||||
This setting has been replaced by :setting:`BACKEND <CACHES-BACKEND>` in
|
||||
:setting:`CACHES`.
|
||||
|
||||
.. setting:: DATABASE_ENGINE
|
||||
|
||||
DATABASE_ENGINE
|
||||
|
|
|
@ -154,6 +154,29 @@ it is needed, later in the response process.
|
|||
For more details, see the :ref:`documentation </ref/template-response>`
|
||||
on the :class:`~django.template.TemplateResponse` class.
|
||||
|
||||
Caching changes
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
Django 1.3 sees the introduction of several improvements to the
|
||||
Django's caching infrastructure.
|
||||
|
||||
Firstly, Django now supports multiple named caches. In the same way
|
||||
that Django 1.2 introduced support for multiple database connections,
|
||||
Django 1.3 allows you to use the new :setting:`CACHES` setting to
|
||||
define multiple named cache connections.
|
||||
|
||||
Secondly, :ref:`Versioning <cache_versioning>`, :ref:`site-wide
|
||||
prefixing <cache_key_prefixing>` and :ref:`transformation
|
||||
<cache_key_transformation>` has been added to the cache API.
|
||||
|
||||
Lastly, support for pylibmc_ has been added to the memcached cache
|
||||
backend.
|
||||
|
||||
For more details, see the :ref:`documentation on
|
||||
caching in Django<topics/cache>`.
|
||||
|
||||
.. _pylibmc: http://sendapatch.se/projects/pylibmc/
|
||||
|
||||
Everything else
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
|
@ -176,10 +199,6 @@ requests. These include:
|
|||
:meth:`~django.test.client.Client.assertNumQueries` -- making it
|
||||
easier to test the database activity associated with a view.
|
||||
|
||||
* :ref:`Versioning <cache_versioning>`, :ref:`site-wide prefixing
|
||||
<cache_key_prefixing>` and :ref:`transformation
|
||||
<cache_key_transformation>` has been added to the cache API.
|
||||
|
||||
* Support for lookups spanning relations in admin's ``list_filter``.
|
||||
|
||||
* Support for _HTTPOnly cookies.
|
||||
|
|
|
@ -47,9 +47,16 @@ where your cached data should live -- whether in a database, on the filesystem
|
|||
or directly in memory. This is an important decision that affects your cache's
|
||||
performance; yes, some cache types are faster than others.
|
||||
|
||||
Your cache preference goes in the :setting:`CACHE_BACKEND` setting in your
|
||||
Your cache preference goes in the :setting:`CACHES` setting in your
|
||||
settings file. Here's an explanation of all available values for
|
||||
:setting:`CACHE_BACKEND`.
|
||||
:setting:`CACHES`.
|
||||
|
||||
.. versionchanged:: 1.3
|
||||
The settings used to configure caching changed in Django 1.3. In
|
||||
Django 1.2 and earlier, you used a single string-based
|
||||
:setting:`CACHE_BACKEND` setting to configure caches. This has
|
||||
been replaced with the new dictionary-based :setting:`CACHES`
|
||||
setting.
|
||||
|
||||
Memcached
|
||||
---------
|
||||
|
@ -66,9 +73,12 @@ fast interface for adding, retrieving and deleting arbitrary data in the cache.
|
|||
All data is stored directly in memory, so there's no overhead of database or
|
||||
filesystem usage.
|
||||
|
||||
After installing Memcached itself, you'll need to install
|
||||
``python-memcached``, which provides Python bindings to Memcached.
|
||||
This is available at ftp://ftp.tummy.com/pub/python-memcached/
|
||||
After installing Memcached itself, you'll need to install a memcached
|
||||
binding. There are several python memcached bindings available; the
|
||||
two most common are `python-memcached`_ and `pylibmc`_.
|
||||
|
||||
.. _`python-memcached`: ftp://ftp.tummy.com/pub/python-memcached/
|
||||
.. _`pylibmc`: http://sendapatch.se/projects/pylibmc/
|
||||
|
||||
.. versionchanged:: 1.2
|
||||
In Django 1.0 and 1.1, you could also use ``cmemcache`` as a binding.
|
||||
|
@ -76,31 +86,64 @@ This is available at ftp://ftp.tummy.com/pub/python-memcached/
|
|||
a lack of maintenance on the ``cmemcache`` library itself. Support for
|
||||
``cmemcache`` will be removed completely in Django 1.4.
|
||||
|
||||
To use Memcached with Django, set :setting:`CACHE_BACKEND` to
|
||||
``memcached://ip:port/``, where ``ip`` is the IP address of the Memcached
|
||||
daemon and ``port`` is the port on which Memcached is running.
|
||||
.. versionchanged:: 1.3
|
||||
Support for ``pylibmc`` was added.
|
||||
|
||||
In this example, Memcached is running on localhost (127.0.0.1) port 11211::
|
||||
To use Memcached with Django:
|
||||
|
||||
CACHE_BACKEND = 'memcached://127.0.0.1:11211/'
|
||||
* Set :setting:`BACKEND <CACHES-BACKEND>` to
|
||||
``django.core.cache.backends.memcached.MemcachedCache`` or
|
||||
``django.core.cache.backends.memcached.PyLibMCCache`` (depending
|
||||
on your chosen memcached binding)
|
||||
|
||||
* Set :setting:`LOCATION <CACHES-LOCATION>` to ``ip:port`` values,
|
||||
where ``ip`` is the IP address of the Memcached daemon and
|
||||
``port`` is the port on which Memcached is running.
|
||||
|
||||
In this example, Memcached is running on localhost (127.0.0.1) port 11211, using
|
||||
the ``python-memcached`` binding::
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': '127.0.0.1:11211',
|
||||
}
|
||||
}
|
||||
|
||||
One excellent feature of Memcached is its ability to share cache over multiple
|
||||
servers. This means you can run Memcached daemons on multiple machines, and the
|
||||
program will treat the group of machines as a *single* cache, without the need
|
||||
to duplicate cache values on each machine. To take advantage of this feature,
|
||||
include all server addresses in :setting:`CACHE_BACKEND`, separated by
|
||||
semicolons.
|
||||
include all server addresses in :setting:`BACKEND <CACHES-BACKEND>`, either
|
||||
separated by semicolons or as a list.
|
||||
|
||||
In this example, the cache is shared over Memcached instances running on IP
|
||||
address 172.19.26.240 and 172.19.26.242, both on port 11211::
|
||||
|
||||
CACHE_BACKEND = 'memcached://172.19.26.240:11211;172.19.26.242:11211/'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': [
|
||||
'172.19.26.240:11211',
|
||||
'172.19.26.242:11211',
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
In the following example, the cache is shared over Memcached instances running
|
||||
on the IP addresses 172.19.26.240 (port 11211), 172.19.26.242 (port 11212), and
|
||||
172.19.26.244 (port 11213)::
|
||||
|
||||
CACHE_BACKEND = 'memcached://172.19.26.240:11211;172.19.26.242:11212;172.19.26.244:11213/'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
|
||||
'LOCATION': [
|
||||
'172.19.26.240:11211',
|
||||
'172.19.26.242:11211',
|
||||
'172.19.26.244:11213',
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
A final point about Memcached is that memory-based caching has one
|
||||
disadvantage: Because the cached data is stored in memory, the data will be
|
||||
|
@ -125,12 +168,19 @@ not already being used in your database.) This command creates a single table
|
|||
in your database that is in the proper format that Django's database-cache
|
||||
system expects.
|
||||
|
||||
Once you've created that database table, set your :setting:`CACHE_BACKEND`
|
||||
setting to ``"db://tablename"``, where ``tablename`` is the name of the
|
||||
database table. In this example, the cache table's name is
|
||||
``my_cache_table``::
|
||||
Once you've created that database table, set your
|
||||
:setting:`BACKEND <CACHES-BACKEND>` setting to
|
||||
``"django.core.cache.backends.db.DatabaseCache"``, and
|
||||
:setting:`LOCATION <CACHES-LOCATION>` to ``tablename`` -- the name of the
|
||||
database table. In this example, the cache table's name is ``my_cache_table``::
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
|
||||
'LOCATION': 'my_cache_table',
|
||||
}
|
||||
}
|
||||
|
||||
CACHE_BACKEND = 'db://my_cache_table'
|
||||
|
||||
The database caching backend uses the same database as specified in your
|
||||
settings file. You can't use a different database backend for your cache table.
|
||||
|
@ -183,18 +233,28 @@ model.
|
|||
Filesystem caching
|
||||
------------------
|
||||
|
||||
To store cached items on a filesystem, use the ``"file://"`` cache type for
|
||||
:setting:`CACHE_BACKEND`. For example, to store cached data in
|
||||
To store cached items on a filesystem, use
|
||||
``"django.core.cache.backends.filebased.FileBasedCache"`` for
|
||||
:setting:`BACKEND <CACHES-BACKEND>`. For example, to store cached data in
|
||||
``/var/tmp/django_cache``, use this setting::
|
||||
|
||||
CACHE_BACKEND = 'file:///var/tmp/django_cache'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
|
||||
'LOCATION': '/var/tmp/django_cache',
|
||||
}
|
||||
}
|
||||
|
||||
Note that there are three forward slashes toward the beginning of that example.
|
||||
The first two are for ``file://``, and the third is the first character of the
|
||||
directory path, ``/var/tmp/django_cache``. If you're on Windows, put the
|
||||
drive letter after the ``file://``, like this::
|
||||
|
||||
file://c:/foo/bar
|
||||
If you're on Windows, put the drive letter at the beginning of the path,
|
||||
like this::
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
|
||||
'LOCATION': 'c:/foo/bar',
|
||||
}
|
||||
}
|
||||
|
||||
The directory path should be absolute -- that is, it should start at the root
|
||||
of your filesystem. It doesn't matter whether you put a slash at the end of the
|
||||
|
@ -215,10 +275,22 @@ Local-memory caching
|
|||
|
||||
If you want the speed advantages of in-memory caching but don't have the
|
||||
capability of running Memcached, consider the local-memory cache backend. This
|
||||
cache is multi-process and thread-safe. To use it, set :setting:`CACHE_BACKEND`
|
||||
to ``"locmem://"``. For example::
|
||||
cache is multi-process and thread-safe. To use it, set
|
||||
:setting:`BACKEND <CACHES-BACKEND>` to
|
||||
``"django.core.cache.backends.locmem.LocMemCache"``. For example::
|
||||
|
||||
CACHE_BACKEND = 'locmem://'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'LOCATION': 'unique-snowflake'
|
||||
}
|
||||
}
|
||||
|
||||
The cache :setting:`LOCATION <CACHES-LOCATION>` is used to identify individual
|
||||
memory stores. If you only have one locmem cache, you can omit the
|
||||
:setting:`LOCATION <CACHES-LOCATION>`; however, if you have more that one local
|
||||
memory cache, you will need to assign a name to at least one of them in
|
||||
order to keep them separate.
|
||||
|
||||
Note that each process will have its own private cache instance, which means no
|
||||
cross-process caching is possible. This obviously also means the local memory
|
||||
|
@ -234,9 +306,13 @@ just implements the cache interface without doing anything.
|
|||
This is useful if you have a production site that uses heavy-duty caching in
|
||||
various places but a development/test environment where you don't want to cache
|
||||
and don't want to have to change your code to special-case the latter. To
|
||||
activate dummy caching, set :setting:`CACHE_BACKEND` like so::
|
||||
activate dummy caching, set :setting:`BACKEND <CACHES-BACKEND>` like so::
|
||||
|
||||
CACHE_BACKEND = 'dummy://'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
|
||||
}
|
||||
}
|
||||
|
||||
Using a custom cache backend
|
||||
----------------------------
|
||||
|
@ -245,10 +321,14 @@ Using a custom cache backend
|
|||
|
||||
While Django includes support for a number of cache backends out-of-the-box,
|
||||
sometimes you might want to use a customized cache backend. To use an external
|
||||
cache backend with Django, use a Python import path as the scheme portion (the
|
||||
part before the initial colon) of the :setting:`CACHE_BACKEND` URI, like so::
|
||||
cache backend with Django, use the Python import path as the
|
||||
:setting:`BACKEND <CACHES-BACKEND>` of the :setting:`CACHES` setting, like so::
|
||||
|
||||
CACHE_BACKEND = 'path.to.backend://'
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'path.to.backend',
|
||||
}
|
||||
}
|
||||
|
||||
If you're building your own backend, you can use the standard cache backends
|
||||
as reference implementations. You'll find the code in the
|
||||
|
@ -258,35 +338,77 @@ Note: Without a really compelling reason, such as a host that doesn't support
|
|||
them, you should stick to the cache backends included with Django. They've
|
||||
been well-tested and are easy to use.
|
||||
|
||||
CACHE_BACKEND arguments
|
||||
-----------------------
|
||||
Cache arguments
|
||||
---------------
|
||||
|
||||
Each cache backend may take arguments. They're given in query-string style on
|
||||
the :setting:`CACHE_BACKEND` setting. Valid arguments are as follows:
|
||||
In addition to the defining the engine and name of the each cache
|
||||
backend, each cache backend can be given additional arguments to
|
||||
control caching behavior. These arguments are provided as additional
|
||||
keys in the :setting:`CACHES` setting. Valid arguments are as follows:
|
||||
|
||||
* ``timeout``: The default timeout, in seconds, to use for the cache.
|
||||
This argument defaults to 300 seconds (5 minutes).
|
||||
* :setting:`TIMEOUT <CACHES-TIMEOUT>`: The default timeout, in
|
||||
seconds, to use for the cache. This argument defaults to 300
|
||||
seconds (5 minutes).
|
||||
|
||||
* ``max_entries``: For the ``locmem``, ``filesystem`` and ``database``
|
||||
backends, the maximum number of entries allowed in the cache before old
|
||||
values are deleted. This argument defaults to 300.
|
||||
* :setting:`OPTIONS <CACHES-OPTIONS>`: Any options that should be
|
||||
passed to cache backend. The list options understood by each
|
||||
backend vary with each backend.
|
||||
|
||||
* ``cull_frequency``: The fraction of entries that are culled when
|
||||
``max_entries`` is reached. The actual ratio is ``1/cull_frequency``, so
|
||||
set ``cull_frequency=2`` to cull half of the entries when ``max_entries``
|
||||
is reached.
|
||||
Cache backends that implement their own culling strategy (i.e.,
|
||||
the ``locmem``, ``filesystem`` and ``database`` backends) will
|
||||
honor the following options:
|
||||
|
||||
A value of ``0`` for ``cull_frequency`` means that the entire cache will
|
||||
be dumped when ``max_entries`` is reached. This makes culling *much*
|
||||
faster at the expense of more cache misses.
|
||||
* ``MAX_ENTRIES``: the maximum number of entries allowed in
|
||||
the cache before old values are deleted. This argument
|
||||
defaults to ``300``.
|
||||
|
||||
In this example, ``timeout`` is set to ``60``::
|
||||
* ``CULL_FREQUENCY``: The fraction of entries that are culled
|
||||
when ``MAX_ENTRIES`` is reached. The actual ratio is
|
||||
``1/CULL_FREQUENCY``, so set ``CULL_FREQUENCY``: to ``2`` to
|
||||
cull half of the entries when ``MAX_ENTRIES`` is reached.
|
||||
|
||||
CACHE_BACKEND = "memcached://127.0.0.1:11211/?timeout=60"
|
||||
A value of ``0`` for ``CULL_FREQUENCY`` means that the
|
||||
entire cache will be dumped when ``MAX_ENTRIES`` is reached.
|
||||
This makes culling *much* faster at the expense of more
|
||||
cache misses.
|
||||
|
||||
In this example, ``timeout`` is ``30`` and ``max_entries`` is ``400``::
|
||||
Cache backends backed by a third-party library will pass their
|
||||
options directly to the underlying cache library. As a result,
|
||||
the list of valid options depends on the library in use.
|
||||
|
||||
CACHE_BACKEND = "locmem://?timeout=30&max_entries=400"
|
||||
* :setting:`KEY_PREFIX <CACHES-KEY_PREFIX>`: A string that will be
|
||||
automatically included (prepended by default) to all cache keys
|
||||
used by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_key_prefixing>` for
|
||||
more information.
|
||||
|
||||
* :setting:`VERSION <CACHES-VERSION>`: The default version number
|
||||
for cache keys generated by the Django server.
|
||||
|
||||
See the :ref:`cache documentation <cache_versioning>` for more
|
||||
information.
|
||||
|
||||
* :setting:`KEY_FUNCTION <CACHES-KEY_FUNCTION>`
|
||||
A string containing a dotted path to a function that defines how
|
||||
to compose a prefix, version and key into a final cache key.
|
||||
|
||||
See the :ref:`cache documentation <cache_key_transformation>`
|
||||
for more information.
|
||||
|
||||
In this example, a filesystem backend is being configured with a timeout
|
||||
of 60 seconds, and a maximum capacity of 1000 items::
|
||||
|
||||
CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.filebased.FileCache',
|
||||
'LOCATION': '127.0.0.1:11211',
|
||||
'TIMEOUT': 60,
|
||||
'OPTIONS': {
|
||||
'MAX_ENTRIES': 1000
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Invalid arguments are silently ignored, as are invalid values of known
|
||||
arguments.
|
||||
|
@ -318,6 +440,7 @@ entire site. You'll need to add
|
|||
|
||||
Then, add the following required settings to your Django settings file:
|
||||
|
||||
* :setting:`CACHE_MIDDLEWARE_ALIAS` -- The cache alias to use for storage.
|
||||
* :setting:`CACHE_MIDDLEWARE_SECONDS` -- The number of seconds each page should
|
||||
be cached.
|
||||
* :setting:`CACHE_MIDDLEWARE_KEY_PREFIX` -- If the cache is shared across
|
||||
|
@ -408,7 +531,17 @@ then requests to ``/foo/1/`` and ``/foo/23/`` will be cached separately, as
|
|||
you may expect. But once a particular URL (e.g., ``/foo/23/``) has been
|
||||
requested, subsequent requests to that URL will use the cache.
|
||||
|
||||
``cache_page`` can also take an optional keyword argument, ``key_prefix``,
|
||||
``cache_page`` can also take an optional keyword argument, ``cache``,
|
||||
which directs the decorator to use a specific cache alias when caching view
|
||||
results. By default, the ``default`` alias will be used, but you can specify
|
||||
any cache alias you want::
|
||||
|
||||
@cache_page(60 * 15, cache="special_cache")
|
||||
def my_view(request):
|
||||
...
|
||||
|
||||
You can also override the cache prefix on a per-view basis. ``cache_page``
|
||||
takes an optional keyword argument, ``key_prefix``,
|
||||
which works in the same way as the :setting:`CACHE_MIDDLEWARE_KEY_PREFIX`
|
||||
setting for the middleware. It can be used like this::
|
||||
|
||||
|
@ -416,6 +549,10 @@ setting for the middleware. It can be used like this::
|
|||
def my_view(request):
|
||||
...
|
||||
|
||||
The two settings can also be combined. If you specify a ``cache`` *and*
|
||||
a ``key_prefix``, you will get all the settings of the requested cache
|
||||
alias, but with the key_prefix overridden.
|
||||
|
||||
Specifying per-view cache in the URLconf
|
||||
----------------------------------------
|
||||
|
||||
|
@ -535,7 +672,8 @@ can be pickled; refer to the Python documentation for more information about
|
|||
pickling.)
|
||||
|
||||
The cache module, ``django.core.cache``, has a ``cache`` object that's
|
||||
automatically created from the :setting:`CACHE_BACKEND` setting::
|
||||
automatically created from the ``'default'`` entry in the :setting:`CACHES`
|
||||
setting::
|
||||
|
||||
>>> from django.core.cache import cache
|
||||
|
||||
|
@ -546,8 +684,9 @@ The basic interface is ``set(key, value, timeout)`` and ``get(key)``::
|
|||
'hello, world!'
|
||||
|
||||
The ``timeout`` argument is optional and defaults to the ``timeout``
|
||||
argument in the :setting:`CACHE_BACKEND` setting (explained above). It's the
|
||||
number of seconds the value should be stored in the cache.
|
||||
argument of the ``'default'`` backend in :setting:`CACHES` setting
|
||||
(explained above). It's the number of seconds the value should be stored
|
||||
in the cache.
|
||||
|
||||
If the object doesn't exist in the cache, ``cache.get()`` returns ``None``::
|
||||
|
||||
|
@ -665,10 +804,10 @@ diagnose problems.
|
|||
To prevent this, Django provides the ability to prefix all cache keys
|
||||
used by a server. When a particular cache key is saved or retrieved,
|
||||
Django will automatically prefix the cache key with the value of the
|
||||
:setting:`CACHE_KEY_PREFIX` setting.
|
||||
:setting:`KEY_PREFIX <CACHES-KEY_PREFIX>` cache setting.
|
||||
|
||||
By ensuring each Django instance has a different
|
||||
:setting:`CACHE_KEY_PREFIX`, you can ensure that there will be no
|
||||
:setting:`KEY_PREFIX <CACHES-KEY_PREFIX>`, you can ensure that there will be no
|
||||
collisions in cache values.
|
||||
|
||||
.. _cache_versioning:
|
||||
|
@ -685,9 +824,9 @@ that are still valid and useful.
|
|||
|
||||
Django provides a better way to target individual cache values.
|
||||
Django's cache framework has a system-wide version identifier,
|
||||
specified using the :setting:`CACHE_VERSION` setting. The value of
|
||||
this setting is automatically combined with the cache prefix and the
|
||||
user-provided cache key to obtain the final cache key.
|
||||
specified using the :setting:`VERSION <CACHES-VERSION>` cache setting.
|
||||
The value of this setting is automatically combined with the cache
|
||||
prefix and the user-provided cache key to obtain the final cache key.
|
||||
|
||||
By default, any key request will automatically include the site
|
||||
default cache key version. However, the primitive cache functions all
|
||||
|
@ -739,10 +878,10 @@ If you want to combine the parts in different ways, or apply other
|
|||
processing to the final key (e.g., taking a hash digest of the key
|
||||
parts), you can provide a custom key function.
|
||||
|
||||
The setting :setting:`CACHE_KEY_FUNCTION` specifies a dotted-path to
|
||||
a function matching the prototype of :func:`make_key()` above. If
|
||||
provided, this custom key function will be used instead of the default
|
||||
key combining function.
|
||||
The :setting:`KEY_FUNCTION <CACHES-KEY_FUNCTION>` cache setting
|
||||
specifies a dotted-path to a function matching the prototype of
|
||||
:func:`make_key()` above. If provided, this custom key function will
|
||||
be used instead of the default key combining function.
|
||||
|
||||
Cache key warnings
|
||||
------------------
|
||||
|
@ -773,15 +912,15 @@ built-in backends, you can subclass it, override just the ``validate_key``
|
|||
method, and follow the instructions for `using a custom cache backend`_. For
|
||||
instance, to do this for the ``locmem`` backend, put this code in a module::
|
||||
|
||||
from django.core.cache.backends.locmem import CacheClass as LocMemCacheClass
|
||||
from django.core.cache.backends.locmem import LocMemCache
|
||||
|
||||
class CacheClass(LocMemCacheClass):
|
||||
class CustomLocMemCache(LocMemCache):
|
||||
def validate_key(self, key):
|
||||
"""Custom validation, raising exceptions or warnings as needed."""
|
||||
# ...
|
||||
|
||||
...and use the dotted Python path to this module as the scheme portion of your
|
||||
:setting:`CACHE_BACKEND`.
|
||||
...and use the dotted Python path to this class in the
|
||||
:setting:`BACKEND <CACHES-BACKEND>` portion of your :setting:`CACHES` setting.
|
||||
|
||||
Upstream caches
|
||||
===============
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
from django.core.cache.backends.locmem import CacheClass as LocMemCacheClass
|
||||
from django.core.cache.backends.locmem import LocMemCache
|
||||
|
||||
class LiberalKeyValidationMixin(object):
|
||||
def validate_key(self, key):
|
||||
pass
|
||||
|
||||
class CacheClass(LiberalKeyValidationMixin, LocMemCacheClass):
|
||||
class CacheClass(LiberalKeyValidationMixin, LocMemCache):
|
||||
pass
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ import warnings
|
|||
|
||||
from django.conf import settings
|
||||
from django.core import management
|
||||
from django.core.cache import get_cache
|
||||
from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
|
||||
from django.core.cache.backends.base import InvalidCacheBackendError, CacheKeyWarning
|
||||
from django.http import HttpResponse, HttpRequest
|
||||
from django.middleware.cache import FetchFromCacheMiddleware, UpdateCacheMiddleware
|
||||
|
@ -33,7 +33,7 @@ class DummyCacheTests(unittest.TestCase):
|
|||
# The Dummy cache backend doesn't really behave like a test backend,
|
||||
# so it has different test requirements.
|
||||
def setUp(self):
|
||||
self.cache = get_cache('dummy://')
|
||||
self.cache = get_cache('django.core.cache.backends.dummy.DummyCache')
|
||||
|
||||
def test_simple(self):
|
||||
"Dummy cache backend ignores cache set calls"
|
||||
|
@ -429,9 +429,6 @@ class BaseCacheTests(object):
|
|||
self.assertEqual(self.cache.get('answer1', version=2), None)
|
||||
|
||||
self.assertEqual(self.v2_cache.get('answer1'), None)
|
||||
# print '---'
|
||||
# print 'c1',self.cache._cache
|
||||
# print 'v2',self.v2_cache._cache
|
||||
self.assertEqual(self.v2_cache.get('answer1', version=1), 42)
|
||||
self.assertEqual(self.v2_cache.get('answer1', version=2), None)
|
||||
|
||||
|
@ -704,11 +701,11 @@ class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
# Spaces are used in the table name to ensure quoting/escaping is working
|
||||
self._table_name = 'test cache table'
|
||||
management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
|
||||
self.cache = get_cache('db://%s?max_entries=30' % self._table_name)
|
||||
self.prefix_cache = get_cache('db://%s' % self._table_name, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('db://%s' % self._table_name, version=2)
|
||||
self.custom_key_cache = get_cache('db://%s' % self._table_name, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('db://%s' % self._table_name, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
self.cache = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30})
|
||||
self.prefix_cache = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, KEY_PREFIX='cacheprefix')
|
||||
self.v2_cache = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, VERSION=2)
|
||||
self.custom_key_cache = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, KEY_FUNCTION=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
from django.db import connection
|
||||
|
@ -719,16 +716,20 @@ class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
self.perform_cull_test(50, 29)
|
||||
|
||||
def test_zero_cull(self):
|
||||
self.cache = get_cache('django.core.cache.backends.db.DatabaseCache', LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
|
||||
self.perform_cull_test(50, 18)
|
||||
|
||||
def test_old_initialization(self):
|
||||
self.cache = get_cache('db://%s?max_entries=30&cull_frequency=0' % self._table_name)
|
||||
self.perform_cull_test(50, 18)
|
||||
|
||||
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
def setUp(self):
|
||||
self.cache = get_cache('locmem://?max_entries=30')
|
||||
self.prefix_cache = get_cache('locmem://', key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('locmem://', version=2)
|
||||
self.custom_key_cache = get_cache('locmem://?max_entries=30', key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('locmem://?max_entries=30', key_func='regressiontests.cache.tests.custom_key_func')
|
||||
self.cache = get_cache('django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 30})
|
||||
self.prefix_cache = get_cache('django.core.cache.backends.locmem.LocMemCache', KEY_PREFIX='cacheprefix')
|
||||
self.v2_cache = get_cache('django.core.cache.backends.locmem.LocMemCache', VERSION=2)
|
||||
self.custom_key_cache = get_cache('django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
# LocMem requires a hack to make the other caches
|
||||
# share a data store with the 'normal' cache.
|
||||
|
@ -744,24 +745,32 @@ class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
self.custom_key_cache2._cache = self.cache._cache
|
||||
self.custom_key_cache2._expire_info = self.cache._expire_info
|
||||
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
||||
def test_cull(self):
|
||||
self.perform_cull_test(50, 29)
|
||||
|
||||
def test_zero_cull(self):
|
||||
self.cache = get_cache('django.core.cache.backends.locmem.LocMemCache', OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
|
||||
self.perform_cull_test(50, 19)
|
||||
|
||||
def test_old_initialization(self):
|
||||
self.cache = get_cache('locmem://?max_entries=30&cull_frequency=0')
|
||||
self.perform_cull_test(50, 19)
|
||||
|
||||
# memcached backend isn't guaranteed to be available.
|
||||
# To check the memcached backend, the test settings file will
|
||||
# need to contain a CACHE_BACKEND setting that points at
|
||||
# need to contain a cache backend setting that points at
|
||||
# your memcache server.
|
||||
class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
def setUp(self):
|
||||
self.cache = get_cache(settings.CACHE_BACKEND)
|
||||
self.prefix_cache = get_cache(settings.CACHE_BACKEND, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache(settings.CACHE_BACKEND, version=2)
|
||||
self.custom_key_cache = get_cache(settings.CACHE_BACKEND, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache(settings.CACHE_BACKEND, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
name = settings.CACHES[DEFAULT_CACHE_ALIAS]['LOCATION']
|
||||
self.cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION=name)
|
||||
self.prefix_cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION=name, KEY_PREFIX='cacheprefix')
|
||||
self.v2_cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION=name, VERSION=2)
|
||||
self.custom_key_cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION=name, KEY_FUNCTION=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION=name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
@ -781,7 +790,7 @@ class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
# memcached limits key length to 250
|
||||
self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value')
|
||||
|
||||
MemcachedCacheTests = unittest.skipUnless(settings.CACHE_BACKEND.startswith('memcached://'), "memcached not available")(MemcachedCacheTests)
|
||||
MemcachedCacheTests = unittest.skipUnless(settings.CACHES[DEFAULT_CACHE_ALIAS]['BACKEND'].startswith('django.core.cache.backends.memcached.'), "memcached not available")(MemcachedCacheTests)
|
||||
|
||||
class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
||||
"""
|
||||
|
@ -789,11 +798,11 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
"""
|
||||
def setUp(self):
|
||||
self.dirname = tempfile.mkdtemp()
|
||||
self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
|
||||
self.prefix_cache = get_cache('file://%s' % self.dirname, key_prefix='cacheprefix')
|
||||
self.v2_cache = get_cache('file://%s' % self.dirname, version=2)
|
||||
self.custom_key_cache = get_cache('file://%s' % self.dirname, key_func=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('file://%s' % self.dirname, key_func='regressiontests.cache.tests.custom_key_func')
|
||||
self.cache = get_cache('django.core.cache.backends.filebased.FileBasedCache', LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30})
|
||||
self.prefix_cache = get_cache('django.core.cache.backends.filebased.FileBasedCache', LOCATION=self.dirname, KEY_PREFIX='cacheprefix')
|
||||
self.v2_cache = get_cache('django.core.cache.backends.filebased.FileBasedCache', LOCATION=self.dirname, VERSION=2)
|
||||
self.custom_key_cache = get_cache('django.core.cache.backends.filebased.FileBasedCache', LOCATION=self.dirname, KEY_FUNCTION=custom_key_func)
|
||||
self.custom_key_cache2 = get_cache('django.core.cache.backends.filebased.FileBasedCache', LOCATION=self.dirname, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
||||
|
||||
def tearDown(self):
|
||||
self.cache.clear()
|
||||
|
@ -824,6 +833,10 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
|||
def test_cull(self):
|
||||
self.perform_cull_test(50, 29)
|
||||
|
||||
def test_old_initialization(self):
|
||||
self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
|
||||
self.perform_cull_test(50, 29)
|
||||
|
||||
class CustomCacheKeyValidationTests(unittest.TestCase):
|
||||
"""
|
||||
Tests for the ability to mixin a custom ``validate_key`` method to
|
||||
|
@ -911,28 +924,35 @@ class CacheUtils(unittest.TestCase):
|
|||
class PrefixedCacheUtils(CacheUtils):
|
||||
def setUp(self):
|
||||
super(PrefixedCacheUtils, self).setUp()
|
||||
self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
|
||||
settings.CACHE_KEY_PREFIX = 'cacheprefix'
|
||||
self.old_cache_key_prefix = settings.CACHES['default'].get('KEY_PREFIX', None)
|
||||
settings.CACHES['default']['KEY_PREFIX'] = 'cacheprefix'
|
||||
|
||||
def tearDown(self):
|
||||
super(PrefixedCacheUtils, self).tearDown()
|
||||
settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
|
||||
if self.old_cache_key_prefix is None:
|
||||
del settings.CACHES['default']['KEY_PREFIX']
|
||||
else:
|
||||
settings.CACHES['default']['KEY_PREFIX'] = self.old_cache_key_prefix
|
||||
|
||||
class CacheHEADTest(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.orig_cache_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.orig_cache_middleware_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.orig_cache_backend = settings.CACHE_BACKEND
|
||||
self.orig_caches = settings.CACHES
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = 60
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = 'test'
|
||||
settings.CACHE_BACKEND = 'locmem:///'
|
||||
settings.CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
|
||||
}
|
||||
}
|
||||
self.path = '/cache/test/'
|
||||
|
||||
def tearDown(self):
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = self.orig_cache_middleware_seconds
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.orig_cache_middleware_key_prefix
|
||||
settings.CACHE_BACKEND = self.orig_cache_backend
|
||||
settings.CACHES = self.orig_caches
|
||||
|
||||
def _get_request(self, method):
|
||||
request = HttpRequest()
|
||||
|
@ -981,7 +1001,7 @@ class CacheI18nTest(unittest.TestCase):
|
|||
def setUp(self):
|
||||
self.orig_cache_middleware_seconds = settings.CACHE_MIDDLEWARE_SECONDS
|
||||
self.orig_cache_middleware_key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX
|
||||
self.orig_cache_backend = settings.CACHE_BACKEND
|
||||
self.orig_caches = settings.CACHES
|
||||
self.orig_use_i18n = settings.USE_I18N
|
||||
self.orig_languages = settings.LANGUAGES
|
||||
settings.LANGUAGES = (
|
||||
|
@ -994,7 +1014,7 @@ class CacheI18nTest(unittest.TestCase):
|
|||
def tearDown(self):
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = self.orig_cache_middleware_seconds
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = self.orig_cache_middleware_key_prefix
|
||||
settings.CACHE_BACKEND = self.orig_cache_backend
|
||||
settings.CACHES = self.orig_caches
|
||||
settings.USE_I18N = self.orig_use_i18n
|
||||
settings.LANGUAGES = self.orig_languages
|
||||
translation.deactivate()
|
||||
|
@ -1046,8 +1066,12 @@ class CacheI18nTest(unittest.TestCase):
|
|||
return UpdateCacheMiddleware().process_response(request, response)
|
||||
|
||||
settings.CACHE_MIDDLEWARE_SECONDS = 60
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX="test"
|
||||
settings.CACHE_BACKEND='locmem:///'
|
||||
settings.CACHE_MIDDLEWARE_KEY_PREFIX = "test"
|
||||
settings.CACHES = {
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache'
|
||||
}
|
||||
}
|
||||
settings.USE_ETAGS = True
|
||||
settings.USE_I18N = True
|
||||
en_message ="Hello world!"
|
||||
|
@ -1083,12 +1107,15 @@ class CacheI18nTest(unittest.TestCase):
|
|||
class PrefixedCacheI18nTest(CacheI18nTest):
|
||||
def setUp(self):
|
||||
super(PrefixedCacheI18nTest, self).setUp()
|
||||
self.old_cache_key_prefix = settings.CACHE_KEY_PREFIX
|
||||
settings.CACHE_KEY_PREFIX = 'cacheprefix'
|
||||
self.old_cache_key_prefix = settings.CACHES['default'].get('KEY_PREFIX', None)
|
||||
settings.CACHES['default']['KEY_PREFIX'] = 'cacheprefix'
|
||||
|
||||
def tearDown(self):
|
||||
super(PrefixedCacheI18nTest, self).tearDown()
|
||||
settings.CACHE_KEY_PREFIX = self.old_cache_key_prefix
|
||||
if self.old_cache_key_prefix is not None:
|
||||
del settings.CACHES['default']['KEY_PREFIX']
|
||||
else:
|
||||
settings.CACHES['default']['KEY_PREFIX'] = self.old_cache_key_prefix
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
|
|
|
@ -1453,8 +1453,6 @@ class Templates(unittest.TestCase):
|
|||
'url-asvar03': ('{% load url from future %}{% url "no_such_view" as url %}{{ url }}', {}, ''),
|
||||
|
||||
### CACHE TAG ######################################################
|
||||
'cache01': ('{% load cache %}{% cache -1 test %}cache01{% endcache %}', {}, 'cache01'),
|
||||
'cache02': ('{% load cache %}{% cache -1 test %}cache02{% endcache %}', {}, 'cache02'),
|
||||
'cache03': ('{% load cache %}{% cache 2 test %}cache03{% endcache %}', {}, 'cache03'),
|
||||
'cache04': ('{% load cache %}{% cache 2 test %}cache04{% endcache %}', {}, 'cache03'),
|
||||
'cache05': ('{% load cache %}{% cache 2 test foo %}cache05{% endcache %}', {'foo': 1}, 'cache05'),
|
||||
|
@ -1463,8 +1461,6 @@ class Templates(unittest.TestCase):
|
|||
|
||||
# Allow first argument to be a variable.
|
||||
'cache08': ('{% load cache %}{% cache time test foo %}cache08{% endcache %}', {'foo': 2, 'time': 2}, 'cache06'),
|
||||
'cache09': ('{% load cache %}{% cache time test foo %}cache09{% endcache %}', {'foo': 3, 'time': -1}, 'cache09'),
|
||||
'cache10': ('{% load cache %}{% cache time test foo %}cache10{% endcache %}', {'foo': 3, 'time': -1}, 'cache10'),
|
||||
|
||||
# Raise exception if we don't have at least 2 args, first one integer.
|
||||
'cache11': ('{% load cache %}{% cache %}{% endcache %}', {}, template.TemplateSyntaxError),
|
||||
|
|
Loading…
Reference in New Issue