Fixed #392 -- Fixed bug in memcache setup if arguments are given. Thanks, adrian@exoweb.net
git-svn-id: http://code.djangoproject.com/svn/django/trunk@598 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
29c50bc0d4
commit
582e6a1010
|
@ -2,8 +2,8 @@
|
||||||
Caching framework.
|
Caching framework.
|
||||||
|
|
||||||
This module defines set of cache backends that all conform to a simple API.
|
This module defines set of cache backends that all conform to a simple API.
|
||||||
In a nutshell, a cache is a set of values -- which can be any object that
|
In a nutshell, a cache is a set of values -- which can be any object that
|
||||||
may be pickled -- identified by string keys. For the complete API, see
|
may be pickled -- identified by string keys. For the complete API, see
|
||||||
the abstract Cache object, below.
|
the abstract Cache object, below.
|
||||||
|
|
||||||
Client code should not access a cache backend directly; instead
|
Client code should not access a cache backend directly; instead
|
||||||
|
@ -12,49 +12,49 @@ settings.CACHE_BACKEND and use that to create and load a cache object.
|
||||||
|
|
||||||
The CACHE_BACKEND setting is a quasi-URI; examples are:
|
The CACHE_BACKEND setting is a quasi-URI; examples are:
|
||||||
|
|
||||||
memcached://127.0.0.1:11211/ A memcached backend; the server is running
|
memcached://127.0.0.1:11211/ A memcached backend; the server is running
|
||||||
on localhost port 11211.
|
on localhost port 11211.
|
||||||
|
|
||||||
pgsql://tablename/ A pgsql backend (the pgsql backend uses
|
pgsql://tablename/ A pgsql backend (the pgsql backend uses
|
||||||
the same database/username as the rest of
|
the same database/username as the rest of
|
||||||
the CMS, so only a table name is needed.)
|
the CMS, so only a table name is needed.)
|
||||||
|
|
||||||
file:///var/tmp/django.cache/ A file-based cache at /var/tmp/django.cache
|
file:///var/tmp/django.cache/ A file-based cache at /var/tmp/django.cache
|
||||||
|
|
||||||
simple:/// A simple single-process memory cache; you
|
simple:/// A simple single-process memory cache; you
|
||||||
probably don't want to use this except for
|
probably don't want to use this except for
|
||||||
testing. Note that this cache backend is
|
testing. Note that this cache backend is
|
||||||
NOT threadsafe!
|
NOT threadsafe!
|
||||||
|
|
||||||
All caches may take arguments; these are given in query-string style. Valid
|
All caches may take arguments; these are given in query-string style. Valid
|
||||||
arguments are:
|
arguments are:
|
||||||
|
|
||||||
timeout
|
timeout
|
||||||
Default timeout, in seconds, to use for the cache. Defaults
|
Default timeout, in seconds, to use for the cache. Defaults
|
||||||
to 5 minutes (300 seconds).
|
to 5 minutes (300 seconds).
|
||||||
|
|
||||||
max_entries
|
max_entries
|
||||||
For the simple, file, and database backends, the maximum number of
|
For the simple, file, and database backends, the maximum number of
|
||||||
entries allowed in the cache before it is cleaned. Defaults to
|
entries allowed in the cache before it is cleaned. Defaults to
|
||||||
300.
|
300.
|
||||||
|
|
||||||
cull_percentage
|
cull_percentage
|
||||||
The percentage of entries that are culled when max_entries is reached.
|
The percentage of entries that are culled when max_entries is reached.
|
||||||
The actual percentage is 1/cull_percentage, so set cull_percentage=3 to
|
The actual percentage is 1/cull_percentage, so set cull_percentage=3 to
|
||||||
cull 1/3 of the entries when max_entries is reached.
|
cull 1/3 of the entries when max_entries is reached.
|
||||||
|
|
||||||
A value of 0 for cull_percentage means that the entire cache will be
|
A value of 0 for cull_percentage means that the entire cache will be
|
||||||
dumped when max_entries is reached. This makes culling *much* faster
|
dumped when max_entries is reached. This makes culling *much* faster
|
||||||
at the expense of more cache misses.
|
at the expense of more cache misses.
|
||||||
|
|
||||||
For example:
|
For example:
|
||||||
|
|
||||||
memcached://127.0.0.1:11211/?timeout=60
|
memcached://127.0.0.1:11211/?timeout=60
|
||||||
pgsql://tablename/?timeout=120&max_entries=500&cull_percentage=4
|
pgsql://tablename/?timeout=120&max_entries=500&cull_percentage=4
|
||||||
|
|
||||||
Invalid arguments are silently ignored, as are invalid values of known
|
Invalid arguments are silently ignored, as are invalid values of known
|
||||||
arguments.
|
arguments.
|
||||||
|
|
||||||
So far, only the memcached and simple backend have been implemented; backends
|
So far, only the memcached and simple backend have been implemented; backends
|
||||||
using postgres, and file-system storage are planned.
|
using postgres, and file-system storage are planned.
|
||||||
"""
|
"""
|
||||||
|
@ -79,14 +79,14 @@ class _Cache:
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
timeout = 300
|
timeout = 300
|
||||||
self.default_timeout = timeout
|
self.default_timeout = timeout
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
'''
|
'''
|
||||||
Fetch a given key from the cache. If the key does not exist, return
|
Fetch a given key from the cache. If the key does not exist, return
|
||||||
default, which itself defaults to None.
|
default, which itself defaults to None.
|
||||||
'''
|
'''
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def set(self, key, value, timeout=None):
|
def set(self, key, value, timeout=None):
|
||||||
'''
|
'''
|
||||||
Set a value in the cache. If timeout is given, that timeout will be
|
Set a value in the cache. If timeout is given, that timeout will be
|
||||||
|
@ -104,7 +104,7 @@ class _Cache:
|
||||||
'''
|
'''
|
||||||
Fetch a bunch of keys from the cache. For certain backends (memcached,
|
Fetch a bunch of keys from the cache. For certain backends (memcached,
|
||||||
pgsql) this can be *much* faster when fetching multiple values.
|
pgsql) this can be *much* faster when fetching multiple values.
|
||||||
|
|
||||||
Returns a dict mapping each key in keys to its value. If the given
|
Returns a dict mapping each key in keys to its value. If the given
|
||||||
key is missing, it will be missing from the response dict.
|
key is missing, it will be missing from the response dict.
|
||||||
'''
|
'''
|
||||||
|
@ -114,7 +114,7 @@ class _Cache:
|
||||||
if val is not None:
|
if val is not None:
|
||||||
d[k] = val
|
d[k] = val
|
||||||
return d
|
return d
|
||||||
|
|
||||||
def has_key(self, key):
|
def has_key(self, key):
|
||||||
'''
|
'''
|
||||||
Returns True if the key is in the cache and has not expired.
|
Returns True if the key is in the cache and has not expired.
|
||||||
|
@ -132,24 +132,24 @@ except ImportError:
|
||||||
else:
|
else:
|
||||||
class _MemcachedCache(_Cache):
|
class _MemcachedCache(_Cache):
|
||||||
"""Memcached cache backend."""
|
"""Memcached cache backend."""
|
||||||
|
|
||||||
def __init__(self, server, params):
|
def __init__(self, server, params):
|
||||||
_Cache.__init__(self, params)
|
_Cache.__init__(self, params)
|
||||||
self._cache = memcache.Client([server])
|
self._cache = memcache.Client([server])
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
val = self._cache.get(key)
|
val = self._cache.get(key)
|
||||||
if val is None:
|
if val is None:
|
||||||
return default
|
return default
|
||||||
else:
|
else:
|
||||||
return val
|
return val
|
||||||
|
|
||||||
def set(self, key, value, timeout=0):
|
def set(self, key, value, timeout=0):
|
||||||
self._cache.set(key, value, timeout)
|
self._cache.set(key, value, timeout)
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
self._cache.delete(key)
|
self._cache.delete(key)
|
||||||
|
|
||||||
def get_many(self, keys):
|
def get_many(self, keys):
|
||||||
return self._cache.get_multi(keys)
|
return self._cache.get_multi(keys)
|
||||||
|
|
||||||
|
@ -158,27 +158,27 @@ else:
|
||||||
##################################
|
##################################
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
class _SimpleCache(_Cache):
|
class _SimpleCache(_Cache):
|
||||||
"""Simple single-process in-memory cache"""
|
"""Simple single-process in-memory cache"""
|
||||||
|
|
||||||
def __init__(self, host, params):
|
def __init__(self, host, params):
|
||||||
_Cache.__init__(self, params)
|
_Cache.__init__(self, params)
|
||||||
self._cache = {}
|
self._cache = {}
|
||||||
self._expire_info = {}
|
self._expire_info = {}
|
||||||
|
|
||||||
max_entries = params.get('max_entries', 300)
|
max_entries = params.get('max_entries', 300)
|
||||||
try:
|
try:
|
||||||
self._max_entries = int(max_entries)
|
self._max_entries = int(max_entries)
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
self._max_entries = 300
|
self._max_entries = 300
|
||||||
|
|
||||||
cull_frequency = params.get('cull_frequency', 3)
|
cull_frequency = params.get('cull_frequency', 3)
|
||||||
try:
|
try:
|
||||||
self._cull_frequency = int(cull_frequency)
|
self._cull_frequency = int(cull_frequency)
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
self._cull_frequency = 3
|
self._cull_frequency = 3
|
||||||
|
|
||||||
def get(self, key, default=None):
|
def get(self, key, default=None):
|
||||||
now = time.time()
|
now = time.time()
|
||||||
exp = self._expire_info.get(key, now)
|
exp = self._expire_info.get(key, now)
|
||||||
|
@ -188,7 +188,7 @@ class _SimpleCache(_Cache):
|
||||||
return default
|
return default
|
||||||
else:
|
else:
|
||||||
return self._cache.get(key, default)
|
return self._cache.get(key, default)
|
||||||
|
|
||||||
def set(self, key, value, timeout=None):
|
def set(self, key, value, timeout=None):
|
||||||
if len(self._cache) >= self._max_entries:
|
if len(self._cache) >= self._max_entries:
|
||||||
self._cull()
|
self._cull()
|
||||||
|
@ -196,7 +196,7 @@ class _SimpleCache(_Cache):
|
||||||
timeout = self.default_timeout
|
timeout = self.default_timeout
|
||||||
self._cache[key] = value
|
self._cache[key] = value
|
||||||
self._expire_info[key] = time.time() + timeout
|
self._expire_info[key] = time.time() + timeout
|
||||||
|
|
||||||
def delete(self, key):
|
def delete(self, key):
|
||||||
try:
|
try:
|
||||||
del self._cache[key]
|
del self._cache[key]
|
||||||
|
@ -206,7 +206,7 @@ class _SimpleCache(_Cache):
|
||||||
del self._expire_info[key]
|
del self._expire_info[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def has_key(self, key):
|
def has_key(self, key):
|
||||||
return self._cache.has_key(key)
|
return self._cache.has_key(key)
|
||||||
|
|
||||||
|
@ -219,7 +219,7 @@ class _SimpleCache(_Cache):
|
||||||
for k in doomed:
|
for k in doomed:
|
||||||
self.delete(k)
|
self.delete(k)
|
||||||
|
|
||||||
##########################################
|
##########################################
|
||||||
# Read settings and load a cache backend #
|
# Read settings and load a cache backend #
|
||||||
##########################################
|
##########################################
|
||||||
|
|
||||||
|
@ -238,12 +238,12 @@ def get_cache(backend_uri):
|
||||||
raise InvalidCacheBackendError("Backend URI must start with scheme://")
|
raise InvalidCacheBackendError("Backend URI must start with scheme://")
|
||||||
if scheme not in _BACKENDS.keys():
|
if scheme not in _BACKENDS.keys():
|
||||||
raise InvalidCacheBackendError("%r is not a valid cache backend" % scheme)
|
raise InvalidCacheBackendError("%r is not a valid cache backend" % scheme)
|
||||||
|
|
||||||
host = rest[2:]
|
host = rest[2:]
|
||||||
qpos = rest.find('?')
|
qpos = rest.find('?')
|
||||||
if qpos != -1:
|
if qpos != -1:
|
||||||
params = dict(parse_qsl(rest[qpos+1:]))
|
params = dict(parse_qsl(rest[qpos+1:]))
|
||||||
host = rest[:qpos]
|
host = rest[2:qpos]
|
||||||
else:
|
else:
|
||||||
params = {}
|
params = {}
|
||||||
if host.endswith('/'):
|
if host.endswith('/'):
|
||||||
|
|
Loading…
Reference in New Issue