Fixed #12671 -- Added set_many(), get_many(), and clear() methods to the cache backend interface. Thanks to Jeff Balogh for the report and patch.
git-svn-id: http://code.djangoproject.com/svn/django/trunk@12306 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
parent
c6ee1f6f24
commit
8e8d4b5888
1
AUTHORS
1
AUTHORS
|
@ -56,6 +56,7 @@ answer newbie questions, and generally made Django that much better:
|
||||||
Mike Axiak <axiak@mit.edu>
|
Mike Axiak <axiak@mit.edu>
|
||||||
Niran Babalola <niran@niran.org>
|
Niran Babalola <niran@niran.org>
|
||||||
Morten Bagai <m@bagai.com>
|
Morten Bagai <m@bagai.com>
|
||||||
|
Jeff Balogh <jbalogh@mozilla.com>
|
||||||
Mikaël Barbero <mikael.barbero nospam at nospam free.fr>
|
Mikaël Barbero <mikael.barbero nospam at nospam free.fr>
|
||||||
Randy Barlow <randy@electronsweatshop.com>
|
Randy Barlow <randy@electronsweatshop.com>
|
||||||
Scott Barr <scott@divisionbyzero.com.au>
|
Scott Barr <scott@divisionbyzero.com.au>
|
||||||
|
|
|
@ -91,3 +91,28 @@ class BaseCache(object):
|
||||||
# so that it always has the same functionality as has_key(), even
|
# so that it always has the same functionality as has_key(), even
|
||||||
# if a subclass overrides it.
|
# if a subclass overrides it.
|
||||||
return self.has_key(key)
|
return self.has_key(key)
|
||||||
|
|
||||||
|
def set_many(self, data, timeout=None):
|
||||||
|
"""
|
||||||
|
Set a bunch of values in the cache at once from a dict of key/value
|
||||||
|
pairs. For certain backends (memcached), this is much more efficient
|
||||||
|
than calling set() multiple times.
|
||||||
|
|
||||||
|
If timeout is given, that timeout will be used for the key; otherwise
|
||||||
|
the default cache timeout will be used.
|
||||||
|
"""
|
||||||
|
for key, value in data.items():
|
||||||
|
self.set(key, value, timeout)
|
||||||
|
|
||||||
|
def delete_many(self, keys):
|
||||||
|
"""
|
||||||
|
Set a bunch of values in the cache at once. For certain backends
|
||||||
|
(memcached), this is much more efficient than calling delete() multiple
|
||||||
|
times.
|
||||||
|
"""
|
||||||
|
for key in keys:
|
||||||
|
self.delete(key)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Remove *all* values from the cache at once."""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
|
@ -84,7 +84,7 @@ class CacheClass(BaseCache):
|
||||||
|
|
||||||
def _cull(self, cursor, now):
|
def _cull(self, cursor, now):
|
||||||
if self._cull_frequency == 0:
|
if self._cull_frequency == 0:
|
||||||
cursor.execute("DELETE FROM %s" % self._table)
|
self.clear()
|
||||||
else:
|
else:
|
||||||
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, [str(now)])
|
cursor.execute("DELETE FROM %s WHERE expires < %%s" % self._table, [str(now)])
|
||||||
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
|
cursor.execute("SELECT COUNT(*) FROM %s" % self._table)
|
||||||
|
@ -92,3 +92,7 @@ class CacheClass(BaseCache):
|
||||||
if num > self._max_entries:
|
if num > self._max_entries:
|
||||||
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
|
cursor.execute("SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" % self._table, [num / self._cull_frequency])
|
||||||
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
|
cursor.execute("DELETE FROM %s WHERE cache_key < %%s" % self._table, [cursor.fetchone()[0]])
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
cursor = connection.cursor()
|
||||||
|
cursor.execute('DELETE FROM %s' % self._table)
|
||||||
|
|
|
@ -23,3 +23,12 @@ class CacheClass(BaseCache):
|
||||||
|
|
||||||
def has_key(self, *args, **kwargs):
|
def has_key(self, *args, **kwargs):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def set_many(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def delete_many(self, *args, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
pass
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import time
|
import time
|
||||||
|
import shutil
|
||||||
try:
|
try:
|
||||||
import cPickle as pickle
|
import cPickle as pickle
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -150,3 +151,9 @@ class CacheClass(BaseCache):
|
||||||
count += len(files)
|
count += len(files)
|
||||||
return count
|
return count
|
||||||
_num_entries = property(_get_num_entries)
|
_num_entries = property(_get_num_entries)
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
try:
|
||||||
|
shutil.rmtree(self._dir)
|
||||||
|
except (IOError, OSError):
|
||||||
|
pass
|
||||||
|
|
|
@ -110,8 +110,7 @@ class CacheClass(BaseCache):
|
||||||
|
|
||||||
def _cull(self):
|
def _cull(self):
|
||||||
if self._cull_frequency == 0:
|
if self._cull_frequency == 0:
|
||||||
self._cache.clear()
|
self.clear()
|
||||||
self._expire_info.clear()
|
|
||||||
else:
|
else:
|
||||||
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
|
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
|
||||||
for k in doomed:
|
for k in doomed:
|
||||||
|
@ -133,3 +132,7 @@ class CacheClass(BaseCache):
|
||||||
self._delete(key)
|
self._delete(key)
|
||||||
finally:
|
finally:
|
||||||
self._lock.writer_leaves()
|
self._lock.writer_leaves()
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._cache.clear()
|
||||||
|
self._expire_info.clear()
|
||||||
|
|
|
@ -71,3 +71,17 @@ class CacheClass(BaseCache):
|
||||||
if val is None:
|
if val is None:
|
||||||
raise ValueError("Key '%s' not found" % key)
|
raise ValueError("Key '%s' not found" % key)
|
||||||
return val
|
return val
|
||||||
|
|
||||||
|
def set_many(self, data, timeout=0):
|
||||||
|
safe_data = {}
|
||||||
|
for key, value in data.items():
|
||||||
|
if isinstance(value, unicode):
|
||||||
|
value = value.encode('utf-8')
|
||||||
|
safe_data[smart_str(key)] = value
|
||||||
|
self._cache.set_multi(safe_data, timeout or self.default_timeout)
|
||||||
|
|
||||||
|
def delete_many(self, keys):
|
||||||
|
self._cache.delete_multi(map(smart_str, keys))
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
self._cache.flush_all()
|
||||||
|
|
|
@ -62,7 +62,7 @@ It's used by sites such as Facebook and Wikipedia to reduce database access and
|
||||||
dramatically increase site performance.
|
dramatically increase site performance.
|
||||||
|
|
||||||
Memcached is available for free at http://danga.com/memcached/ . It runs as a
|
Memcached is available for free at http://danga.com/memcached/ . It runs as a
|
||||||
daemon and is allotted a specified amount of RAM. All it does is provide an
|
daemon and is allotted a specified amount of RAM. All it does is provide a
|
||||||
fast interface for adding, retrieving and deleting arbitrary data in the cache.
|
fast interface for adding, retrieving and deleting arbitrary data in the cache.
|
||||||
All data is stored directly in memory, so there's no overhead of database or
|
All data is stored directly in memory, so there's no overhead of database or
|
||||||
filesystem usage.
|
filesystem usage.
|
||||||
|
@ -522,11 +522,37 @@ actually exist in the cache (and haven't expired)::
|
||||||
>>> cache.get_many(['a', 'b', 'c'])
|
>>> cache.get_many(['a', 'b', 'c'])
|
||||||
{'a': 1, 'b': 2, 'c': 3}
|
{'a': 1, 'b': 2, 'c': 3}
|
||||||
|
|
||||||
Finally, you can delete keys explicitly with ``delete()``. This is an easy way
|
.. versionadded:: 1.2
|
||||||
of clearing the cache for a particular object::
|
|
||||||
|
To set multiple values more efficiently, use ``set_many()`` to pass a dictionary
|
||||||
|
of key-value pairs::
|
||||||
|
|
||||||
|
>>> cache.set_many({'a': 1, 'b': 2, 'c': 3})
|
||||||
|
>>> cache.get_many(['a', 'b', 'c'])
|
||||||
|
{'a': 1, 'b': 2, 'c': 3}
|
||||||
|
|
||||||
|
Like ``cache.set()``, ``set_many()`` takes an optional ``timeout`` parameter.
|
||||||
|
|
||||||
|
You can delete keys explicitly with ``delete()``. This is an easy way of
|
||||||
|
clearing the cache for a particular object::
|
||||||
|
|
||||||
>>> cache.delete('a')
|
>>> cache.delete('a')
|
||||||
|
|
||||||
|
.. versionadded:: 1.2
|
||||||
|
|
||||||
|
If you want to clear a bunch of keys at once, ``delete_many()`` can take a list
|
||||||
|
of keys to be cleared::
|
||||||
|
|
||||||
|
>>> cache.delete_many(['a', 'b', 'c'])
|
||||||
|
|
||||||
|
.. versionadded:: 1.2
|
||||||
|
|
||||||
|
Finally, if you want to delete all the keys in the cache, use
|
||||||
|
``cache.clear()``. Be careful with this; ``clear()`` will remove *everything*
|
||||||
|
from the cache, not just the keys set by your application. ::
|
||||||
|
|
||||||
|
>>> cache.clear()
|
||||||
|
|
||||||
.. versionadded:: 1.1
|
.. versionadded:: 1.1
|
||||||
|
|
||||||
You can also increment or decrement a key that already exists using the
|
You can also increment or decrement a key that already exists using the
|
||||||
|
|
|
@ -129,9 +129,24 @@ class DummyCacheTests(unittest.TestCase):
|
||||||
self.cache.set(key, value)
|
self.cache.set(key, value)
|
||||||
self.assertEqual(self.cache.get(key), None)
|
self.assertEqual(self.cache.get(key), None)
|
||||||
|
|
||||||
|
def test_set_many(self):
|
||||||
|
"set_many does nothing for the dummy cache backend"
|
||||||
|
self.cache.set_many({'a': 1, 'b': 2})
|
||||||
|
|
||||||
|
def test_delete_many(self):
|
||||||
|
"delete_many does nothing for the dummy cache backend"
|
||||||
|
self.cache.delete_many(['a', 'b'])
|
||||||
|
|
||||||
|
def test_clear(self):
|
||||||
|
"clear does nothing for the dummy cache backend"
|
||||||
|
self.cache.clear()
|
||||||
|
|
||||||
|
|
||||||
class BaseCacheTests(object):
|
class BaseCacheTests(object):
|
||||||
# A common set of tests to apply to all cache backends
|
# A common set of tests to apply to all cache backends
|
||||||
|
def tearDown(self):
|
||||||
|
self.cache.clear()
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
# Simple cache set/get works
|
# Simple cache set/get works
|
||||||
self.cache.set("key", "value")
|
self.cache.set("key", "value")
|
||||||
|
@ -278,6 +293,37 @@ class BaseCacheTests(object):
|
||||||
self.cache.set(key, value)
|
self.cache.set(key, value)
|
||||||
self.assertEqual(self.cache.get(key), value)
|
self.assertEqual(self.cache.get(key), value)
|
||||||
|
|
||||||
|
def test_set_many(self):
|
||||||
|
# Multiple keys can be set using set_many
|
||||||
|
self.cache.set_many({"key1": "spam", "key2": "eggs"})
|
||||||
|
self.assertEqual(self.cache.get("key1"), "spam")
|
||||||
|
self.assertEqual(self.cache.get("key2"), "eggs")
|
||||||
|
|
||||||
|
def test_set_many_expiration(self):
|
||||||
|
# set_many takes a second ``timeout`` parameter
|
||||||
|
self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
|
||||||
|
time.sleep(2)
|
||||||
|
self.assertEqual(self.cache.get("key1"), None)
|
||||||
|
self.assertEqual(self.cache.get("key2"), None)
|
||||||
|
|
||||||
|
def test_delete_many(self):
|
||||||
|
# Multiple keys can be deleted using delete_many
|
||||||
|
self.cache.set("key1", "spam")
|
||||||
|
self.cache.set("key2", "eggs")
|
||||||
|
self.cache.set("key3", "ham")
|
||||||
|
self.cache.delete_many(["key1", "key2"])
|
||||||
|
self.assertEqual(self.cache.get("key1"), None)
|
||||||
|
self.assertEqual(self.cache.get("key2"), None)
|
||||||
|
self.assertEqual(self.cache.get("key3"), "ham")
|
||||||
|
|
||||||
|
def test_clear(self):
|
||||||
|
# The cache can be emptied using clear
|
||||||
|
self.cache.set("key1", "spam")
|
||||||
|
self.cache.set("key2", "eggs")
|
||||||
|
self.cache.clear()
|
||||||
|
self.assertEqual(self.cache.get("key1"), None)
|
||||||
|
self.assertEqual(self.cache.get("key2"), None)
|
||||||
|
|
||||||
class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
management.call_command('createcachetable', 'test_cache_table', verbosity=0, interactive=False)
|
management.call_command('createcachetable', 'test_cache_table', verbosity=0, interactive=False)
|
||||||
|
@ -286,7 +332,7 @@ class DBCacheTests(unittest.TestCase, BaseCacheTests):
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
from django.db import connection
|
from django.db import connection
|
||||||
cursor = connection.cursor()
|
cursor = connection.cursor()
|
||||||
cursor.execute('DROP TABLE test_cache_table');
|
cursor.execute('DROP TABLE test_cache_table')
|
||||||
|
|
||||||
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -309,9 +355,6 @@ class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
||||||
self.dirname = tempfile.mkdtemp()
|
self.dirname = tempfile.mkdtemp()
|
||||||
self.cache = get_cache('file://%s' % self.dirname)
|
self.cache = get_cache('file://%s' % self.dirname)
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
shutil.rmtree(self.dirname)
|
|
||||||
|
|
||||||
def test_hashing(self):
|
def test_hashing(self):
|
||||||
"""Test that keys are hashed into subdirectories correctly"""
|
"""Test that keys are hashed into subdirectories correctly"""
|
||||||
self.cache.set("foo", "bar")
|
self.cache.set("foo", "bar")
|
||||||
|
|
Loading…
Reference in New Issue