From ffc37e2343a93cf6d44247e20cd263b41f931716 Mon Sep 17 00:00:00 2001 From: Curtis Maloney Date: Sat, 19 Oct 2013 09:49:24 +1100 Subject: [PATCH] Fixed #21012 -- New API to access cache backends. Thanks Curtis Malony and Florian Apolloner. Squashed commit of the following: commit 3380495e93f5e81b80a251b03ddb0a80b17685f5 Author: Aymeric Augustin Date: Sat Nov 23 14:18:07 2013 +0100 Looked up the template_fragments cache at runtime. commit 905a74f52b24a198f802520ff06290a94dedc687 Author: Aymeric Augustin Date: Sat Nov 23 14:19:48 2013 +0100 Removed all uses of create_cache. Refactored the cache tests significantly. Made it safe to override the CACHES setting. commit 35e289fe9285feffed3c60657af9279a6a2cfccc Author: Aymeric Augustin Date: Sat Nov 23 12:23:57 2013 +0100 Removed create_cache function. commit 8e274f747a1f1c0c0e6c37873e29067f7fa022e8 Author: Aymeric Augustin Date: Sat Nov 23 12:04:52 2013 +0100 Updated docs to describe a simplified cache backend API. commit ee7eb0f73e6d4699edcf5d357dce715224525cf6 Author: Curtis Maloney Date: Sat Oct 19 09:49:24 2013 +1100 Fixed #21012 -- Thread-local caches, like databases. --- django/contrib/sessions/backends/cache.py | 4 +- django/contrib/sessions/backends/cached_db.py | 4 +- django/contrib/sessions/tests.py | 11 +- django/contrib/staticfiles/storage.py | 4 +- django/core/cache/__init__.py | 76 +- django/core/cache/backends/memcached.py | 14 +- .../management/commands/createcachetable.py | 4 +- django/middleware/cache.py | 50 +- django/templatetags/cache.py | 19 +- django/test/signals.py | 8 + django/utils/cache.py | 6 +- docs/internals/deprecation.txt | 5 +- docs/releases/1.7.txt | 18 + docs/topics/cache.txt | 52 +- tests/cache/tests.py | 1058 +++++++++-------- tests/template_tests/tests.py | 2 +- 16 files changed, 729 insertions(+), 606 deletions(-) diff --git a/django/contrib/sessions/backends/cache.py b/django/contrib/sessions/backends/cache.py index 596042fcb32..b1058b32943 100644 --- a/django/contrib/sessions/backends/cache.py +++ b/django/contrib/sessions/backends/cache.py @@ -1,6 +1,6 @@ from django.conf import settings from django.contrib.sessions.backends.base import SessionBase, CreateError -from django.core.cache import get_cache +from django.core.cache import caches from django.utils.six.moves import xrange KEY_PREFIX = "django.contrib.sessions.cache" @@ -11,7 +11,7 @@ class SessionStore(SessionBase): A cache-based session store. """ def __init__(self, session_key=None): - self._cache = get_cache(settings.SESSION_CACHE_ALIAS) + self._cache = caches[settings.SESSION_CACHE_ALIAS] super(SessionStore, self).__init__(session_key) @property diff --git a/django/contrib/sessions/backends/cached_db.py b/django/contrib/sessions/backends/cached_db.py index 15b7172f859..f5c14b0e1e0 100644 --- a/django/contrib/sessions/backends/cached_db.py +++ b/django/contrib/sessions/backends/cached_db.py @@ -6,7 +6,7 @@ import logging from django.conf import settings from django.contrib.sessions.backends.db import SessionStore as DBStore -from django.core.cache import get_cache +from django.core.cache import caches from django.core.exceptions import SuspiciousOperation from django.utils import timezone from django.utils.encoding import force_text @@ -20,7 +20,7 @@ class SessionStore(DBStore): """ def __init__(self, session_key=None): - self._cache = get_cache(settings.SESSION_CACHE_ALIAS) + self._cache = caches[settings.SESSION_CACHE_ALIAS] super(SessionStore, self).__init__(session_key) @property diff --git a/django/contrib/sessions/tests.py b/django/contrib/sessions/tests.py index 862bdfb69bc..af89f0048e9 100644 --- a/django/contrib/sessions/tests.py +++ b/django/contrib/sessions/tests.py @@ -15,7 +15,7 @@ from django.contrib.sessions.backends.file import SessionStore as FileSession from django.contrib.sessions.backends.signed_cookies import SessionStore as CookieSession from django.contrib.sessions.models import Session from django.contrib.sessions.middleware import SessionMiddleware -from django.core.cache import get_cache +from django.core.cache import caches from django.core.cache.backends.base import InvalidCacheBackendError from django.core import management from django.core.exceptions import ImproperlyConfigured @@ -140,7 +140,7 @@ class SessionTestsMixin(object): self.assertTrue(self.session.modified) def test_save(self): - if (hasattr(self.session, '_cache') and'DummyCache' in + if (hasattr(self.session, '_cache') and 'DummyCache' in settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']): raise unittest.SkipTest("Session saving tests require a real cache backend") self.session.save() @@ -481,7 +481,7 @@ class CacheSessionTests(SessionTestsMixin, unittest.TestCase): def test_default_cache(self): self.session.save() - self.assertNotEqual(get_cache('default').get(self.session.cache_key), None) + self.assertNotEqual(caches['default'].get(self.session.cache_key), None) @override_settings(CACHES={ 'default': { @@ -489,6 +489,7 @@ class CacheSessionTests(SessionTestsMixin, unittest.TestCase): }, 'sessions': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', + 'LOCATION': 'session', }, }, SESSION_CACHE_ALIAS='sessions') def test_non_default_cache(self): @@ -496,8 +497,8 @@ class CacheSessionTests(SessionTestsMixin, unittest.TestCase): self.session = self.backend() self.session.save() - self.assertEqual(get_cache('default').get(self.session.cache_key), None) - self.assertNotEqual(get_cache('sessions').get(self.session.cache_key), None) + self.assertEqual(caches['default'].get(self.session.cache_key), None) + self.assertNotEqual(caches['sessions'].get(self.session.cache_key), None) class SessionMiddlewareTests(unittest.TestCase): diff --git a/django/contrib/staticfiles/storage.py b/django/contrib/staticfiles/storage.py index a527379febf..5190a299e4f 100644 --- a/django/contrib/staticfiles/storage.py +++ b/django/contrib/staticfiles/storage.py @@ -7,7 +7,7 @@ import posixpath import re from django.conf import settings -from django.core.cache import (get_cache, InvalidCacheBackendError, +from django.core.cache import (caches, InvalidCacheBackendError, cache as default_cache) from django.core.exceptions import ImproperlyConfigured from django.core.files.base import ContentFile @@ -56,7 +56,7 @@ class CachedFilesMixin(object): def __init__(self, *args, **kwargs): super(CachedFilesMixin, self).__init__(*args, **kwargs) try: - self.cache = get_cache('staticfiles') + self.cache = caches['staticfiles'] except InvalidCacheBackendError: # Use the default backend self.cache = default_cache diff --git a/django/core/cache/__init__.py b/django/core/cache/__init__.py index 747a12a0234..6829b747725 100644 --- a/django/core/cache/__init__.py +++ b/django/core/cache/__init__.py @@ -6,14 +6,15 @@ In a nutshell, a cache is a set of values -- which can be any object that may be pickled -- identified by string keys. For the complete API, see the abstract BaseCache class in django.core.cache.backends.base. -Client code should not access a cache backend directly; instead it should -either use the "cache" variable made available here, or it should use the -get_cache() function made available here. get_cache() takes a CACHES alias or a -backend path and config parameters, and returns an instance of a backend cache -class. +Client code should use the `cache` variable defined here to access the default +cache backend and look up non-default cache backends in the `caches` dict-like +object. See docs/topics/cache.txt for information on the public API. """ +from threading import local +import warnings + from django.conf import settings from django.core import signals from django.core.cache.backends.base import ( @@ -35,14 +36,14 @@ if DEFAULT_CACHE_ALIAS not in settings.CACHES: def get_cache(backend, **kwargs): """ - Function to load a cache backend dynamically. This is flexible by design + Function to create a cache backend dynamically. This is flexible by design to allow different use cases: To load a backend that is pre-defined in the settings:: cache = get_cache('default') - To load a backend with its dotted import path, + To create a backend with its dotted import path, including arbitrary options:: cache = get_cache('django.core.cache.backends.memcached.MemcachedCache', **{ @@ -50,6 +51,12 @@ def get_cache(backend, **kwargs): }) """ + warnings.warn("'get_cache' is deprecated in favor of 'caches'.", + PendingDeprecationWarning, stacklevel=2) + return _create_cache(backend, **kwargs) + + +def _create_cache(backend, **kwargs): try: # Try to get the CACHES entry for the given backend name first try: @@ -79,4 +86,57 @@ def get_cache(backend, **kwargs): signals.request_finished.connect(cache.close) return cache -cache = get_cache(DEFAULT_CACHE_ALIAS) + +class CacheHandler(object): + """ + A Cache Handler to manage access to Cache instances. + + Ensures only one instance of each alias exists per thread. + """ + def __init__(self): + self._caches = local() + + def __getitem__(self, alias): + try: + return getattr(self._caches, alias) + except AttributeError: + pass + + if alias not in settings.CACHES: + raise InvalidCacheBackendError( + "Could not find config for '%s' in settings.CACHES" % alias + ) + + cache = _create_cache(alias) + setattr(self._caches, alias, cache) + + return cache + +caches = CacheHandler() + +class DefaultCacheProxy(object): + """ + Proxy access to the default Cache object's attributes. + + This allows the legacy `cache` object to be thread-safe using the new + ``caches`` API. + """ + def __getattr__(self, name): + return getattr(caches[DEFAULT_CACHE_ALIAS], name) + + def __setattr__(self, name, value): + return setattr(caches[DEFAULT_CACHE_ALIAS], name, value) + + def __delattr__(self, name): + return delattr(caches[DEFAULT_CACHE_ALIAS], name) + + def __contains__(self, key): + return key in caches[DEFAULT_CACHE_ALIAS] + + def __eq__(self, other): + return caches[DEFAULT_CACHE_ALIAS] == other + + def __ne__(self, other): + return caches[DEFAULT_CACHE_ALIAS] != other + +cache = DefaultCacheProxy() diff --git a/django/core/cache/backends/memcached.py b/django/core/cache/backends/memcached.py index c49c20e59b1..216ceb04e1f 100644 --- a/django/core/cache/backends/memcached.py +++ b/django/core/cache/backends/memcached.py @@ -2,13 +2,13 @@ import time import pickle -from threading import local from django.core.cache.backends.base import BaseCache, DEFAULT_TIMEOUT from django.utils import six from django.utils.deprecation import RenameMethodsBase from django.utils.encoding import force_str +from django.utils.functional import cached_property class BaseMemcachedCacheMethods(RenameMethodsBase): @@ -177,24 +177,14 @@ class PyLibMCCache(BaseMemcachedCache): "An implementation of a cache binding using pylibmc" def __init__(self, server, params): import pylibmc - self._local = local() super(PyLibMCCache, self).__init__(server, params, library=pylibmc, value_not_found_exception=pylibmc.NotFound) - @property + @cached_property def _cache(self): - # PylibMC uses cache options as the 'behaviors' attribute. - # It also needs to use threadlocals, because some versions of - # PylibMC don't play well with the GIL. - client = getattr(self._local, 'client', None) - if client: - return client - client = self._lib.Client(self._servers) if self._options: client.behaviors = self._options - self._local.client = client - return client diff --git a/django/core/management/commands/createcachetable.py b/django/core/management/commands/createcachetable.py index be6da1dbf06..a71ff945a90 100644 --- a/django/core/management/commands/createcachetable.py +++ b/django/core/management/commands/createcachetable.py @@ -1,7 +1,7 @@ from optparse import make_option from django.conf import settings -from django.core.cache import get_cache +from django.core.cache import caches from django.core.cache.backends.db import BaseDatabaseCache from django.core.management.base import BaseCommand, CommandError from django.db import connections, router, transaction, models, DEFAULT_DB_ALIAS @@ -30,7 +30,7 @@ class Command(BaseCommand): self.create_table(db, tablename) else: for cache_alias in settings.CACHES: - cache = get_cache(cache_alias) + cache = caches[cache_alias] if isinstance(cache, BaseDatabaseCache): self.create_table(db, cache._table) diff --git a/django/middleware/cache.py b/django/middleware/cache.py index 361e46f5e5a..a99d428ad96 100644 --- a/django/middleware/cache.py +++ b/django/middleware/cache.py @@ -46,7 +46,7 @@ More details about how the caching works: import warnings from django.conf import settings -from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS +from django.core.cache import caches, DEFAULT_CACHE_ALIAS from django.utils.cache import get_cache_key, learn_cache_key, patch_response_headers, get_max_age @@ -64,7 +64,7 @@ class UpdateCacheMiddleware(object): self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS - self.cache = get_cache(self.cache_alias) + self.cache = caches[self.cache_alias] def _session_accessed(self, request): try: @@ -122,10 +122,9 @@ class FetchFromCacheMiddleware(object): MIDDLEWARE_CLASSES so that it'll get called last during the request phase. """ def __init__(self): - self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS - self.cache = get_cache(self.cache_alias) + self.cache = caches[self.cache_alias] def process_request(self, request): """ @@ -169,39 +168,32 @@ class CacheMiddleware(UpdateCacheMiddleware, FetchFromCacheMiddleware): # we fall back to system defaults. If it is not provided at all, # we need to use middleware defaults. - cache_kwargs = {} + try: + key_prefix = kwargs['key_prefix'] + if key_prefix is None: + key_prefix = '' + except KeyError: + key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX + self.key_prefix = key_prefix try: - self.key_prefix = kwargs['key_prefix'] - if self.key_prefix is not None: - cache_kwargs['KEY_PREFIX'] = self.key_prefix - else: - self.key_prefix = '' + cache_alias = kwargs['cache_alias'] + if cache_alias is None: + cache_alias = DEFAULT_CACHE_ALIAS except KeyError: - self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX - cache_kwargs['KEY_PREFIX'] = self.key_prefix + cache_alias = settings.CACHE_MIDDLEWARE_ALIAS + self.cache_alias = cache_alias - try: - self.cache_alias = kwargs['cache_alias'] - if self.cache_alias is None: - self.cache_alias = DEFAULT_CACHE_ALIAS - if cache_timeout is not None: - cache_kwargs['TIMEOUT'] = cache_timeout - except KeyError: - self.cache_alias = settings.CACHE_MIDDLEWARE_ALIAS - if cache_timeout is None: - cache_kwargs['TIMEOUT'] = settings.CACHE_MIDDLEWARE_SECONDS - else: - cache_kwargs['TIMEOUT'] = cache_timeout + if cache_timeout is None: + cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS + self.cache_timeout = cache_timeout if cache_anonymous_only is None: - self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) - else: - self.cache_anonymous_only = cache_anonymous_only + cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) + self.cache_anonymous_only = cache_anonymous_only if self.cache_anonymous_only: msg = "CACHE_MIDDLEWARE_ANONYMOUS_ONLY has been deprecated and will be removed in Django 1.8." warnings.warn(msg, DeprecationWarning, stacklevel=1) - self.cache = get_cache(self.cache_alias, **cache_kwargs) - self.cache_timeout = self.cache.default_timeout + self.cache = caches[self.cache_alias] diff --git a/django/templatetags/cache.py b/django/templatetags/cache.py index 1ef97d7cf0c..bdea9b59ae0 100644 --- a/django/templatetags/cache.py +++ b/django/templatetags/cache.py @@ -1,16 +1,11 @@ from __future__ import unicode_literals -from django.core.cache import get_cache, InvalidCacheBackendError +from django.core.cache import cache, caches, InvalidCacheBackendError from django.core.cache.utils import make_template_fragment_key from django.template import Library, Node, TemplateSyntaxError, VariableDoesNotExist register = Library() -try: - default_cache = get_cache('template_fragments') -except InvalidCacheBackendError: - from django.core.cache import cache as default_cache - class CacheNode(Node): def __init__(self, nodelist, expire_time_var, fragment_name, vary_on, cache_name): @@ -35,17 +30,21 @@ class CacheNode(Node): except VariableDoesNotExist: raise TemplateSyntaxError('"cache" tag got an unknown variable: %r' % self.cache_name.var) try: - cache = get_cache(cache_name) + fragment_cache = caches[cache_name] except InvalidCacheBackendError: raise TemplateSyntaxError('Invalid cache name specified for cache tag: %r' % cache_name) else: - cache = default_cache + try: + fragment_cache = caches['template_fragments'] + except InvalidCacheBackendError: + fragment_cache = caches['default'] + vary_on = [var.resolve(context) for var in self.vary_on] cache_key = make_template_fragment_key(self.fragment_name, vary_on) - value = cache.get(cache_key) + value = fragment_cache.get(cache_key) if value is None: value = self.nodelist.render(context) - cache.set(cache_key, value, expire_time) + fragment_cache.set(cache_key, value, expire_time) return value diff --git a/django/test/signals.py b/django/test/signals.py index de8802b381c..4af408ea9d5 100644 --- a/django/test/signals.py +++ b/django/test/signals.py @@ -1,5 +1,6 @@ import os import time +import threading import warnings from django.conf import settings @@ -19,6 +20,13 @@ setting_changed = Signal(providing_args=["setting", "value", "enter"]) COMPLEX_OVERRIDE_SETTINGS = set(['DATABASES']) +@receiver(setting_changed) +def clear_cache_handlers(**kwargs): + if kwargs['setting'] == 'CACHES': + from django.core.cache import caches + caches._caches = threading.local() + + @receiver(setting_changed) def update_connections_time_zone(**kwargs): if kwargs['setting'] == 'TIME_ZONE': diff --git a/django/utils/cache.py b/django/utils/cache.py index 7e38f7b021d..d89408a7a21 100644 --- a/django/utils/cache.py +++ b/django/utils/cache.py @@ -23,7 +23,7 @@ import re import time from django.conf import settings -from django.core.cache import get_cache +from django.core.cache import caches from django.utils.encoding import iri_to_uri, force_bytes, force_text from django.utils.http import http_date from django.utils.timezone import get_current_timezone_name @@ -219,7 +219,7 @@ def get_cache_key(request, key_prefix=None, method='GET', cache=None): key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX cache_key = _generate_cache_header_key(key_prefix, request) if cache is None: - cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS) + cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] headerlist = cache.get(cache_key, None) if headerlist is not None: return _generate_cache_key(request, method, headerlist, key_prefix) @@ -246,7 +246,7 @@ def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cach cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS cache_key = _generate_cache_header_key(key_prefix, request) if cache is None: - cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS) + cache = caches[settings.CACHE_MIDDLEWARE_ALIAS] if response.has_header('Vary'): is_accept_language_redundant = settings.USE_I18N or settings.USE_L10N # If i18n or l10n are used, the generated cache key will be suffixed diff --git a/docs/internals/deprecation.txt b/docs/internals/deprecation.txt index 4080957c774..4c44c705016 100644 --- a/docs/internals/deprecation.txt +++ b/docs/internals/deprecation.txt @@ -114,7 +114,7 @@ these changes. no longer appears to be actively maintained & does not work on Python 3. You are advised to install `Pillow`_, which should be used instead. -.. _`Pillow`: https://pypi.python.org/pypi/Pillow + .. _`Pillow`: https://pypi.python.org/pypi/Pillow * The following private APIs will be removed: @@ -215,6 +215,9 @@ these changes. * The internal ``django.utils.functional.memoize`` will be removed. +* ``django.core.cache.get_cache`` will be removed. Add suitable entries + to :setting:`CACHES` and use :data:`django.core.cache.caches` instead. + 2.0 --- diff --git a/docs/releases/1.7.txt b/docs/releases/1.7.txt index 749a843a5ad..8272e246c69 100644 --- a/docs/releases/1.7.txt +++ b/docs/releases/1.7.txt @@ -269,6 +269,18 @@ Minor features allowing the ``published`` element to be included in the feed (which relies on ``pubdate``). +Cache +^^^^^ + +* Access to caches configured in :setting:`CACHES` is now available via + :data:`django.core.cache.caches`. This dict-like object provides a different + instance per thread. It supersedes :func:`django.core.cache.get_cache` which + is now deprecated. + +* If you instanciate cache backends directly, be aware that they aren't + thread-safe any more, as :data:`django.core.cache.caches` now yields + differend instances per thread. + Email ^^^^^ @@ -643,6 +655,12 @@ Miscellaneous Features deprecated in 1.7 ========================== +``django.core.cache.get_cache`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +:func:`django.core.cache.get_cache` has been supplanted by +:data:`django.core.cache.caches`. + ``django.utils.dictconfig``/``django.utils.importlib`` ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/topics/cache.txt b/docs/topics/cache.txt index abf4946cb5f..409bd428273 100644 --- a/docs/topics/cache.txt +++ b/docs/topics/cache.txt @@ -703,22 +703,50 @@ pickling.) Accessing the cache ------------------- +.. data:: django.core.cache.caches + + .. versionadded:: 1.7 + + You can access the caches configured in the :setting:`CACHES` setting + through a dict-like object: ``django.core.cache.caches``. Repeated + requests for the same alias in the same thread will return the same + object. + + >>> from django.core.cache import caches + >>> cache1 = caches['myalias'] + >>> cache2 = caches['myalias'] + >>> cache1 is cache2 + True + + If the named key does not exist, ``InvalidCacheBackendError`` will be + raised. + + To provide thread-safety, a different instance of the cache backend will + be returned for each thread. + +.. data:: django.core.cache.cache + + As a shortcut, the default cache is available as + ``django.core.cache.cache``:: + + >>> from django.core.cache import cache + + This object is equivalent to ``caches['default']``. + .. function:: django.core.cache.get_cache(backend, **kwargs) -The cache module, ``django.core.cache``, has a ``cache`` object that's -automatically created from the ``'default'`` entry in the :setting:`CACHES` -setting:: + .. deprecated:: 1.7 + This function has been deprecated in favour of + :data:`~django.core.cache.caches`. - >>> from django.core.cache import cache - -If you have multiple caches defined in :setting:`CACHES`, then you can use -:func:`django.core.cache.get_cache` to retrieve a cache object for any key:: - - >>> from django.core.cache import get_cache - >>> cache = get_cache('alternate') - -If the named key does not exist, ``InvalidCacheBackendError`` will be raised. + Before Django 1.7 this function was the canonical way to obtain a cache + instance. It could also be used to create a new cache instance with a + different configuration. + >>> from django.core.cache import get_cache + >>> get_cache('default') + >>> get_cache('django.core.cache.backends.memcached.MemcachedCache', LOCATION='127.0.0.2') + >>> get_cache('default', TIMEOUT=300) Basic usage ----------- diff --git a/tests/cache/tests.py b/tests/cache/tests.py index 85e65168afc..4ebcd078def 100644 --- a/tests/cache/tests.py +++ b/tests/cache/tests.py @@ -8,17 +8,17 @@ import os import pickle import random import re +import shutil import string import tempfile +import threading import time import unittest import warnings from django.conf import settings from django.core import management -from django.core.cache import get_cache -from django.core.cache.backends.base import (CacheKeyWarning, - InvalidCacheBackendError) +from django.core.cache import cache, caches, CacheKeyWarning, InvalidCacheBackendError from django.db import connection, router, transaction from django.core.cache.utils import make_template_fragment_key from django.http import HttpResponse, StreamingHttpResponse @@ -27,7 +27,8 @@ from django.middleware.cache import (FetchFromCacheMiddleware, from django.template import Template from django.template.response import TemplateResponse from django.test import TestCase, TransactionTestCase, RequestFactory -from django.test.utils import override_settings, IgnoreDeprecationWarningsMixin +from django.test.utils import (override_settings, IgnoreDeprecationWarningsMixin, + IgnorePendingDeprecationWarningsMixin) from django.utils import six from django.utils import timezone from django.utils import translation @@ -49,72 +50,73 @@ class C: return 24 -class DummyCacheTests(unittest.TestCase): +@override_settings(CACHES={ + 'default': { + 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', + } +}) +class DummyCacheTests(TestCase): # The Dummy cache backend doesn't really behave like a test backend, - # so it has different test requirements. - backend_name = 'django.core.cache.backends.dummy.DummyCache' - - def setUp(self): - self.cache = get_cache(self.backend_name) + # so it has its own test case. def test_simple(self): "Dummy cache backend ignores cache set calls" - self.cache.set("key", "value") - self.assertEqual(self.cache.get("key"), None) + cache.set("key", "value") + self.assertEqual(cache.get("key"), None) def test_add(self): "Add doesn't do anything in dummy cache backend" - self.cache.add("addkey1", "value") - result = self.cache.add("addkey1", "newvalue") + cache.add("addkey1", "value") + result = cache.add("addkey1", "newvalue") self.assertEqual(result, True) - self.assertEqual(self.cache.get("addkey1"), None) + self.assertEqual(cache.get("addkey1"), None) def test_non_existent(self): "Non-existent keys aren't found in the dummy cache backend" - self.assertEqual(self.cache.get("does_not_exist"), None) - self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!") + self.assertEqual(cache.get("does_not_exist"), None) + self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): "get_many returns nothing for the dummy cache backend" - self.cache.set('a', 'a') - self.cache.set('b', 'b') - self.cache.set('c', 'c') - self.cache.set('d', 'd') - self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {}) - self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {}) + cache.set('a', 'a') + cache.set('b', 'b') + cache.set('c', 'c') + cache.set('d', 'd') + self.assertEqual(cache.get_many(['a', 'c', 'd']), {}) + self.assertEqual(cache.get_many(['a', 'b', 'e']), {}) def test_delete(self): "Cache deletion is transparently ignored on the dummy cache backend" - self.cache.set("key1", "spam") - self.cache.set("key2", "eggs") - self.assertEqual(self.cache.get("key1"), None) - self.cache.delete("key1") - self.assertEqual(self.cache.get("key1"), None) - self.assertEqual(self.cache.get("key2"), None) + cache.set("key1", "spam") + cache.set("key2", "eggs") + self.assertEqual(cache.get("key1"), None) + cache.delete("key1") + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), None) def test_has_key(self): "The has_key method doesn't ever return True for the dummy cache backend" - self.cache.set("hello1", "goodbye1") - self.assertEqual(self.cache.has_key("hello1"), False) - self.assertEqual(self.cache.has_key("goodbye1"), False) + cache.set("hello1", "goodbye1") + self.assertEqual(cache.has_key("hello1"), False) + self.assertEqual(cache.has_key("goodbye1"), False) def test_in(self): "The in operator doesn't ever return True for the dummy cache backend" - self.cache.set("hello2", "goodbye2") - self.assertEqual("hello2" in self.cache, False) - self.assertEqual("goodbye2" in self.cache, False) + cache.set("hello2", "goodbye2") + self.assertEqual("hello2" in cache, False) + self.assertEqual("goodbye2" in cache, False) def test_incr(self): "Dummy cache values can't be incremented" - self.cache.set('answer', 42) - self.assertRaises(ValueError, self.cache.incr, 'answer') - self.assertRaises(ValueError, self.cache.incr, 'does_not_exist') + cache.set('answer', 42) + self.assertRaises(ValueError, cache.incr, 'answer') + self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): "Dummy cache values can't be decremented" - self.cache.set('answer', 42) - self.assertRaises(ValueError, self.cache.decr, 'answer') - self.assertRaises(ValueError, self.cache.decr, 'does_not_exist') + cache.set('answer', 42) + self.assertRaises(ValueError, cache.decr, 'answer') + self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_data_types(self): "All data types are ignored equally by the dummy cache" @@ -127,21 +129,21 @@ class DummyCacheTests(unittest.TestCase): 'function': f, 'class': C, } - self.cache.set("stuff", stuff) - self.assertEqual(self.cache.get("stuff"), None) + cache.set("stuff", stuff) + self.assertEqual(cache.get("stuff"), None) def test_expiration(self): "Expiration has no effect on the dummy cache" - self.cache.set('expire1', 'very quickly', 1) - self.cache.set('expire2', 'very quickly', 1) - self.cache.set('expire3', 'very quickly', 1) + cache.set('expire1', 'very quickly', 1) + cache.set('expire2', 'very quickly', 1) + cache.set('expire3', 'very quickly', 1) time.sleep(2) - self.assertEqual(self.cache.get("expire1"), None) + self.assertEqual(cache.get("expire1"), None) - self.cache.add("expire2", "newvalue") - self.assertEqual(self.cache.get("expire2"), None) - self.assertEqual(self.cache.has_key("expire3"), False) + cache.add("expire2", "newvalue") + self.assertEqual(cache.get("expire2"), None) + self.assertEqual(cache.has_key("expire3"), False) def test_unicode(self): "Unicode values are ignored by the dummy cache" @@ -152,33 +154,57 @@ class DummyCacheTests(unittest.TestCase): 'ascii2': {'x': 1} } for (key, value) in stuff.items(): - self.cache.set(key, value) - self.assertEqual(self.cache.get(key), None) + cache.set(key, value) + self.assertEqual(cache.get(key), None) def test_set_many(self): "set_many does nothing for the dummy cache backend" - self.cache.set_many({'a': 1, 'b': 2}) - self.cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1') + cache.set_many({'a': 1, 'b': 2}) + cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1') def test_delete_many(self): "delete_many does nothing for the dummy cache backend" - self.cache.delete_many(['a', 'b']) + cache.delete_many(['a', 'b']) def test_clear(self): "clear does nothing for the dummy cache backend" - self.cache.clear() + cache.clear() def test_incr_version(self): "Dummy cache versions can't be incremented" - self.cache.set('answer', 42) - self.assertRaises(ValueError, self.cache.incr_version, 'answer') - self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist') + cache.set('answer', 42) + self.assertRaises(ValueError, cache.incr_version, 'answer') + self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): "Dummy cache versions can't be decremented" - self.cache.set('answer', 42) - self.assertRaises(ValueError, self.cache.decr_version, 'answer') - self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist') + cache.set('answer', 42) + self.assertRaises(ValueError, cache.decr_version, 'answer') + self.assertRaises(ValueError, cache.decr_version, 'does_not_exist') + + +def custom_key_func(key, key_prefix, version): + "A customized cache key function" + return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) + + +_caches_setting_base = { + 'default': {}, + 'prefix': {'KEY_PREFIX': 'cacheprefix'}, + 'v2': {'VERSION': 2}, + 'custom_key': {'KEY_FUNCTION': custom_key_func}, + 'custom_key2': {'KEY_FUNCTION': 'cache.tests.custom_key_func'}, + 'cull': {'OPTIONS': {'MAX_ENTRIES': 30}}, + 'zero_cull': {'OPTIONS': {'CULL_FREQUENCY': 0, 'MAX_ENTRIES': 30}}, + 'other': {'LOCATION': 'other'}, +} + + +def caches_setting_for_tests(**params): + setting = dict((k, v.copy()) for k, v in _caches_setting_base.items()) + for cache_params in setting.values(): + cache_params.update(params) + return setting class BaseCacheTests(object): @@ -187,89 +213,92 @@ class BaseCacheTests(object): def setUp(self): self.factory = RequestFactory() + def tearDown(self): + cache.clear() + def test_simple(self): # Simple cache set/get works - self.cache.set("key", "value") - self.assertEqual(self.cache.get("key"), "value") + cache.set("key", "value") + self.assertEqual(cache.get("key"), "value") def test_add(self): # A key can be added to a cache - self.cache.add("addkey1", "value") - result = self.cache.add("addkey1", "newvalue") + cache.add("addkey1", "value") + result = cache.add("addkey1", "newvalue") self.assertEqual(result, False) - self.assertEqual(self.cache.get("addkey1"), "value") + self.assertEqual(cache.get("addkey1"), "value") def test_prefix(self): # Test for same cache key conflicts between shared backend - self.cache.set('somekey', 'value') + cache.set('somekey', 'value') # should not be set in the prefixed cache - self.assertFalse(self.prefix_cache.has_key('somekey')) + self.assertFalse(caches['prefix'].has_key('somekey')) - self.prefix_cache.set('somekey', 'value2') + caches['prefix'].set('somekey', 'value2') - self.assertEqual(self.cache.get('somekey'), 'value') - self.assertEqual(self.prefix_cache.get('somekey'), 'value2') + self.assertEqual(cache.get('somekey'), 'value') + self.assertEqual(caches['prefix'].get('somekey'), 'value2') def test_non_existent(self): # Non-existent cache keys return as None/default # get with non-existent keys - self.assertEqual(self.cache.get("does_not_exist"), None) - self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!") + self.assertEqual(cache.get("does_not_exist"), None) + self.assertEqual(cache.get("does_not_exist", "bang!"), "bang!") def test_get_many(self): # Multiple cache keys can be returned using get_many - self.cache.set('a', 'a') - self.cache.set('b', 'b') - self.cache.set('c', 'c') - self.cache.set('d', 'd') - self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) - self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) + cache.set('a', 'a') + cache.set('b', 'b') + cache.set('c', 'c') + cache.set('d', 'd') + self.assertEqual(cache.get_many(['a', 'c', 'd']), {'a': 'a', 'c': 'c', 'd': 'd'}) + self.assertEqual(cache.get_many(['a', 'b', 'e']), {'a': 'a', 'b': 'b'}) def test_delete(self): # Cache keys can be deleted - self.cache.set("key1", "spam") - self.cache.set("key2", "eggs") - self.assertEqual(self.cache.get("key1"), "spam") - self.cache.delete("key1") - self.assertEqual(self.cache.get("key1"), None) - self.assertEqual(self.cache.get("key2"), "eggs") + cache.set("key1", "spam") + cache.set("key2", "eggs") + self.assertEqual(cache.get("key1"), "spam") + cache.delete("key1") + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), "eggs") def test_has_key(self): # The cache can be inspected for cache keys - self.cache.set("hello1", "goodbye1") - self.assertEqual(self.cache.has_key("hello1"), True) - self.assertEqual(self.cache.has_key("goodbye1"), False) + cache.set("hello1", "goodbye1") + self.assertEqual(cache.has_key("hello1"), True) + self.assertEqual(cache.has_key("goodbye1"), False) def test_in(self): # The in operator can be used to inspect cache contents - self.cache.set("hello2", "goodbye2") - self.assertEqual("hello2" in self.cache, True) - self.assertEqual("goodbye2" in self.cache, False) + cache.set("hello2", "goodbye2") + self.assertEqual("hello2" in cache, True) + self.assertEqual("goodbye2" in cache, False) def test_incr(self): # Cache values can be incremented - self.cache.set('answer', 41) - self.assertEqual(self.cache.incr('answer'), 42) - self.assertEqual(self.cache.get('answer'), 42) - self.assertEqual(self.cache.incr('answer', 10), 52) - self.assertEqual(self.cache.get('answer'), 52) - self.assertEqual(self.cache.incr('answer', -10), 42) - self.assertRaises(ValueError, self.cache.incr, 'does_not_exist') + cache.set('answer', 41) + self.assertEqual(cache.incr('answer'), 42) + self.assertEqual(cache.get('answer'), 42) + self.assertEqual(cache.incr('answer', 10), 52) + self.assertEqual(cache.get('answer'), 52) + self.assertEqual(cache.incr('answer', -10), 42) + self.assertRaises(ValueError, cache.incr, 'does_not_exist') def test_decr(self): # Cache values can be decremented - self.cache.set('answer', 43) - self.assertEqual(self.cache.decr('answer'), 42) - self.assertEqual(self.cache.get('answer'), 42) - self.assertEqual(self.cache.decr('answer', 10), 32) - self.assertEqual(self.cache.get('answer'), 32) - self.assertEqual(self.cache.decr('answer', -10), 42) - self.assertRaises(ValueError, self.cache.decr, 'does_not_exist') + cache.set('answer', 43) + self.assertEqual(cache.decr('answer'), 42) + self.assertEqual(cache.get('answer'), 42) + self.assertEqual(cache.decr('answer', 10), 32) + self.assertEqual(cache.get('answer'), 32) + self.assertEqual(cache.decr('answer', -10), 42) + self.assertRaises(ValueError, cache.decr, 'does_not_exist') def test_close(self): - self.assertTrue(hasattr(self.cache, 'close')) - self.cache.close() + self.assertTrue(hasattr(cache, 'close')) + cache.close() def test_data_types(self): # Many different data types can be cached @@ -282,8 +311,8 @@ class BaseCacheTests(object): 'function': f, 'class': C, } - self.cache.set("stuff", stuff) - self.assertEqual(self.cache.get("stuff"), stuff) + cache.set("stuff", stuff) + self.assertEqual(cache.get("stuff"), stuff) def test_cache_read_for_model_instance(self): # Don't want fields with callable as default to be called on cache read @@ -292,8 +321,8 @@ class BaseCacheTests(object): my_poll = Poll.objects.create(question="Well?") self.assertEqual(Poll.objects.count(), 1) pub_date = my_poll.pub_date - self.cache.set('question', my_poll) - cached_poll = self.cache.get('question') + cache.set('question', my_poll) + cached_poll = cache.get('question') self.assertEqual(cached_poll.pub_date, pub_date) # We only want the default expensive calculation run once self.assertEqual(expensive_calculation.num_runs, 1) @@ -307,7 +336,7 @@ class BaseCacheTests(object): defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) self.assertEqual(expensive_calculation.num_runs, 1) - self.cache.set('deferred_queryset', defer_qs) + cache.set('deferred_queryset', defer_qs) # cache set should not re-evaluate default functions self.assertEqual(expensive_calculation.num_runs, 1) @@ -319,25 +348,25 @@ class BaseCacheTests(object): self.assertEqual(expensive_calculation.num_runs, 1) defer_qs = Poll.objects.all().defer('question') self.assertEqual(defer_qs.count(), 1) - self.cache.set('deferred_queryset', defer_qs) + cache.set('deferred_queryset', defer_qs) self.assertEqual(expensive_calculation.num_runs, 1) runs_before_cache_read = expensive_calculation.num_runs - self.cache.get('deferred_queryset') + cache.get('deferred_queryset') # We only want the default expensive calculation run on creation and set self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read) def test_expiration(self): # Cache values can be set to expire - self.cache.set('expire1', 'very quickly', 1) - self.cache.set('expire2', 'very quickly', 1) - self.cache.set('expire3', 'very quickly', 1) + cache.set('expire1', 'very quickly', 1) + cache.set('expire2', 'very quickly', 1) + cache.set('expire3', 'very quickly', 1) time.sleep(2) - self.assertEqual(self.cache.get("expire1"), None) + self.assertEqual(cache.get("expire1"), None) - self.cache.add("expire2", "newvalue") - self.assertEqual(self.cache.get("expire2"), "newvalue") - self.assertEqual(self.cache.has_key("expire3"), False) + cache.add("expire2", "newvalue") + self.assertEqual(cache.get("expire2"), "newvalue") + self.assertEqual(cache.has_key("expire3"), False) def test_unicode(self): # Unicode values can be cached @@ -349,21 +378,21 @@ class BaseCacheTests(object): } # Test `set` for (key, value) in stuff.items(): - self.cache.set(key, value) - self.assertEqual(self.cache.get(key), value) + cache.set(key, value) + self.assertEqual(cache.get(key), value) # Test `add` for (key, value) in stuff.items(): - self.cache.delete(key) - self.cache.add(key, value) - self.assertEqual(self.cache.get(key), value) + cache.delete(key) + cache.add(key, value) + self.assertEqual(cache.get(key), value) # Test `set_many` for (key, value) in stuff.items(): - self.cache.delete(key) - self.cache.set_many(stuff) + cache.delete(key) + cache.set_many(stuff) for (key, value) in stuff.items(): - self.assertEqual(self.cache.get(key), value) + self.assertEqual(cache.get(key), value) def test_binary_string(self): # Binary strings should be cacheable @@ -372,53 +401,53 @@ class BaseCacheTests(object): compressed_value = compress(value.encode()) # Test set - self.cache.set('binary1', compressed_value) - compressed_result = self.cache.get('binary1') + cache.set('binary1', compressed_value) + compressed_result = cache.get('binary1') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test add - self.cache.add('binary1-add', compressed_value) - compressed_result = self.cache.get('binary1-add') + cache.add('binary1-add', compressed_value) + compressed_result = cache.get('binary1-add') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) # Test set_many - self.cache.set_many({'binary1-set_many': compressed_value}) - compressed_result = self.cache.get('binary1-set_many') + cache.set_many({'binary1-set_many': compressed_value}) + compressed_result = cache.get('binary1-set_many') self.assertEqual(compressed_value, compressed_result) self.assertEqual(value, decompress(compressed_result).decode()) def test_set_many(self): # Multiple keys can be set using set_many - self.cache.set_many({"key1": "spam", "key2": "eggs"}) - self.assertEqual(self.cache.get("key1"), "spam") - self.assertEqual(self.cache.get("key2"), "eggs") + cache.set_many({"key1": "spam", "key2": "eggs"}) + self.assertEqual(cache.get("key1"), "spam") + self.assertEqual(cache.get("key2"), "eggs") def test_set_many_expiration(self): # set_many takes a second ``timeout`` parameter - self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1) + cache.set_many({"key1": "spam", "key2": "eggs"}, 1) time.sleep(2) - self.assertEqual(self.cache.get("key1"), None) - self.assertEqual(self.cache.get("key2"), None) + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), None) def test_delete_many(self): # Multiple keys can be deleted using delete_many - self.cache.set("key1", "spam") - self.cache.set("key2", "eggs") - self.cache.set("key3", "ham") - self.cache.delete_many(["key1", "key2"]) - self.assertEqual(self.cache.get("key1"), None) - self.assertEqual(self.cache.get("key2"), None) - self.assertEqual(self.cache.get("key3"), "ham") + cache.set("key1", "spam") + cache.set("key2", "eggs") + cache.set("key3", "ham") + cache.delete_many(["key1", "key2"]) + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), None) + self.assertEqual(cache.get("key3"), "ham") def test_clear(self): # The cache can be emptied using clear - self.cache.set("key1", "spam") - self.cache.set("key2", "eggs") - self.cache.clear() - self.assertEqual(self.cache.get("key1"), None) - self.assertEqual(self.cache.get("key2"), None) + cache.set("key1", "spam") + cache.set("key2", "eggs") + cache.clear() + self.assertEqual(cache.get("key1"), None) + self.assertEqual(cache.get("key2"), None) def test_long_timeout(self): ''' @@ -426,63 +455,67 @@ class BaseCacheTests(object): it is an absolute expiration timestamp instead of a relative offset. Test that we honour this convention. Refs #12399. ''' - self.cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second - self.assertEqual(self.cache.get('key1'), 'eggs') + cache.set('key1', 'eggs', 60 * 60 * 24 * 30 + 1) # 30 days + 1 second + self.assertEqual(cache.get('key1'), 'eggs') - self.cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) - self.assertEqual(self.cache.get('key2'), 'ham') + cache.add('key2', 'ham', 60 * 60 * 24 * 30 + 1) + self.assertEqual(cache.get('key2'), 'ham') - self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) - self.assertEqual(self.cache.get('key3'), 'sausage') - self.assertEqual(self.cache.get('key4'), 'lobster bisque') + cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60 * 60 * 24 * 30 + 1) + self.assertEqual(cache.get('key3'), 'sausage') + self.assertEqual(cache.get('key4'), 'lobster bisque') def test_forever_timeout(self): ''' Passing in None into timeout results in a value that is cached forever ''' - self.cache.set('key1', 'eggs', None) - self.assertEqual(self.cache.get('key1'), 'eggs') + cache.set('key1', 'eggs', None) + self.assertEqual(cache.get('key1'), 'eggs') - self.cache.add('key2', 'ham', None) - self.assertEqual(self.cache.get('key2'), 'ham') + cache.add('key2', 'ham', None) + self.assertEqual(cache.get('key2'), 'ham') - self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) - self.assertEqual(self.cache.get('key3'), 'sausage') - self.assertEqual(self.cache.get('key4'), 'lobster bisque') + cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, None) + self.assertEqual(cache.get('key3'), 'sausage') + self.assertEqual(cache.get('key4'), 'lobster bisque') def test_zero_timeout(self): ''' Passing in None into timeout results in a value that is cached forever ''' - self.cache.set('key1', 'eggs', 0) - self.assertEqual(self.cache.get('key1'), None) + cache.set('key1', 'eggs', 0) + self.assertEqual(cache.get('key1'), None) - self.cache.add('key2', 'ham', 0) - self.assertEqual(self.cache.get('key2'), None) + cache.add('key2', 'ham', 0) + self.assertEqual(cache.get('key2'), None) - self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) - self.assertEqual(self.cache.get('key3'), None) - self.assertEqual(self.cache.get('key4'), None) + cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 0) + self.assertEqual(cache.get('key3'), None) + self.assertEqual(cache.get('key4'), None) def test_float_timeout(self): # Make sure a timeout given as a float doesn't crash anything. - self.cache.set("key1", "spam", 100.2) - self.assertEqual(self.cache.get("key1"), "spam") + cache.set("key1", "spam", 100.2) + self.assertEqual(cache.get("key1"), "spam") - def perform_cull_test(self, initial_count, final_count): - """This is implemented as a utility method, because only some of the backends - implement culling. The culling algorithm also varies slightly, so the final - number of entries will vary between backends""" - # Create initial cache key entries. This will overflow the cache, causing a cull + def _perform_cull_test(self, cull_cache, initial_count, final_count): + # Create initial cache key entries. This will overflow the cache, + # causing a cull. for i in range(1, initial_count): - self.cache.set('cull%d' % i, 'value', 1000) + cull_cache.set('cull%d' % i, 'value', 1000) count = 0 # Count how many keys are left in the cache. for i in range(1, initial_count): - if self.cache.has_key('cull%d' % i): + if cull_cache.has_key('cull%d' % i): count = count + 1 self.assertEqual(count, final_count) + def test_cull(self): + self._perform_cull_test(caches['cull'], 50, 29) + + def test_zero_cull(self): + self._perform_cull_test(caches['zero_cull'], 50, 19) + def test_invalid_keys(self): """ All the builtin backends (except memcached, see below) should warn on @@ -496,302 +529,302 @@ class BaseCacheTests(object): def func(key, *args): return key - old_func = self.cache.key_func - self.cache.key_func = func + old_func = cache.key_func + cache.key_func = func try: with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached does not allow whitespace or control characters in keys - self.cache.set('key with spaces', 'value') + cache.set('key with spaces', 'value') self.assertEqual(len(w), 2) self.assertIsInstance(w[0].message, CacheKeyWarning) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") # memcached limits key length to 250 - self.cache.set('a' * 251, 'value') + cache.set('a' * 251, 'value') self.assertEqual(len(w), 1) self.assertIsInstance(w[0].message, CacheKeyWarning) finally: - self.cache.key_func = old_func + cache.key_func = old_func def test_cache_versioning_get_set(self): # set, using default version = 1 - self.cache.set('answer1', 42) - self.assertEqual(self.cache.get('answer1'), 42) - self.assertEqual(self.cache.get('answer1', version=1), 42) - self.assertEqual(self.cache.get('answer1', version=2), None) + cache.set('answer1', 42) + self.assertEqual(cache.get('answer1'), 42) + self.assertEqual(cache.get('answer1', version=1), 42) + self.assertEqual(cache.get('answer1', version=2), None) - self.assertEqual(self.v2_cache.get('answer1'), None) - self.assertEqual(self.v2_cache.get('answer1', version=1), 42) - self.assertEqual(self.v2_cache.get('answer1', version=2), None) + self.assertEqual(caches['v2'].get('answer1'), None) + self.assertEqual(caches['v2'].get('answer1', version=1), 42) + self.assertEqual(caches['v2'].get('answer1', version=2), None) # set, default version = 1, but manually override version = 2 - self.cache.set('answer2', 42, version=2) - self.assertEqual(self.cache.get('answer2'), None) - self.assertEqual(self.cache.get('answer2', version=1), None) - self.assertEqual(self.cache.get('answer2', version=2), 42) + cache.set('answer2', 42, version=2) + self.assertEqual(cache.get('answer2'), None) + self.assertEqual(cache.get('answer2', version=1), None) + self.assertEqual(cache.get('answer2', version=2), 42) - self.assertEqual(self.v2_cache.get('answer2'), 42) - self.assertEqual(self.v2_cache.get('answer2', version=1), None) - self.assertEqual(self.v2_cache.get('answer2', version=2), 42) + self.assertEqual(caches['v2'].get('answer2'), 42) + self.assertEqual(caches['v2'].get('answer2', version=1), None) + self.assertEqual(caches['v2'].get('answer2', version=2), 42) # v2 set, using default version = 2 - self.v2_cache.set('answer3', 42) - self.assertEqual(self.cache.get('answer3'), None) - self.assertEqual(self.cache.get('answer3', version=1), None) - self.assertEqual(self.cache.get('answer3', version=2), 42) + caches['v2'].set('answer3', 42) + self.assertEqual(cache.get('answer3'), None) + self.assertEqual(cache.get('answer3', version=1), None) + self.assertEqual(cache.get('answer3', version=2), 42) - self.assertEqual(self.v2_cache.get('answer3'), 42) - self.assertEqual(self.v2_cache.get('answer3', version=1), None) - self.assertEqual(self.v2_cache.get('answer3', version=2), 42) + self.assertEqual(caches['v2'].get('answer3'), 42) + self.assertEqual(caches['v2'].get('answer3', version=1), None) + self.assertEqual(caches['v2'].get('answer3', version=2), 42) # v2 set, default version = 2, but manually override version = 1 - self.v2_cache.set('answer4', 42, version=1) - self.assertEqual(self.cache.get('answer4'), 42) - self.assertEqual(self.cache.get('answer4', version=1), 42) - self.assertEqual(self.cache.get('answer4', version=2), None) + caches['v2'].set('answer4', 42, version=1) + self.assertEqual(cache.get('answer4'), 42) + self.assertEqual(cache.get('answer4', version=1), 42) + self.assertEqual(cache.get('answer4', version=2), None) - self.assertEqual(self.v2_cache.get('answer4'), None) - self.assertEqual(self.v2_cache.get('answer4', version=1), 42) - self.assertEqual(self.v2_cache.get('answer4', version=2), None) + self.assertEqual(caches['v2'].get('answer4'), None) + self.assertEqual(caches['v2'].get('answer4', version=1), 42) + self.assertEqual(caches['v2'].get('answer4', version=2), None) def test_cache_versioning_add(self): # add, default version = 1, but manually override version = 2 - self.cache.add('answer1', 42, version=2) - self.assertEqual(self.cache.get('answer1', version=1), None) - self.assertEqual(self.cache.get('answer1', version=2), 42) + cache.add('answer1', 42, version=2) + self.assertEqual(cache.get('answer1', version=1), None) + self.assertEqual(cache.get('answer1', version=2), 42) - self.cache.add('answer1', 37, version=2) - self.assertEqual(self.cache.get('answer1', version=1), None) - self.assertEqual(self.cache.get('answer1', version=2), 42) + cache.add('answer1', 37, version=2) + self.assertEqual(cache.get('answer1', version=1), None) + self.assertEqual(cache.get('answer1', version=2), 42) - self.cache.add('answer1', 37, version=1) - self.assertEqual(self.cache.get('answer1', version=1), 37) - self.assertEqual(self.cache.get('answer1', version=2), 42) + cache.add('answer1', 37, version=1) + self.assertEqual(cache.get('answer1', version=1), 37) + self.assertEqual(cache.get('answer1', version=2), 42) # v2 add, using default version = 2 - self.v2_cache.add('answer2', 42) - self.assertEqual(self.cache.get('answer2', version=1), None) - self.assertEqual(self.cache.get('answer2', version=2), 42) + caches['v2'].add('answer2', 42) + self.assertEqual(cache.get('answer2', version=1), None) + self.assertEqual(cache.get('answer2', version=2), 42) - self.v2_cache.add('answer2', 37) - self.assertEqual(self.cache.get('answer2', version=1), None) - self.assertEqual(self.cache.get('answer2', version=2), 42) + caches['v2'].add('answer2', 37) + self.assertEqual(cache.get('answer2', version=1), None) + self.assertEqual(cache.get('answer2', version=2), 42) - self.v2_cache.add('answer2', 37, version=1) - self.assertEqual(self.cache.get('answer2', version=1), 37) - self.assertEqual(self.cache.get('answer2', version=2), 42) + caches['v2'].add('answer2', 37, version=1) + self.assertEqual(cache.get('answer2', version=1), 37) + self.assertEqual(cache.get('answer2', version=2), 42) # v2 add, default version = 2, but manually override version = 1 - self.v2_cache.add('answer3', 42, version=1) - self.assertEqual(self.cache.get('answer3', version=1), 42) - self.assertEqual(self.cache.get('answer3', version=2), None) + caches['v2'].add('answer3', 42, version=1) + self.assertEqual(cache.get('answer3', version=1), 42) + self.assertEqual(cache.get('answer3', version=2), None) - self.v2_cache.add('answer3', 37, version=1) - self.assertEqual(self.cache.get('answer3', version=1), 42) - self.assertEqual(self.cache.get('answer3', version=2), None) + caches['v2'].add('answer3', 37, version=1) + self.assertEqual(cache.get('answer3', version=1), 42) + self.assertEqual(cache.get('answer3', version=2), None) - self.v2_cache.add('answer3', 37) - self.assertEqual(self.cache.get('answer3', version=1), 42) - self.assertEqual(self.cache.get('answer3', version=2), 37) + caches['v2'].add('answer3', 37) + self.assertEqual(cache.get('answer3', version=1), 42) + self.assertEqual(cache.get('answer3', version=2), 37) def test_cache_versioning_has_key(self): - self.cache.set('answer1', 42) + cache.set('answer1', 42) # has_key - self.assertTrue(self.cache.has_key('answer1')) - self.assertTrue(self.cache.has_key('answer1', version=1)) - self.assertFalse(self.cache.has_key('answer1', version=2)) + self.assertTrue(cache.has_key('answer1')) + self.assertTrue(cache.has_key('answer1', version=1)) + self.assertFalse(cache.has_key('answer1', version=2)) - self.assertFalse(self.v2_cache.has_key('answer1')) - self.assertTrue(self.v2_cache.has_key('answer1', version=1)) - self.assertFalse(self.v2_cache.has_key('answer1', version=2)) + self.assertFalse(caches['v2'].has_key('answer1')) + self.assertTrue(caches['v2'].has_key('answer1', version=1)) + self.assertFalse(caches['v2'].has_key('answer1', version=2)) def test_cache_versioning_delete(self): - self.cache.set('answer1', 37, version=1) - self.cache.set('answer1', 42, version=2) - self.cache.delete('answer1') - self.assertEqual(self.cache.get('answer1', version=1), None) - self.assertEqual(self.cache.get('answer1', version=2), 42) + cache.set('answer1', 37, version=1) + cache.set('answer1', 42, version=2) + cache.delete('answer1') + self.assertEqual(cache.get('answer1', version=1), None) + self.assertEqual(cache.get('answer1', version=2), 42) - self.cache.set('answer2', 37, version=1) - self.cache.set('answer2', 42, version=2) - self.cache.delete('answer2', version=2) - self.assertEqual(self.cache.get('answer2', version=1), 37) - self.assertEqual(self.cache.get('answer2', version=2), None) + cache.set('answer2', 37, version=1) + cache.set('answer2', 42, version=2) + cache.delete('answer2', version=2) + self.assertEqual(cache.get('answer2', version=1), 37) + self.assertEqual(cache.get('answer2', version=2), None) - self.cache.set('answer3', 37, version=1) - self.cache.set('answer3', 42, version=2) - self.v2_cache.delete('answer3') - self.assertEqual(self.cache.get('answer3', version=1), 37) - self.assertEqual(self.cache.get('answer3', version=2), None) + cache.set('answer3', 37, version=1) + cache.set('answer3', 42, version=2) + caches['v2'].delete('answer3') + self.assertEqual(cache.get('answer3', version=1), 37) + self.assertEqual(cache.get('answer3', version=2), None) - self.cache.set('answer4', 37, version=1) - self.cache.set('answer4', 42, version=2) - self.v2_cache.delete('answer4', version=1) - self.assertEqual(self.cache.get('answer4', version=1), None) - self.assertEqual(self.cache.get('answer4', version=2), 42) + cache.set('answer4', 37, version=1) + cache.set('answer4', 42, version=2) + caches['v2'].delete('answer4', version=1) + self.assertEqual(cache.get('answer4', version=1), None) + self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_incr_decr(self): - self.cache.set('answer1', 37, version=1) - self.cache.set('answer1', 42, version=2) - self.cache.incr('answer1') - self.assertEqual(self.cache.get('answer1', version=1), 38) - self.assertEqual(self.cache.get('answer1', version=2), 42) - self.cache.decr('answer1') - self.assertEqual(self.cache.get('answer1', version=1), 37) - self.assertEqual(self.cache.get('answer1', version=2), 42) + cache.set('answer1', 37, version=1) + cache.set('answer1', 42, version=2) + cache.incr('answer1') + self.assertEqual(cache.get('answer1', version=1), 38) + self.assertEqual(cache.get('answer1', version=2), 42) + cache.decr('answer1') + self.assertEqual(cache.get('answer1', version=1), 37) + self.assertEqual(cache.get('answer1', version=2), 42) - self.cache.set('answer2', 37, version=1) - self.cache.set('answer2', 42, version=2) - self.cache.incr('answer2', version=2) - self.assertEqual(self.cache.get('answer2', version=1), 37) - self.assertEqual(self.cache.get('answer2', version=2), 43) - self.cache.decr('answer2', version=2) - self.assertEqual(self.cache.get('answer2', version=1), 37) - self.assertEqual(self.cache.get('answer2', version=2), 42) + cache.set('answer2', 37, version=1) + cache.set('answer2', 42, version=2) + cache.incr('answer2', version=2) + self.assertEqual(cache.get('answer2', version=1), 37) + self.assertEqual(cache.get('answer2', version=2), 43) + cache.decr('answer2', version=2) + self.assertEqual(cache.get('answer2', version=1), 37) + self.assertEqual(cache.get('answer2', version=2), 42) - self.cache.set('answer3', 37, version=1) - self.cache.set('answer3', 42, version=2) - self.v2_cache.incr('answer3') - self.assertEqual(self.cache.get('answer3', version=1), 37) - self.assertEqual(self.cache.get('answer3', version=2), 43) - self.v2_cache.decr('answer3') - self.assertEqual(self.cache.get('answer3', version=1), 37) - self.assertEqual(self.cache.get('answer3', version=2), 42) + cache.set('answer3', 37, version=1) + cache.set('answer3', 42, version=2) + caches['v2'].incr('answer3') + self.assertEqual(cache.get('answer3', version=1), 37) + self.assertEqual(cache.get('answer3', version=2), 43) + caches['v2'].decr('answer3') + self.assertEqual(cache.get('answer3', version=1), 37) + self.assertEqual(cache.get('answer3', version=2), 42) - self.cache.set('answer4', 37, version=1) - self.cache.set('answer4', 42, version=2) - self.v2_cache.incr('answer4', version=1) - self.assertEqual(self.cache.get('answer4', version=1), 38) - self.assertEqual(self.cache.get('answer4', version=2), 42) - self.v2_cache.decr('answer4', version=1) - self.assertEqual(self.cache.get('answer4', version=1), 37) - self.assertEqual(self.cache.get('answer4', version=2), 42) + cache.set('answer4', 37, version=1) + cache.set('answer4', 42, version=2) + caches['v2'].incr('answer4', version=1) + self.assertEqual(cache.get('answer4', version=1), 38) + self.assertEqual(cache.get('answer4', version=2), 42) + caches['v2'].decr('answer4', version=1) + self.assertEqual(cache.get('answer4', version=1), 37) + self.assertEqual(cache.get('answer4', version=2), 42) def test_cache_versioning_get_set_many(self): # set, using default version = 1 - self.cache.set_many({'ford1': 37, 'arthur1': 42}) - self.assertEqual(self.cache.get_many(['ford1', 'arthur1']), + cache.set_many({'ford1': 37, 'arthur1': 42}) + self.assertEqual(cache.get_many(['ford1', 'arthur1']), {'ford1': 37, 'arthur1': 42}) - self.assertEqual(self.cache.get_many(['ford1', 'arthur1'], version=1), + self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) - self.assertEqual(self.cache.get_many(['ford1', 'arthur1'], version=2), {}) + self.assertEqual(cache.get_many(['ford1', 'arthur1'], version=2), {}) - self.assertEqual(self.v2_cache.get_many(['ford1', 'arthur1']), {}) - self.assertEqual(self.v2_cache.get_many(['ford1', 'arthur1'], version=1), + self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1']), {}) + self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=1), {'ford1': 37, 'arthur1': 42}) - self.assertEqual(self.v2_cache.get_many(['ford1', 'arthur1'], version=2), {}) + self.assertEqual(caches['v2'].get_many(['ford1', 'arthur1'], version=2), {}) # set, default version = 1, but manually override version = 2 - self.cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) - self.assertEqual(self.cache.get_many(['ford2', 'arthur2']), {}) - self.assertEqual(self.cache.get_many(['ford2', 'arthur2'], version=1), {}) - self.assertEqual(self.cache.get_many(['ford2', 'arthur2'], version=2), + cache.set_many({'ford2': 37, 'arthur2': 42}, version=2) + self.assertEqual(cache.get_many(['ford2', 'arthur2']), {}) + self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=1), {}) + self.assertEqual(cache.get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) - self.assertEqual(self.v2_cache.get_many(['ford2', 'arthur2']), + self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2']), {'ford2': 37, 'arthur2': 42}) - self.assertEqual(self.v2_cache.get_many(['ford2', 'arthur2'], version=1), {}) - self.assertEqual(self.v2_cache.get_many(['ford2', 'arthur2'], version=2), + self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=1), {}) + self.assertEqual(caches['v2'].get_many(['ford2', 'arthur2'], version=2), {'ford2': 37, 'arthur2': 42}) # v2 set, using default version = 2 - self.v2_cache.set_many({'ford3': 37, 'arthur3': 42}) - self.assertEqual(self.cache.get_many(['ford3', 'arthur3']), {}) - self.assertEqual(self.cache.get_many(['ford3', 'arthur3'], version=1), {}) - self.assertEqual(self.cache.get_many(['ford3', 'arthur3'], version=2), + caches['v2'].set_many({'ford3': 37, 'arthur3': 42}) + self.assertEqual(cache.get_many(['ford3', 'arthur3']), {}) + self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=1), {}) + self.assertEqual(cache.get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) - self.assertEqual(self.v2_cache.get_many(['ford3', 'arthur3']), + self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3']), {'ford3': 37, 'arthur3': 42}) - self.assertEqual(self.v2_cache.get_many(['ford3', 'arthur3'], version=1), {}) - self.assertEqual(self.v2_cache.get_many(['ford3', 'arthur3'], version=2), + self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=1), {}) + self.assertEqual(caches['v2'].get_many(['ford3', 'arthur3'], version=2), {'ford3': 37, 'arthur3': 42}) # v2 set, default version = 2, but manually override version = 1 - self.v2_cache.set_many({'ford4': 37, 'arthur4': 42}, version=1) - self.assertEqual(self.cache.get_many(['ford4', 'arthur4']), + caches['v2'].set_many({'ford4': 37, 'arthur4': 42}, version=1) + self.assertEqual(cache.get_many(['ford4', 'arthur4']), {'ford4': 37, 'arthur4': 42}) - self.assertEqual(self.cache.get_many(['ford4', 'arthur4'], version=1), + self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) - self.assertEqual(self.cache.get_many(['ford4', 'arthur4'], version=2), {}) + self.assertEqual(cache.get_many(['ford4', 'arthur4'], version=2), {}) - self.assertEqual(self.v2_cache.get_many(['ford4', 'arthur4']), {}) - self.assertEqual(self.v2_cache.get_many(['ford4', 'arthur4'], version=1), + self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4']), {}) + self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=1), {'ford4': 37, 'arthur4': 42}) - self.assertEqual(self.v2_cache.get_many(['ford4', 'arthur4'], version=2), {}) + self.assertEqual(caches['v2'].get_many(['ford4', 'arthur4'], version=2), {}) def test_incr_version(self): - self.cache.set('answer', 42, version=2) - self.assertEqual(self.cache.get('answer'), None) - self.assertEqual(self.cache.get('answer', version=1), None) - self.assertEqual(self.cache.get('answer', version=2), 42) - self.assertEqual(self.cache.get('answer', version=3), None) + cache.set('answer', 42, version=2) + self.assertEqual(cache.get('answer'), None) + self.assertEqual(cache.get('answer', version=1), None) + self.assertEqual(cache.get('answer', version=2), 42) + self.assertEqual(cache.get('answer', version=3), None) - self.assertEqual(self.cache.incr_version('answer', version=2), 3) - self.assertEqual(self.cache.get('answer'), None) - self.assertEqual(self.cache.get('answer', version=1), None) - self.assertEqual(self.cache.get('answer', version=2), None) - self.assertEqual(self.cache.get('answer', version=3), 42) + self.assertEqual(cache.incr_version('answer', version=2), 3) + self.assertEqual(cache.get('answer'), None) + self.assertEqual(cache.get('answer', version=1), None) + self.assertEqual(cache.get('answer', version=2), None) + self.assertEqual(cache.get('answer', version=3), 42) - self.v2_cache.set('answer2', 42) - self.assertEqual(self.v2_cache.get('answer2'), 42) - self.assertEqual(self.v2_cache.get('answer2', version=1), None) - self.assertEqual(self.v2_cache.get('answer2', version=2), 42) - self.assertEqual(self.v2_cache.get('answer2', version=3), None) + caches['v2'].set('answer2', 42) + self.assertEqual(caches['v2'].get('answer2'), 42) + self.assertEqual(caches['v2'].get('answer2', version=1), None) + self.assertEqual(caches['v2'].get('answer2', version=2), 42) + self.assertEqual(caches['v2'].get('answer2', version=3), None) - self.assertEqual(self.v2_cache.incr_version('answer2'), 3) - self.assertEqual(self.v2_cache.get('answer2'), None) - self.assertEqual(self.v2_cache.get('answer2', version=1), None) - self.assertEqual(self.v2_cache.get('answer2', version=2), None) - self.assertEqual(self.v2_cache.get('answer2', version=3), 42) + self.assertEqual(caches['v2'].incr_version('answer2'), 3) + self.assertEqual(caches['v2'].get('answer2'), None) + self.assertEqual(caches['v2'].get('answer2', version=1), None) + self.assertEqual(caches['v2'].get('answer2', version=2), None) + self.assertEqual(caches['v2'].get('answer2', version=3), 42) - self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist') + self.assertRaises(ValueError, cache.incr_version, 'does_not_exist') def test_decr_version(self): - self.cache.set('answer', 42, version=2) - self.assertEqual(self.cache.get('answer'), None) - self.assertEqual(self.cache.get('answer', version=1), None) - self.assertEqual(self.cache.get('answer', version=2), 42) + cache.set('answer', 42, version=2) + self.assertEqual(cache.get('answer'), None) + self.assertEqual(cache.get('answer', version=1), None) + self.assertEqual(cache.get('answer', version=2), 42) - self.assertEqual(self.cache.decr_version('answer', version=2), 1) - self.assertEqual(self.cache.get('answer'), 42) - self.assertEqual(self.cache.get('answer', version=1), 42) - self.assertEqual(self.cache.get('answer', version=2), None) + self.assertEqual(cache.decr_version('answer', version=2), 1) + self.assertEqual(cache.get('answer'), 42) + self.assertEqual(cache.get('answer', version=1), 42) + self.assertEqual(cache.get('answer', version=2), None) - self.v2_cache.set('answer2', 42) - self.assertEqual(self.v2_cache.get('answer2'), 42) - self.assertEqual(self.v2_cache.get('answer2', version=1), None) - self.assertEqual(self.v2_cache.get('answer2', version=2), 42) + caches['v2'].set('answer2', 42) + self.assertEqual(caches['v2'].get('answer2'), 42) + self.assertEqual(caches['v2'].get('answer2', version=1), None) + self.assertEqual(caches['v2'].get('answer2', version=2), 42) - self.assertEqual(self.v2_cache.decr_version('answer2'), 1) - self.assertEqual(self.v2_cache.get('answer2'), None) - self.assertEqual(self.v2_cache.get('answer2', version=1), 42) - self.assertEqual(self.v2_cache.get('answer2', version=2), None) + self.assertEqual(caches['v2'].decr_version('answer2'), 1) + self.assertEqual(caches['v2'].get('answer2'), None) + self.assertEqual(caches['v2'].get('answer2', version=1), 42) + self.assertEqual(caches['v2'].get('answer2', version=2), None) - self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist', version=2) + self.assertRaises(ValueError, cache.decr_version, 'does_not_exist', version=2) def test_custom_key_func(self): # Two caches with different key functions aren't visible to each other - self.cache.set('answer1', 42) - self.assertEqual(self.cache.get('answer1'), 42) - self.assertEqual(self.custom_key_cache.get('answer1'), None) - self.assertEqual(self.custom_key_cache2.get('answer1'), None) + cache.set('answer1', 42) + self.assertEqual(cache.get('answer1'), 42) + self.assertEqual(caches['custom_key'].get('answer1'), None) + self.assertEqual(caches['custom_key2'].get('answer1'), None) - self.custom_key_cache.set('answer2', 42) - self.assertEqual(self.cache.get('answer2'), None) - self.assertEqual(self.custom_key_cache.get('answer2'), 42) - self.assertEqual(self.custom_key_cache2.get('answer2'), 42) + caches['custom_key'].set('answer2', 42) + self.assertEqual(cache.get('answer2'), None) + self.assertEqual(caches['custom_key'].get('answer2'), 42) + self.assertEqual(caches['custom_key2'].get('answer2'), 42) def test_cache_write_unpickable_object(self): update_middleware = UpdateCacheMiddleware() - update_middleware.cache = self.cache + update_middleware.cache = cache fetch_middleware = FetchFromCacheMiddleware() - fetch_middleware.cache = self.cache + fetch_middleware.cache = cache request = self.factory.get('/cache/test') request._cache_update_cache = True @@ -817,46 +850,36 @@ class BaseCacheTests(object): self.assertEqual(get_cache_data.cookies, response.cookies) -def custom_key_func(key, key_prefix, version): - "A customized cache key function" - return 'CUSTOM-' + '-'.join([key_prefix, str(version), key]) - - -@override_settings( - CACHES={ - 'default': { - 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', - 'LOCATION': 'test cache table', - }, - }, -) +@override_settings(CACHES=caches_setting_for_tests( + BACKEND='django.core.cache.backends.db.DatabaseCache', + # Spaces are used in the table name to ensure quoting/escaping is working + LOCATION='test cache table' +)) class DBCacheTests(BaseCacheTests, TransactionTestCase): available_apps = ['cache'] - backend_name = 'django.core.cache.backends.db.DatabaseCache' def setUp(self): - self.factory = RequestFactory() - # Spaces are used in the table name to ensure quoting/escaping is working - self._table_name = 'test cache table' - management.call_command('createcachetable', verbosity=0, interactive=False) - self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30}) - self.prefix_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_PREFIX='cacheprefix') - self.v2_cache = get_cache(self.backend_name, LOCATION=self._table_name, VERSION=2) - self.custom_key_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION=custom_key_func) - self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION='cache.tests.custom_key_func') + # The super calls needs to happen first for the settings override. + super(DBCacheTests, self).setUp() + self.create_table() def tearDown(self): - cursor = connection.cursor() - cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name)) - connection.commit() + # The super call needs to happen first because it uses the database. + super(DBCacheTests, self).tearDown() + self.drop_table() - def test_cull(self): - self.perform_cull_test(50, 29) + def create_table(self): + management.call_command('createcachetable', verbosity=0, interactive=False) + + def drop_table(self): + cursor = connection.cursor() + table_name = connection.ops.quote_name('test cache table') + cursor.execute('DROP TABLE %s' % table_name) + cursor.close() def test_zero_cull(self): - self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0}) - self.perform_cull_test(50, 18) + self._perform_cull_test(caches['zero_cull'], 50, 18) def test_second_call_doesnt_crash(self): stdout = six.StringIO() @@ -865,30 +888,30 @@ class DBCacheTests(BaseCacheTests, TransactionTestCase): stdout=stdout ) self.assertEqual(stdout.getvalue(), - "Cache table '%s' already exists.\n" % self._table_name) + "Cache table 'test cache table' already exists.\n" * len(settings.CACHES)) def test_createcachetable_with_table_argument(self): """ Delete and recreate cache table with legacy behavior (explicitly specifying the table name). """ - self.tearDown() + self.drop_table() stdout = six.StringIO() management.call_command( 'createcachetable', - self._table_name, + 'test cache table', verbosity=2, stdout=stdout ) self.assertEqual(stdout.getvalue(), - "Cache table '%s' created.\n" % self._table_name) + "Cache table 'test cache table' created.\n") def test_clear_commits_transaction(self): # Ensure the database transaction is committed (#19896) - self.cache.set("key1", "spam") - self.cache.clear() + cache.set("key1", "spam") + cache.clear() transaction.rollback() - self.assertEqual(self.cache.get("key1"), None) + self.assertEqual(cache.get("key1"), None) @override_settings(USE_TZ=True) @@ -945,86 +968,59 @@ class CreateCacheTableForDBCacheTests(TestCase): router.routers = old_routers -class LocMemCacheTests(unittest.TestCase, BaseCacheTests): - backend_name = 'django.core.cache.backends.locmem.LocMemCache' +@override_settings(CACHES=caches_setting_for_tests( + BACKEND='django.core.cache.backends.locmem.LocMemCache', +)) +class LocMemCacheTests(BaseCacheTests, TestCase): def setUp(self): - self.factory = RequestFactory() - self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}) - self.prefix_cache = get_cache(self.backend_name, KEY_PREFIX='cacheprefix') - self.v2_cache = get_cache(self.backend_name, VERSION=2) - self.custom_key_cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION=custom_key_func) - self.custom_key_cache2 = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION='cache.tests.custom_key_func') + super(LocMemCacheTests, self).setUp() # LocMem requires a hack to make the other caches # share a data store with the 'normal' cache. - self.prefix_cache._cache = self.cache._cache - self.prefix_cache._expire_info = self.cache._expire_info + caches['prefix']._cache = cache._cache + caches['prefix']._expire_info = cache._expire_info - self.v2_cache._cache = self.cache._cache - self.v2_cache._expire_info = self.cache._expire_info + caches['v2']._cache = cache._cache + caches['v2']._expire_info = cache._expire_info - self.custom_key_cache._cache = self.cache._cache - self.custom_key_cache._expire_info = self.cache._expire_info + caches['custom_key']._cache = cache._cache + caches['custom_key']._expire_info = cache._expire_info - self.custom_key_cache2._cache = self.cache._cache - self.custom_key_cache2._expire_info = self.cache._expire_info - - def tearDown(self): - self.cache.clear() - - def test_cull(self): - self.perform_cull_test(50, 29) - - def test_zero_cull(self): - self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0}) - self.perform_cull_test(50, 19) + caches['custom_key2']._cache = cache._cache + caches['custom_key2']._expire_info = cache._expire_info def test_multiple_caches(self): "Check that multiple locmem caches are isolated" - mirror_cache = get_cache(self.backend_name) - other_cache = get_cache(self.backend_name, LOCATION='other') - - self.cache.set('value1', 42) - self.assertEqual(mirror_cache.get('value1'), 42) - self.assertEqual(other_cache.get('value1'), None) + cache.set('value', 42) + self.assertEqual(caches['default'].get('value'), 42) + self.assertEqual(caches['other'].get('value'), None) def test_incr_decr_timeout(self): """incr/decr does not modify expiry time (matches memcached behavior)""" key = 'value' - _key = self.cache.make_key(key) - self.cache.set(key, 1, timeout=self.cache.default_timeout * 10) - expire = self.cache._expire_info[_key] - self.cache.incr(key) - self.assertEqual(expire, self.cache._expire_info[_key]) - self.cache.decr(key) - self.assertEqual(expire, self.cache._expire_info[_key]) + _key = cache.make_key(key) + cache.set(key, 1, timeout=cache.default_timeout * 10) + expire = cache._expire_info[_key] + cache.incr(key) + self.assertEqual(expire, cache._expire_info[_key]) + cache.decr(key) + self.assertEqual(expire, cache._expire_info[_key]) # memcached backend isn't guaranteed to be available. # To check the memcached backend, the test settings file will # need to contain at least one cache backend setting that points at # your memcache server. -@unittest.skipUnless( - any(cache['BACKEND'].startswith('django.core.cache.backends.memcached.') - for cache in settings.CACHES.values()), - "memcached not available") -class MemcachedCacheTests(unittest.TestCase, BaseCacheTests): +memcached_params = None +for _cache_params in settings.CACHES.values(): + if _cache_params['BACKEND'].startswith('django.core.cache.backends.memcached.'): + memcached_params = _cache_params - def setUp(self): - self.factory = RequestFactory() - for cache_key, cache in settings.CACHES.items(): - if cache['BACKEND'].startswith('django.core.cache.backends.memcached.'): - break - random_prefix = ''.join(random.choice(string.ascii_letters) for x in range(10)) - self.cache = get_cache(cache_key) - self.prefix_cache = get_cache(cache_key, KEY_PREFIX=random_prefix) - self.v2_cache = get_cache(cache_key, VERSION=2) - self.custom_key_cache = get_cache(cache_key, KEY_FUNCTION=custom_key_func) - self.custom_key_cache2 = get_cache(cache_key, KEY_FUNCTION='cache.tests.custom_key_func') - def tearDown(self): - self.cache.clear() +@unittest.skipIf(memcached_params is None, "memcached not available") +@override_settings(CACHES=caches_setting_for_tests(**memcached_params)) +class MemcachedCacheTests(BaseCacheTests, TestCase): def test_invalid_keys(self): """ @@ -1037,9 +1033,9 @@ class MemcachedCacheTests(unittest.TestCase, BaseCacheTests): """ # memcached does not allow whitespace or control characters in keys - self.assertRaises(Exception, self.cache.set, 'key with spaces', 'value') + self.assertRaises(Exception, cache.set, 'key with spaces', 'value') # memcached limits key length to 250 - self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value') + self.assertRaises(Exception, cache.set, 'a' * 251, 'value') # Explicitly display a skipped test if no configured cache uses MemcachedCache @unittest.skipUnless( @@ -1050,58 +1046,62 @@ class MemcachedCacheTests(unittest.TestCase, BaseCacheTests): # Regression test for #19810 for cache_key, cache in settings.CACHES.items(): if cache['BACKEND'] == 'django.core.cache.backends.memcached.MemcachedCache': - self.assertEqual(get_cache(cache_key)._cache.pickleProtocol, + self.assertEqual(caches[cache_key]._cache.pickleProtocol, pickle.HIGHEST_PROTOCOL) + def test_cull(self): + # culling isn't implemented, memcached deals with it. + pass -class FileBasedCacheTests(unittest.TestCase, BaseCacheTests): + def test_zero_cull(self): + # culling isn't implemented, memcached deals with it. + pass + + +@override_settings(CACHES=caches_setting_for_tests( + BACKEND='django.core.cache.backends.filebased.FileBasedCache', +)) +class FileBasedCacheTests(BaseCacheTests, TestCase): """ Specific test cases for the file-based cache. """ - backend_name = 'django.core.cache.backends.filebased.FileBasedCache' def setUp(self): - self.factory = RequestFactory() + super(FileBasedCacheTests, self).setUp() self.dirname = tempfile.mkdtemp() - self.cache = get_cache(self.backend_name, LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30}) - self.prefix_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_PREFIX='cacheprefix') - self.v2_cache = get_cache(self.backend_name, LOCATION=self.dirname, VERSION=2) - self.custom_key_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION=custom_key_func) - self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION='cache.tests.custom_key_func') + for cache_params in settings.CACHES.values(): + cache_params.update({'LOCATION': self.dirname}) def tearDown(self): - self.cache.clear() - os.rmdir(self.dirname) - - def test_cull(self): - self.perform_cull_test(50, 29) + shutil.rmtree(self.dirname) + super(FileBasedCacheTests, self).tearDown() def test_ignores_non_cache_files(self): fname = os.path.join(self.dirname, 'not-a-cache-file') with open(fname, 'w'): os.utime(fname, None) - self.cache.clear() + cache.clear() self.assertTrue(os.path.exists(fname), 'Expected cache.clear to ignore non cache files') os.remove(fname) def test_clear_does_not_remove_cache_dir(self): - self.cache.clear() + cache.clear() self.assertTrue(os.path.exists(self.dirname), 'Expected cache.clear to keep the cache dir') def test_creates_cache_dir_if_nonexistent(self): os.rmdir(self.dirname) - self.cache.set('foo', 'bar') + cache.set('foo', 'bar') os.path.exists(self.dirname) - def test_zero_cull(self): - # Regression test for #15806 - self.cache = get_cache(self.backend_name, LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0}) - self.perform_cull_test(50, 19) - -class CustomCacheKeyValidationTests(unittest.TestCase): +@override_settings(CACHES={ + 'default': { + 'BACKEND': 'cache.liberal_backend.CacheClass', + }, +}) +class CustomCacheKeyValidationTests(TestCase): """ Tests for the ability to mixin a custom ``validate_key`` method to a custom cache backend that otherwise inherits from a builtin @@ -1109,8 +1109,6 @@ class CustomCacheKeyValidationTests(unittest.TestCase): """ def test_custom_key_validation(self): - cache = get_cache('cache.liberal_backend.CacheClass') - # this key is both longer than 250 characters, and has spaces key = 'some key with spaces' * 15 val = 'a value' @@ -1118,19 +1116,25 @@ class CustomCacheKeyValidationTests(unittest.TestCase): self.assertEqual(cache.get(key), val) -class GetCacheTests(unittest.TestCase): +class GetCacheTests(IgnorePendingDeprecationWarningsMixin, TestCase): def test_simple(self): - from django.core.cache import cache - self.assertIsInstance(cache, get_cache('default').__class__) + from django.core.cache import caches, DEFAULT_CACHE_ALIAS, get_cache + self.assertIsInstance( + caches[DEFAULT_CACHE_ALIAS], + get_cache('default').__class__ + ) cache = get_cache( - 'django.core.cache.backends.dummy.DummyCache', **{'TIMEOUT': 120}) + 'django.core.cache.backends.dummy.DummyCache', + **{'TIMEOUT': 120} + ) self.assertEqual(cache.default_timeout, 120) self.assertRaises(InvalidCacheBackendError, get_cache, 'does_not_exist') def test_close(self): + from django.core.cache import get_cache from django.core import signals cache = get_cache('cache.closeable_cache.CacheClass') self.assertFalse(cache.closed) @@ -1153,11 +1157,8 @@ class CacheUtils(TestCase): def setUp(self): self.path = '/cache/test/' - self.cache = get_cache('default') self.factory = RequestFactory() - def tearDown(self): - self.cache.clear() def test_patch_vary_headers(self): headers = ( @@ -1261,11 +1262,10 @@ class CacheHEADTest(TestCase): def setUp(self): self.path = '/cache/test/' - self.cache = get_cache('default') self.factory = RequestFactory() def tearDown(self): - self.cache.clear() + cache.clear() def _set_cache(self, request, msg): response = HttpResponse() @@ -1314,11 +1314,10 @@ class CacheI18nTest(TestCase): def setUp(self): self.path = '/cache/test/' - self.cache = get_cache('default') self.factory = RequestFactory() def tearDown(self): - self.cache.clear() + cache.clear() @override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False) def test_cache_key_i18n_translation(self): @@ -1581,8 +1580,8 @@ class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase): def setUp(self): super(CacheMiddlewareTest, self).setUp() self.factory = RequestFactory() - self.default_cache = get_cache('default') - self.other_cache = get_cache('other') + self.default_cache = caches['default'] + self.other_cache = caches['other'] def tearDown(self): self.default_cache.clear() @@ -1608,7 +1607,7 @@ class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase): # First, test with "defaults": as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None) - self.assertEqual(as_view_decorator.cache_timeout, 300) # Timeout value for 'default' cache, i.e. 300 + self.assertEqual(as_view_decorator.cache_timeout, 30) # Timeout value for 'default' cache, i.e. 30 self.assertEqual(as_view_decorator.key_prefix, '') self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache self.assertEqual(as_view_decorator.cache_anonymous_only, False) @@ -1755,7 +1754,7 @@ class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase): time.sleep(2) # ... the default cache will still hit - get_cache('default') + caches['default'] response = default_view(request, '11') self.assertEqual(response.content, b'Hello World 1') @@ -1801,11 +1800,10 @@ class TestWithTemplateResponse(TestCase): """ def setUp(self): self.path = '/cache/test/' - self.cache = get_cache('default') self.factory = RequestFactory() def tearDown(self): - self.cache.clear() + cache.clear() def test_patch_vary_headers(self): headers = ( @@ -1904,3 +1902,29 @@ class TestMakeTemplateFragmentKey(TestCase): key = make_template_fragment_key('spam', ['abc:def%']) self.assertEqual(key, 'template.cache.spam.f27688177baec990cdf3fbd9d9c3f469') + +class CacheHandlerTest(TestCase): + def test_same_instance(self): + """ + Attempting to retrieve the same alias should yield the same instance. + """ + cache1 = caches['default'] + cache2 = caches['default'] + + self.assertTrue(cache1 is cache2) + + def test_per_thread(self): + """ + Requesting the same alias from separate threads should yield separate + instances. + """ + c = [] + def runner(): + c.append(caches['default']) + + for x in range(2): + t = threading.Thread(target=runner) + t.start() + t.join() + + self.assertFalse(c[0] is c[1]) diff --git a/tests/template_tests/tests.py b/tests/template_tests/tests.py index bad15e2834f..f54fc6fcba8 100644 --- a/tests/template_tests/tests.py +++ b/tests/template_tests/tests.py @@ -514,7 +514,7 @@ class TemplateRegressionTests(TestCase): o2 = t2.render(ctx) self.assertEqual(o1, 'foo') - self.assertNotEqual(o1, o2) + self.assertEqual(o2, 'bar') def test_cache_missing_backend(self): """