2007-07-16 17:36:10 +08:00
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
# Unit tests for cache framework
|
|
|
|
# Uses whatever cache backend is set in the test settings file.
|
2012-03-30 16:02:08 +08:00
|
|
|
from __future__ import absolute_import
|
2006-08-27 21:59:47 +08:00
|
|
|
|
2011-03-28 10:11:19 +08:00
|
|
|
import hashlib
|
2008-08-02 13:56:57 +08:00
|
|
|
import os
|
2011-08-23 11:51:10 +08:00
|
|
|
import re
|
2012-01-09 04:00:30 +08:00
|
|
|
import StringIO
|
2008-08-02 13:56:57 +08:00
|
|
|
import tempfile
|
2007-12-05 02:03:56 +08:00
|
|
|
import time
|
2010-09-13 02:45:26 +08:00
|
|
|
import warnings
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
from django.conf import settings
|
|
|
|
from django.core import management
|
2010-12-21 23:19:19 +08:00
|
|
|
from django.core.cache import get_cache, DEFAULT_CACHE_ALIAS
|
2011-09-08 21:25:31 +08:00
|
|
|
from django.core.cache.backends.base import (CacheKeyWarning,
|
|
|
|
InvalidCacheBackendError)
|
2011-11-20 03:56:31 +08:00
|
|
|
from django.db import router
|
2011-03-02 20:47:36 +08:00
|
|
|
from django.http import HttpResponse, HttpRequest, QueryDict
|
2011-09-08 21:25:31 +08:00
|
|
|
from django.middleware.cache import (FetchFromCacheMiddleware,
|
|
|
|
UpdateCacheMiddleware, CacheMiddleware)
|
|
|
|
from django.template import Template
|
|
|
|
from django.template.response import TemplateResponse
|
2011-11-21 15:08:39 +08:00
|
|
|
from django.test import TestCase, TransactionTestCase, RequestFactory
|
2011-09-08 21:25:31 +08:00
|
|
|
from django.test.utils import (get_warnings_state, restore_warnings_state,
|
|
|
|
override_settings)
|
2011-11-18 21:01:06 +08:00
|
|
|
from django.utils import timezone, translation, unittest
|
2011-09-08 21:25:31 +08:00
|
|
|
from django.utils.cache import (patch_vary_headers, get_cache_key,
|
|
|
|
learn_cache_key, patch_cache_control, patch_response_headers)
|
2011-12-29 21:57:32 +08:00
|
|
|
from django.utils.encoding import force_unicode
|
2010-12-22 15:52:44 +08:00
|
|
|
from django.views.decorators.cache import cache_page
|
2011-02-17 11:50:55 +08:00
|
|
|
|
2011-10-14 02:51:33 +08:00
|
|
|
from .models import Poll, expensive_calculation
|
2007-11-19 11:12:19 +08:00
|
|
|
|
2007-07-15 14:24:54 +08:00
|
|
|
# functions/classes for complex data type tests
|
2006-08-27 21:59:47 +08:00
|
|
|
def f():
|
|
|
|
return 42
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
class C:
|
|
|
|
def m(n):
|
|
|
|
return 24
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
class DummyCacheTests(unittest.TestCase):
|
|
|
|
# The Dummy cache backend doesn't really behave like a test backend,
|
|
|
|
# so it has different test requirements.
|
2011-04-03 06:13:21 +08:00
|
|
|
backend_name = 'django.core.cache.backends.dummy.DummyCache'
|
|
|
|
|
2008-10-11 05:41:12 +08:00
|
|
|
def setUp(self):
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name)
|
2009-03-11 21:27:03 +08:00
|
|
|
|
|
|
|
def test_simple(self):
|
|
|
|
"Dummy cache backend ignores cache set calls"
|
|
|
|
self.cache.set("key", "value")
|
|
|
|
self.assertEqual(self.cache.get("key"), None)
|
|
|
|
|
|
|
|
def test_add(self):
|
|
|
|
"Add doesn't do anything in dummy cache backend"
|
|
|
|
self.cache.add("addkey1", "value")
|
|
|
|
result = self.cache.add("addkey1", "newvalue")
|
|
|
|
self.assertEqual(result, True)
|
|
|
|
self.assertEqual(self.cache.get("addkey1"), None)
|
|
|
|
|
|
|
|
def test_non_existent(self):
|
|
|
|
"Non-existent keys aren't found in the dummy cache backend"
|
|
|
|
self.assertEqual(self.cache.get("does_not_exist"), None)
|
|
|
|
self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
|
|
|
|
|
|
|
|
def test_get_many(self):
|
|
|
|
"get_many returns nothing for the dummy cache backend"
|
|
|
|
self.cache.set('a', 'a')
|
|
|
|
self.cache.set('b', 'b')
|
|
|
|
self.cache.set('c', 'c')
|
|
|
|
self.cache.set('d', 'd')
|
|
|
|
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {})
|
|
|
|
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {})
|
|
|
|
|
|
|
|
def test_delete(self):
|
|
|
|
"Cache deletion is transparently ignored on the dummy cache backend"
|
|
|
|
self.cache.set("key1", "spam")
|
|
|
|
self.cache.set("key2", "eggs")
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.cache.delete("key1")
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.assertEqual(self.cache.get("key2"), None)
|
|
|
|
|
|
|
|
def test_has_key(self):
|
|
|
|
"The has_key method doesn't ever return True for the dummy cache backend"
|
|
|
|
self.cache.set("hello1", "goodbye1")
|
|
|
|
self.assertEqual(self.cache.has_key("hello1"), False)
|
|
|
|
self.assertEqual(self.cache.has_key("goodbye1"), False)
|
|
|
|
|
|
|
|
def test_in(self):
|
|
|
|
"The in operator doesn't ever return True for the dummy cache backend"
|
|
|
|
self.cache.set("hello2", "goodbye2")
|
|
|
|
self.assertEqual("hello2" in self.cache, False)
|
|
|
|
self.assertEqual("goodbye2" in self.cache, False)
|
|
|
|
|
|
|
|
def test_incr(self):
|
|
|
|
"Dummy cache values can't be incremented"
|
|
|
|
self.cache.set('answer', 42)
|
|
|
|
self.assertRaises(ValueError, self.cache.incr, 'answer')
|
|
|
|
self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
|
|
|
|
|
|
|
|
def test_decr(self):
|
|
|
|
"Dummy cache values can't be decremented"
|
|
|
|
self.cache.set('answer', 42)
|
|
|
|
self.assertRaises(ValueError, self.cache.decr, 'answer')
|
|
|
|
self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
|
|
|
|
|
|
|
|
def test_data_types(self):
|
|
|
|
"All data types are ignored equally by the dummy cache"
|
|
|
|
stuff = {
|
|
|
|
'string' : 'this is a string',
|
|
|
|
'int' : 42,
|
|
|
|
'list' : [1, 2, 3, 4],
|
|
|
|
'tuple' : (1, 2, 3, 4),
|
|
|
|
'dict' : {'A': 1, 'B' : 2},
|
|
|
|
'function' : f,
|
|
|
|
'class' : C,
|
|
|
|
}
|
|
|
|
self.cache.set("stuff", stuff)
|
|
|
|
self.assertEqual(self.cache.get("stuff"), None)
|
|
|
|
|
|
|
|
def test_expiration(self):
|
|
|
|
"Expiration has no effect on the dummy cache"
|
|
|
|
self.cache.set('expire1', 'very quickly', 1)
|
|
|
|
self.cache.set('expire2', 'very quickly', 1)
|
|
|
|
self.cache.set('expire3', 'very quickly', 1)
|
|
|
|
|
|
|
|
time.sleep(2)
|
|
|
|
self.assertEqual(self.cache.get("expire1"), None)
|
|
|
|
|
|
|
|
self.cache.add("expire2", "newvalue")
|
|
|
|
self.assertEqual(self.cache.get("expire2"), None)
|
|
|
|
self.assertEqual(self.cache.has_key("expire3"), False)
|
|
|
|
|
|
|
|
def test_unicode(self):
|
|
|
|
"Unicode values are ignored by the dummy cache"
|
|
|
|
stuff = {
|
|
|
|
u'ascii': u'ascii_value',
|
|
|
|
u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
|
|
|
|
u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
|
2011-06-10 18:18:06 +08:00
|
|
|
u'ascii2': {u'x' : 1 }
|
2009-03-11 21:27:03 +08:00
|
|
|
}
|
|
|
|
for (key, value) in stuff.items():
|
|
|
|
self.cache.set(key, value)
|
|
|
|
self.assertEqual(self.cache.get(key), None)
|
|
|
|
|
2010-01-27 16:21:35 +08:00
|
|
|
def test_set_many(self):
|
|
|
|
"set_many does nothing for the dummy cache backend"
|
|
|
|
self.cache.set_many({'a': 1, 'b': 2})
|
2011-09-11 01:09:23 +08:00
|
|
|
self.cache.set_many({'a': 1, 'b': 2}, timeout=2, version='1')
|
2010-01-27 16:21:35 +08:00
|
|
|
|
|
|
|
def test_delete_many(self):
|
|
|
|
"delete_many does nothing for the dummy cache backend"
|
|
|
|
self.cache.delete_many(['a', 'b'])
|
|
|
|
|
|
|
|
def test_clear(self):
|
|
|
|
"clear does nothing for the dummy cache backend"
|
|
|
|
self.cache.clear()
|
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def test_incr_version(self):
|
|
|
|
"Dummy cache versions can't be incremented"
|
|
|
|
self.cache.set('answer', 42)
|
|
|
|
self.assertRaises(ValueError, self.cache.incr_version, 'answer')
|
|
|
|
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
|
|
|
|
|
|
|
|
def test_decr_version(self):
|
|
|
|
"Dummy cache versions can't be decremented"
|
|
|
|
self.cache.set('answer', 42)
|
|
|
|
self.assertRaises(ValueError, self.cache.decr_version, 'answer')
|
|
|
|
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist')
|
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
|
|
|
|
class BaseCacheTests(object):
|
|
|
|
# A common set of tests to apply to all cache backends
|
2010-01-27 16:21:35 +08:00
|
|
|
|
2011-12-12 05:04:56 +08:00
|
|
|
def _get_request_cache(self, path):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
request.path = request.path_info = path
|
|
|
|
request._cache_update_cache = True
|
|
|
|
request.method = 'GET'
|
|
|
|
return request
|
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
def test_simple(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Simple cache set/get works
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set("key", "value")
|
|
|
|
self.assertEqual(self.cache.get("key"), "value")
|
2006-08-27 21:59:47 +08:00
|
|
|
|
2007-10-20 23:16:34 +08:00
|
|
|
def test_add(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# A key can be added to a cache
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.add("addkey1", "value")
|
|
|
|
result = self.cache.add("addkey1", "newvalue")
|
2008-08-10 11:52:21 +08:00
|
|
|
self.assertEqual(result, False)
|
2008-10-11 05:41:12 +08:00
|
|
|
self.assertEqual(self.cache.get("addkey1"), "value")
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def test_prefix(self):
|
|
|
|
# Test for same cache key conflicts between shared backend
|
|
|
|
self.cache.set('somekey', 'value')
|
|
|
|
|
|
|
|
# should not be set in the prefixed cache
|
|
|
|
self.assertFalse(self.prefix_cache.has_key('somekey'))
|
|
|
|
|
|
|
|
self.prefix_cache.set('somekey', 'value2')
|
|
|
|
|
|
|
|
self.assertEqual(self.cache.get('somekey'), 'value')
|
|
|
|
self.assertEqual(self.prefix_cache.get('somekey'), 'value2')
|
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
def test_non_existent(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Non-existent cache keys return as None/default
|
2006-08-27 21:59:47 +08:00
|
|
|
# get with non-existent keys
|
2008-10-11 05:41:12 +08:00
|
|
|
self.assertEqual(self.cache.get("does_not_exist"), None)
|
|
|
|
self.assertEqual(self.cache.get("does_not_exist", "bang!"), "bang!")
|
2006-08-27 21:59:47 +08:00
|
|
|
|
|
|
|
def test_get_many(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Multiple cache keys can be returned using get_many
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set('a', 'a')
|
|
|
|
self.cache.set('b', 'b')
|
|
|
|
self.cache.set('c', 'c')
|
|
|
|
self.cache.set('d', 'd')
|
|
|
|
self.assertEqual(self.cache.get_many(['a', 'c', 'd']), {'a' : 'a', 'c' : 'c', 'd' : 'd'})
|
|
|
|
self.assertEqual(self.cache.get_many(['a', 'b', 'e']), {'a' : 'a', 'b' : 'b'})
|
2006-08-27 21:59:47 +08:00
|
|
|
|
|
|
|
def test_delete(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Cache keys can be deleted
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set("key1", "spam")
|
|
|
|
self.cache.set("key2", "eggs")
|
|
|
|
self.assertEqual(self.cache.get("key1"), "spam")
|
|
|
|
self.cache.delete("key1")
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.assertEqual(self.cache.get("key2"), "eggs")
|
2006-08-27 21:59:47 +08:00
|
|
|
|
|
|
|
def test_has_key(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# The cache can be inspected for cache keys
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set("hello1", "goodbye1")
|
|
|
|
self.assertEqual(self.cache.has_key("hello1"), True)
|
|
|
|
self.assertEqual(self.cache.has_key("goodbye1"), False)
|
2006-08-27 21:59:47 +08:00
|
|
|
|
2007-07-15 14:24:54 +08:00
|
|
|
def test_in(self):
|
2011-08-12 22:14:15 +08:00
|
|
|
# The in operator can be used to inspect cache contents
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set("hello2", "goodbye2")
|
|
|
|
self.assertEqual("hello2" in self.cache, True)
|
|
|
|
self.assertEqual("goodbye2" in self.cache, False)
|
2007-05-08 12:13:46 +08:00
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
def test_incr(self):
|
|
|
|
# Cache values can be incremented
|
|
|
|
self.cache.set('answer', 41)
|
|
|
|
self.assertEqual(self.cache.incr('answer'), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer'), 42)
|
|
|
|
self.assertEqual(self.cache.incr('answer', 10), 52)
|
|
|
|
self.assertEqual(self.cache.get('answer'), 52)
|
|
|
|
self.assertRaises(ValueError, self.cache.incr, 'does_not_exist')
|
|
|
|
|
|
|
|
def test_decr(self):
|
|
|
|
# Cache values can be decremented
|
|
|
|
self.cache.set('answer', 43)
|
|
|
|
self.assertEqual(self.cache.decr('answer'), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer'), 42)
|
|
|
|
self.assertEqual(self.cache.decr('answer', 10), 32)
|
|
|
|
self.assertEqual(self.cache.get('answer'), 32)
|
|
|
|
self.assertRaises(ValueError, self.cache.decr, 'does_not_exist')
|
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
def test_data_types(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Many different data types can be cached
|
2006-08-27 21:59:47 +08:00
|
|
|
stuff = {
|
|
|
|
'string' : 'this is a string',
|
|
|
|
'int' : 42,
|
|
|
|
'list' : [1, 2, 3, 4],
|
|
|
|
'tuple' : (1, 2, 3, 4),
|
|
|
|
'dict' : {'A': 1, 'B' : 2},
|
|
|
|
'function' : f,
|
|
|
|
'class' : C,
|
|
|
|
}
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set("stuff", stuff)
|
|
|
|
self.assertEqual(self.cache.get("stuff"), stuff)
|
2007-07-15 14:24:54 +08:00
|
|
|
|
2009-10-29 22:32:01 +08:00
|
|
|
def test_cache_read_for_model_instance(self):
|
|
|
|
# Don't want fields with callable as default to be called on cache read
|
|
|
|
expensive_calculation.num_runs = 0
|
|
|
|
Poll.objects.all().delete()
|
|
|
|
my_poll = Poll.objects.create(question="Well?")
|
|
|
|
self.assertEqual(Poll.objects.count(), 1)
|
|
|
|
pub_date = my_poll.pub_date
|
|
|
|
self.cache.set('question', my_poll)
|
|
|
|
cached_poll = self.cache.get('question')
|
|
|
|
self.assertEqual(cached_poll.pub_date, pub_date)
|
|
|
|
# We only want the default expensive calculation run once
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
|
|
|
|
def test_cache_write_for_model_instance_with_deferred(self):
|
|
|
|
# Don't want fields with callable as default to be called on cache write
|
|
|
|
expensive_calculation.num_runs = 0
|
|
|
|
Poll.objects.all().delete()
|
|
|
|
my_poll = Poll.objects.create(question="What?")
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
defer_qs = Poll.objects.all().defer('question')
|
|
|
|
self.assertEqual(defer_qs.count(), 1)
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
self.cache.set('deferred_queryset', defer_qs)
|
|
|
|
# cache set should not re-evaluate default functions
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
|
|
|
|
def test_cache_read_for_model_instance_with_deferred(self):
|
|
|
|
# Don't want fields with callable as default to be called on cache read
|
|
|
|
expensive_calculation.num_runs = 0
|
|
|
|
Poll.objects.all().delete()
|
|
|
|
my_poll = Poll.objects.create(question="What?")
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
defer_qs = Poll.objects.all().defer('question')
|
|
|
|
self.assertEqual(defer_qs.count(), 1)
|
|
|
|
self.cache.set('deferred_queryset', defer_qs)
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, 1)
|
|
|
|
runs_before_cache_read = expensive_calculation.num_runs
|
|
|
|
cached_polls = self.cache.get('deferred_queryset')
|
|
|
|
# We only want the default expensive calculation run on creation and set
|
|
|
|
self.assertEqual(expensive_calculation.num_runs, runs_before_cache_read)
|
|
|
|
|
2006-08-27 21:59:47 +08:00
|
|
|
def test_expiration(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Cache values can be set to expire
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set('expire1', 'very quickly', 1)
|
|
|
|
self.cache.set('expire2', 'very quickly', 1)
|
|
|
|
self.cache.set('expire3', 'very quickly', 1)
|
2007-12-05 02:03:56 +08:00
|
|
|
|
2008-08-02 13:56:57 +08:00
|
|
|
time.sleep(2)
|
2008-10-11 05:41:12 +08:00
|
|
|
self.assertEqual(self.cache.get("expire1"), None)
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.add("expire2", "newvalue")
|
|
|
|
self.assertEqual(self.cache.get("expire2"), "newvalue")
|
|
|
|
self.assertEqual(self.cache.has_key("expire3"), False)
|
2006-08-27 21:59:47 +08:00
|
|
|
|
2007-07-16 17:36:10 +08:00
|
|
|
def test_unicode(self):
|
2009-03-11 21:27:03 +08:00
|
|
|
# Unicode values can be cached
|
2007-07-16 17:36:10 +08:00
|
|
|
stuff = {
|
|
|
|
u'ascii': u'ascii_value',
|
|
|
|
u'unicode_ascii': u'Iñtërnâtiônàlizætiøn1',
|
|
|
|
u'Iñtërnâtiônàlizætiøn': u'Iñtërnâtiônàlizætiøn2',
|
2011-06-10 18:18:06 +08:00
|
|
|
u'ascii2': {u'x' : 1 }
|
2007-07-16 17:36:10 +08:00
|
|
|
}
|
2011-03-19 10:42:40 +08:00
|
|
|
# Test `set`
|
2007-07-16 17:36:10 +08:00
|
|
|
for (key, value) in stuff.items():
|
2008-10-11 05:41:12 +08:00
|
|
|
self.cache.set(key, value)
|
|
|
|
self.assertEqual(self.cache.get(key), value)
|
2007-07-16 17:36:10 +08:00
|
|
|
|
2011-03-19 10:42:40 +08:00
|
|
|
# Test `add`
|
|
|
|
for (key, value) in stuff.items():
|
|
|
|
self.cache.delete(key)
|
|
|
|
self.cache.add(key, value)
|
|
|
|
self.assertEqual(self.cache.get(key), value)
|
|
|
|
|
|
|
|
# Test `set_many`
|
|
|
|
for (key, value) in stuff.items():
|
|
|
|
self.cache.delete(key)
|
|
|
|
self.cache.set_many(stuff)
|
|
|
|
for (key, value) in stuff.items():
|
|
|
|
self.assertEqual(self.cache.get(key), value)
|
|
|
|
|
2010-03-02 04:11:24 +08:00
|
|
|
def test_binary_string(self):
|
2011-08-12 22:14:15 +08:00
|
|
|
# Binary strings should be cacheable
|
2010-03-02 04:11:24 +08:00
|
|
|
from zlib import compress, decompress
|
|
|
|
value = 'value_to_be_compressed'
|
|
|
|
compressed_value = compress(value)
|
2011-03-19 10:42:40 +08:00
|
|
|
|
|
|
|
# Test set
|
2010-03-02 04:11:24 +08:00
|
|
|
self.cache.set('binary1', compressed_value)
|
|
|
|
compressed_result = self.cache.get('binary1')
|
|
|
|
self.assertEqual(compressed_value, compressed_result)
|
|
|
|
self.assertEqual(value, decompress(compressed_result))
|
|
|
|
|
2011-03-19 10:42:40 +08:00
|
|
|
# Test add
|
|
|
|
self.cache.add('binary1-add', compressed_value)
|
|
|
|
compressed_result = self.cache.get('binary1-add')
|
|
|
|
self.assertEqual(compressed_value, compressed_result)
|
|
|
|
self.assertEqual(value, decompress(compressed_result))
|
|
|
|
|
|
|
|
# Test set_many
|
|
|
|
self.cache.set_many({'binary1-set_many': compressed_value})
|
|
|
|
compressed_result = self.cache.get('binary1-set_many')
|
|
|
|
self.assertEqual(compressed_value, compressed_result)
|
|
|
|
self.assertEqual(value, decompress(compressed_result))
|
|
|
|
|
2010-01-27 16:21:35 +08:00
|
|
|
def test_set_many(self):
|
|
|
|
# Multiple keys can be set using set_many
|
|
|
|
self.cache.set_many({"key1": "spam", "key2": "eggs"})
|
|
|
|
self.assertEqual(self.cache.get("key1"), "spam")
|
|
|
|
self.assertEqual(self.cache.get("key2"), "eggs")
|
|
|
|
|
|
|
|
def test_set_many_expiration(self):
|
|
|
|
# set_many takes a second ``timeout`` parameter
|
|
|
|
self.cache.set_many({"key1": "spam", "key2": "eggs"}, 1)
|
|
|
|
time.sleep(2)
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.assertEqual(self.cache.get("key2"), None)
|
|
|
|
|
|
|
|
def test_delete_many(self):
|
|
|
|
# Multiple keys can be deleted using delete_many
|
|
|
|
self.cache.set("key1", "spam")
|
|
|
|
self.cache.set("key2", "eggs")
|
|
|
|
self.cache.set("key3", "ham")
|
|
|
|
self.cache.delete_many(["key1", "key2"])
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.assertEqual(self.cache.get("key2"), None)
|
|
|
|
self.assertEqual(self.cache.get("key3"), "ham")
|
|
|
|
|
|
|
|
def test_clear(self):
|
|
|
|
# The cache can be emptied using clear
|
|
|
|
self.cache.set("key1", "spam")
|
|
|
|
self.cache.set("key2", "eggs")
|
|
|
|
self.cache.clear()
|
|
|
|
self.assertEqual(self.cache.get("key1"), None)
|
|
|
|
self.assertEqual(self.cache.get("key2"), None)
|
|
|
|
|
2010-02-11 20:06:26 +08:00
|
|
|
def test_long_timeout(self):
|
|
|
|
'''
|
|
|
|
Using a timeout greater than 30 days makes memcached think
|
|
|
|
it is an absolute expiration timestamp instead of a relative
|
|
|
|
offset. Test that we honour this convention. Refs #12399.
|
|
|
|
'''
|
|
|
|
self.cache.set('key1', 'eggs', 60*60*24*30 + 1) #30 days + 1 second
|
|
|
|
self.assertEqual(self.cache.get('key1'), 'eggs')
|
|
|
|
|
|
|
|
self.cache.add('key2', 'ham', 60*60*24*30 + 1)
|
|
|
|
self.assertEqual(self.cache.get('key2'), 'ham')
|
|
|
|
|
|
|
|
self.cache.set_many({'key3': 'sausage', 'key4': 'lobster bisque'}, 60*60*24*30 + 1)
|
|
|
|
self.assertEqual(self.cache.get('key3'), 'sausage')
|
|
|
|
self.assertEqual(self.cache.get('key4'), 'lobster bisque')
|
|
|
|
|
2011-07-29 17:39:23 +08:00
|
|
|
def test_float_timeout(self):
|
|
|
|
# Make sure a timeout given as a float doesn't crash anything.
|
|
|
|
self.cache.set("key1", "spam", 100.2)
|
|
|
|
self.assertEqual(self.cache.get("key1"), "spam")
|
|
|
|
|
2010-08-31 08:44:46 +08:00
|
|
|
def perform_cull_test(self, initial_count, final_count):
|
|
|
|
"""This is implemented as a utility method, because only some of the backends
|
|
|
|
implement culling. The culling algorithm also varies slightly, so the final
|
|
|
|
number of entries will vary between backends"""
|
|
|
|
# Create initial cache key entries. This will overflow the cache, causing a cull
|
|
|
|
for i in range(1, initial_count):
|
|
|
|
self.cache.set('cull%d' % i, 'value', 1000)
|
|
|
|
count = 0
|
|
|
|
# Count how many keys are left in the cache.
|
|
|
|
for i in range(1, initial_count):
|
|
|
|
if self.cache.has_key('cull%d' % i):
|
|
|
|
count = count + 1
|
|
|
|
self.assertEqual(count, final_count)
|
|
|
|
|
2010-09-13 02:45:26 +08:00
|
|
|
def test_invalid_keys(self):
|
|
|
|
"""
|
|
|
|
All the builtin backends (except memcached, see below) should warn on
|
|
|
|
keys that would be refused by memcached. This encourages portable
|
|
|
|
caching code without making it too difficult to use production backends
|
|
|
|
with more liberal key rules. Refs #6447.
|
|
|
|
|
|
|
|
"""
|
2010-11-19 23:39:35 +08:00
|
|
|
# mimic custom ``make_key`` method being defined since the default will
|
|
|
|
# never show the below warnings
|
|
|
|
def func(key, *args):
|
|
|
|
return key
|
|
|
|
|
|
|
|
old_func = self.cache.key_func
|
|
|
|
self.cache.key_func = func
|
2010-09-13 02:45:26 +08:00
|
|
|
|
2010-11-11 23:06:20 +08:00
|
|
|
try:
|
2012-05-04 00:19:18 +08:00
|
|
|
with warnings.catch_warnings(record=True) as w:
|
2012-05-04 02:18:05 +08:00
|
|
|
warnings.simplefilter("always")
|
2012-05-04 00:19:18 +08:00
|
|
|
# memcached does not allow whitespace or control characters in keys
|
|
|
|
self.cache.set('key with spaces', 'value')
|
|
|
|
self.assertEqual(len(w), 2)
|
|
|
|
self.assertTrue(isinstance(w[0].message, CacheKeyWarning))
|
|
|
|
with warnings.catch_warnings(record=True) as w:
|
2012-05-04 02:18:05 +08:00
|
|
|
warnings.simplefilter("always")
|
2012-05-04 00:19:18 +08:00
|
|
|
# memcached limits key length to 250
|
|
|
|
self.cache.set('a' * 251, 'value')
|
|
|
|
self.assertEqual(len(w), 1)
|
|
|
|
self.assertTrue(isinstance(w[0].message, CacheKeyWarning))
|
2010-11-11 23:06:20 +08:00
|
|
|
finally:
|
2010-11-19 23:39:35 +08:00
|
|
|
self.cache.key_func = old_func
|
|
|
|
|
|
|
|
def test_cache_versioning_get_set(self):
|
|
|
|
# set, using default version = 1
|
|
|
|
self.cache.set('answer1', 42)
|
|
|
|
self.assertEqual(self.cache.get('answer1'), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), None)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get('answer1'), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer1', version=1), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer1', version=2), None)
|
|
|
|
|
|
|
|
# set, default version = 1, but manually override version = 2
|
|
|
|
self.cache.set('answer2', 42, version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer2'), None)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
# v2 set, using default version = 2
|
|
|
|
self.v2_cache.set('answer3', 42)
|
|
|
|
self.assertEqual(self.cache.get('answer3'), None)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), 42)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get('answer3'), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer3', version=1), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer3', version=2), 42)
|
|
|
|
|
|
|
|
# v2 set, default version = 2, but manually override version = 1
|
|
|
|
self.v2_cache.set('answer4', 42, version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer4'), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=2), None)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get('answer4'), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer4', version=1), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer4', version=2), None)
|
|
|
|
|
|
|
|
def test_cache_versioning_add(self):
|
|
|
|
|
|
|
|
# add, default version = 1, but manually override version = 2
|
|
|
|
self.cache.add('answer1', 42, version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.add('answer1', 37, version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.add('answer1', 37, version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
|
|
|
|
# v2 add, using default version = 2
|
|
|
|
self.v2_cache.add('answer2', 42)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
self.v2_cache.add('answer2', 37)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
self.v2_cache.add('answer2', 37, version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
# v2 add, default version = 2, but manually override version = 1
|
|
|
|
self.v2_cache.add('answer3', 42, version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), None)
|
|
|
|
|
|
|
|
self.v2_cache.add('answer3', 37, version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), None)
|
|
|
|
|
|
|
|
self.v2_cache.add('answer3', 37)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), 37)
|
|
|
|
|
|
|
|
def test_cache_versioning_has_key(self):
|
|
|
|
self.cache.set('answer1', 42)
|
|
|
|
|
|
|
|
# has_key
|
|
|
|
self.assertTrue(self.cache.has_key('answer1'))
|
|
|
|
self.assertTrue(self.cache.has_key('answer1', version=1))
|
|
|
|
self.assertFalse(self.cache.has_key('answer1', version=2))
|
|
|
|
|
|
|
|
self.assertFalse(self.v2_cache.has_key('answer1'))
|
|
|
|
self.assertTrue(self.v2_cache.has_key('answer1', version=1))
|
|
|
|
self.assertFalse(self.v2_cache.has_key('answer1', version=2))
|
|
|
|
|
|
|
|
def test_cache_versioning_delete(self):
|
|
|
|
self.cache.set('answer1', 37, version=1)
|
|
|
|
self.cache.set('answer1', 42, version=2)
|
|
|
|
self.cache.delete('answer1')
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.set('answer2', 37, version=1)
|
|
|
|
self.cache.set('answer2', 42, version=2)
|
|
|
|
self.cache.delete('answer2', version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), None)
|
|
|
|
|
|
|
|
self.cache.set('answer3', 37, version=1)
|
|
|
|
self.cache.set('answer3', 42, version=2)
|
|
|
|
self.v2_cache.delete('answer3')
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), None)
|
|
|
|
|
|
|
|
self.cache.set('answer4', 37, version=1)
|
|
|
|
self.cache.set('answer4', 42, version=2)
|
|
|
|
self.v2_cache.delete('answer4', version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
|
|
|
|
|
|
|
def test_cache_versioning_incr_decr(self):
|
|
|
|
self.cache.set('answer1', 37, version=1)
|
|
|
|
self.cache.set('answer1', 42, version=2)
|
|
|
|
self.cache.incr('answer1')
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), 38)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
self.cache.decr('answer1')
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer1', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.set('answer2', 37, version=1)
|
|
|
|
self.cache.set('answer2', 42, version=2)
|
|
|
|
self.cache.incr('answer2', version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 43)
|
|
|
|
self.cache.decr('answer2', version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.set('answer3', 37, version=1)
|
|
|
|
self.cache.set('answer3', 42, version=2)
|
|
|
|
self.v2_cache.incr('answer3')
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), 43)
|
|
|
|
self.v2_cache.decr('answer3')
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer3', version=2), 42)
|
|
|
|
|
|
|
|
self.cache.set('answer4', 37, version=1)
|
|
|
|
self.cache.set('answer4', 42, version=2)
|
|
|
|
self.v2_cache.incr('answer4', version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=1), 38)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
|
|
|
self.v2_cache.decr('answer4', version=1)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=1), 37)
|
|
|
|
self.assertEqual(self.cache.get('answer4', version=2), 42)
|
|
|
|
|
|
|
|
def test_cache_versioning_get_set_many(self):
|
|
|
|
# set, using default version = 1
|
|
|
|
self.cache.set_many({'ford1': 37, 'arthur1': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford1','arthur1']),
|
|
|
|
{'ford1': 37, 'arthur1': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=1),
|
|
|
|
{'ford1': 37, 'arthur1': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford1','arthur1'], version=2), {})
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1']), {})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=1),
|
|
|
|
{'ford1': 37, 'arthur1': 42})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford1','arthur1'], version=2), {})
|
|
|
|
|
|
|
|
# set, default version = 1, but manually override version = 2
|
|
|
|
self.cache.set_many({'ford2': 37, 'arthur2': 42}, version=2)
|
|
|
|
self.assertEqual(self.cache.get_many(['ford2','arthur2']), {})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=1), {})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford2','arthur2'], version=2),
|
|
|
|
{'ford2': 37, 'arthur2': 42})
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2']),
|
|
|
|
{'ford2': 37, 'arthur2': 42})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=1), {})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford2','arthur2'], version=2),
|
|
|
|
{'ford2': 37, 'arthur2': 42})
|
|
|
|
|
|
|
|
# v2 set, using default version = 2
|
|
|
|
self.v2_cache.set_many({'ford3': 37, 'arthur3': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford3','arthur3']), {})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=1), {})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford3','arthur3'], version=2),
|
|
|
|
{'ford3': 37, 'arthur3': 42})
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3']),
|
|
|
|
{'ford3': 37, 'arthur3': 42})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=1), {})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford3','arthur3'], version=2),
|
|
|
|
{'ford3': 37, 'arthur3': 42})
|
|
|
|
|
|
|
|
# v2 set, default version = 2, but manually override version = 1
|
|
|
|
self.v2_cache.set_many({'ford4': 37, 'arthur4': 42}, version=1)
|
|
|
|
self.assertEqual(self.cache.get_many(['ford4','arthur4']),
|
|
|
|
{'ford4': 37, 'arthur4': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=1),
|
|
|
|
{'ford4': 37, 'arthur4': 42})
|
|
|
|
self.assertEqual(self.cache.get_many(['ford4','arthur4'], version=2), {})
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4']), {})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=1),
|
|
|
|
{'ford4': 37, 'arthur4': 42})
|
|
|
|
self.assertEqual(self.v2_cache.get_many(['ford4','arthur4'], version=2), {})
|
|
|
|
|
|
|
|
def test_incr_version(self):
|
|
|
|
self.cache.set('answer', 42, version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer'), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=2), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=3), None)
|
|
|
|
|
|
|
|
self.assertEqual(self.cache.incr_version('answer', version=2), 3)
|
|
|
|
self.assertEqual(self.cache.get('answer'), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=2), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=3), 42)
|
|
|
|
|
|
|
|
self.v2_cache.set('answer2', 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=3), None)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.incr_version('answer2'), 3)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2'), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=3), 42)
|
|
|
|
|
|
|
|
self.assertRaises(ValueError, self.cache.incr_version, 'does_not_exist')
|
|
|
|
|
|
|
|
def test_decr_version(self):
|
|
|
|
self.cache.set('answer', 42, version=2)
|
|
|
|
self.assertEqual(self.cache.get('answer'), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=1), None)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=2), 42)
|
|
|
|
|
|
|
|
self.assertEqual(self.cache.decr_version('answer', version=2), 1)
|
|
|
|
self.assertEqual(self.cache.get('answer'), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=1), 42)
|
|
|
|
self.assertEqual(self.cache.get('answer', version=2), None)
|
|
|
|
|
|
|
|
self.v2_cache.set('answer2', 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2'), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=1), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=2), 42)
|
|
|
|
|
|
|
|
self.assertEqual(self.v2_cache.decr_version('answer2'), 1)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2'), None)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=1), 42)
|
|
|
|
self.assertEqual(self.v2_cache.get('answer2', version=2), None)
|
|
|
|
|
|
|
|
self.assertRaises(ValueError, self.cache.decr_version, 'does_not_exist', version=2)
|
|
|
|
|
|
|
|
def test_custom_key_func(self):
|
|
|
|
# Two caches with different key functions aren't visible to each other
|
|
|
|
self.cache.set('answer1', 42)
|
|
|
|
self.assertEqual(self.cache.get('answer1'), 42)
|
|
|
|
self.assertEqual(self.custom_key_cache.get('answer1'), None)
|
|
|
|
self.assertEqual(self.custom_key_cache2.get('answer1'), None)
|
|
|
|
|
|
|
|
self.custom_key_cache.set('answer2', 42)
|
|
|
|
self.assertEqual(self.cache.get('answer2'), None)
|
|
|
|
self.assertEqual(self.custom_key_cache.get('answer2'), 42)
|
|
|
|
self.assertEqual(self.custom_key_cache2.get('answer2'), 42)
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2011-12-12 05:04:56 +08:00
|
|
|
def test_cache_write_unpickable_object(self):
|
|
|
|
update_middleware = UpdateCacheMiddleware()
|
|
|
|
update_middleware.cache = self.cache
|
|
|
|
|
|
|
|
fetch_middleware = FetchFromCacheMiddleware()
|
|
|
|
fetch_middleware.cache = self.cache
|
|
|
|
|
|
|
|
request = self._get_request_cache('/cache/test')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertEqual(get_cache_data, None)
|
|
|
|
|
|
|
|
response = HttpResponse()
|
|
|
|
content = 'Testing cookie serialization.'
|
|
|
|
response.content = content
|
|
|
|
response.set_cookie('foo', 'bar')
|
|
|
|
|
|
|
|
update_middleware.process_response(request, response)
|
|
|
|
|
|
|
|
get_cache_data = fetch_middleware.process_request(request)
|
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(get_cache_data.content, content)
|
|
|
|
self.assertEqual(get_cache_data.cookies, response.cookies)
|
|
|
|
|
|
|
|
update_middleware.process_response(request, get_cache_data)
|
|
|
|
get_cache_data = fetch_middleware.process_request(request)
|
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(get_cache_data.content, content)
|
|
|
|
self.assertEqual(get_cache_data.cookies, response.cookies)
|
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def custom_key_func(key, key_prefix, version):
|
|
|
|
"A customized cache key function"
|
|
|
|
return 'CUSTOM-' + '-'.join([key_prefix, str(version), key])
|
2010-09-13 02:45:26 +08:00
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2011-11-21 15:08:39 +08:00
|
|
|
class DBCacheTests(BaseCacheTests, TransactionTestCase):
|
2011-04-03 06:13:21 +08:00
|
|
|
backend_name = 'django.core.cache.backends.db.DatabaseCache'
|
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
def setUp(self):
|
2010-02-11 20:21:46 +08:00
|
|
|
# Spaces are used in the table name to ensure quoting/escaping is working
|
|
|
|
self._table_name = 'test cache table'
|
|
|
|
management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False)
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30})
|
|
|
|
self.prefix_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_PREFIX='cacheprefix')
|
|
|
|
self.v2_cache = get_cache(self.backend_name, LOCATION=self._table_name, VERSION=2)
|
|
|
|
self.custom_key_cache = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION=custom_key_func)
|
|
|
|
self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self._table_name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
2009-03-11 21:27:03 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
from django.db import connection
|
|
|
|
cursor = connection.cursor()
|
2010-02-11 20:21:46 +08:00
|
|
|
cursor.execute('DROP TABLE %s' % connection.ops.quote_name(self._table_name))
|
2011-11-20 18:17:12 +08:00
|
|
|
connection.commit()
|
2009-03-11 21:27:03 +08:00
|
|
|
|
2010-08-31 08:44:46 +08:00
|
|
|
def test_cull(self):
|
|
|
|
self.perform_cull_test(50, 29)
|
|
|
|
|
2010-11-02 13:55:08 +08:00
|
|
|
def test_zero_cull(self):
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, LOCATION=self._table_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
|
2010-12-21 23:19:19 +08:00
|
|
|
self.perform_cull_test(50, 18)
|
|
|
|
|
|
|
|
def test_old_initialization(self):
|
2010-11-02 13:55:08 +08:00
|
|
|
self.cache = get_cache('db://%s?max_entries=30&cull_frequency=0' % self._table_name)
|
|
|
|
self.perform_cull_test(50, 18)
|
|
|
|
|
2012-01-09 04:00:30 +08:00
|
|
|
def test_second_call_doesnt_crash(self):
|
|
|
|
err = StringIO.StringIO()
|
|
|
|
management.call_command('createcachetable', self._table_name, verbosity=0, interactive=False, stderr=err)
|
|
|
|
self.assertTrue("Cache table 'test cache table' could not be created" in err.getvalue())
|
|
|
|
|
2011-07-05 05:53:17 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
@override_settings(USE_TZ=True)
|
|
|
|
class DBCacheWithTimeZoneTests(DBCacheTests):
|
|
|
|
pass
|
2011-11-20 18:17:12 +08:00
|
|
|
|
|
|
|
|
2011-11-20 03:56:31 +08:00
|
|
|
class DBCacheRouter(object):
|
|
|
|
"""A router that puts the cache table on the 'other' database."""
|
|
|
|
|
|
|
|
def db_for_read(self, model, **hints):
|
|
|
|
if model._meta.app_label == 'django_cache':
|
|
|
|
return 'other'
|
|
|
|
|
|
|
|
def db_for_write(self, model, **hints):
|
|
|
|
if model._meta.app_label == 'django_cache':
|
|
|
|
return 'other'
|
|
|
|
|
|
|
|
def allow_syncdb(self, db, model):
|
|
|
|
if model._meta.app_label == 'django_cache':
|
|
|
|
return db == 'other'
|
|
|
|
|
|
|
|
|
|
|
|
class CreateCacheTableForDBCacheTests(TestCase):
|
|
|
|
multi_db = True
|
|
|
|
|
|
|
|
def test_createcachetable_observes_database_router(self):
|
|
|
|
old_routers = router.routers
|
|
|
|
try:
|
|
|
|
router.routers = [DBCacheRouter()]
|
|
|
|
# cache table should not be created on 'default'
|
|
|
|
with self.assertNumQueries(0, using='default'):
|
|
|
|
management.call_command('createcachetable', 'cache_table',
|
|
|
|
database='default',
|
|
|
|
verbosity=0, interactive=False)
|
|
|
|
# cache table should be created on 'other'
|
|
|
|
# one query is used to create the table and another one the index
|
|
|
|
with self.assertNumQueries(2, using='other'):
|
|
|
|
management.call_command('createcachetable', 'cache_table',
|
|
|
|
database='other',
|
|
|
|
verbosity=0, interactive=False)
|
|
|
|
finally:
|
|
|
|
router.routers = old_routers
|
|
|
|
|
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
class LocMemCacheTests(unittest.TestCase, BaseCacheTests):
|
2011-04-03 06:13:21 +08:00
|
|
|
backend_name = 'django.core.cache.backends.locmem.LocMemCache'
|
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
def setUp(self):
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30})
|
|
|
|
self.prefix_cache = get_cache(self.backend_name, KEY_PREFIX='cacheprefix')
|
|
|
|
self.v2_cache = get_cache(self.backend_name, VERSION=2)
|
|
|
|
self.custom_key_cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION=custom_key_func)
|
|
|
|
self.custom_key_cache2 = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30}, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
2010-11-19 23:39:35 +08:00
|
|
|
|
|
|
|
# LocMem requires a hack to make the other caches
|
|
|
|
# share a data store with the 'normal' cache.
|
|
|
|
self.prefix_cache._cache = self.cache._cache
|
|
|
|
self.prefix_cache._expire_info = self.cache._expire_info
|
|
|
|
|
|
|
|
self.v2_cache._cache = self.cache._cache
|
|
|
|
self.v2_cache._expire_info = self.cache._expire_info
|
|
|
|
|
|
|
|
self.custom_key_cache._cache = self.cache._cache
|
|
|
|
self.custom_key_cache._expire_info = self.cache._expire_info
|
|
|
|
|
|
|
|
self.custom_key_cache2._cache = self.cache._cache
|
|
|
|
self.custom_key_cache2._expire_info = self.cache._expire_info
|
2010-08-31 08:44:46 +08:00
|
|
|
|
2010-12-21 23:19:19 +08:00
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
|
|
|
|
2010-08-31 08:44:46 +08:00
|
|
|
def test_cull(self):
|
|
|
|
self.perform_cull_test(50, 29)
|
2009-03-11 21:27:03 +08:00
|
|
|
|
2010-11-02 13:55:08 +08:00
|
|
|
def test_zero_cull(self):
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, OPTIONS={'MAX_ENTRIES': 30, 'CULL_FREQUENCY': 0})
|
2010-12-21 23:19:19 +08:00
|
|
|
self.perform_cull_test(50, 19)
|
|
|
|
|
|
|
|
def test_old_initialization(self):
|
2010-11-02 13:55:08 +08:00
|
|
|
self.cache = get_cache('locmem://?max_entries=30&cull_frequency=0')
|
|
|
|
self.perform_cull_test(50, 19)
|
|
|
|
|
2010-12-22 15:52:44 +08:00
|
|
|
def test_multiple_caches(self):
|
|
|
|
"Check that multiple locmem caches are isolated"
|
2011-04-03 06:13:21 +08:00
|
|
|
mirror_cache = get_cache(self.backend_name)
|
|
|
|
other_cache = get_cache(self.backend_name, LOCATION='other')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
self.cache.set('value1', 42)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(mirror_cache.get('value1'), 42)
|
|
|
|
self.assertEqual(other_cache.get('value1'), None)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-11-27 06:27:16 +08:00
|
|
|
def test_incr_decr_timeout(self):
|
|
|
|
"""incr/decr does not modify expiry time (matches memcached behavior)"""
|
|
|
|
key = 'value'
|
|
|
|
_key = self.cache.make_key(key)
|
|
|
|
self.cache.set(key, 1, timeout=self.cache.default_timeout*10)
|
|
|
|
expire = self.cache._expire_info[_key]
|
|
|
|
self.cache.incr(key)
|
|
|
|
self.assertEqual(expire, self.cache._expire_info[_key])
|
|
|
|
self.cache.decr(key)
|
|
|
|
self.assertEqual(expire, self.cache._expire_info[_key])
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
# memcached backend isn't guaranteed to be available.
|
|
|
|
# To check the memcached backend, the test settings file will
|
2010-12-21 23:19:19 +08:00
|
|
|
# need to contain a cache backend setting that points at
|
2009-03-11 21:27:03 +08:00
|
|
|
# your memcache server.
|
2012-03-30 17:08:29 +08:00
|
|
|
@unittest.skipUnless(
|
|
|
|
settings.CACHES[DEFAULT_CACHE_ALIAS]['BACKEND'].startswith('django.core.cache.backends.memcached.'),
|
|
|
|
"memcached not available")
|
2010-11-02 13:55:08 +08:00
|
|
|
class MemcachedCacheTests(unittest.TestCase, BaseCacheTests):
|
2011-04-03 06:13:21 +08:00
|
|
|
backend_name = 'django.core.cache.backends.memcached.MemcachedCache'
|
|
|
|
|
2010-11-02 13:55:08 +08:00
|
|
|
def setUp(self):
|
2010-12-21 23:19:19 +08:00
|
|
|
name = settings.CACHES[DEFAULT_CACHE_ALIAS]['LOCATION']
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, LOCATION=name)
|
|
|
|
self.prefix_cache = get_cache(self.backend_name, LOCATION=name, KEY_PREFIX='cacheprefix')
|
|
|
|
self.v2_cache = get_cache(self.backend_name, LOCATION=name, VERSION=2)
|
|
|
|
self.custom_key_cache = get_cache(self.backend_name, LOCATION=name, KEY_FUNCTION=custom_key_func)
|
|
|
|
self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=name, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
2010-11-19 23:39:35 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
2010-11-02 13:55:08 +08:00
|
|
|
|
|
|
|
def test_invalid_keys(self):
|
|
|
|
"""
|
|
|
|
On memcached, we don't introduce a duplicate key validation
|
|
|
|
step (for speed reasons), we just let the memcached API
|
|
|
|
library raise its own exception on bad keys. Refs #6447.
|
|
|
|
|
|
|
|
In order to be memcached-API-library agnostic, we only assert
|
|
|
|
that a generic exception of some kind is raised.
|
2010-09-13 02:45:26 +08:00
|
|
|
|
2010-11-02 13:55:08 +08:00
|
|
|
"""
|
|
|
|
# memcached does not allow whitespace or control characters in keys
|
|
|
|
self.assertRaises(Exception, self.cache.set, 'key with spaces', 'value')
|
|
|
|
# memcached limits key length to 250
|
|
|
|
self.assertRaises(Exception, self.cache.set, 'a' * 251, 'value')
|
2010-11-19 23:39:35 +08:00
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2009-03-11 21:27:03 +08:00
|
|
|
class FileBasedCacheTests(unittest.TestCase, BaseCacheTests):
|
2007-12-05 02:03:56 +08:00
|
|
|
"""
|
|
|
|
Specific test cases for the file-based cache.
|
|
|
|
"""
|
2011-04-03 06:13:21 +08:00
|
|
|
backend_name = 'django.core.cache.backends.filebased.FileBasedCache'
|
|
|
|
|
2007-12-05 02:03:56 +08:00
|
|
|
def setUp(self):
|
2008-10-11 05:41:12 +08:00
|
|
|
self.dirname = tempfile.mkdtemp()
|
2011-04-03 06:13:21 +08:00
|
|
|
self.cache = get_cache(self.backend_name, LOCATION=self.dirname, OPTIONS={'MAX_ENTRIES': 30})
|
|
|
|
self.prefix_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_PREFIX='cacheprefix')
|
|
|
|
self.v2_cache = get_cache(self.backend_name, LOCATION=self.dirname, VERSION=2)
|
|
|
|
self.custom_key_cache = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION=custom_key_func)
|
|
|
|
self.custom_key_cache2 = get_cache(self.backend_name, LOCATION=self.dirname, KEY_FUNCTION='regressiontests.cache.tests.custom_key_func')
|
2010-11-19 23:39:35 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2007-12-05 02:03:56 +08:00
|
|
|
def test_hashing(self):
|
|
|
|
"""Test that keys are hashed into subdirectories correctly"""
|
|
|
|
self.cache.set("foo", "bar")
|
2010-11-19 23:39:35 +08:00
|
|
|
key = self.cache.make_key("foo")
|
2011-03-28 10:11:19 +08:00
|
|
|
keyhash = hashlib.md5(key).hexdigest()
|
2007-12-05 02:03:56 +08:00
|
|
|
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertTrue(os.path.exists(keypath))
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2007-12-05 02:03:56 +08:00
|
|
|
def test_subdirectory_removal(self):
|
|
|
|
"""
|
|
|
|
Make sure that the created subdirectories are correctly removed when empty.
|
|
|
|
"""
|
|
|
|
self.cache.set("foo", "bar")
|
2010-11-19 23:39:35 +08:00
|
|
|
key = self.cache.make_key("foo")
|
2011-03-28 10:11:19 +08:00
|
|
|
keyhash = hashlib.md5(key).hexdigest()
|
2007-12-05 02:03:56 +08:00
|
|
|
keypath = os.path.join(self.dirname, keyhash[:2], keyhash[2:4], keyhash[4:])
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertTrue(os.path.exists(keypath))
|
2007-12-05 02:03:56 +08:00
|
|
|
|
|
|
|
self.cache.delete("foo")
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertTrue(not os.path.exists(keypath))
|
|
|
|
self.assertTrue(not os.path.exists(os.path.dirname(keypath)))
|
|
|
|
self.assertTrue(not os.path.exists(os.path.dirname(os.path.dirname(keypath))))
|
2007-11-19 11:12:19 +08:00
|
|
|
|
2010-08-31 08:44:46 +08:00
|
|
|
def test_cull(self):
|
2010-11-19 23:39:35 +08:00
|
|
|
self.perform_cull_test(50, 29)
|
2010-08-31 08:44:46 +08:00
|
|
|
|
2010-12-21 23:19:19 +08:00
|
|
|
def test_old_initialization(self):
|
|
|
|
self.cache = get_cache('file://%s?max_entries=30' % self.dirname)
|
|
|
|
self.perform_cull_test(50, 29)
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2010-09-13 02:45:26 +08:00
|
|
|
class CustomCacheKeyValidationTests(unittest.TestCase):
|
|
|
|
"""
|
|
|
|
Tests for the ability to mixin a custom ``validate_key`` method to
|
|
|
|
a custom cache backend that otherwise inherits from a builtin
|
|
|
|
backend, and override the default key validation. Refs #6447.
|
|
|
|
|
|
|
|
"""
|
|
|
|
def test_custom_key_validation(self):
|
|
|
|
cache = get_cache('regressiontests.cache.liberal_backend://')
|
|
|
|
|
|
|
|
# this key is both longer than 250 characters, and has spaces
|
|
|
|
key = 'some key with spaces' * 15
|
|
|
|
val = 'a value'
|
|
|
|
cache.set(key, val)
|
|
|
|
self.assertEqual(cache.get(key), val)
|
|
|
|
|
2011-07-05 17:10:58 +08:00
|
|
|
|
|
|
|
class GetCacheTests(unittest.TestCase):
|
|
|
|
|
|
|
|
def test_simple(self):
|
|
|
|
cache = get_cache('locmem://')
|
|
|
|
from django.core.cache.backends.locmem import LocMemCache
|
|
|
|
self.assertTrue(isinstance(cache, LocMemCache))
|
|
|
|
|
|
|
|
from django.core.cache import cache
|
|
|
|
self.assertTrue(isinstance(cache, get_cache('default').__class__))
|
|
|
|
|
|
|
|
cache = get_cache(
|
|
|
|
'django.core.cache.backends.dummy.DummyCache', **{'TIMEOUT': 120})
|
|
|
|
self.assertEqual(cache.default_timeout, 120)
|
|
|
|
|
|
|
|
self.assertRaises(InvalidCacheBackendError, get_cache, 'does_not_exist')
|
|
|
|
|
2012-02-10 02:58:25 +08:00
|
|
|
def test_close(self):
|
|
|
|
from django.core import signals
|
|
|
|
cache = get_cache('regressiontests.cache.closeable_cache.CacheClass')
|
|
|
|
self.assertFalse(cache.closed)
|
|
|
|
signals.request_finished.send(self.__class__)
|
|
|
|
self.assertTrue(cache.closed)
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
|
|
|
CACHE_MIDDLEWARE_SECONDS=1,
|
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
USE_I18N=False,
|
|
|
|
)
|
2011-10-27 06:47:04 +08:00
|
|
|
class CacheUtils(TestCase):
|
2007-11-19 11:12:19 +08:00
|
|
|
"""TestCase for django.utils.cache functions."""
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2009-04-02 02:19:32 +08:00
|
|
|
def setUp(self):
|
|
|
|
self.path = '/cache/test/'
|
2011-10-27 06:47:04 +08:00
|
|
|
self.cache = get_cache('default')
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
2009-04-02 02:19:32 +08:00
|
|
|
|
2010-10-29 09:31:15 +08:00
|
|
|
def _get_request(self, path, method='GET'):
|
2009-04-02 02:19:32 +08:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
2010-10-29 09:31:15 +08:00
|
|
|
request.method = method
|
2009-04-02 02:19:32 +08:00
|
|
|
request.path = request.path_info = "/cache/%s" % path
|
|
|
|
return request
|
|
|
|
|
2007-11-19 11:12:19 +08:00
|
|
|
def test_patch_vary_headers(self):
|
2008-08-02 13:56:57 +08:00
|
|
|
headers = (
|
2007-11-19 11:12:19 +08:00
|
|
|
# Initial vary, new headers, resulting vary.
|
|
|
|
(None, ('Accept-Encoding',), 'Accept-Encoding'),
|
|
|
|
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
|
|
|
|
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
|
|
|
|
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
)
|
|
|
|
for initial_vary, newheaders, resulting_vary in headers:
|
|
|
|
response = HttpResponse()
|
|
|
|
if initial_vary is not None:
|
|
|
|
response['Vary'] = initial_vary
|
|
|
|
patch_vary_headers(response, newheaders)
|
|
|
|
self.assertEqual(response['Vary'], resulting_vary)
|
|
|
|
|
2009-04-02 02:19:32 +08:00
|
|
|
def test_get_cache_key(self):
|
|
|
|
request = self._get_request(self.path)
|
|
|
|
response = HttpResponse()
|
|
|
|
key_prefix = 'localprefix'
|
|
|
|
# Expect None if no headers have been set yet.
|
|
|
|
self.assertEqual(get_cache_key(request), None)
|
|
|
|
# Set headers to an empty list.
|
|
|
|
learn_cache_key(request, response)
|
2010-10-29 09:31:15 +08:00
|
|
|
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
2011-03-02 20:47:36 +08:00
|
|
|
# Verify that a specified key_prefix is taken into account.
|
2009-04-02 02:19:32 +08:00
|
|
|
learn_cache_key(request, response, key_prefix=key_prefix)
|
2010-10-29 09:31:15 +08:00
|
|
|
self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
2009-04-02 02:19:32 +08:00
|
|
|
|
2011-03-02 20:47:36 +08:00
|
|
|
def test_get_cache_key_with_query(self):
|
|
|
|
request = self._get_request(self.path + '?test=1')
|
|
|
|
response = HttpResponse()
|
|
|
|
# Expect None if no headers have been set yet.
|
|
|
|
self.assertEqual(get_cache_key(request), None)
|
|
|
|
# Set headers to an empty list.
|
|
|
|
learn_cache_key(request, response)
|
|
|
|
# Verify that the querystring is taken into account.
|
|
|
|
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
|
|
|
|
|
2009-04-02 02:19:32 +08:00
|
|
|
def test_learn_cache_key(self):
|
2010-10-29 09:31:15 +08:00
|
|
|
request = self._get_request(self.path, 'HEAD')
|
2009-04-02 02:19:32 +08:00
|
|
|
response = HttpResponse()
|
|
|
|
response['Vary'] = 'Pony'
|
|
|
|
# Make sure that the Vary header is added to the key hash
|
|
|
|
learn_cache_key(request, response)
|
2011-10-27 06:47:04 +08:00
|
|
|
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
2010-10-29 09:31:15 +08:00
|
|
|
|
2011-08-23 11:51:10 +08:00
|
|
|
def test_patch_cache_control(self):
|
|
|
|
tests = (
|
|
|
|
# Initial Cache-Control, kwargs to patch_cache_control, expected Cache-Control parts
|
|
|
|
(None, {'private' : True}, set(['private'])),
|
|
|
|
|
|
|
|
# Test whether private/public attributes are mutually exclusive
|
|
|
|
('private', {'private' : True}, set(['private'])),
|
|
|
|
('private', {'public' : True}, set(['public'])),
|
|
|
|
('public', {'public' : True}, set(['public'])),
|
|
|
|
('public', {'private' : True}, set(['private'])),
|
|
|
|
('must-revalidate,max-age=60,private', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
|
|
|
|
('must-revalidate,max-age=60,public', {'private' : True}, set(['must-revalidate', 'max-age=60', 'private'])),
|
|
|
|
('must-revalidate,max-age=60', {'public' : True}, set(['must-revalidate', 'max-age=60', 'public'])),
|
|
|
|
)
|
|
|
|
|
|
|
|
cc_delim_re = re.compile(r'\s*,\s*')
|
|
|
|
|
|
|
|
for initial_cc, newheaders, expected_cc in tests:
|
|
|
|
response = HttpResponse()
|
|
|
|
if initial_cc is not None:
|
|
|
|
response['Cache-Control'] = initial_cc
|
|
|
|
patch_cache_control(response, **newheaders)
|
|
|
|
parts = set(cc_delim_re.split(response['Cache-Control']))
|
|
|
|
self.assertEqual(parts, expected_cc)
|
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
|
|
|
|
@override_settings(
|
2011-10-27 04:55:36 +08:00
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
2012-03-30 17:08:29 +08:00
|
|
|
'KEY_PREFIX': 'cacheprefix',
|
2011-10-27 04:55:36 +08:00
|
|
|
},
|
|
|
|
},
|
2012-03-30 17:08:29 +08:00
|
|
|
)
|
|
|
|
class PrefixedCacheUtils(CacheUtils):
|
|
|
|
pass
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_SECONDS=60,
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX='test',
|
2011-10-27 04:55:36 +08:00
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
},
|
2012-03-30 17:08:29 +08:00
|
|
|
)
|
2011-10-27 06:47:04 +08:00
|
|
|
class CacheHEADTest(TestCase):
|
2010-10-29 09:31:15 +08:00
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.path = '/cache/test/'
|
2011-10-27 06:47:04 +08:00
|
|
|
self.cache = get_cache('default')
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
2010-10-29 09:31:15 +08:00
|
|
|
|
|
|
|
def _get_request(self, method):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
request.method = method
|
|
|
|
request.path = request.path_info = self.path
|
|
|
|
return request
|
|
|
|
|
|
|
|
def _get_request_cache(self, method):
|
|
|
|
request = self._get_request(method)
|
|
|
|
request._cache_update_cache = True
|
|
|
|
return request
|
|
|
|
|
|
|
|
def _set_cache(self, request, msg):
|
|
|
|
response = HttpResponse()
|
|
|
|
response.content = msg
|
|
|
|
return UpdateCacheMiddleware().process_response(request, response)
|
|
|
|
|
|
|
|
def test_head_caches_correctly(self):
|
|
|
|
test_content = 'test content'
|
|
|
|
|
|
|
|
request = self._get_request_cache('HEAD')
|
|
|
|
self._set_cache(request, test_content)
|
|
|
|
|
|
|
|
request = self._get_request('HEAD')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(test_content, get_cache_data.content)
|
|
|
|
|
|
|
|
def test_head_with_cached_get(self):
|
|
|
|
test_content = 'test content'
|
|
|
|
|
|
|
|
request = self._get_request_cache('GET')
|
|
|
|
self._set_cache(request, test_content)
|
|
|
|
|
|
|
|
request = self._get_request('HEAD')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(test_content, get_cache_data.content)
|
2007-11-19 11:12:19 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
2011-10-27 04:55:36 +08:00
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
},
|
2012-03-30 17:08:29 +08:00
|
|
|
LANGUAGES=(
|
|
|
|
('en', 'English'),
|
|
|
|
('es', 'Spanish'),
|
|
|
|
),
|
|
|
|
)
|
2011-10-27 06:47:04 +08:00
|
|
|
class CacheI18nTest(TestCase):
|
2010-02-24 04:45:28 +08:00
|
|
|
|
|
|
|
def setUp(self):
|
|
|
|
self.path = '/cache/test/'
|
2011-10-27 06:47:04 +08:00
|
|
|
self.cache = get_cache('default')
|
2010-02-24 04:45:28 +08:00
|
|
|
|
2011-10-27 06:47:04 +08:00
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
|
|
|
|
|
|
|
def _get_request(self, method='GET'):
|
2010-02-24 04:45:28 +08:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
2011-10-27 06:47:04 +08:00
|
|
|
request.method = method
|
2010-02-24 04:45:28 +08:00
|
|
|
request.path = request.path_info = self.path
|
|
|
|
return request
|
|
|
|
|
2011-03-02 20:47:36 +08:00
|
|
|
def _get_request_cache(self, query_string=None):
|
2010-02-24 04:45:28 +08:00
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
2011-03-02 20:47:36 +08:00
|
|
|
if query_string:
|
|
|
|
request.META['QUERY_STRING'] = query_string
|
|
|
|
request.GET = QueryDict(query_string)
|
2010-02-24 04:45:28 +08:00
|
|
|
request.path = request.path_info = self.path
|
|
|
|
request._cache_update_cache = True
|
|
|
|
request.method = 'GET'
|
|
|
|
request.session = {}
|
|
|
|
return request
|
|
|
|
|
2011-11-18 21:01:06 +08:00
|
|
|
@override_settings(USE_I18N=True, USE_L10N=False, USE_TZ=False)
|
2011-11-01 22:02:31 +08:00
|
|
|
def test_cache_key_i18n_translation(self):
|
2010-02-24 04:45:28 +08:00
|
|
|
request = self._get_request()
|
|
|
|
lang = translation.get_language()
|
|
|
|
response = HttpResponse()
|
|
|
|
key = learn_cache_key(request, response)
|
2011-11-01 22:02:31 +08:00
|
|
|
self.assertIn(lang, key, "Cache keys should include the language name when translation is active")
|
2010-02-24 04:45:28 +08:00
|
|
|
key2 = get_cache_key(request)
|
|
|
|
self.assertEqual(key, key2)
|
|
|
|
|
2011-11-18 21:01:06 +08:00
|
|
|
@override_settings(USE_I18N=False, USE_L10N=True, USE_TZ=False)
|
2011-11-01 22:02:31 +08:00
|
|
|
def test_cache_key_i18n_formatting(self):
|
|
|
|
request = self._get_request()
|
|
|
|
lang = translation.get_language()
|
|
|
|
response = HttpResponse()
|
|
|
|
key = learn_cache_key(request, response)
|
|
|
|
self.assertIn(lang, key, "Cache keys should include the language name when formatting is active")
|
|
|
|
key2 = get_cache_key(request)
|
|
|
|
self.assertEqual(key, key2)
|
|
|
|
|
2011-11-18 21:01:06 +08:00
|
|
|
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
|
|
|
|
def test_cache_key_i18n_timezone(self):
|
|
|
|
request = self._get_request()
|
2011-12-29 21:57:32 +08:00
|
|
|
# This is tightly coupled to the implementation,
|
|
|
|
# but it's the most straightforward way to test the key.
|
|
|
|
tz = force_unicode(timezone.get_current_timezone_name(), errors='ignore')
|
|
|
|
tz = tz.encode('ascii', 'ignore').replace(' ', '_')
|
2011-11-18 21:01:06 +08:00
|
|
|
response = HttpResponse()
|
|
|
|
key = learn_cache_key(request, response)
|
|
|
|
self.assertIn(tz, key, "Cache keys should include the time zone name when time zones are active")
|
|
|
|
key2 = get_cache_key(request)
|
|
|
|
self.assertEqual(key, key2)
|
|
|
|
|
2011-11-01 22:02:31 +08:00
|
|
|
@override_settings(USE_I18N=False, USE_L10N=False)
|
2010-02-24 04:45:28 +08:00
|
|
|
def test_cache_key_no_i18n (self):
|
|
|
|
request = self._get_request()
|
|
|
|
lang = translation.get_language()
|
2011-12-29 21:57:32 +08:00
|
|
|
tz = force_unicode(timezone.get_current_timezone_name(), errors='ignore')
|
|
|
|
tz = tz.encode('ascii', 'ignore').replace(' ', '_')
|
2010-02-24 04:45:28 +08:00
|
|
|
response = HttpResponse()
|
|
|
|
key = learn_cache_key(request, response)
|
2011-11-01 22:02:31 +08:00
|
|
|
self.assertNotIn(lang, key, "Cache keys shouldn't include the language name when i18n isn't active")
|
2011-11-18 21:01:06 +08:00
|
|
|
self.assertNotIn(tz, key, "Cache keys shouldn't include the time zone name when i18n isn't active")
|
2010-02-24 04:45:28 +08:00
|
|
|
|
2011-12-29 21:57:32 +08:00
|
|
|
@override_settings(USE_I18N=False, USE_L10N=False, USE_TZ=True)
|
|
|
|
def test_cache_key_with_non_ascii_tzname(self):
|
|
|
|
# Regression test for #17476
|
|
|
|
class CustomTzName(timezone.UTC):
|
|
|
|
name = ''
|
|
|
|
def tzname(self, dt):
|
|
|
|
return self.name
|
|
|
|
|
|
|
|
request = self._get_request()
|
|
|
|
response = HttpResponse()
|
|
|
|
with timezone.override(CustomTzName()):
|
|
|
|
CustomTzName.name = 'Hora estándar de Argentina' # UTF-8 string
|
|
|
|
sanitized_name = 'Hora_estndar_de_Argentina'
|
|
|
|
self.assertIn(sanitized_name, learn_cache_key(request, response),
|
|
|
|
"Cache keys should include the time zone name when time zones are active")
|
|
|
|
|
|
|
|
CustomTzName.name = u'Hora estándar de Argentina' # unicode
|
|
|
|
sanitized_name = 'Hora_estndar_de_Argentina'
|
|
|
|
self.assertIn(sanitized_name, learn_cache_key(request, response),
|
|
|
|
"Cache keys should include the time zone name when time zones are active")
|
|
|
|
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
|
|
|
CACHE_MIDDLEWARE_SECONDS=60,
|
|
|
|
USE_ETAGS=True,
|
|
|
|
USE_I18N=True,
|
|
|
|
)
|
2010-02-24 04:45:28 +08:00
|
|
|
def test_middleware(self):
|
|
|
|
def set_cache(request, lang, msg):
|
2011-10-27 06:47:04 +08:00
|
|
|
translation.activate(lang)
|
|
|
|
response = HttpResponse()
|
|
|
|
response.content = msg
|
|
|
|
return UpdateCacheMiddleware().process_response(request, response)
|
2011-03-02 20:47:36 +08:00
|
|
|
|
|
|
|
# cache with non empty request.GET
|
|
|
|
request = self._get_request_cache(query_string='foo=bar&other=true')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
# first access, cache must return None
|
|
|
|
self.assertEqual(get_cache_data, None)
|
|
|
|
response = HttpResponse()
|
|
|
|
content = 'Check for cache with QUERY_STRING'
|
|
|
|
response.content = content
|
|
|
|
UpdateCacheMiddleware().process_response(request, response)
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
# cache must return content
|
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(get_cache_data.content, content)
|
|
|
|
# different QUERY_STRING, cache must be empty
|
|
|
|
request = self._get_request_cache(query_string='foo=bar&somethingelse=true')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertEqual(get_cache_data, None)
|
|
|
|
|
|
|
|
# i18n tests
|
2010-02-24 04:45:28 +08:00
|
|
|
en_message ="Hello world!"
|
|
|
|
es_message ="Hola mundo!"
|
|
|
|
|
|
|
|
request = self._get_request_cache()
|
|
|
|
set_cache(request, 'en', en_message)
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
# Check that we can recover the cache
|
2011-10-27 06:47:04 +08:00
|
|
|
self.assertNotEqual(get_cache_data, None)
|
|
|
|
self.assertEqual(get_cache_data.content, en_message)
|
2010-12-13 06:57:17 +08:00
|
|
|
# Check that we use etags
|
|
|
|
self.assertTrue(get_cache_data.has_header('ETag'))
|
|
|
|
# Check that we can disable etags
|
2011-10-27 04:55:36 +08:00
|
|
|
with self.settings(USE_ETAGS=False):
|
|
|
|
request._cache_update_cache = True
|
|
|
|
set_cache(request, 'en', en_message)
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertFalse(get_cache_data.has_header('ETag'))
|
2010-02-24 04:45:28 +08:00
|
|
|
# change the session language and set content
|
|
|
|
request = self._get_request_cache()
|
|
|
|
set_cache(request, 'es', es_message)
|
|
|
|
# change again the language
|
2011-10-27 06:47:04 +08:00
|
|
|
translation.activate('en')
|
|
|
|
# retrieve the content from cache
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertEqual(get_cache_data.content, en_message)
|
2010-02-24 04:45:28 +08:00
|
|
|
# change again the language
|
2011-10-27 06:47:04 +08:00
|
|
|
translation.activate('es')
|
|
|
|
get_cache_data = FetchFromCacheMiddleware().process_request(request)
|
|
|
|
self.assertEqual(get_cache_data.content, es_message)
|
2011-10-27 14:17:24 +08:00
|
|
|
# reset the language
|
|
|
|
translation.deactivate()
|
2010-11-19 23:39:35 +08:00
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
@override_settings(
|
2011-10-27 04:55:36 +08:00
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
'KEY_PREFIX': 'cacheprefix'
|
|
|
|
},
|
|
|
|
},
|
2012-03-30 17:08:29 +08:00
|
|
|
)
|
|
|
|
class PrefixedCacheI18nTest(CacheI18nTest):
|
|
|
|
pass
|
2010-11-19 23:39:35 +08:00
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
|
|
|
|
def hello_world_view(request, value):
|
|
|
|
return HttpResponse('Hello World %s' % value)
|
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_ALIAS='other',
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
|
|
|
|
CACHE_MIDDLEWARE_SECONDS=30,
|
|
|
|
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
|
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
'other': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
'LOCATION': 'other',
|
|
|
|
'TIMEOUT': '1',
|
|
|
|
},
|
|
|
|
},
|
|
|
|
)
|
2011-10-27 06:47:04 +08:00
|
|
|
class CacheMiddlewareTest(TestCase):
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2012-05-03 21:27:01 +08:00
|
|
|
# The following tests will need to be modified in Django 1.6 to not use
|
|
|
|
# deprecated ways of using the cache_page decorator that will be removed in
|
|
|
|
# such version
|
2010-12-22 15:52:44 +08:00
|
|
|
def setUp(self):
|
2011-02-17 11:50:55 +08:00
|
|
|
self.factory = RequestFactory()
|
2011-10-27 06:47:04 +08:00
|
|
|
self.default_cache = get_cache('default')
|
|
|
|
self.other_cache = get_cache('other')
|
2012-05-03 21:27:01 +08:00
|
|
|
self.save_warnings_state()
|
2012-05-04 00:19:18 +08:00
|
|
|
warnings.filterwarnings('ignore', category=DeprecationWarning,
|
|
|
|
module='django.views.decorators.cache')
|
2011-10-27 06:47:04 +08:00
|
|
|
|
|
|
|
def tearDown(self):
|
2012-05-03 21:27:01 +08:00
|
|
|
self.restore_warnings_state()
|
2011-10-27 06:47:04 +08:00
|
|
|
self.default_cache.clear()
|
|
|
|
self.other_cache.clear()
|
2011-02-17 11:50:55 +08:00
|
|
|
|
2011-01-24 14:36:31 +08:00
|
|
|
def test_constructor(self):
|
|
|
|
"""
|
|
|
|
Ensure the constructor is correctly distinguishing between usage of CacheMiddleware as
|
|
|
|
Middleware vs. usage of CacheMiddleware as view decorator and setting attributes
|
|
|
|
appropriately.
|
|
|
|
"""
|
|
|
|
# If no arguments are passed in construction, it's being used as middleware.
|
|
|
|
middleware = CacheMiddleware()
|
|
|
|
|
|
|
|
# Now test object attributes against values defined in setUp above
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(middleware.cache_timeout, 30)
|
|
|
|
self.assertEqual(middleware.key_prefix, 'middlewareprefix')
|
|
|
|
self.assertEqual(middleware.cache_alias, 'other')
|
|
|
|
self.assertEqual(middleware.cache_anonymous_only, False)
|
2011-01-24 14:36:31 +08:00
|
|
|
|
|
|
|
# If arguments are being passed in construction, it's being used as a decorator.
|
|
|
|
# First, test with "defaults":
|
|
|
|
as_view_decorator = CacheMiddleware(cache_alias=None, key_prefix=None)
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(as_view_decorator.cache_timeout, 300) # Timeout value for 'default' cache, i.e. 300
|
|
|
|
self.assertEqual(as_view_decorator.key_prefix, '')
|
|
|
|
self.assertEqual(as_view_decorator.cache_alias, 'default') # Value of DEFAULT_CACHE_ALIAS from django.core.cache
|
|
|
|
self.assertEqual(as_view_decorator.cache_anonymous_only, False)
|
2011-01-24 14:36:31 +08:00
|
|
|
|
|
|
|
# Next, test with custom values:
|
|
|
|
as_view_decorator_with_custom = CacheMiddleware(cache_anonymous_only=True, cache_timeout=60, cache_alias='other', key_prefix='foo')
|
|
|
|
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(as_view_decorator_with_custom.cache_timeout, 60)
|
|
|
|
self.assertEqual(as_view_decorator_with_custom.key_prefix, 'foo')
|
|
|
|
self.assertEqual(as_view_decorator_with_custom.cache_alias, 'other')
|
|
|
|
self.assertEqual(as_view_decorator_with_custom.cache_anonymous_only, True)
|
2011-01-24 14:36:31 +08:00
|
|
|
|
2010-12-22 15:52:44 +08:00
|
|
|
def test_middleware(self):
|
|
|
|
middleware = CacheMiddleware()
|
|
|
|
prefix_middleware = CacheMiddleware(key_prefix='prefix1')
|
|
|
|
timeout_middleware = CacheMiddleware(cache_timeout=1)
|
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
request = self.factory.get('/view/')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Put the request through the request middleware
|
|
|
|
result = middleware.process_request(request)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(result, None)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
response = hello_world_view(request, '1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Now put the response through the response middleware
|
|
|
|
response = middleware.process_response(request, response)
|
|
|
|
|
|
|
|
# Repeating the request should result in a cache hit
|
|
|
|
result = middleware.process_request(request)
|
|
|
|
self.assertNotEquals(result, None)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(result.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# The same request through a different middleware won't hit
|
|
|
|
result = prefix_middleware.process_request(request)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(result, None)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# The same request with a timeout _will_ hit
|
|
|
|
result = timeout_middleware.process_request(request)
|
|
|
|
self.assertNotEquals(result, None)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(result.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
@override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
|
2011-02-17 11:50:55 +08:00
|
|
|
def test_cache_middleware_anonymous_only_wont_cause_session_access(self):
|
|
|
|
""" The cache middleware shouldn't cause a session access due to
|
|
|
|
CACHE_MIDDLEWARE_ANONYMOUS_ONLY if nothing else has accessed the
|
|
|
|
session. Refs 13283 """
|
|
|
|
|
|
|
|
from django.contrib.sessions.middleware import SessionMiddleware
|
|
|
|
from django.contrib.auth.middleware import AuthenticationMiddleware
|
|
|
|
|
|
|
|
middleware = CacheMiddleware()
|
|
|
|
session_middleware = SessionMiddleware()
|
|
|
|
auth_middleware = AuthenticationMiddleware()
|
|
|
|
|
|
|
|
request = self.factory.get('/view_anon/')
|
|
|
|
|
|
|
|
# Put the request through the request middleware
|
|
|
|
session_middleware.process_request(request)
|
|
|
|
auth_middleware.process_request(request)
|
|
|
|
result = middleware.process_request(request)
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(result, None)
|
2011-02-17 11:50:55 +08:00
|
|
|
|
|
|
|
response = hello_world_view(request, '1')
|
|
|
|
|
|
|
|
# Now put the response through the response middleware
|
|
|
|
session_middleware.process_response(request, response)
|
|
|
|
response = middleware.process_response(request, response)
|
|
|
|
|
|
|
|
self.assertEqual(request.session.accessed, False)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-10-27 04:55:36 +08:00
|
|
|
@override_settings(CACHE_MIDDLEWARE_ANONYMOUS_ONLY=True)
|
2011-02-17 12:35:26 +08:00
|
|
|
def test_cache_middleware_anonymous_only_with_cache_page(self):
|
|
|
|
"""CACHE_MIDDLEWARE_ANONYMOUS_ONLY should still be effective when used
|
|
|
|
with the cache_page decorator: the response to a request from an
|
|
|
|
authenticated user should not be cached."""
|
|
|
|
|
|
|
|
request = self.factory.get('/view_anon/')
|
|
|
|
|
|
|
|
class MockAuthenticatedUser(object):
|
|
|
|
def is_authenticated(self):
|
|
|
|
return True
|
|
|
|
|
|
|
|
class MockAccessedSession(object):
|
|
|
|
accessed = True
|
|
|
|
|
|
|
|
request.user = MockAuthenticatedUser()
|
|
|
|
request.session = MockAccessedSession()
|
|
|
|
|
|
|
|
response = cache_page(hello_world_view)(request, '1')
|
|
|
|
|
|
|
|
self.assertFalse("Cache-Control" in response)
|
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
def test_view_decorator(self):
|
2010-12-22 15:52:44 +08:00
|
|
|
# decorate the same view with different cache decorators
|
2011-02-17 11:50:55 +08:00
|
|
|
default_view = cache_page(hello_world_view)
|
|
|
|
default_with_prefix_view = cache_page(key_prefix='prefix1')(hello_world_view)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
explicit_default_view = cache_page(cache='default')(hello_world_view)
|
|
|
|
explicit_default_with_prefix_view = cache_page(cache='default', key_prefix='prefix1')(hello_world_view)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
other_view = cache_page(cache='other')(hello_world_view)
|
|
|
|
other_with_prefix_view = cache_page(cache='other', key_prefix='prefix2')(hello_world_view)
|
2011-09-08 21:25:24 +08:00
|
|
|
other_with_timeout_view = cache_page(3, cache='other', key_prefix='prefix3')(hello_world_view)
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-02-17 11:50:55 +08:00
|
|
|
request = self.factory.get('/view/')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Request the view once
|
|
|
|
response = default_view(request, '1')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Request again -- hit the cache
|
|
|
|
response = default_view(request, '2')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Requesting the same view with the explicit cache should yield the same result
|
|
|
|
response = explicit_default_view(request, '3')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Requesting with a prefix will hit a different cache key
|
|
|
|
response = explicit_default_with_prefix_view(request, '4')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 4')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Hitting the same view again gives a cache hit
|
|
|
|
response = explicit_default_with_prefix_view(request, '5')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 4')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# And going back to the implicit cache will hit the same cache
|
|
|
|
response = default_with_prefix_view(request, '6')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 4')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# Requesting from an alternate cache won't hit cache
|
|
|
|
response = other_view(request, '7')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 7')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# But a repeated hit will hit cache
|
|
|
|
response = other_view(request, '8')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 7')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# And prefixing the alternate cache yields yet another cache entry
|
|
|
|
response = other_with_prefix_view(request, '9')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 9')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2011-01-24 14:36:31 +08:00
|
|
|
# Request from the alternate cache with a new prefix and a custom timeout
|
|
|
|
response = other_with_timeout_view(request, '10')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 10')
|
2011-01-24 14:36:31 +08:00
|
|
|
|
2010-12-22 15:52:44 +08:00
|
|
|
# But if we wait a couple of seconds...
|
|
|
|
time.sleep(2)
|
|
|
|
|
|
|
|
# ... the default cache will still hit
|
|
|
|
cache = get_cache('default')
|
2011-01-24 14:36:31 +08:00
|
|
|
response = default_view(request, '11')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# ... the default cache with a prefix will still hit
|
2011-01-24 14:36:31 +08:00
|
|
|
response = default_with_prefix_view(request, '12')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 4')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# ... the explicit default cache will still hit
|
2011-01-24 14:36:31 +08:00
|
|
|
response = explicit_default_view(request, '13')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 1')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# ... the explicit default cache with a prefix will still hit
|
2011-01-24 14:36:31 +08:00
|
|
|
response = explicit_default_with_prefix_view(request, '14')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 4')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# .. but a rapidly expiring cache won't hit
|
2011-01-24 14:36:31 +08:00
|
|
|
response = other_view(request, '15')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 15')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
|
|
|
# .. even if it has a prefix
|
2011-01-24 14:36:31 +08:00
|
|
|
response = other_with_prefix_view(request, '16')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 16')
|
2011-01-24 14:36:31 +08:00
|
|
|
|
|
|
|
# ... but a view with a custom timeout will still hit
|
|
|
|
response = other_with_timeout_view(request, '17')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 10')
|
2011-01-24 14:36:31 +08:00
|
|
|
|
|
|
|
# And if we wait a few more seconds
|
|
|
|
time.sleep(2)
|
|
|
|
|
|
|
|
# the custom timeouot cache will miss
|
|
|
|
response = other_with_timeout_view(request, '18')
|
2011-03-03 23:04:39 +08:00
|
|
|
self.assertEqual(response.content, 'Hello World 18')
|
2010-12-22 15:52:44 +08:00
|
|
|
|
2012-03-30 17:08:29 +08:00
|
|
|
|
|
|
|
@override_settings(
|
|
|
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
|
|
|
CACHE_MIDDLEWARE_SECONDS=1,
|
2011-10-27 04:55:36 +08:00
|
|
|
CACHES={
|
|
|
|
'default': {
|
|
|
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
|
|
},
|
|
|
|
},
|
2012-03-30 17:08:29 +08:00
|
|
|
USE_I18N=False,
|
|
|
|
)
|
2011-09-08 21:25:31 +08:00
|
|
|
class TestWithTemplateResponse(TestCase):
|
|
|
|
"""
|
|
|
|
Tests various headers w/ TemplateResponse.
|
|
|
|
|
|
|
|
Most are probably redundant since they manipulate the same object
|
|
|
|
anyway but the Etag header is 'special' because it relies on the
|
|
|
|
content being complete (which is not necessarily always the case
|
|
|
|
with a TemplateResponse)
|
|
|
|
"""
|
|
|
|
def setUp(self):
|
|
|
|
self.path = '/cache/test/'
|
2011-10-27 06:47:04 +08:00
|
|
|
self.cache = get_cache('default')
|
|
|
|
|
|
|
|
def tearDown(self):
|
|
|
|
self.cache.clear()
|
2011-09-08 21:25:31 +08:00
|
|
|
|
|
|
|
def _get_request(self, path, method='GET'):
|
|
|
|
request = HttpRequest()
|
|
|
|
request.META = {
|
|
|
|
'SERVER_NAME': 'testserver',
|
|
|
|
'SERVER_PORT': 80,
|
|
|
|
}
|
|
|
|
request.method = method
|
|
|
|
request.path = request.path_info = "/cache/%s" % path
|
|
|
|
return request
|
|
|
|
|
|
|
|
def test_patch_vary_headers(self):
|
|
|
|
headers = (
|
|
|
|
# Initial vary, new headers, resulting vary.
|
|
|
|
(None, ('Accept-Encoding',), 'Accept-Encoding'),
|
|
|
|
('Accept-Encoding', ('accept-encoding',), 'Accept-Encoding'),
|
|
|
|
('Accept-Encoding', ('ACCEPT-ENCODING',), 'Accept-Encoding'),
|
|
|
|
('Cookie', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding',), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
(None, ('Accept-Encoding', 'COOKIE'), 'Accept-Encoding, COOKIE'),
|
|
|
|
('Cookie, Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
('Cookie , Accept-Encoding', ('Accept-Encoding', 'cookie'), 'Cookie, Accept-Encoding'),
|
|
|
|
)
|
|
|
|
for initial_vary, newheaders, resulting_vary in headers:
|
|
|
|
response = TemplateResponse(HttpResponse(), Template("This is a test"))
|
|
|
|
if initial_vary is not None:
|
|
|
|
response['Vary'] = initial_vary
|
|
|
|
patch_vary_headers(response, newheaders)
|
|
|
|
self.assertEqual(response['Vary'], resulting_vary)
|
|
|
|
|
|
|
|
def test_get_cache_key(self):
|
|
|
|
request = self._get_request(self.path)
|
|
|
|
response = TemplateResponse(HttpResponse(), Template("This is a test"))
|
|
|
|
key_prefix = 'localprefix'
|
|
|
|
# Expect None if no headers have been set yet.
|
|
|
|
self.assertEqual(get_cache_key(request), None)
|
|
|
|
# Set headers to an empty list.
|
|
|
|
learn_cache_key(request, response)
|
|
|
|
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
|
|
|
# Verify that a specified key_prefix is taken into account.
|
|
|
|
learn_cache_key(request, response, key_prefix=key_prefix)
|
|
|
|
self.assertEqual(get_cache_key(request, key_prefix=key_prefix), 'views.decorators.cache.cache_page.localprefix.GET.a8c87a3d8c44853d7f79474f7ffe4ad5.d41d8cd98f00b204e9800998ecf8427e')
|
|
|
|
|
|
|
|
def test_get_cache_key_with_query(self):
|
|
|
|
request = self._get_request(self.path + '?test=1')
|
|
|
|
response = TemplateResponse(HttpResponse(), Template("This is a test"))
|
|
|
|
# Expect None if no headers have been set yet.
|
|
|
|
self.assertEqual(get_cache_key(request), None)
|
|
|
|
# Set headers to an empty list.
|
|
|
|
learn_cache_key(request, response)
|
|
|
|
# Verify that the querystring is taken into account.
|
|
|
|
self.assertEqual(get_cache_key(request), 'views.decorators.cache.cache_page.settingsprefix.GET.bd889c5a59603af44333ed21504db3cd.d41d8cd98f00b204e9800998ecf8427e')
|
|
|
|
|
|
|
|
@override_settings(USE_ETAGS=False)
|
|
|
|
def test_without_etag(self):
|
|
|
|
response = TemplateResponse(HttpResponse(), Template("This is a test"))
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
patch_response_headers(response)
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
response = response.render()
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
|
|
|
|
@override_settings(USE_ETAGS=True)
|
|
|
|
def test_with_etag(self):
|
|
|
|
response = TemplateResponse(HttpResponse(), Template("This is a test"))
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
patch_response_headers(response)
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
response = response.render()
|
|
|
|
self.assertTrue(response.has_header('ETag'))
|
|
|
|
|
|
|
|
|
|
|
|
class TestEtagWithAdmin(TestCase):
|
|
|
|
# See https://code.djangoproject.com/ticket/16003
|
2011-09-21 02:16:49 +08:00
|
|
|
urls = "regressiontests.admin_views.urls"
|
|
|
|
|
2011-09-08 21:25:31 +08:00
|
|
|
def test_admin(self):
|
|
|
|
with self.settings(USE_ETAGS=False):
|
|
|
|
response = self.client.get('/test_admin/admin/')
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
self.assertFalse(response.has_header('ETag'))
|
|
|
|
|
|
|
|
with self.settings(USE_ETAGS=True):
|
|
|
|
response = self.client.get('/test_admin/admin/')
|
|
|
|
self.assertEqual(response.status_code, 200)
|
|
|
|
self.assertTrue(response.has_header('ETag'))
|