2006-02-24 14:07:01 +08:00
|
|
|
"File-based cache backend"
|
2013-08-26 22:34:02 +08:00
|
|
|
import errno
|
|
|
|
import glob
|
2011-03-28 09:40:43 +08:00
|
|
|
import hashlib
|
2013-08-26 22:34:02 +08:00
|
|
|
import io
|
2008-08-02 13:56:57 +08:00
|
|
|
import os
|
2013-08-26 22:34:02 +08:00
|
|
|
import random
|
|
|
|
import tempfile
|
2011-03-28 09:40:43 +08:00
|
|
|
import time
|
2013-08-26 22:34:02 +08:00
|
|
|
import zlib
|
2015-01-28 20:35:27 +08:00
|
|
|
|
|
|
|
from django.core.cache.backends.base import DEFAULT_TIMEOUT, BaseCache
|
2013-08-26 22:34:02 +08:00
|
|
|
from django.core.files.move import file_move_safe
|
|
|
|
from django.utils.encoding import force_bytes
|
2015-01-28 20:35:27 +08:00
|
|
|
|
2006-02-24 14:07:01 +08:00
|
|
|
try:
|
2012-07-20 22:16:57 +08:00
|
|
|
from django.utils.six.moves import cPickle as pickle
|
2006-02-24 14:07:01 +08:00
|
|
|
except ImportError:
|
|
|
|
import pickle
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2013-05-18 18:54:59 +08:00
|
|
|
|
2010-12-21 23:19:19 +08:00
|
|
|
class FileBasedCache(BaseCache):
|
2013-08-26 22:34:02 +08:00
|
|
|
cache_suffix = '.djcache'
|
|
|
|
|
2010-12-21 23:19:19 +08:00
|
|
|
def __init__(self, dir, params):
|
2013-08-26 22:34:02 +08:00
|
|
|
super(FileBasedCache, self).__init__(params)
|
|
|
|
self._dir = os.path.abspath(dir)
|
|
|
|
self._createdir()
|
2006-02-24 14:07:01 +08:00
|
|
|
|
2013-05-18 18:54:59 +08:00
|
|
|
def add(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
2013-08-26 22:34:02 +08:00
|
|
|
if self.has_key(key, version):
|
2008-08-10 11:52:21 +08:00
|
|
|
return False
|
2013-08-26 22:34:02 +08:00
|
|
|
self.set(key, value, timeout, version)
|
2008-08-10 11:52:21 +08:00
|
|
|
return True
|
2007-10-20 23:16:34 +08:00
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def get(self, key, default=None, version=None):
|
2013-08-26 22:34:02 +08:00
|
|
|
fname = self._key_to_file(key, version)
|
2016-05-26 23:36:00 +08:00
|
|
|
try:
|
|
|
|
with io.open(fname, 'rb') as f:
|
|
|
|
if not self._is_expired(f):
|
|
|
|
return pickle.loads(zlib.decompress(f.read()))
|
|
|
|
except IOError as e:
|
|
|
|
if e.errno == errno.ENOENT:
|
|
|
|
pass # Cache file doesn't exist.
|
2006-02-24 14:07:01 +08:00
|
|
|
return default
|
|
|
|
|
2013-05-18 18:54:59 +08:00
|
|
|
def set(self, key, value, timeout=DEFAULT_TIMEOUT, version=None):
|
2013-08-26 22:34:02 +08:00
|
|
|
self._createdir() # Cache dir can be deleted at any time.
|
|
|
|
fname = self._key_to_file(key, version)
|
|
|
|
self._cull() # make some room if necessary
|
|
|
|
fd, tmp_path = tempfile.mkstemp(dir=self._dir)
|
|
|
|
renamed = False
|
2006-02-24 14:07:01 +08:00
|
|
|
try:
|
2013-08-26 22:34:02 +08:00
|
|
|
with io.open(fd, 'wb') as f:
|
2013-09-24 00:40:19 +08:00
|
|
|
expiry = self.get_backend_timeout(timeout)
|
2015-10-08 14:05:01 +08:00
|
|
|
f.write(pickle.dumps(expiry, pickle.HIGHEST_PROTOCOL))
|
2015-11-15 04:07:11 +08:00
|
|
|
f.write(zlib.compress(pickle.dumps(value, pickle.HIGHEST_PROTOCOL)))
|
2013-08-26 22:34:02 +08:00
|
|
|
file_move_safe(tmp_path, fname, allow_overwrite=True)
|
|
|
|
renamed = True
|
|
|
|
finally:
|
|
|
|
if not renamed:
|
|
|
|
os.remove(tmp_path)
|
2006-02-24 14:07:01 +08:00
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def delete(self, key, version=None):
|
2013-08-26 22:34:02 +08:00
|
|
|
self._delete(self._key_to_file(key, version))
|
2007-12-05 02:03:56 +08:00
|
|
|
|
|
|
|
def _delete(self, fname):
|
2013-08-26 22:34:02 +08:00
|
|
|
if not fname.startswith(self._dir) or not os.path.exists(fname):
|
|
|
|
return
|
2007-12-05 02:03:56 +08:00
|
|
|
try:
|
2013-08-26 22:34:02 +08:00
|
|
|
os.remove(fname)
|
|
|
|
except OSError as e:
|
|
|
|
# ENOENT can happen if the cache file is removed (by another
|
|
|
|
# process) after the os.path.exists check.
|
|
|
|
if e.errno != errno.ENOENT:
|
|
|
|
raise
|
2006-02-24 14:07:01 +08:00
|
|
|
|
2010-11-19 23:39:35 +08:00
|
|
|
def has_key(self, key, version=None):
|
2013-08-26 22:34:02 +08:00
|
|
|
fname = self._key_to_file(key, version)
|
|
|
|
if os.path.exists(fname):
|
|
|
|
with io.open(fname, 'rb') as f:
|
|
|
|
return not self._is_expired(f)
|
|
|
|
return False
|
2006-02-24 14:07:01 +08:00
|
|
|
|
2007-12-05 02:03:56 +08:00
|
|
|
def _cull(self):
|
2013-08-26 22:34:02 +08:00
|
|
|
"""
|
|
|
|
Removes random cache entries if max_entries is reached at a ratio
|
|
|
|
of num_entries / cull_frequency. A value of 0 for CULL_FREQUENCY means
|
|
|
|
that the entire cache will be purged.
|
|
|
|
"""
|
|
|
|
filelist = self._list_cache_files()
|
|
|
|
num_entries = len(filelist)
|
|
|
|
if num_entries < self._max_entries:
|
|
|
|
return # return early if no culling is required
|
2006-02-24 14:07:01 +08:00
|
|
|
if self._cull_frequency == 0:
|
2013-08-26 22:34:02 +08:00
|
|
|
return self.clear() # Clear the cache when CULL_FREQUENCY = 0
|
|
|
|
# Delete a random selection of entries
|
|
|
|
filelist = random.sample(filelist,
|
|
|
|
int(num_entries / self._cull_frequency))
|
|
|
|
for fname in filelist:
|
|
|
|
self._delete(fname)
|
2006-02-24 14:07:01 +08:00
|
|
|
|
|
|
|
def _createdir(self):
|
2013-08-26 22:34:02 +08:00
|
|
|
if not os.path.exists(self._dir):
|
|
|
|
try:
|
|
|
|
os.makedirs(self._dir, 0o700)
|
|
|
|
except OSError as e:
|
|
|
|
if e.errno != errno.EEXIST:
|
|
|
|
raise EnvironmentError(
|
|
|
|
"Cache directory '%s' does not exist "
|
|
|
|
"and could not be created'" % self._dir)
|
|
|
|
|
|
|
|
def _key_to_file(self, key, version=None):
|
|
|
|
"""
|
|
|
|
Convert a key into a cache file path. Basically this is the
|
|
|
|
root cache path joined with the md5sum of the key and a suffix.
|
2007-12-05 02:03:56 +08:00
|
|
|
"""
|
2013-08-26 22:34:02 +08:00
|
|
|
key = self.make_key(key, version=version)
|
|
|
|
self.validate_key(key)
|
|
|
|
return os.path.join(self._dir, ''.join(
|
|
|
|
[hashlib.md5(force_bytes(key)).hexdigest(), self.cache_suffix]))
|
2008-08-02 13:56:57 +08:00
|
|
|
|
2013-08-26 22:34:02 +08:00
|
|
|
def clear(self):
|
|
|
|
"""
|
|
|
|
Remove all the cache files.
|
2007-12-05 02:03:56 +08:00
|
|
|
"""
|
2013-08-26 22:34:02 +08:00
|
|
|
if not os.path.exists(self._dir):
|
|
|
|
return
|
|
|
|
for fname in self._list_cache_files():
|
|
|
|
self._delete(fname)
|
2007-12-05 02:03:56 +08:00
|
|
|
|
2013-08-26 22:34:02 +08:00
|
|
|
def _is_expired(self, f):
|
|
|
|
"""
|
|
|
|
Takes an open cache file and determines if it has expired,
|
|
|
|
deletes the file if it is has passed its expiry time.
|
|
|
|
"""
|
|
|
|
exp = pickle.load(f)
|
|
|
|
if exp is not None and exp < time.time():
|
|
|
|
f.close() # On Windows a file has to be closed before deleting
|
|
|
|
self._delete(f.name)
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
def _list_cache_files(self):
|
|
|
|
"""
|
|
|
|
Get a list of paths to all the cache files. These are all the files
|
|
|
|
in the root cache dir that end on the cache_suffix.
|
|
|
|
"""
|
|
|
|
if not os.path.exists(self._dir):
|
|
|
|
return []
|
|
|
|
filelist = [os.path.join(self._dir, fname) for fname
|
|
|
|
in glob.glob1(self._dir, '*%s' % self.cache_suffix)]
|
|
|
|
return filelist
|