2011-08-11 22:07:39 +08:00
|
|
|
import hashlib
|
2015-01-28 20:35:27 +08:00
|
|
|
import json
|
2010-10-20 09:33:24 +08:00
|
|
|
import os
|
2011-08-11 22:07:39 +08:00
|
|
|
import posixpath
|
|
|
|
import re
|
2015-01-28 20:35:27 +08:00
|
|
|
from collections import OrderedDict
|
2017-01-07 19:11:46 +08:00
|
|
|
from urllib.parse import unquote, urldefrag, urlsplit, urlunsplit
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2010-10-20 09:33:24 +08:00
|
|
|
from django.conf import settings
|
2015-01-28 20:35:27 +08:00
|
|
|
from django.contrib.staticfiles.utils import check_settings, matches_patterns
|
|
|
|
from django.core.cache import (
|
|
|
|
InvalidCacheBackendError, cache as default_cache, caches,
|
|
|
|
)
|
2010-10-20 09:33:24 +08:00
|
|
|
from django.core.exceptions import ImproperlyConfigured
|
2011-08-11 22:07:39 +08:00
|
|
|
from django.core.files.base import ContentFile
|
|
|
|
from django.core.files.storage import FileSystemStorage, get_storage_class
|
2017-03-04 22:47:49 +08:00
|
|
|
from django.utils.encoding import force_bytes
|
2011-08-11 22:07:39 +08:00
|
|
|
from django.utils.functional import LazyObject
|
2010-10-20 09:33:24 +08:00
|
|
|
|
|
|
|
|
|
|
|
class StaticFilesStorage(FileSystemStorage):
|
|
|
|
"""
|
2011-01-05 20:41:40 +08:00
|
|
|
Standard file system storage for static files.
|
2011-02-01 22:57:10 +08:00
|
|
|
|
2010-10-20 09:33:24 +08:00
|
|
|
The defaults for ``location`` and ``base_url`` are
|
2010-11-17 23:36:26 +08:00
|
|
|
``STATIC_ROOT`` and ``STATIC_URL``.
|
2010-10-20 09:33:24 +08:00
|
|
|
"""
|
|
|
|
def __init__(self, location=None, base_url=None, *args, **kwargs):
|
|
|
|
if location is None:
|
2010-11-17 23:36:26 +08:00
|
|
|
location = settings.STATIC_ROOT
|
2010-10-20 09:33:24 +08:00
|
|
|
if base_url is None:
|
2010-11-17 23:36:26 +08:00
|
|
|
base_url = settings.STATIC_URL
|
2014-01-09 23:03:37 +08:00
|
|
|
check_settings(base_url)
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(location, base_url, *args, **kwargs)
|
2014-01-09 23:03:37 +08:00
|
|
|
# FileSystemStorage fallbacks to MEDIA_ROOT when location
|
|
|
|
# is empty, so we restore the empty value.
|
2013-12-10 01:29:39 +08:00
|
|
|
if not location:
|
2014-01-09 23:03:37 +08:00
|
|
|
self.base_location = None
|
|
|
|
self.location = None
|
|
|
|
|
|
|
|
def path(self, name):
|
|
|
|
if not self.location:
|
2011-08-15 02:15:04 +08:00
|
|
|
raise ImproperlyConfigured("You're using the staticfiles app "
|
|
|
|
"without having set the STATIC_ROOT "
|
|
|
|
"setting to a filesystem path.")
|
2017-01-21 21:13:44 +08:00
|
|
|
return super().path(name)
|
2011-08-15 02:15:04 +08:00
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class HashedFilesMixin:
|
2012-07-09 00:17:53 +08:00
|
|
|
default_template = """url("%s")"""
|
2017-01-11 22:21:29 +08:00
|
|
|
max_post_process_passes = 5
|
2011-08-11 22:07:39 +08:00
|
|
|
patterns = (
|
|
|
|
("*.css", (
|
2012-08-14 23:24:31 +08:00
|
|
|
r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
|
|
|
|
(r"""(@import\s*["']\s*(.*?)["'])""", """@import url("%s")"""),
|
2011-08-11 22:07:39 +08:00
|
|
|
)),
|
|
|
|
)
|
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(*args, **kwargs)
|
2013-08-03 13:41:15 +08:00
|
|
|
self._patterns = OrderedDict()
|
2014-01-10 08:31:36 +08:00
|
|
|
self.hashed_files = {}
|
2011-08-11 22:07:39 +08:00
|
|
|
for extension, patterns in self.patterns:
|
|
|
|
for pattern in patterns:
|
2012-07-09 00:17:53 +08:00
|
|
|
if isinstance(pattern, (tuple, list)):
|
|
|
|
pattern, template = pattern
|
|
|
|
else:
|
|
|
|
template = self.default_template
|
2013-02-24 05:39:37 +08:00
|
|
|
compiled = re.compile(pattern, re.IGNORECASE)
|
2012-07-09 00:17:53 +08:00
|
|
|
self._patterns.setdefault(extension, []).append((compiled, template))
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-05-16 19:21:46 +08:00
|
|
|
def file_hash(self, name, content=None):
|
|
|
|
"""
|
2016-02-21 03:54:18 +08:00
|
|
|
Return a hash of the file with the given name and optional content.
|
2012-05-16 19:21:46 +08:00
|
|
|
"""
|
|
|
|
if content is None:
|
|
|
|
return None
|
|
|
|
md5 = hashlib.md5()
|
|
|
|
for chunk in content.chunks():
|
|
|
|
md5.update(chunk)
|
|
|
|
return md5.hexdigest()[:12]
|
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def hashed_name(self, name, content=None, filename=None):
|
|
|
|
# `filename` is the name of file to hash if `content` isn't given.
|
|
|
|
# `name` is the base name to construct the new hashed filename from.
|
2011-11-03 18:51:02 +08:00
|
|
|
parsed_name = urlsplit(unquote(name))
|
2012-03-13 11:48:03 +08:00
|
|
|
clean_name = parsed_name.path.strip()
|
2017-01-11 22:21:29 +08:00
|
|
|
if filename:
|
|
|
|
filename = urlsplit(unquote(filename)).path.strip()
|
|
|
|
filename = filename or clean_name
|
2012-08-15 16:29:05 +08:00
|
|
|
opened = False
|
2011-08-11 22:07:39 +08:00
|
|
|
if content is None:
|
2017-01-11 22:21:29 +08:00
|
|
|
if not self.exists(filename):
|
|
|
|
raise ValueError("The file '%s' could not be found with %r." % (filename, self))
|
2011-08-11 22:07:39 +08:00
|
|
|
try:
|
2017-01-11 22:21:29 +08:00
|
|
|
content = self.open(filename)
|
2011-08-11 22:07:39 +08:00
|
|
|
except IOError:
|
2012-02-13 18:51:17 +08:00
|
|
|
# Handle directory paths and fragments
|
2011-08-11 22:07:39 +08:00
|
|
|
return name
|
2012-08-15 16:29:05 +08:00
|
|
|
opened = True
|
|
|
|
try:
|
|
|
|
file_hash = self.file_hash(clean_name, content)
|
|
|
|
finally:
|
|
|
|
if opened:
|
|
|
|
content.close()
|
2011-11-03 18:51:02 +08:00
|
|
|
path, filename = os.path.split(clean_name)
|
2011-08-11 22:07:39 +08:00
|
|
|
root, ext = os.path.splitext(filename)
|
2012-05-16 19:21:46 +08:00
|
|
|
if file_hash is not None:
|
2012-06-08 00:08:47 +08:00
|
|
|
file_hash = ".%s" % file_hash
|
|
|
|
hashed_name = os.path.join(path, "%s%s%s" %
|
2012-05-16 19:21:46 +08:00
|
|
|
(root, file_hash, ext))
|
2011-11-03 18:51:02 +08:00
|
|
|
unparsed_name = list(parsed_name)
|
|
|
|
unparsed_name[2] = hashed_name
|
2011-12-28 06:49:24 +08:00
|
|
|
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
|
|
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
|
|
if '?#' in name and not unparsed_name[3]:
|
|
|
|
unparsed_name[2] += '?'
|
2011-11-03 18:51:02 +08:00
|
|
|
return urlunsplit(unparsed_name)
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
2017-01-11 22:21:29 +08:00
|
|
|
Return the non-hashed URL in DEBUG mode.
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
|
|
|
if settings.DEBUG and not force:
|
2012-02-13 18:51:17 +08:00
|
|
|
hashed_name, fragment = name, ''
|
2011-11-03 01:45:32 +08:00
|
|
|
else:
|
2012-02-13 18:51:17 +08:00
|
|
|
clean_name, fragment = urldefrag(name)
|
2012-02-16 22:40:32 +08:00
|
|
|
if urlsplit(clean_name).path.endswith('/'): # don't hash paths
|
|
|
|
hashed_name = name
|
|
|
|
else:
|
2017-01-11 22:21:29 +08:00
|
|
|
args = (clean_name,)
|
|
|
|
if hashed_files is not None:
|
|
|
|
args += (hashed_files,)
|
|
|
|
hashed_name = hashed_name_func(*args)
|
2012-02-13 18:51:17 +08:00
|
|
|
|
2017-01-21 21:13:44 +08:00
|
|
|
final_url = super().url(hashed_name)
|
2012-02-13 18:51:17 +08:00
|
|
|
|
|
|
|
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
|
|
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
|
|
query_fragment = '?#' in name # [sic!]
|
|
|
|
if fragment or query_fragment:
|
|
|
|
urlparts = list(urlsplit(final_url))
|
|
|
|
if fragment and not urlparts[4]:
|
|
|
|
urlparts[4] = fragment
|
|
|
|
if query_fragment and not urlparts[3]:
|
|
|
|
urlparts[2] += '?'
|
|
|
|
final_url = urlunsplit(urlparts)
|
|
|
|
|
|
|
|
return unquote(final_url)
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def url(self, name, force=False):
|
|
|
|
"""
|
|
|
|
Return the non-hashed URL in DEBUG mode.
|
|
|
|
"""
|
|
|
|
return self._url(self.stored_name, name, force)
|
|
|
|
|
|
|
|
def url_converter(self, name, hashed_files, template=None):
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
2016-02-21 03:54:18 +08:00
|
|
|
Return the custom URL converter for the given file name.
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
2012-07-09 00:17:53 +08:00
|
|
|
if template is None:
|
|
|
|
template = self.default_template
|
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
def converter(matchobj):
|
|
|
|
"""
|
2016-02-21 03:54:18 +08:00
|
|
|
Convert the matched URL to a normalized and hashed URL.
|
|
|
|
|
|
|
|
This requires figuring out which files the matched URL resolves
|
|
|
|
to and calling the url() method of the storage.
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
|
|
|
matched, url = matchobj.groups()
|
2016-02-21 03:54:18 +08:00
|
|
|
|
2016-07-12 20:20:39 +08:00
|
|
|
# Ignore absolute/protocol-relative and data-uri URLs.
|
|
|
|
if re.match(r'^[a-z]+:', url):
|
2016-02-21 03:54:18 +08:00
|
|
|
return matched
|
|
|
|
|
|
|
|
# Ignore absolute URLs that don't point to a static file (dynamic
|
|
|
|
# CSS / JS?). Note that STATIC_URL cannot be empty.
|
|
|
|
if url.startswith('/') and not url.startswith(settings.STATIC_URL):
|
2011-08-11 22:07:39 +08:00
|
|
|
return matched
|
2016-02-21 03:54:18 +08:00
|
|
|
|
|
|
|
# Strip off the fragment so a path-like fragment won't interfere.
|
2015-08-17 23:59:31 +08:00
|
|
|
url_path, fragment = urldefrag(url)
|
2016-02-21 03:54:18 +08:00
|
|
|
|
2015-08-17 23:59:31 +08:00
|
|
|
if url_path.startswith('/'):
|
2016-02-21 03:54:18 +08:00
|
|
|
# Otherwise the condition above would have returned prematurely.
|
|
|
|
assert url_path.startswith(settings.STATIC_URL)
|
|
|
|
target_name = url_path[len(settings.STATIC_URL):]
|
2011-09-21 23:58:21 +08:00
|
|
|
else:
|
2016-02-21 03:54:18 +08:00
|
|
|
# We're using the posixpath module to mix paths and URLs conveniently.
|
|
|
|
source_name = name if os.sep == '/' else name.replace(os.sep, '/')
|
|
|
|
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
|
|
|
|
|
|
|
|
# Determine the hashed name of the target file with the storage backend.
|
2017-01-11 22:21:29 +08:00
|
|
|
hashed_url = self._url(
|
|
|
|
self._stored_name, unquote(target_name),
|
|
|
|
force=True, hashed_files=hashed_files,
|
|
|
|
)
|
2016-02-21 03:54:18 +08:00
|
|
|
|
|
|
|
transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:])
|
|
|
|
|
|
|
|
# Restore the fragment that was stripped off earlier.
|
2015-08-17 23:59:31 +08:00
|
|
|
if fragment:
|
2016-02-21 03:54:18 +08:00
|
|
|
transformed_url += ('?#' if '?#' in url else '#') + fragment
|
2012-02-13 18:51:17 +08:00
|
|
|
|
2012-03-13 11:48:11 +08:00
|
|
|
# Return the hashed version to the file
|
2016-02-21 03:54:18 +08:00
|
|
|
return template % unquote(transformed_url)
|
2012-07-09 00:17:53 +08:00
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
return converter
|
|
|
|
|
|
|
|
def post_process(self, paths, dry_run=False, **options):
|
|
|
|
"""
|
2013-08-03 13:41:15 +08:00
|
|
|
Post process the given OrderedDict of files (called from collectstatic).
|
2012-02-13 18:51:17 +08:00
|
|
|
|
|
|
|
Processing is actually two separate operations:
|
|
|
|
|
|
|
|
1. renaming files to include a hash of their content for cache-busting,
|
|
|
|
and copying those files to the target storage.
|
|
|
|
2. adjusting files which contain references to other files so they
|
|
|
|
refer to the cache-busting filenames.
|
|
|
|
|
|
|
|
If either of these are performed on a file, then that file is considered
|
|
|
|
post-processed.
|
2011-08-11 22:07:39 +08:00
|
|
|
"""
|
|
|
|
# don't even dare to process the files if we're in dry run mode
|
|
|
|
if dry_run:
|
2012-02-13 18:51:17 +08:00
|
|
|
return
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-05-11 00:29:34 +08:00
|
|
|
# where to store the new paths
|
2014-01-10 08:31:36 +08:00
|
|
|
hashed_files = OrderedDict()
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-02-13 18:51:17 +08:00
|
|
|
# build a list of adjustable files
|
2016-01-24 00:47:07 +08:00
|
|
|
adjustable_paths = [
|
|
|
|
path for path in paths
|
2017-05-28 07:08:46 +08:00
|
|
|
if matches_patterns(path, self._patterns)
|
2016-01-24 00:47:07 +08:00
|
|
|
]
|
2017-01-11 22:21:29 +08:00
|
|
|
# Do a single pass first. Post-process all files once, then repeat for
|
|
|
|
# adjustable files.
|
|
|
|
for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files):
|
|
|
|
yield name, hashed_name, processed
|
|
|
|
|
|
|
|
paths = {path: paths[path] for path in adjustable_paths}
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
for i in range(self.max_post_process_passes):
|
|
|
|
substitutions = False
|
|
|
|
for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files):
|
|
|
|
yield name, hashed_name, processed
|
|
|
|
substitutions = substitutions or subst
|
|
|
|
|
|
|
|
if not substitutions:
|
|
|
|
break
|
|
|
|
|
|
|
|
if substitutions:
|
|
|
|
yield 'All', None, RuntimeError('Max post-process passes exceeded.')
|
|
|
|
|
|
|
|
# Store the processed paths
|
|
|
|
self.hashed_files.update(hashed_files)
|
|
|
|
|
|
|
|
def _post_process(self, paths, adjustable_paths, hashed_files):
|
|
|
|
# Sort the files by directory level
|
2016-01-24 00:47:07 +08:00
|
|
|
def path_level(name):
|
|
|
|
return len(name.split(os.sep))
|
|
|
|
|
2017-05-28 07:08:46 +08:00
|
|
|
for name in sorted(paths, key=path_level, reverse=True):
|
2017-01-11 22:21:29 +08:00
|
|
|
substitutions = True
|
2012-02-13 18:51:17 +08:00
|
|
|
# use the original, local file, not the copied-but-unprocessed
|
|
|
|
# file, which might be somewhere far away, like S3
|
2012-02-14 17:55:09 +08:00
|
|
|
storage, path = paths[name]
|
|
|
|
with storage.open(path) as original_file:
|
2017-01-11 22:21:29 +08:00
|
|
|
cleaned_name = self.clean_name(name)
|
|
|
|
hash_key = self.hash_key(cleaned_name)
|
2012-02-13 18:51:17 +08:00
|
|
|
|
|
|
|
# generate the hash with the original content, even for
|
|
|
|
# adjustable files.
|
2017-01-11 22:21:29 +08:00
|
|
|
if hash_key not in hashed_files:
|
|
|
|
hashed_name = self.hashed_name(name, original_file)
|
|
|
|
else:
|
|
|
|
hashed_name = hashed_files[hash_key]
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-02-13 18:51:17 +08:00
|
|
|
# then get the original's file content..
|
|
|
|
if hasattr(original_file, 'seek'):
|
|
|
|
original_file.seek(0)
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-02-13 18:51:17 +08:00
|
|
|
hashed_file_exists = self.exists(hashed_name)
|
|
|
|
processed = False
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2012-02-13 18:51:17 +08:00
|
|
|
# ..to apply each replacement pattern to the content
|
|
|
|
if name in adjustable_paths:
|
2017-01-11 22:21:29 +08:00
|
|
|
old_hashed_name = hashed_name
|
2012-07-08 18:56:49 +08:00
|
|
|
content = original_file.read().decode(settings.FILE_CHARSET)
|
2017-01-07 19:11:46 +08:00
|
|
|
for extension, patterns in self._patterns.items():
|
2016-02-28 04:30:07 +08:00
|
|
|
if matches_patterns(path, (extension,)):
|
|
|
|
for pattern, template in patterns:
|
2017-01-11 22:21:29 +08:00
|
|
|
converter = self.url_converter(name, hashed_files, template)
|
2016-02-28 04:30:07 +08:00
|
|
|
try:
|
|
|
|
content = pattern.sub(converter, content)
|
|
|
|
except ValueError as exc:
|
2017-01-11 22:21:29 +08:00
|
|
|
yield name, None, exc, False
|
2012-02-13 18:51:17 +08:00
|
|
|
if hashed_file_exists:
|
|
|
|
self.delete(hashed_name)
|
|
|
|
# then save the processed result
|
2012-08-29 02:59:56 +08:00
|
|
|
content_file = ContentFile(force_bytes(content))
|
2017-01-11 22:21:29 +08:00
|
|
|
# Save intermediate file for reference
|
|
|
|
saved_name = self._save(hashed_name, content_file)
|
|
|
|
hashed_name = self.hashed_name(name, content_file)
|
|
|
|
|
|
|
|
if self.exists(hashed_name):
|
|
|
|
self.delete(hashed_name)
|
|
|
|
|
2012-02-13 18:51:17 +08:00
|
|
|
saved_name = self._save(hashed_name, content_file)
|
2017-03-04 22:47:49 +08:00
|
|
|
hashed_name = self.clean_name(saved_name)
|
2017-01-11 22:21:29 +08:00
|
|
|
# If the file hash stayed the same, this file didn't change
|
|
|
|
if old_hashed_name == hashed_name:
|
|
|
|
substitutions = False
|
2012-02-13 18:51:17 +08:00
|
|
|
processed = True
|
2017-01-11 22:21:29 +08:00
|
|
|
|
|
|
|
if not processed:
|
2012-02-13 18:51:17 +08:00
|
|
|
# or handle the case in which neither processing nor
|
|
|
|
# a change to the original file happened
|
|
|
|
if not hashed_file_exists:
|
|
|
|
processed = True
|
|
|
|
saved_name = self._save(hashed_name, original_file)
|
2017-03-04 22:47:49 +08:00
|
|
|
hashed_name = self.clean_name(saved_name)
|
2011-08-11 22:07:39 +08:00
|
|
|
|
|
|
|
# and then set the cache accordingly
|
2017-01-11 22:21:29 +08:00
|
|
|
hashed_files[hash_key] = hashed_name
|
2011-08-11 22:07:39 +08:00
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
yield name, hashed_name, processed, substitutions
|
2014-01-10 08:31:36 +08:00
|
|
|
|
|
|
|
def clean_name(self, name):
|
|
|
|
return name.replace('\\', '/')
|
|
|
|
|
|
|
|
def hash_key(self, name):
|
|
|
|
return name
|
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def _stored_name(self, name, hashed_files):
|
|
|
|
# Normalize the path to avoid multiple names for the same file like
|
|
|
|
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
|
|
|
|
# path.
|
|
|
|
name = posixpath.normpath(name)
|
|
|
|
cleaned_name = self.clean_name(name)
|
|
|
|
hash_key = self.hash_key(cleaned_name)
|
|
|
|
cache_name = hashed_files.get(hash_key)
|
2014-01-10 08:31:36 +08:00
|
|
|
if cache_name is None:
|
|
|
|
cache_name = self.clean_name(self.hashed_name(name))
|
|
|
|
return cache_name
|
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def stored_name(self, name):
|
|
|
|
cleaned_name = self.clean_name(name)
|
|
|
|
hash_key = self.hash_key(cleaned_name)
|
|
|
|
cache_name = self.hashed_files.get(hash_key)
|
|
|
|
if cache_name:
|
|
|
|
return cache_name
|
|
|
|
# No cached name found, recalculate it from the files.
|
|
|
|
intermediate_name = name
|
|
|
|
for i in range(self.max_post_process_passes + 1):
|
|
|
|
cache_name = self.clean_name(
|
|
|
|
self.hashed_name(name, content=None, filename=intermediate_name)
|
|
|
|
)
|
|
|
|
if intermediate_name == cache_name:
|
|
|
|
# Store the hashed name if there was a miss.
|
|
|
|
self.hashed_files[hash_key] = cache_name
|
|
|
|
return cache_name
|
|
|
|
else:
|
|
|
|
# Move on to the next intermediate file.
|
|
|
|
intermediate_name = cache_name
|
|
|
|
# If the cache name can't be determined after the max number of passes,
|
|
|
|
# the intermediate files on disk may be corrupt; avoid an infinite loop.
|
|
|
|
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
|
|
|
|
|
2014-01-10 08:31:36 +08:00
|
|
|
|
|
|
|
class ManifestFilesMixin(HashedFilesMixin):
|
|
|
|
manifest_version = '1.0' # the manifest format standard
|
|
|
|
manifest_name = 'staticfiles.json'
|
2017-01-11 22:21:29 +08:00
|
|
|
manifest_strict = True
|
2014-01-10 08:31:36 +08:00
|
|
|
|
|
|
|
def __init__(self, *args, **kwargs):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(*args, **kwargs)
|
2014-01-10 08:31:36 +08:00
|
|
|
self.hashed_files = self.load_manifest()
|
|
|
|
|
|
|
|
def read_manifest(self):
|
|
|
|
try:
|
|
|
|
with self.open(self.manifest_name) as manifest:
|
2017-02-08 01:05:47 +08:00
|
|
|
return manifest.read().decode()
|
2014-01-10 08:31:36 +08:00
|
|
|
except IOError:
|
|
|
|
return None
|
|
|
|
|
|
|
|
def load_manifest(self):
|
|
|
|
content = self.read_manifest()
|
|
|
|
if content is None:
|
|
|
|
return OrderedDict()
|
|
|
|
try:
|
|
|
|
stored = json.loads(content, object_pairs_hook=OrderedDict)
|
|
|
|
except ValueError:
|
|
|
|
pass
|
|
|
|
else:
|
2015-05-14 02:51:18 +08:00
|
|
|
version = stored.get('version')
|
2014-01-10 08:31:36 +08:00
|
|
|
if version == '1.0':
|
|
|
|
return stored.get('paths', OrderedDict())
|
|
|
|
raise ValueError("Couldn't load manifest '%s' (version %s)" %
|
|
|
|
(self.manifest_name, self.manifest_version))
|
|
|
|
|
|
|
|
def post_process(self, *args, **kwargs):
|
2014-05-16 21:40:57 +08:00
|
|
|
self.hashed_files = OrderedDict()
|
2017-02-24 09:06:01 +08:00
|
|
|
yield from super().post_process(*args, **kwargs)
|
2014-05-16 21:40:57 +08:00
|
|
|
self.save_manifest()
|
|
|
|
|
|
|
|
def save_manifest(self):
|
2014-01-10 08:31:36 +08:00
|
|
|
payload = {'paths': self.hashed_files, 'version': self.manifest_version}
|
|
|
|
if self.exists(self.manifest_name):
|
|
|
|
self.delete(self.manifest_name)
|
2017-02-08 01:05:47 +08:00
|
|
|
contents = json.dumps(payload).encode()
|
2014-03-23 05:57:01 +08:00
|
|
|
self._save(self.manifest_name, ContentFile(contents))
|
2014-01-10 08:31:36 +08:00
|
|
|
|
2017-01-11 22:21:29 +08:00
|
|
|
def stored_name(self, name):
|
|
|
|
parsed_name = urlsplit(unquote(name))
|
|
|
|
clean_name = parsed_name.path.strip()
|
|
|
|
hash_key = self.hash_key(clean_name)
|
|
|
|
cache_name = self.hashed_files.get(hash_key)
|
|
|
|
if cache_name is None:
|
|
|
|
if self.manifest_strict:
|
|
|
|
raise ValueError("Missing staticfiles manifest entry for '%s'" % clean_name)
|
|
|
|
cache_name = self.clean_name(self.hashed_name(name))
|
|
|
|
unparsed_name = list(parsed_name)
|
|
|
|
unparsed_name[2] = cache_name
|
|
|
|
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
|
|
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
|
|
if '?#' in name and not unparsed_name[3]:
|
|
|
|
unparsed_name[2] += '?'
|
|
|
|
return urlunsplit(unparsed_name)
|
|
|
|
|
2014-01-10 08:31:36 +08:00
|
|
|
|
2017-01-19 15:39:46 +08:00
|
|
|
class _MappingCache:
|
2014-01-10 08:31:36 +08:00
|
|
|
"""
|
|
|
|
A small dict-like wrapper for a given cache backend instance.
|
|
|
|
"""
|
|
|
|
def __init__(self, cache):
|
|
|
|
self.cache = cache
|
|
|
|
|
|
|
|
def __setitem__(self, key, value):
|
|
|
|
self.cache.set(key, value)
|
|
|
|
|
|
|
|
def __getitem__(self, key):
|
2015-05-14 02:51:18 +08:00
|
|
|
value = self.cache.get(key)
|
2014-01-10 08:31:36 +08:00
|
|
|
if value is None:
|
|
|
|
raise KeyError("Couldn't find a file name '%s'" % key)
|
|
|
|
return value
|
|
|
|
|
|
|
|
def clear(self):
|
|
|
|
self.cache.clear()
|
|
|
|
|
|
|
|
def update(self, data):
|
|
|
|
self.cache.set_many(data)
|
|
|
|
|
|
|
|
def get(self, key, default=None):
|
|
|
|
try:
|
|
|
|
return self[key]
|
|
|
|
except KeyError:
|
|
|
|
return default
|
|
|
|
|
|
|
|
|
|
|
|
class CachedFilesMixin(HashedFilesMixin):
|
|
|
|
def __init__(self, *args, **kwargs):
|
2017-01-21 21:13:44 +08:00
|
|
|
super().__init__(*args, **kwargs)
|
2014-01-10 08:31:36 +08:00
|
|
|
try:
|
|
|
|
self.hashed_files = _MappingCache(caches['staticfiles'])
|
|
|
|
except InvalidCacheBackendError:
|
|
|
|
# Use the default backend
|
|
|
|
self.hashed_files = _MappingCache(default_cache)
|
|
|
|
|
|
|
|
def hash_key(self, name):
|
|
|
|
key = hashlib.md5(force_bytes(self.clean_name(name))).hexdigest()
|
|
|
|
return 'staticfiles:%s' % key
|
2012-05-11 00:29:34 +08:00
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
|
|
|
|
class CachedStaticFilesStorage(CachedFilesMixin, StaticFilesStorage):
|
|
|
|
"""
|
|
|
|
A static file system storage backend which also saves
|
|
|
|
hashed copies of the files it saves.
|
|
|
|
"""
|
|
|
|
pass
|
2010-10-20 09:33:24 +08:00
|
|
|
|
|
|
|
|
2014-01-10 08:31:36 +08:00
|
|
|
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
|
|
|
|
"""
|
|
|
|
A static file system storage backend which also saves
|
|
|
|
hashed copies of the files it saves.
|
|
|
|
"""
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
class ConfiguredStorage(LazyObject):
|
|
|
|
def _setup(self):
|
|
|
|
self._wrapped = get_storage_class(settings.STATICFILES_STORAGE)()
|
|
|
|
|
2016-11-13 01:11:23 +08:00
|
|
|
|
2011-08-11 22:07:39 +08:00
|
|
|
staticfiles_storage = ConfiguredStorage()
|