Fixed #24452 -- Fixed HashedFilesMixin correctness with nested paths.

This commit is contained in:
David Sanders 2017-01-11 07:21:29 -07:00 committed by Tim Graham
parent 7156a6c9c4
commit 53bffe8d03
6 changed files with 342 additions and 50 deletions

View File

@ -18,6 +18,7 @@ from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.encoding import force_bytes, force_text from django.utils.encoding import force_bytes, force_text
from django.utils.functional import LazyObject from django.utils.functional import LazyObject
from django.utils.six import iteritems from django.utils.six import iteritems
from django.utils.six.moves import range
from django.utils.six.moves.urllib.parse import ( from django.utils.six.moves.urllib.parse import (
unquote, urldefrag, urlsplit, urlunsplit, unquote, urldefrag, urlsplit, urlunsplit,
) )
@ -54,6 +55,7 @@ class StaticFilesStorage(FileSystemStorage):
class HashedFilesMixin(object): class HashedFilesMixin(object):
default_template = """url("%s")""" default_template = """url("%s")"""
max_post_process_passes = 5
patterns = ( patterns = (
("*.css", ( ("*.css", (
r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""", r"""(url\(['"]{0,1}\s*(.*?)["']{0,1}\))""",
@ -85,16 +87,20 @@ class HashedFilesMixin(object):
md5.update(chunk) md5.update(chunk)
return md5.hexdigest()[:12] return md5.hexdigest()[:12]
def hashed_name(self, name, content=None): def hashed_name(self, name, content=None, filename=None):
# `filename` is the name of file to hash if `content` isn't given.
# `name` is the base name to construct the new hashed filename from.
parsed_name = urlsplit(unquote(name)) parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip() clean_name = parsed_name.path.strip()
if filename:
filename = urlsplit(unquote(filename)).path.strip()
filename = filename or clean_name
opened = False opened = False
if content is None: if content is None:
if not self.exists(clean_name): if not self.exists(filename):
raise ValueError("The file '%s' could not be found with %r." % raise ValueError("The file '%s' could not be found with %r." % (filename, self))
(clean_name, self))
try: try:
content = self.open(clean_name) content = self.open(filename)
except IOError: except IOError:
# Handle directory paths and fragments # Handle directory paths and fragments
return name return name
@ -118,9 +124,9 @@ class HashedFilesMixin(object):
unparsed_name[2] += '?' unparsed_name[2] += '?'
return urlunsplit(unparsed_name) return urlunsplit(unparsed_name)
def url(self, name, force=False): def _url(self, hashed_name_func, name, force=False, hashed_files=None):
""" """
Return the real URL in DEBUG mode. Return the non-hashed URL in DEBUG mode.
""" """
if settings.DEBUG and not force: if settings.DEBUG and not force:
hashed_name, fragment = name, '' hashed_name, fragment = name, ''
@ -129,7 +135,10 @@ class HashedFilesMixin(object):
if urlsplit(clean_name).path.endswith('/'): # don't hash paths if urlsplit(clean_name).path.endswith('/'): # don't hash paths
hashed_name = name hashed_name = name
else: else:
hashed_name = self.stored_name(clean_name) args = (clean_name,)
if hashed_files is not None:
args += (hashed_files,)
hashed_name = hashed_name_func(*args)
final_url = super(HashedFilesMixin, self).url(hashed_name) final_url = super(HashedFilesMixin, self).url(hashed_name)
@ -146,7 +155,13 @@ class HashedFilesMixin(object):
return unquote(final_url) return unquote(final_url)
def url_converter(self, name, template=None): def url(self, name, force=False):
"""
Return the non-hashed URL in DEBUG mode.
"""
return self._url(self.stored_name, name, force)
def url_converter(self, name, hashed_files, template=None):
""" """
Return the custom URL converter for the given file name. Return the custom URL converter for the given file name.
""" """
@ -184,7 +199,10 @@ class HashedFilesMixin(object):
target_name = posixpath.join(posixpath.dirname(source_name), url_path) target_name = posixpath.join(posixpath.dirname(source_name), url_path)
# Determine the hashed name of the target file with the storage backend. # Determine the hashed name of the target file with the storage backend.
hashed_url = self.url(unquote(target_name), force=True) hashed_url = self._url(
self._stored_name, unquote(target_name),
force=True, hashed_files=hashed_files,
)
transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:]) transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:])
@ -223,21 +241,48 @@ class HashedFilesMixin(object):
path for path in paths path for path in paths
if matches_patterns(path, self._patterns.keys()) if matches_patterns(path, self._patterns.keys())
] ]
# Do a single pass first. Post-process all files once, then repeat for
# adjustable files.
for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files):
yield name, hashed_name, processed
# then sort the files by the directory level paths = {path: paths[path] for path in adjustable_paths}
for i in range(self.max_post_process_passes):
substitutions = False
for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files):
yield name, hashed_name, processed
substitutions = substitutions or subst
if not substitutions:
break
if substitutions:
yield 'All', None, RuntimeError('Max post-process passes exceeded.')
# Store the processed paths
self.hashed_files.update(hashed_files)
def _post_process(self, paths, adjustable_paths, hashed_files):
# Sort the files by directory level
def path_level(name): def path_level(name):
return len(name.split(os.sep)) return len(name.split(os.sep))
for name in sorted(paths.keys(), key=path_level, reverse=True): for name in sorted(paths.keys(), key=path_level, reverse=True):
substitutions = True
# use the original, local file, not the copied-but-unprocessed # use the original, local file, not the copied-but-unprocessed
# file, which might be somewhere far away, like S3 # file, which might be somewhere far away, like S3
storage, path = paths[name] storage, path = paths[name]
with storage.open(path) as original_file: with storage.open(path) as original_file:
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
# generate the hash with the original content, even for # generate the hash with the original content, even for
# adjustable files. # adjustable files.
if hash_key not in hashed_files:
hashed_name = self.hashed_name(name, original_file) hashed_name = self.hashed_name(name, original_file)
else:
hashed_name = hashed_files[hash_key]
# then get the original's file content.. # then get the original's file content..
if hasattr(original_file, 'seek'): if hasattr(original_file, 'seek'):
@ -248,23 +293,35 @@ class HashedFilesMixin(object):
# ..to apply each replacement pattern to the content # ..to apply each replacement pattern to the content
if name in adjustable_paths: if name in adjustable_paths:
old_hashed_name = hashed_name
content = original_file.read().decode(settings.FILE_CHARSET) content = original_file.read().decode(settings.FILE_CHARSET)
for extension, patterns in iteritems(self._patterns): for extension, patterns in iteritems(self._patterns):
if matches_patterns(path, (extension,)): if matches_patterns(path, (extension,)):
for pattern, template in patterns: for pattern, template in patterns:
converter = self.url_converter(name, template) converter = self.url_converter(name, hashed_files, template)
try: try:
content = pattern.sub(converter, content) content = pattern.sub(converter, content)
except ValueError as exc: except ValueError as exc:
yield name, None, exc yield name, None, exc, False
if hashed_file_exists: if hashed_file_exists:
self.delete(hashed_name) self.delete(hashed_name)
# then save the processed result # then save the processed result
content_file = ContentFile(force_bytes(content)) content_file = ContentFile(force_bytes(content))
# Save intermediate file for reference
saved_name = self._save(hashed_name, content_file)
hashed_name = self.hashed_name(name, content_file)
if self.exists(hashed_name):
self.delete(hashed_name)
saved_name = self._save(hashed_name, content_file) saved_name = self._save(hashed_name, content_file)
hashed_name = force_text(self.clean_name(saved_name)) hashed_name = force_text(self.clean_name(saved_name))
# If the file hash stayed the same, this file didn't change
if old_hashed_name == hashed_name:
substitutions = False
processed = True processed = True
else:
if not processed:
# or handle the case in which neither processing nor # or handle the case in which neither processing nor
# a change to the original file happened # a change to the original file happened
if not hashed_file_exists: if not hashed_file_exists:
@ -273,11 +330,9 @@ class HashedFilesMixin(object):
hashed_name = force_text(self.clean_name(saved_name)) hashed_name = force_text(self.clean_name(saved_name))
# and then set the cache accordingly # and then set the cache accordingly
hashed_files[self.hash_key(name)] = hashed_name hashed_files[hash_key] = hashed_name
yield name, hashed_name, processed
# Finally store the processed paths yield name, hashed_name, processed, substitutions
self.hashed_files.update(hashed_files)
def clean_name(self, name): def clean_name(self, name):
return name.replace('\\', '/') return name.replace('\\', '/')
@ -285,20 +340,46 @@ class HashedFilesMixin(object):
def hash_key(self, name): def hash_key(self, name):
return name return name
def stored_name(self, name): def _stored_name(self, name, hashed_files):
hash_key = self.hash_key(name) # Normalize the path to avoid multiple names for the same file like
cache_name = self.hashed_files.get(hash_key) # ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
# path.
name = posixpath.normpath(name)
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = hashed_files.get(hash_key)
if cache_name is None: if cache_name is None:
cache_name = self.clean_name(self.hashed_name(name)) cache_name = self.clean_name(self.hashed_name(name))
# store the hashed name if there was a miss, e.g. return cache_name
# when the files are still processed
def stored_name(self, name):
cleaned_name = self.clean_name(name)
hash_key = self.hash_key(cleaned_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name:
return cache_name
# No cached name found, recalculate it from the files.
intermediate_name = name
for i in range(self.max_post_process_passes + 1):
cache_name = self.clean_name(
self.hashed_name(name, content=None, filename=intermediate_name)
)
if intermediate_name == cache_name:
# Store the hashed name if there was a miss.
self.hashed_files[hash_key] = cache_name self.hashed_files[hash_key] = cache_name
return cache_name return cache_name
else:
# Move on to the next intermediate file.
intermediate_name = cache_name
# If the cache name can't be determined after the max number of passes,
# the intermediate files on disk may be corrupt; avoid an infinite loop.
raise ValueError("The name '%s' could not be hashed with %r." % (name, self))
class ManifestFilesMixin(HashedFilesMixin): class ManifestFilesMixin(HashedFilesMixin):
manifest_version = '1.0' # the manifest format standard manifest_version = '1.0' # the manifest format standard
manifest_name = 'staticfiles.json' manifest_name = 'staticfiles.json'
manifest_strict = True
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(ManifestFilesMixin, self).__init__(*args, **kwargs) super(ManifestFilesMixin, self).__init__(*args, **kwargs)
@ -341,6 +422,23 @@ class ManifestFilesMixin(HashedFilesMixin):
contents = json.dumps(payload).encode('utf-8') contents = json.dumps(payload).encode('utf-8')
self._save(self.manifest_name, ContentFile(contents)) self._save(self.manifest_name, ContentFile(contents))
def stored_name(self, name):
parsed_name = urlsplit(unquote(name))
clean_name = parsed_name.path.strip()
hash_key = self.hash_key(clean_name)
cache_name = self.hashed_files.get(hash_key)
if cache_name is None:
if self.manifest_strict:
raise ValueError("Missing staticfiles manifest entry for '%s'" % clean_name)
cache_name = self.clean_name(self.hashed_name(name))
unparsed_name = list(parsed_name)
unparsed_name[2] = cache_name
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
if '?#' in name and not unparsed_name[3]:
unparsed_name[2] += '?'
return urlunsplit(unparsed_name)
class _MappingCache(object): class _MappingCache(object):
""" """

View File

@ -293,6 +293,24 @@ content:
@import url("../admin/css/base.27e20196a850.css"); @import url("../admin/css/base.27e20196a850.css");
.. attribute:: storage.ManifestStaticFilesStorage.max_post_process_passes
Since static files might reference other static files that need to have their
paths replaced, multiple passes of replacing paths may be needed until the file
hashes converge. To prevent an infinite loop due to hashes not converging (for
example, if ``'foo.css'`` references ``'bar.css'`` which references
``'foo.css'``) there is a maximum number of passes before post-processing is
abandoned. In cases with a large number of references, a higher number of
passes might be needed. Increase the maximum number of passes by subclassing
``ManifestStaticFilesStorage`` and setting the ``max_post_process_passes``
attribute. It defaults to 5.
.. versionchanged:: 1.11
Previous versions didn't make multiple passes to ensure file hashes
converged, so often times file hashes weren't correct. The
``max_post_process_passes`` attribute was added.
To enable the ``ManifestStaticFilesStorage`` you have to make sure the To enable the ``ManifestStaticFilesStorage`` you have to make sure the
following requirements are met: following requirements are met:
@ -315,6 +333,18 @@ hashed names for all processed files in a file called ``staticfiles.json``.
This happens once when you run the :djadmin:`collectstatic` management This happens once when you run the :djadmin:`collectstatic` management
command. command.
.. attribute:: storage.ManifestStaticFilesStorage.manifest_strict
If a file isn't found in the ``staticfiles.json`` manifest at runtime, a
``ValueError`` is raised. This behavior can be disabled by subclassing
``ManifestStaticFilesStorage`` and setting the ``manifest_strict`` attribute to
``False`` -- nonexistent paths will remain unchanged.
.. versionchanged:: 1.11
The ``manifest_strict`` attribute was added. In older versions, the
behavior is the same as ``manifest_strict=False``.
Due to the requirement of running :djadmin:`collectstatic`, this storage Due to the requirement of running :djadmin:`collectstatic`, this storage
typically shouldn't be used when running tests as ``collectstatic`` isn't run typically shouldn't be used when running tests as ``collectstatic`` isn't run
as part of the normal test setup. During testing, ensure that the as part of the normal test setup. During testing, ensure that the
@ -350,6 +380,16 @@ If you want to override certain options of the cache backend the storage uses,
simply specify a custom entry in the :setting:`CACHES` setting named simply specify a custom entry in the :setting:`CACHES` setting named
``'staticfiles'``. It falls back to using the ``'default'`` cache backend. ``'staticfiles'``. It falls back to using the ``'default'`` cache backend.
.. warning::
``CachedStaticFilesStorage`` isn't recommended -- in almost all cases
``ManifestStaticFilesStorage`` is a better choice. There are several
performance penalties when using ``CachedStaticFilesStorage`` since a cache
miss requires hashing files at runtime. Remote file storage require several
round-trips to hash a file on a cache miss, as several file accesses are
required to ensure that the file hash is correct in the case of nested file
paths.
Finders Module Finders Module
============== ==============

View File

@ -481,6 +481,21 @@ Backwards incompatible changes in 1.11
updated. Check your project if you subclass these widgets or extend the updated. Check your project if you subclass these widgets or extend the
templates. templates.
:mod:`django.contrib.staticfiles`
---------------------------------
* ``collectstatic`` may now fail during post-processing when using a hashed
static files storage if a reference loop exists (e.g. ``'foo.css'``
references ``'bar.css'`` which itself references ``'foo.css'``) or if the
chain of files referencing other files is too deep to resolve in several
passes. In the latter case, increase the number of passes using
:attr:`.ManifestStaticFilesStorage.max_post_process_passes`.
* When using ``ManifestStaticFilesStorage``, static files not found in the
manifest at runtime now raise a ``ValueError`` instead of returning an
unchanged path. You can revert to the old behavior by setting
:attr:`.ManifestStaticFilesStorage.manifest_strict` to ``False``.
Database backend API Database backend API
-------------------- --------------------

View File

@ -0,0 +1 @@
@import url("foo.css")

View File

@ -0,0 +1 @@
@import url("bar.css")

View File

@ -28,9 +28,21 @@ def hashed_file_path(test, path):
class TestHashedFiles(object): class TestHashedFiles(object):
hashed_file_path = hashed_file_path hashed_file_path = hashed_file_path
def setUp(self):
self._max_post_process_passes = storage.staticfiles_storage.max_post_process_passes
super(TestHashedFiles, self).setUp()
def tearDown(self): def tearDown(self):
# Clear hashed files to avoid side effects among tests. # Clear hashed files to avoid side effects among tests.
storage.staticfiles_storage.hashed_files.clear() storage.staticfiles_storage.hashed_files.clear()
storage.staticfiles_storage.max_post_process_passes = self._max_post_process_passes
def assertPostCondition(self):
"""
Assert post conditions for a test are met. Must be manually called at
the end of each test.
"""
pass
def test_template_tag_return(self): def test_template_tag_return(self):
""" """
@ -39,17 +51,19 @@ class TestHashedFiles(object):
self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png") self.assertStaticRaises(ValueError, "does/not/exist.png", "/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt") self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt")
self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True) self.assertStaticRenders("test/file.txt", "/static/test/file.dad0999e4f8f.txt", asvar=True)
self.assertStaticRenders("cached/styles.css", "/static/cached/styles.bb84a0240107.css") self.assertStaticRenders("cached/styles.css", "/static/cached/styles.5e0040571e1a.css")
self.assertStaticRenders("path/", "/static/path/") self.assertStaticRenders("path/", "/static/path/")
self.assertStaticRenders("path/?query", "/static/path/?query") self.assertStaticRenders("path/?query", "/static/path/?query")
self.assertPostCondition()
def test_template_tag_simple_content(self): def test_template_tag_simple_content(self):
relpath = self.hashed_file_path("cached/styles.css") relpath = self.hashed_file_path("cached/styles.css")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"cached/other.css", content) self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content) self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_ignored_completely(self): def test_path_ignored_completely(self):
relpath = self.hashed_file_path("cached/css/ignored.css") relpath = self.hashed_file_path("cached/css/ignored.css")
@ -62,28 +76,29 @@ class TestHashedFiles(object):
self.assertIn(b'data:foobar', content) self.assertIn(b'data:foobar', content)
self.assertIn(b'chrome:foobar', content) self.assertIn(b'chrome:foobar', content)
self.assertIn(b'//foobar', content) self.assertIn(b'//foobar', content)
self.assertPostCondition()
def test_path_with_querystring(self): def test_path_with_querystring(self):
relpath = self.hashed_file_path("cached/styles.css?spam=eggs") relpath = self.hashed_file_path("cached/styles.css?spam=eggs")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css?spam=eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css?spam=eggs")
with storage.staticfiles_storage.open( with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
"cached/styles.bb84a0240107.css") as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"cached/other.css", content) self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content) self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_fragment(self): def test_path_with_fragment(self):
relpath = self.hashed_file_path("cached/styles.css#eggs") relpath = self.hashed_file_path("cached/styles.css#eggs")
self.assertEqual(relpath, "cached/styles.bb84a0240107.css#eggs") self.assertEqual(relpath, "cached/styles.5e0040571e1a.css#eggs")
with storage.staticfiles_storage.open( with storage.staticfiles_storage.open("cached/styles.5e0040571e1a.css") as relfile:
"cached/styles.bb84a0240107.css") as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"cached/other.css", content) self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content) self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
def test_path_with_querystring_and_fragment(self): def test_path_with_querystring_and_fragment(self):
relpath = self.hashed_file_path("cached/css/fragments.css") relpath = self.hashed_file_path("cached/css/fragments.css")
self.assertEqual(relpath, "cached/css/fragments.59dc2b188043.css") self.assertEqual(relpath, "cached/css/fragments.c4e6753b52d3.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertIn(b'fonts/font.a4b0478549d0.eot?#iefix', content) self.assertIn(b'fonts/font.a4b0478549d0.eot?#iefix', content)
@ -91,60 +106,79 @@ class TestHashedFiles(object):
self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content) self.assertIn(b'fonts/font.b8d603e42714.svg#path/to/../../fonts/font.svg', content)
self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content) self.assertIn(b'data:font/woff;charset=utf-8;base64,d09GRgABAAAAADJoAA0AAAAAR2QAAQAAAAAAAAAAAAA', content)
self.assertIn(b'#default#VML', content) self.assertIn(b'#default#VML', content)
self.assertPostCondition()
def test_template_tag_absolute(self): def test_template_tag_absolute(self):
relpath = self.hashed_file_path("cached/absolute.css") relpath = self.hashed_file_path("cached/absolute.css")
self.assertEqual(relpath, "cached/absolute.df312c6326e1.css") self.assertEqual(relpath, "cached/absolute.eb04def9f9a4.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"/static/cached/styles.css", content) self.assertNotIn(b"/static/cached/styles.css", content)
self.assertIn(b"/static/cached/styles.bb84a0240107.css", content) self.assertIn(b"/static/cached/styles.5e0040571e1a.css", content)
self.assertNotIn(b"/static/styles_root.css", content) self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content) self.assertIn(b'/static/cached/img/relative.acae32e4532b.png', content)
self.assertPostCondition()
def test_template_tag_absolute_root(self): def test_template_tag_absolute_root(self):
""" """
Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249). Like test_template_tag_absolute, but for a file in STATIC_ROOT (#26249).
""" """
relpath = self.hashed_file_path("absolute_root.css") relpath = self.hashed_file_path("absolute_root.css")
self.assertEqual(relpath, "absolute_root.f864a4d7f083.css") self.assertEqual(relpath, "absolute_root.f821df1b64f7.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"/static/styles_root.css", content) self.assertNotIn(b"/static/styles_root.css", content)
self.assertIn(b"/static/styles_root.401f2509a628.css", content) self.assertIn(b"/static/styles_root.401f2509a628.css", content)
self.assertPostCondition()
def test_template_tag_relative(self): def test_template_tag_relative(self):
relpath = self.hashed_file_path("cached/relative.css") relpath = self.hashed_file_path("cached/relative.css")
self.assertEqual(relpath, "cached/relative.b0375bd89156.css") self.assertEqual(relpath, "cached/relative.c3e9e1ea6f2e.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"../cached/styles.css", content) self.assertNotIn(b"../cached/styles.css", content)
self.assertNotIn(b'@import "styles.css"', content) self.assertNotIn(b'@import "styles.css"', content)
self.assertNotIn(b'url(img/relative.png)', content) self.assertNotIn(b'url(img/relative.png)', content)
self.assertIn(b'url("img/relative.acae32e4532b.png")', content) self.assertIn(b'url("img/relative.acae32e4532b.png")', content)
self.assertIn(b"../cached/styles.bb84a0240107.css", content) self.assertIn(b"../cached/styles.5e0040571e1a.css", content)
self.assertPostCondition()
def test_import_replacement(self): def test_import_replacement(self):
"See #18050" "See #18050"
relpath = self.hashed_file_path("cached/import.css") relpath = self.hashed_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.2b1d40b0bbd4.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"""import url("styles.bb84a0240107.css")""", relfile.read()) self.assertIn(b"""import url("styles.5e0040571e1a.css")""", relfile.read())
self.assertPostCondition()
def test_template_tag_deep_relative(self): def test_template_tag_deep_relative(self):
relpath = self.hashed_file_path("cached/css/window.css") relpath = self.hashed_file_path("cached/css/window.css")
self.assertEqual(relpath, "cached/css/window.3906afbb5a17.css") self.assertEqual(relpath, "cached/css/window.5d5c10836967.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b'url(img/window.png)', content) self.assertNotIn(b'url(img/window.png)', content)
self.assertIn(b'url("img/window.acae32e4532b.png")', content) self.assertIn(b'url("img/window.acae32e4532b.png")', content)
self.assertPostCondition()
def test_template_tag_url(self): def test_template_tag_url(self):
relpath = self.hashed_file_path("cached/url.css") relpath = self.hashed_file_path("cached/url.css")
self.assertEqual(relpath, "cached/url.902310b73412.css") self.assertEqual(relpath, "cached/url.902310b73412.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b"https://", relfile.read()) self.assertIn(b"https://", relfile.read())
self.assertPostCondition()
@override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'loop')],
STATICFILES_FINDERS=['django.contrib.staticfiles.finders.FileSystemFinder'],
)
def test_import_loop(self):
finders.get_finder.cache_clear()
err = six.StringIO()
with self.assertRaisesMessage(RuntimeError, 'Max post-process passes exceeded'):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'All' failed!\n\n", err.getvalue())
self.assertPostCondition()
def test_post_processing(self): def test_post_processing(self):
""" """
@ -173,14 +207,16 @@ class TestHashedFiles(object):
self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed']) self.assertIn(os.path.join('cached', 'css', 'window.css'), stats['post_processed'])
self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified']) self.assertIn(os.path.join('cached', 'css', 'img', 'window.png'), stats['unmodified'])
self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed']) self.assertIn(os.path.join('test', 'nonascii.css'), stats['post_processed'])
self.assertPostCondition()
def test_css_import_case_insensitive(self): def test_css_import_case_insensitive(self):
relpath = self.hashed_file_path("cached/styles_insensitive.css") relpath = self.hashed_file_path("cached/styles_insensitive.css")
self.assertEqual(relpath, "cached/styles_insensitive.c609562b6d3c.css") self.assertEqual(relpath, "cached/styles_insensitive.3fa427592a53.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read() content = relfile.read()
self.assertNotIn(b"cached/other.css", content) self.assertNotIn(b"cached/other.css", content)
self.assertIn(b"other.d41d8cd98f00.css", content) self.assertIn(b"other.d41d8cd98f00.css", content)
self.assertPostCondition()
@override_settings( @override_settings(
STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')], STATICFILES_DIRS=[os.path.join(TEST_ROOT, 'project', 'faulty')],
@ -195,6 +231,7 @@ class TestHashedFiles(object):
with self.assertRaises(Exception): with self.assertRaises(Exception):
call_command('collectstatic', interactive=False, verbosity=0, stderr=err) call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue()) self.assertEqual("Post-processing 'faulty.css' failed!\n\n", err.getvalue())
self.assertPostCondition()
@override_settings( @override_settings(
@ -206,7 +243,7 @@ class TestCollectionCachedStorage(TestHashedFiles, CollectionTestCase):
""" """
def test_cache_invalidation(self): def test_cache_invalidation(self):
name = "cached/styles.css" name = "cached/styles.css"
hashed_name = "cached/styles.bb84a0240107.css" hashed_name = "cached/styles.5e0040571e1a.css"
# check if the cache is filled correctly as expected # check if the cache is filled correctly as expected
cache_key = storage.staticfiles_storage.hash_key(name) cache_key = storage.staticfiles_storage.hash_key(name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key) cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
@ -219,6 +256,17 @@ class TestCollectionCachedStorage(TestHashedFiles, CollectionTestCase):
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key) cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, hashed_name) self.assertEqual(cached_name, hashed_name)
# Check files that had to be hashed multiple times since their content
# includes other files that were hashed.
name = 'cached/relative.css'
hashed_name = 'cached/relative.c3e9e1ea6f2e.css'
cache_key = storage.staticfiles_storage.hash_key(name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertIsNone(cached_name)
self.assertEqual(self.hashed_file_path(name), hashed_name)
cached_name = storage.staticfiles_storage.hashed_files.get(cache_key)
self.assertEqual(cached_name, hashed_name)
def test_cache_key_memcache_validation(self): def test_cache_key_memcache_validation(self):
""" """
Handle cache key creation correctly, see #17861. Handle cache key creation correctly, see #17861.
@ -236,6 +284,22 @@ class TestCollectionCachedStorage(TestHashedFiles, CollectionTestCase):
cache_validator.validate_key(cache_key) cache_validator.validate_key(cache_key)
self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7') self.assertEqual(cache_key, 'staticfiles:821ea71ef36f95b3922a77f7364670e7')
def test_corrupt_intermediate_files(self):
configured_storage = storage.staticfiles_storage
# Clear cache to force rehashing of the files
configured_storage.hashed_files.clear()
# Simulate a corrupt chain of intermediate files by ensuring they don't
# resolve before the max post-process count, which would normally be
# high enough.
configured_storage.max_post_process_passes = 1
# File without intermediates that can be rehashed without a problem.
self.hashed_file_path('cached/css/img/window.png')
# File with too many intermediates to rehash with the low max
# post-process passes.
err_msg = "The name 'cached/styles.css' could not be hashed with %r." % (configured_storage._wrapped,)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path('cached/styles.css')
@override_settings( @override_settings(
STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsCachedStaticFilesStorage', STATICFILES_STORAGE='staticfiles_tests.storage.ExtraPatternsCachedStaticFilesStorage',
@ -258,15 +322,15 @@ class TestExtraPatternsCachedStorage(CollectionTestCase):
""" """
# CSS files shouldn't be touched by JS patterns. # CSS files shouldn't be touched by JS patterns.
relpath = self.cached_file_path("cached/import.css") relpath = self.cached_file_path("cached/import.css")
self.assertEqual(relpath, "cached/import.2b1d40b0bbd4.css") self.assertEqual(relpath, "cached/import.f53576679e5a.css")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'import url("styles.bb84a0240107.css")', relfile.read()) self.assertIn(b'import url("styles.5e0040571e1a.css")', relfile.read())
# Confirm JS patterns have been applied to JS files. # Confirm JS patterns have been applied to JS files.
relpath = self.cached_file_path("cached/test.js") relpath = self.cached_file_path("cached/test.js")
self.assertEqual(relpath, "cached/test.62789ffcd280.js") self.assertEqual(relpath, "cached/test.388d7a790d46.js")
with storage.staticfiles_storage.open(relpath) as relfile: with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'JS_URL("import.2b1d40b0bbd4.css")', relfile.read()) self.assertIn(b'JS_URL("import.f53576679e5a.css")', relfile.read())
@override_settings( @override_settings(
@ -289,6 +353,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir]) STATICFILES_DIRS=settings.STATICFILES_DIRS + [temp_dir])
self.patched_settings.enable() self.patched_settings.enable()
self.addCleanup(shutil.rmtree, six.text_type(temp_dir)) self.addCleanup(shutil.rmtree, six.text_type(temp_dir))
self._manifest_strict = storage.staticfiles_storage.manifest_strict
def tearDown(self): def tearDown(self):
self.patched_settings.disable() self.patched_settings.disable()
@ -296,8 +361,17 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
if os.path.exists(self._clear_filename): if os.path.exists(self._clear_filename):
os.unlink(self._clear_filename) os.unlink(self._clear_filename)
storage.staticfiles_storage.manifest_strict = self._manifest_strict
super(TestCollectionManifestStorage, self).tearDown() super(TestCollectionManifestStorage, self).tearDown()
def assertPostCondition(self):
hashed_files = storage.staticfiles_storage.hashed_files
# The in-memory version of the manifest matches the one on disk
# since a properly created manifest should cover all filenames.
if hashed_files:
manifest = storage.staticfiles_storage.load_manifest()
self.assertEqual(hashed_files, manifest)
def test_manifest_exists(self): def test_manifest_exists(self):
filename = storage.staticfiles_storage.manifest_name filename = storage.staticfiles_storage.manifest_name
path = storage.staticfiles_storage.path(filename) path = storage.staticfiles_storage.path(filename)
@ -317,7 +391,7 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
self.assertEqual(hashed_files, manifest) self.assertEqual(hashed_files, manifest)
def test_clear_empties_manifest(self): def test_clear_empties_manifest(self):
cleared_file_name = os.path.join('test', 'cleared.txt') cleared_file_name = storage.staticfiles_storage.clean_name(os.path.join('test', 'cleared.txt'))
# collect the additional file # collect the additional file
self.run_collectstatic() self.run_collectstatic()
@ -342,6 +416,27 @@ class TestCollectionManifestStorage(TestHashedFiles, CollectionTestCase):
manifest_content = storage.staticfiles_storage.load_manifest() manifest_content = storage.staticfiles_storage.load_manifest()
self.assertNotIn(cleared_file_name, manifest_content) self.assertNotIn(cleared_file_name, manifest_content)
def test_missing_entry(self):
missing_file_name = 'cached/missing.css'
configured_storage = storage.staticfiles_storage
self.assertNotIn(missing_file_name, configured_storage.hashed_files)
# File name not found in manifest
with self.assertRaisesMessage(ValueError, "Missing staticfiles manifest entry for '%s'" % missing_file_name):
self.hashed_file_path(missing_file_name)
configured_storage.manifest_strict = False
# File doesn't exist on disk
err_msg = "The file '%s' could not be found with %r." % (missing_file_name, configured_storage._wrapped)
with self.assertRaisesMessage(ValueError, err_msg):
self.hashed_file_path(missing_file_name)
content = six.StringIO()
content.write('Found')
configured_storage.save(missing_file_name, content)
# File exists on disk
self.hashed_file_path(missing_file_name)
@override_settings( @override_settings(
STATICFILES_STORAGE='staticfiles_tests.storage.SimpleCachedStaticFilesStorage', STATICFILES_STORAGE='staticfiles_tests.storage.SimpleCachedStaticFilesStorage',
@ -446,3 +541,45 @@ class TestStaticFilePermissions(CollectionTestCase):
dir_mode = os.stat(test_dir)[0] & 0o777 dir_mode = os.stat(test_dir)[0] & 0o777
self.assertEqual(file_mode, 0o640) self.assertEqual(file_mode, 0o640)
self.assertEqual(dir_mode, 0o740) self.assertEqual(dir_mode, 0o740)
@override_settings(
STATICFILES_STORAGE='django.contrib.staticfiles.storage.CachedStaticFilesStorage',
)
class TestCollectionHashedFilesCache(CollectionTestCase):
"""
Files referenced from CSS use the correct final hashed name regardless of
the order in which the files are post-processed.
"""
hashed_file_path = hashed_file_path
def setUp(self):
self.testimage_path = os.path.join(
TEST_ROOT, 'project', 'documents', 'cached', 'css', 'img', 'window.png'
)
with open(self.testimage_path, 'r+b') as f:
self._orig_image_content = f.read()
super(TestCollectionHashedFilesCache, self).setUp()
def tearDown(self):
with open(self.testimage_path, 'w+b') as f:
f.write(self._orig_image_content)
super(TestCollectionHashedFilesCache, self).tearDown()
def test_file_change_after_collectstatic(self):
finders.get_finder.cache_clear()
err = six.StringIO()
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
with open(self.testimage_path, 'w+b') as f:
f.write(b"new content of png file to change it's hash")
# Change modification time of self.testimage_path to make sure it gets
# collected again.
mtime = os.path.getmtime(self.testimage_path)
atime = os.path.getatime(self.testimage_path)
os.utime(self.testimage_path, (mtime + 1, atime + 1))
call_command('collectstatic', interactive=False, verbosity=0, stderr=err)
relpath = self.hashed_file_path('cached/css/window.css')
with storage.staticfiles_storage.open(relpath) as relfile:
self.assertIn(b'window.a836fe39729e.png', relfile.read())