Minor bugfixing of the staticfiles app following upstream development in django-staticfiles.

- Create the files to ignore during the tests dynamically (.hidden and backup~)
- Refactored the post_processing method of the CachedFilesMixin storage mixin to be less time consuming.
- Refactored handling of fragments in the post_process method.

git-svn-id: http://code.djangoproject.com/svn/django/trunk@17519 bcc190cf-cafb-0310-a4f2-bffc1f526a37
This commit is contained in:
Jannis Leidel 2012-02-13 10:51:17 +00:00
parent 2df1847c9b
commit 4f1ac8f5f1
5 changed files with 189 additions and 87 deletions

View File

@ -7,13 +7,14 @@ from optparse import make_option
from django.core.files.storage import FileSystemStorage
from django.core.management.base import CommandError, NoArgsCommand
from django.utils.encoding import smart_str, smart_unicode
from django.utils.datastructures import SortedDict
from django.contrib.staticfiles import finders, storage
class Command(NoArgsCommand):
"""
Command that allows to copy or symlink media files from different
Command that allows to copy or symlink static files from different
locations to the settings.STATIC_ROOT.
"""
option_list = NoArgsCommand.option_list + (
@ -50,6 +51,7 @@ class Command(NoArgsCommand):
self.copied_files = []
self.symlinked_files = []
self.unmodified_files = []
self.post_processed_files = []
self.storage = storage.staticfiles_storage
try:
self.storage.path('')
@ -61,18 +63,27 @@ class Command(NoArgsCommand):
if hasattr(os, 'stat_float_times'):
os.stat_float_times(False)
def handle_noargs(self, **options):
def set_options(self, **options):
"""
Set instance variables based on an options dict
"""
self.interactive = options['interactive']
self.verbosity = int(options.get('verbosity', 1))
self.symlink = options['link']
self.clear = options['clear']
self.dry_run = options['dry_run']
ignore_patterns = options['ignore_patterns']
if options['use_default_ignore_patterns']:
ignore_patterns += ['CVS', '.*', '*~']
self.ignore_patterns = list(set(ignore_patterns))
self.interactive = options['interactive']
self.symlink = options['link']
self.verbosity = int(options.get('verbosity', 1))
self.post_process = options['post_process']
def collect(self):
"""
Perform the bulk of the work of collectstatic.
Split off from handle_noargs() to facilitate testing.
"""
if self.symlink:
if sys.platform == 'win32':
raise CommandError("Symlinking is not supported by this "
@ -80,6 +91,46 @@ class Command(NoArgsCommand):
if not self.local:
raise CommandError("Can't symlink to a remote destination.")
if self.clear:
self.clear_dir('')
if self.symlink:
handler = self.link_file
else:
handler = self.copy_file
found_files = SortedDict()
for finder in finders.get_finders():
for path, storage in finder.list(self.ignore_patterns):
# Prefix the relative path if the source storage contains it
if getattr(storage, 'prefix', None):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
found_files[prefixed_path] = storage.open(path)
handler(path, prefixed_path, storage)
# Here we check if the storage backend has a post_process
# method and pass it the list of modified files.
if self.post_process and hasattr(self.storage, 'post_process'):
processor = self.storage.post_process(found_files,
dry_run=self.dry_run)
for original_path, processed_path, processed in processor:
if processed:
self.log(u"Post-processed '%s' as '%s" %
(original_path, processed_path), level=1)
self.post_processed_files.append(original_path)
else:
self.log(u"Skipped post-processing '%s'" % original_path)
return {
'modified': self.copied_files + self.symlinked_files,
'unmodified': self.unmodified_files,
'post_processed': self.post_processed_files,
}
def handle_noargs(self, **options):
self.set_options(**options)
# Warn before doing anything more.
if (isinstance(self.storage, FileSystemStorage) and
self.storage.location):
@ -107,49 +158,25 @@ Type 'yes' to continue, or 'no' to cancel: """
if confirm != 'yes':
raise CommandError("Collecting static files cancelled.")
if self.clear:
self.clear_dir('')
handler = {
True: self.link_file,
False: self.copy_file,
}[self.symlink]
found_files = []
for finder in finders.get_finders():
for path, storage in finder.list(self.ignore_patterns):
# Prefix the relative path if the source storage contains it
if getattr(storage, 'prefix', None):
prefixed_path = os.path.join(storage.prefix, path)
else:
prefixed_path = path
found_files.append(prefixed_path)
handler(path, prefixed_path, storage)
# Here we check if the storage backend has a post_process
# method and pass it the list of modified files.
if self.post_process and hasattr(self.storage, 'post_process'):
post_processed = self.storage.post_process(found_files, **options)
for path in post_processed:
self.log(u"Post-processed '%s'" % path, level=1)
else:
post_processed = []
modified_files = self.copied_files + self.symlinked_files
actual_count = len(modified_files)
unmodified_count = len(self.unmodified_files)
collected = self.collect()
modified_count = len(collected['modified'])
unmodified_count = len(collected['unmodified'])
post_processed_count = len(collected['post_processed'])
if self.verbosity >= 1:
template = ("\n%(actual_count)s %(identifier)s %(action)s"
"%(destination)s%(unmodified)s.\n")
template = ("\n%(modified_count)s %(identifier)s %(action)s"
"%(destination)s%(unmodified)s%(post_processed)s.\n")
summary = template % {
'actual_count': actual_count,
'identifier': 'static file' + (actual_count > 1 and 's' or ''),
'modified_count': modified_count,
'identifier': 'static file' + (modified_count != 1 and 's' or ''),
'action': self.symlink and 'symlinked' or 'copied',
'destination': (destination_path and " to '%s'"
% destination_path or ''),
'unmodified': (self.unmodified_files and ', %s unmodified'
'unmodified': (collected['unmodified'] and ', %s unmodified'
% unmodified_count or ''),
'post_processed': (collected['post_processed'] and
', %s post-processed'
% post_processed_count or ''),
}
self.stdout.write(smart_str(summary))
@ -180,21 +207,20 @@ Type 'yes' to continue, or 'no' to cancel: """
self.clear_dir(os.path.join(path, d))
def delete_file(self, path, prefixed_path, source_storage):
# Whether we are in symlink mode
# Checks if the target file should be deleted if it already exists
if self.storage.exists(prefixed_path):
try:
# When was the target file modified last time?
target_last_modified = \
self.storage.modified_time(prefixed_path)
except (OSError, NotImplementedError):
except (OSError, NotImplementedError, AttributeError):
# The storage doesn't support ``modified_time`` or failed
pass
else:
try:
# When was the source file modified last time?
source_last_modified = source_storage.modified_time(path)
except (OSError, NotImplementedError):
except (OSError, NotImplementedError, AttributeError):
pass
else:
# The full path of the target file

View File

@ -4,7 +4,7 @@ import os
import posixpath
import re
from urllib import unquote
from urlparse import urlsplit, urlunsplit
from urlparse import urlsplit, urlunsplit, urldefrag
from django.conf import settings
from django.core.cache import (get_cache, InvalidCacheBackendError,
@ -12,10 +12,10 @@ from django.core.cache import (get_cache, InvalidCacheBackendError,
from django.core.exceptions import ImproperlyConfigured
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage, get_storage_class
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_unicode, smart_str
from django.utils.functional import LazyObject
from django.utils.importlib import import_module
from django.utils.datastructures import SortedDict
from django.contrib.staticfiles.utils import check_settings, matches_patterns
@ -75,7 +75,7 @@ class CachedFilesMixin(object):
try:
content = self.open(clean_name)
except IOError:
# Handle directory paths
# Handle directory paths and fragments
return name
path, filename = os.path.split(clean_name)
root, ext = os.path.splitext(filename)
@ -102,16 +102,31 @@ class CachedFilesMixin(object):
Returns the real URL in DEBUG mode.
"""
if settings.DEBUG and not force:
hashed_name = name
hashed_name, fragment = name, ''
else:
clean_name, fragment = urldefrag(name)
cache_key = self.cache_key(name)
hashed_name = self.cache.get(cache_key)
if hashed_name is None:
hashed_name = self.hashed_name(name).replace('\\', '/')
hashed_name = self.hashed_name(clean_name).replace('\\', '/')
# set the cache if there was a miss
# (e.g. if cache server goes down)
self.cache.set(cache_key, hashed_name)
return unquote(super(CachedFilesMixin, self).url(hashed_name))
final_url = super(CachedFilesMixin, self).url(hashed_name)
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
query_fragment = '?#' in name # [sic!]
if fragment or query_fragment:
urlparts = list(urlsplit(final_url))
if fragment and not urlparts[4]:
urlparts[4] = fragment
if query_fragment and not urlparts[3]:
urlparts[2] += '?'
final_url = urlunsplit(urlparts)
return unquote(final_url)
def url_converter(self, name):
"""
@ -124,8 +139,9 @@ class CachedFilesMixin(object):
of the storage.
"""
matched, url = matchobj.groups()
# Completely ignore http(s) prefixed URLs
if url.startswith(('#', 'http', 'https', 'data:')):
# Completely ignore http(s) prefixed URLs,
# fragments and data-uri URLs
if url.startswith(('#', 'http:', 'https:', 'data:')):
return matched
name_parts = name.split(os.sep)
# Using posix normpath here to remove duplicates
@ -146,6 +162,7 @@ class CachedFilesMixin(object):
start, end = 1, sub_level - 1
joined_result = '/'.join(name_parts[:-start] + url_parts[end:])
hashed_url = self.url(unquote(joined_result), force=True)
# Return the hashed and normalized version to the file
return 'url("%s")' % unquote(hashed_url)
return converter
@ -153,50 +170,72 @@ class CachedFilesMixin(object):
def post_process(self, paths, dry_run=False, **options):
"""
Post process the given list of files (called from collectstatic).
Processing is actually two separate operations:
1. renaming files to include a hash of their content for cache-busting,
and copying those files to the target storage.
2. adjusting files which contain references to other files so they
refer to the cache-busting filenames.
If either of these are performed on a file, then that file is considered
post-processed.
"""
processed_files = []
# don't even dare to process the files if we're in dry run mode
if dry_run:
return processed_files
return
# delete cache of all handled paths
self.cache.delete_many([self.cache_key(path) for path in paths])
# only try processing the files we have patterns for
# build a list of adjustable files
matches = lambda path: matches_patterns(path, self._patterns.keys())
processing_paths = [path for path in paths if matches(path)]
adjustable_paths = [path for path in paths if matches(path)]
# then sort the files by the directory level
path_level = lambda name: len(name.split(os.sep))
for name in sorted(paths, key=path_level, reverse=True):
for name in sorted(paths.keys(), key=path_level, reverse=True):
# first get a hashed name for the given file
hashed_name = self.hashed_name(name)
# use the original, local file, not the copied-but-unprocessed
# file, which might be somewhere far away, like S3
with paths[name] as original_file:
with self.open(name) as original_file:
# then get the original's file content
content = original_file.read()
# generate the hash with the original content, even for
# adjustable files.
hashed_name = self.hashed_name(name, original_file)
# to apply each replacement pattern on the content
if name in processing_paths:
# then get the original's file content..
if hasattr(original_file, 'seek'):
original_file.seek(0)
hashed_file_exists = self.exists(hashed_name)
processed = False
# ..to apply each replacement pattern to the content
if name in adjustable_paths:
content = original_file.read()
converter = self.url_converter(name)
for patterns in self._patterns.values():
for pattern in patterns:
content = pattern.sub(converter, content)
# then save the processed result
if self.exists(hashed_name):
self.delete(hashed_name)
content_file = ContentFile(smart_str(content))
saved_name = self._save(hashed_name, content_file)
hashed_name = force_unicode(saved_name.replace('\\', '/'))
processed_files.append(hashed_name)
if hashed_file_exists:
self.delete(hashed_name)
# then save the processed result
content_file = ContentFile(smart_str(content))
saved_name = self._save(hashed_name, content_file)
hashed_name = force_unicode(saved_name.replace('\\', '/'))
processed = True
else:
# or handle the case in which neither processing nor
# a change to the original file happened
if not hashed_file_exists:
processed = True
saved_name = self._save(hashed_name, original_file)
hashed_name = force_unicode(saved_name.replace('\\', '/'))
# and then set the cache accordingly
self.cache.set(self.cache_key(name), hashed_name)
return processed_files
yield name, hashed_name, processed
class CachedStaticFilesStorage(CachedFilesMixin, StaticFilesStorage):

View File

@ -1 +0,0 @@
This file should be ignored.

View File

@ -1 +0,0 @@
This file should be ignored.

View File

@ -39,6 +39,7 @@ TEST_SETTINGS = {
'django.contrib.staticfiles.finders.DefaultStorageFinder',
),
}
from django.contrib.staticfiles.management.commands.collectstatic import Command as CollectstaticCommand
class BaseStaticFilesTestCase(object):
@ -52,13 +53,26 @@ class BaseStaticFilesTestCase(object):
default_storage._wrapped = empty
storage.staticfiles_storage._wrapped = empty
testfiles_path = os.path.join(TEST_ROOT, 'apps', 'test', 'static', 'test')
# To make sure SVN doesn't hangs itself with the non-ASCII characters
# during checkout, we actually create one file dynamically.
_nonascii_filepath = os.path.join(
TEST_ROOT, 'apps', 'test', 'static', 'test', u'fi\u015fier.txt')
with codecs.open(_nonascii_filepath, 'w', 'utf-8') as f:
self._nonascii_filepath = os.path.join(testfiles_path, u'fi\u015fier.txt')
with codecs.open(self._nonascii_filepath, 'w', 'utf-8') as f:
f.write(u"fi\u015fier in the app dir")
self.addCleanup(os.unlink, _nonascii_filepath)
# And also create the stupid hidden file to dwarf the setup.py's
# package data handling.
self._hidden_filepath = os.path.join(testfiles_path, '.hidden')
with codecs.open(self._hidden_filepath, 'w', 'utf-8') as f:
f.write("should be ignored")
self._backup_filepath = os.path.join(
TEST_ROOT, 'project', 'documents', 'test', 'backup~')
with codecs.open(self._backup_filepath, 'w', 'utf-8') as f:
f.write("should be ignored")
def tearDown(self):
os.unlink(self._nonascii_filepath)
os.unlink(self._hidden_filepath)
os.unlink(self._backup_filepath)
def assertFileContains(self, filepath, text):
self.assertIn(text, self._get_file(smart_unicode(filepath)),
@ -93,7 +107,7 @@ class BaseCollectionTestCase(BaseStaticFilesTestCase):
Tests shared by all file finding features (collectstatic,
findstatic, and static serve view).
This relies on the asserts defined in UtilityAssertsTestCase, but
This relies on the asserts defined in BaseStaticFilesTestCase, but
is separated because some test cases need those asserts without
all these tests.
"""
@ -300,7 +314,7 @@ class TestCollectionCachedStorage(BaseCollectionTestCase,
"does/not/exist.png",
"/static/does/not/exist.png")
self.assertStaticRenders("test/file.txt",
"/static/test/file.dad0999e4f8f.txt")
"/static/test/file.ea5bccaf16d5.txt")
self.assertStaticRenders("cached/styles.css",
"/static/cached/styles.93b1147e8552.css")
@ -362,12 +376,12 @@ class TestCollectionCachedStorage(BaseCollectionTestCase,
self.assertEqual(relpath, "cached/relative.2217ea7273c2.css")
with storage.staticfiles_storage.open(relpath) as relfile:
content = relfile.read()
self.assertIn("/static/cached/styles.93b1147e8552.css", content)
self.assertNotIn("../cached/styles.css", content)
self.assertNotIn('@import "styles.css"', content)
self.assertNotIn('url(img/relative.png)', content)
self.assertIn('url("/static/cached/img/relative.acae32e4532b.png")', content)
self.assertIn("/static/cached/styles.93b1147e8552.css", content)
self.assertNotIn("url(img/relative.png)", content)
self.assertIn("/static/cached/img/relative.acae32e4532b.png", content)
self.assertIn("/static/cached/absolute.cc80cb5e2eb1.css#eggs", content)
def test_template_tag_deep_relative(self):
relpath = self.cached_file_path("cached/css/window.css")
@ -398,13 +412,38 @@ class TestCollectionCachedStorage(BaseCollectionTestCase,
cached_name = storage.staticfiles_storage.cache.get(cache_key)
self.assertEqual(cached_name, hashed_name)
def test_post_processing(self):
"""Test that post_processing behaves correctly.
Files that are alterable should always be post-processed; files that
aren't should be skipped.
collectstatic has already been called once in setUp() for this testcase,
therefore we check by verifying behavior on a second run.
"""
collectstatic_args = {
'interactive': False,
'verbosity': '0',
'link': False,
'clear': False,
'dry_run': False,
'post_process': True,
'use_default_ignore_patterns': True,
'ignore_patterns': ['*.ignoreme'],
}
collectstatic_cmd = CollectstaticCommand()
collectstatic_cmd.set_options(**collectstatic_args)
stats = collectstatic_cmd.collect()
self.assertTrue(u'cached/css/window.css' in stats['post_processed'])
self.assertTrue(u'cached/css/img/window.png' in stats['unmodified'])
# we set DEBUG to False here since the template tag wouldn't work otherwise
TestCollectionCachedStorage = override_settings(**dict(TEST_SETTINGS,
STATICFILES_STORAGE='django.contrib.staticfiles.storage.CachedStaticFilesStorage',
DEBUG=False,
))(TestCollectionCachedStorage)
if sys.platform != 'win32':
class TestCollectionLinks(CollectionTestCase, TestDefaults):