Fixed E127 pep8 warnings.
This commit is contained in:
parent
d599b590eb
commit
6685713869
|
@ -179,9 +179,9 @@ class RelatedFieldListFilter(FieldListFilter):
|
||||||
self.title = self.lookup_title
|
self.title = self.lookup_title
|
||||||
|
|
||||||
def has_output(self):
|
def has_output(self):
|
||||||
if (isinstance(self.field, models.related.RelatedObject)
|
if (isinstance(self.field, models.related.RelatedObject) and
|
||||||
and self.field.field.null or hasattr(self.field, 'rel')
|
self.field.field.null or hasattr(self.field, 'rel') and
|
||||||
and self.field.null):
|
self.field.null):
|
||||||
extra = 1
|
extra = 1
|
||||||
else:
|
else:
|
||||||
extra = 0
|
extra = 0
|
||||||
|
@ -206,9 +206,9 @@ class RelatedFieldListFilter(FieldListFilter):
|
||||||
}, [self.lookup_kwarg_isnull]),
|
}, [self.lookup_kwarg_isnull]),
|
||||||
'display': val,
|
'display': val,
|
||||||
}
|
}
|
||||||
if (isinstance(self.field, models.related.RelatedObject)
|
if (isinstance(self.field, models.related.RelatedObject) and
|
||||||
and self.field.field.null or hasattr(self.field, 'rel')
|
self.field.field.null or hasattr(self.field, 'rel') and
|
||||||
and self.field.null):
|
self.field.null):
|
||||||
yield {
|
yield {
|
||||||
'selected': bool(self.lookup_val_isnull),
|
'selected': bool(self.lookup_val_isnull),
|
||||||
'query_string': cl.get_query_string({
|
'query_string': cl.get_query_string({
|
||||||
|
|
|
@ -32,8 +32,7 @@ def submit_row(context):
|
||||||
save_as = context['save_as']
|
save_as = context['save_as']
|
||||||
ctx = {
|
ctx = {
|
||||||
'opts': opts,
|
'opts': opts,
|
||||||
'show_delete_link': (not is_popup and context['has_delete_permission']
|
'show_delete_link': not is_popup and context['has_delete_permission'] and change and context.get('show_delete', True),
|
||||||
and change and context.get('show_delete', True)),
|
|
||||||
'show_save_as_new': not is_popup and change and save_as,
|
'show_save_as_new': not is_popup and change and save_as,
|
||||||
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
|
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
|
||||||
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
||||||
|
|
|
@ -153,10 +153,13 @@ class ForeignKeyRawIdWidget(forms.TextInput):
|
||||||
extra = []
|
extra = []
|
||||||
if rel_to in self.admin_site._registry:
|
if rel_to in self.admin_site._registry:
|
||||||
# The related object is registered with the same AdminSite
|
# The related object is registered with the same AdminSite
|
||||||
related_url = reverse('admin:%s_%s_changelist' %
|
related_url = reverse(
|
||||||
(rel_to._meta.app_label,
|
'admin:%s_%s_changelist' % (
|
||||||
rel_to._meta.model_name),
|
rel_to._meta.app_label,
|
||||||
current_app=self.admin_site.name)
|
rel_to._meta.model_name,
|
||||||
|
),
|
||||||
|
current_app=self.admin_site.name,
|
||||||
|
)
|
||||||
|
|
||||||
params = self.url_parameters()
|
params = self.url_parameters()
|
||||||
if params:
|
if params:
|
||||||
|
@ -167,10 +170,10 @@ class ForeignKeyRawIdWidget(forms.TextInput):
|
||||||
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
|
attrs['class'] = 'vForeignKeyRawIdAdminField' # The JavaScript code looks for this hook.
|
||||||
# TODO: "lookup_id_" is hard-coded here. This should instead use
|
# TODO: "lookup_id_" is hard-coded here. This should instead use
|
||||||
# the correct API to determine the ID dynamically.
|
# the correct API to determine the ID dynamically.
|
||||||
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> '
|
extra.append('<a href="%s%s" class="related-lookup" id="lookup_id_%s" onclick="return showRelatedObjectLookupPopup(this);"> ' %
|
||||||
% (related_url, url, name))
|
(related_url, url, name))
|
||||||
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>'
|
extra.append('<img src="%s" width="16" height="16" alt="%s" /></a>' %
|
||||||
% (static('admin/img/selector-search.gif'), _('Lookup')))
|
(static('admin/img/selector-search.gif'), _('Lookup')))
|
||||||
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra
|
output = [super(ForeignKeyRawIdWidget, self).render(name, value, attrs)] + extra
|
||||||
if value:
|
if value:
|
||||||
output.append(self.label_for_value(value))
|
output.append(self.label_for_value(value))
|
||||||
|
|
|
@ -69,9 +69,11 @@ class UserManagerTestCase(TestCase):
|
||||||
self.assertEqual(returned, 'email\ with_whitespace@d.com')
|
self.assertEqual(returned, 'email\ with_whitespace@d.com')
|
||||||
|
|
||||||
def test_empty_username(self):
|
def test_empty_username(self):
|
||||||
self.assertRaisesMessage(ValueError,
|
self.assertRaisesMessage(
|
||||||
|
ValueError,
|
||||||
'The given username must be set',
|
'The given username must be set',
|
||||||
User.objects.create_user, username='')
|
User.objects.create_user, username=''
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class AbstractUserTestCase(TestCase):
|
class AbstractUserTestCase(TestCase):
|
||||||
|
|
|
@ -48,9 +48,11 @@ if lib_names:
|
||||||
|
|
||||||
# No GEOS library could be found.
|
# No GEOS library could be found.
|
||||||
if lib_path is None:
|
if lib_path is None:
|
||||||
raise ImportError('Could not find the GEOS library (tried "%s"). '
|
raise ImportError(
|
||||||
|
'Could not find the GEOS library (tried "%s"). '
|
||||||
'Try setting GEOS_LIBRARY_PATH in your settings.' %
|
'Try setting GEOS_LIBRARY_PATH in your settings.' %
|
||||||
'", "'.join(lib_names))
|
'", "'.join(lib_names)
|
||||||
|
)
|
||||||
|
|
||||||
# Getting the GEOS C library. The C interface (CDLL) is used for
|
# Getting the GEOS C library. The C interface (CDLL) is used for
|
||||||
# both *NIX and Windows.
|
# both *NIX and Windows.
|
||||||
|
|
|
@ -164,8 +164,7 @@ class BaseTests(object):
|
||||||
response = self.client.post(add_url, data, follow=True)
|
response = self.client.post(add_url, data, follow=True)
|
||||||
self.assertRedirects(response, show_url)
|
self.assertRedirects(response, show_url)
|
||||||
self.assertTrue('messages' in response.context)
|
self.assertTrue('messages' in response.context)
|
||||||
messages = [Message(self.levels[level], msg) for msg in
|
messages = [Message(self.levels[level], msg) for msg in data['messages']]
|
||||||
data['messages']]
|
|
||||||
self.assertEqual(list(response.context['messages']), messages)
|
self.assertEqual(list(response.context['messages']), messages)
|
||||||
for msg in data['messages']:
|
for msg in data['messages']:
|
||||||
self.assertContains(response, msg)
|
self.assertContains(response, msg)
|
||||||
|
@ -209,8 +208,7 @@ class BaseTests(object):
|
||||||
show_url = reverse('django.contrib.messages.tests.urls.show')
|
show_url = reverse('django.contrib.messages.tests.urls.show')
|
||||||
messages = []
|
messages = []
|
||||||
for level in ('debug', 'info', 'success', 'warning', 'error'):
|
for level in ('debug', 'info', 'success', 'warning', 'error'):
|
||||||
messages.extend([Message(self.levels[level], msg) for msg in
|
messages.extend([Message(self.levels[level], msg) for msg in data['messages']])
|
||||||
data['messages']])
|
|
||||||
add_url = reverse('django.contrib.messages.tests.urls.add',
|
add_url = reverse('django.contrib.messages.tests.urls.add',
|
||||||
args=(level,))
|
args=(level,))
|
||||||
self.client.post(add_url, data)
|
self.client.post(add_url, data)
|
||||||
|
|
|
@ -68,8 +68,8 @@ def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_ove
|
||||||
# first open the old file, so that it won't go away
|
# first open the old file, so that it won't go away
|
||||||
with open(old_file_name, 'rb') as old_file:
|
with open(old_file_name, 'rb') as old_file:
|
||||||
# now open the new file, not forgetting allow_overwrite
|
# now open the new file, not forgetting allow_overwrite
|
||||||
fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
|
fd = os.open(new_file_name, (os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) |
|
||||||
(os.O_EXCL if not allow_overwrite else 0))
|
(os.O_EXCL if not allow_overwrite else 0)))
|
||||||
try:
|
try:
|
||||||
locks.lock(fd, locks.LOCK_EX)
|
locks.lock(fd, locks.LOCK_EX)
|
||||||
current_chunk = None
|
current_chunk = None
|
||||||
|
|
|
@ -118,8 +118,7 @@ def get_commands():
|
||||||
for app_name in apps:
|
for app_name in apps:
|
||||||
try:
|
try:
|
||||||
path = find_management_module(app_name)
|
path = find_management_module(app_name)
|
||||||
_commands.update(dict((name, app_name)
|
_commands.update(dict((name, app_name) for name in find_commands(path)))
|
||||||
for name in find_commands(path)))
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
pass # No management module - ignore this app
|
pass # No management module - ignore this app
|
||||||
|
|
||||||
|
|
|
@ -131,8 +131,7 @@ def get_ns_resolver(ns_pattern, resolver):
|
||||||
# Build a namespaced resolver for the given parent urlconf pattern.
|
# Build a namespaced resolver for the given parent urlconf pattern.
|
||||||
# This makes it possible to have captured parameters in the parent
|
# This makes it possible to have captured parameters in the parent
|
||||||
# urlconf pattern.
|
# urlconf pattern.
|
||||||
ns_resolver = RegexURLResolver(ns_pattern,
|
ns_resolver = RegexURLResolver(ns_pattern, resolver.url_patterns)
|
||||||
resolver.url_patterns)
|
|
||||||
return RegexURLResolver(r'^/', [ns_resolver])
|
return RegexURLResolver(r'^/', [ns_resolver])
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -390,9 +390,11 @@ WHEN (new.%(col_name)s IS NULL)
|
||||||
sequence_name = self._get_sequence_name(sequence_info['table'])
|
sequence_name = self._get_sequence_name(sequence_info['table'])
|
||||||
table_name = self.quote_name(sequence_info['table'])
|
table_name = self.quote_name(sequence_info['table'])
|
||||||
column_name = self.quote_name(sequence_info['column'] or 'id')
|
column_name = self.quote_name(sequence_info['column'] or 'id')
|
||||||
query = _get_sequence_reset_sql() % {'sequence': sequence_name,
|
query = _get_sequence_reset_sql() % {
|
||||||
|
'sequence': sequence_name,
|
||||||
'table': table_name,
|
'table': table_name,
|
||||||
'column': column_name}
|
'column': column_name,
|
||||||
|
}
|
||||||
sql.append(query)
|
sql.append(query)
|
||||||
return sql
|
return sql
|
||||||
|
|
||||||
|
@ -880,12 +882,10 @@ class FormatStylePlaceholderCursor(object):
|
||||||
def fetchmany(self, size=None):
|
def fetchmany(self, size=None):
|
||||||
if size is None:
|
if size is None:
|
||||||
size = self.arraysize
|
size = self.arraysize
|
||||||
return tuple(_rowfactory(r, self.cursor)
|
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchmany(size))
|
||||||
for r in self.cursor.fetchmany(size))
|
|
||||||
|
|
||||||
def fetchall(self):
|
def fetchall(self):
|
||||||
return tuple(_rowfactory(r, self.cursor)
|
return tuple(_rowfactory(r, self.cursor) for r in self.cursor.fetchall())
|
||||||
for r in self.cursor.fetchall())
|
|
||||||
|
|
||||||
def var(self, *args):
|
def var(self, *args):
|
||||||
return VariableWrapper(self.cursor.var(*args))
|
return VariableWrapper(self.cursor.var(*args))
|
||||||
|
|
|
@ -160,9 +160,11 @@ class ModelBase(type):
|
||||||
new_class.add_to_class(obj_name, obj)
|
new_class.add_to_class(obj_name, obj)
|
||||||
|
|
||||||
# All the fields of any type declared on this model
|
# All the fields of any type declared on this model
|
||||||
new_fields = new_class._meta.local_fields + \
|
new_fields = (
|
||||||
new_class._meta.local_many_to_many + \
|
new_class._meta.local_fields +
|
||||||
|
new_class._meta.local_many_to_many +
|
||||||
new_class._meta.virtual_fields
|
new_class._meta.virtual_fields
|
||||||
|
)
|
||||||
field_names = set(f.name for f in new_fields)
|
field_names = set(f.name for f in new_fields)
|
||||||
|
|
||||||
# Basic setup for proxy models.
|
# Basic setup for proxy models.
|
||||||
|
@ -216,10 +218,11 @@ class ModelBase(type):
|
||||||
# moment).
|
# moment).
|
||||||
for field in parent_fields:
|
for field in parent_fields:
|
||||||
if field.name in field_names:
|
if field.name in field_names:
|
||||||
raise FieldError('Local field %r in class %r clashes '
|
raise FieldError(
|
||||||
|
'Local field %r in class %r clashes '
|
||||||
'with field of similar name from '
|
'with field of similar name from '
|
||||||
'base class %r' %
|
'base class %r' % (field.name, name, base.__name__)
|
||||||
(field.name, name, base.__name__))
|
)
|
||||||
if not base._meta.abstract:
|
if not base._meta.abstract:
|
||||||
# Concrete classes...
|
# Concrete classes...
|
||||||
base = base._meta.concrete_model
|
base = base._meta.concrete_model
|
||||||
|
@ -253,10 +256,11 @@ class ModelBase(type):
|
||||||
# class
|
# class
|
||||||
for field in base._meta.virtual_fields:
|
for field in base._meta.virtual_fields:
|
||||||
if base._meta.abstract and field.name in field_names:
|
if base._meta.abstract and field.name in field_names:
|
||||||
raise FieldError('Local field %r in class %r clashes '
|
raise FieldError(
|
||||||
|
'Local field %r in class %r clashes '
|
||||||
'with field of similar name from '
|
'with field of similar name from '
|
||||||
'abstract base class %r' %
|
'abstract base class %r' % (field.name, name, base.__name__)
|
||||||
(field.name, name, base.__name__))
|
)
|
||||||
new_class.add_to_class(field.name, copy.deepcopy(field))
|
new_class.add_to_class(field.name, copy.deepcopy(field))
|
||||||
|
|
||||||
if abstract:
|
if abstract:
|
||||||
|
|
|
@ -227,12 +227,21 @@ class SingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjectDescri
|
||||||
# If null=True, we can assign null here, but otherwise the value needs
|
# If null=True, we can assign null here, but otherwise the value needs
|
||||||
# to be an instance of the related class.
|
# to be an instance of the related class.
|
||||||
if value is None and self.related.field.null is False:
|
if value is None and self.related.field.null is False:
|
||||||
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
raise ValueError(
|
||||||
(instance._meta.object_name, self.related.get_accessor_name()))
|
'Cannot assign None: "%s.%s" does not allow null values.' % (
|
||||||
|
instance._meta.object_name,
|
||||||
|
self.related.get_accessor_name(),
|
||||||
|
)
|
||||||
|
)
|
||||||
elif value is not None and not isinstance(value, self.related.model):
|
elif value is not None and not isinstance(value, self.related.model):
|
||||||
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
|
raise ValueError(
|
||||||
(value, instance._meta.object_name,
|
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
|
||||||
self.related.get_accessor_name(), self.related.opts.object_name))
|
value,
|
||||||
|
instance._meta.object_name,
|
||||||
|
self.related.get_accessor_name(),
|
||||||
|
self.related.opts.object_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
elif value is not None:
|
elif value is not None:
|
||||||
if instance._state.db is None:
|
if instance._state.db is None:
|
||||||
instance._state.db = router.db_for_write(instance.__class__, instance=value)
|
instance._state.db = router.db_for_write(instance.__class__, instance=value)
|
||||||
|
@ -244,8 +253,10 @@ class SingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjectDescri
|
||||||
|
|
||||||
related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
|
related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
|
||||||
if None in related_pk:
|
if None in related_pk:
|
||||||
raise ValueError('Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
|
raise ValueError(
|
||||||
(value, instance._meta.object_name))
|
'Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
|
||||||
|
(value, instance._meta.object_name)
|
||||||
|
)
|
||||||
|
|
||||||
# Set the value of the related field to the value of the related object's related field
|
# Set the value of the related field to the value of the related object's related field
|
||||||
for index, field in enumerate(self.related.field.local_related_fields):
|
for index, field in enumerate(self.related.field.local_related_fields):
|
||||||
|
@ -355,12 +366,19 @@ class ReverseSingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjec
|
||||||
# If null=True, we can assign null here, but otherwise the value needs
|
# If null=True, we can assign null here, but otherwise the value needs
|
||||||
# to be an instance of the related class.
|
# to be an instance of the related class.
|
||||||
if value is None and self.field.null is False:
|
if value is None and self.field.null is False:
|
||||||
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
|
raise ValueError(
|
||||||
(instance._meta.object_name, self.field.name))
|
'Cannot assign None: "%s.%s" does not allow null values.' %
|
||||||
|
(instance._meta.object_name, self.field.name)
|
||||||
|
)
|
||||||
elif value is not None and not isinstance(value, self.field.rel.to):
|
elif value is not None and not isinstance(value, self.field.rel.to):
|
||||||
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
|
raise ValueError(
|
||||||
(value, instance._meta.object_name,
|
'Cannot assign "%r": "%s.%s" must be a "%s" instance.' % (
|
||||||
self.field.name, self.field.rel.to._meta.object_name))
|
value,
|
||||||
|
instance._meta.object_name,
|
||||||
|
self.field.name,
|
||||||
|
self.field.rel.to._meta.object_name,
|
||||||
|
)
|
||||||
|
)
|
||||||
elif value is not None:
|
elif value is not None:
|
||||||
if instance._state.db is None:
|
if instance._state.db is None:
|
||||||
instance._state.db = router.db_for_write(instance.__class__, instance=value)
|
instance._state.db = router.db_for_write(instance.__class__, instance=value)
|
||||||
|
@ -706,7 +724,10 @@ def create_many_related_manager(superclass, rel):
|
||||||
# from the method lookup table, as we do with add and remove.
|
# from the method lookup table, as we do with add and remove.
|
||||||
if not self.through._meta.auto_created:
|
if not self.through._meta.auto_created:
|
||||||
opts = self.through._meta
|
opts = self.through._meta
|
||||||
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
|
raise AttributeError(
|
||||||
|
"Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." %
|
||||||
|
(opts.app_label, opts.object_name)
|
||||||
|
)
|
||||||
db = router.db_for_write(self.instance.__class__, instance=self.instance)
|
db = router.db_for_write(self.instance.__class__, instance=self.instance)
|
||||||
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
|
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
|
||||||
self.add(new_obj)
|
self.add(new_obj)
|
||||||
|
@ -736,16 +757,23 @@ def create_many_related_manager(superclass, rel):
|
||||||
for obj in objs:
|
for obj in objs:
|
||||||
if isinstance(obj, self.model):
|
if isinstance(obj, self.model):
|
||||||
if not router.allow_relation(obj, self.instance):
|
if not router.allow_relation(obj, self.instance):
|
||||||
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
|
raise ValueError(
|
||||||
(obj, self.instance._state.db, obj._state.db))
|
'Cannot add "%r": instance is on database "%s", value is on database "%s"' %
|
||||||
|
(obj, self.instance._state.db, obj._state.db)
|
||||||
|
)
|
||||||
fk_val = self.through._meta.get_field(
|
fk_val = self.through._meta.get_field(
|
||||||
target_field_name).get_foreign_related_value(obj)[0]
|
target_field_name).get_foreign_related_value(obj)[0]
|
||||||
if fk_val is None:
|
if fk_val is None:
|
||||||
raise ValueError('Cannot add "%r": the value for field "%s" is None' %
|
raise ValueError(
|
||||||
(obj, target_field_name))
|
'Cannot add "%r": the value for field "%s" is None' %
|
||||||
|
(obj, target_field_name)
|
||||||
|
)
|
||||||
new_ids.add(fk_val)
|
new_ids.add(fk_val)
|
||||||
elif isinstance(obj, Model):
|
elif isinstance(obj, Model):
|
||||||
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
|
raise TypeError(
|
||||||
|
"'%s' instance expected, got %r" %
|
||||||
|
(self.model._meta.object_name, obj)
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
new_ids.add(obj)
|
new_ids.add(obj)
|
||||||
db = router.db_for_write(self.through, instance=self.instance)
|
db = router.db_for_write(self.through, instance=self.instance)
|
||||||
|
|
|
@ -276,16 +276,15 @@ class BaseAppCache(object):
|
||||||
try:
|
try:
|
||||||
model_list = self._get_models_cache[cache_key]
|
model_list = self._get_models_cache[cache_key]
|
||||||
if self.available_apps is not None and only_installed:
|
if self.available_apps is not None and only_installed:
|
||||||
model_list = [m for m in model_list
|
model_list = [m for m in model_list if m._meta.app_label in self.available_apps]
|
||||||
if m._meta.app_label in self.available_apps]
|
|
||||||
return model_list
|
return model_list
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
self._populate()
|
self._populate()
|
||||||
if app_mod:
|
if app_mod:
|
||||||
if app_mod in self.app_store:
|
if app_mod in self.app_store:
|
||||||
app_list = [self.app_models.get(self._label_for(app_mod),
|
app_list = [self.app_models.get(self._label_for(app_mod), ModelDict())]
|
||||||
ModelDict())]
|
|
||||||
else:
|
else:
|
||||||
app_list = []
|
app_list = []
|
||||||
else:
|
else:
|
||||||
|
@ -304,8 +303,7 @@ class BaseAppCache(object):
|
||||||
)
|
)
|
||||||
self._get_models_cache[cache_key] = model_list
|
self._get_models_cache[cache_key] = model_list
|
||||||
if self.available_apps is not None and only_installed:
|
if self.available_apps is not None and only_installed:
|
||||||
model_list = [m for m in model_list
|
model_list = [m for m in model_list if m._meta.app_label in self.available_apps]
|
||||||
if m._meta.app_label in self.available_apps]
|
|
||||||
return model_list
|
return model_list
|
||||||
|
|
||||||
def get_model(self, app_label, model_name,
|
def get_model(self, app_label, model_name,
|
||||||
|
|
|
@ -150,9 +150,9 @@ class QuerySet(object):
|
||||||
"""
|
"""
|
||||||
if not isinstance(k, (slice,) + six.integer_types):
|
if not isinstance(k, (slice,) + six.integer_types):
|
||||||
raise TypeError
|
raise TypeError
|
||||||
assert ((not isinstance(k, slice) and (k >= 0))
|
assert ((not isinstance(k, slice) and (k >= 0)) or
|
||||||
or (isinstance(k, slice) and (k.start is None or k.start >= 0)
|
(isinstance(k, slice) and (k.start is None or k.start >= 0) and
|
||||||
and (k.stop is None or k.stop >= 0))), \
|
(k.stop is None or k.stop >= 0))), \
|
||||||
"Negative indexing is not supported."
|
"Negative indexing is not supported."
|
||||||
|
|
||||||
if self._result_cache is not None:
|
if self._result_cache is not None:
|
||||||
|
|
|
@ -240,8 +240,7 @@ class ModelFormMetaclass(DeclarativeFieldsMetaclass):
|
||||||
def __new__(mcs, name, bases, attrs):
|
def __new__(mcs, name, bases, attrs):
|
||||||
formfield_callback = attrs.pop('formfield_callback', None)
|
formfield_callback = attrs.pop('formfield_callback', None)
|
||||||
|
|
||||||
new_class = (super(ModelFormMetaclass, mcs)
|
new_class = super(ModelFormMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
||||||
.__new__(mcs, name, bases, attrs))
|
|
||||||
|
|
||||||
if bases == (BaseModelForm,):
|
if bases == (BaseModelForm,):
|
||||||
return new_class
|
return new_class
|
||||||
|
|
|
@ -259,9 +259,8 @@ class MultiPartParser(object):
|
||||||
file_obj = handler.file_complete(counters[i])
|
file_obj = handler.file_complete(counters[i])
|
||||||
if file_obj:
|
if file_obj:
|
||||||
# If it returns a file object, then set the files dict.
|
# If it returns a file object, then set the files dict.
|
||||||
self._files.appendlist(force_text(old_field_name,
|
self._files.appendlist(
|
||||||
self._encoding,
|
force_text(old_field_name, self._encoding, errors='replace'),
|
||||||
errors='replace'),
|
|
||||||
file_obj)
|
file_obj)
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -92,8 +92,7 @@ class VariableDoesNotExist(Exception):
|
||||||
self.params = params
|
self.params = params
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.msg % tuple(force_text(p, errors='replace')
|
return self.msg % tuple(force_text(p, errors='replace') for p in self.params)
|
||||||
for p in self.params)
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidTemplateLibrary(Exception):
|
class InvalidTemplateLibrary(Exception):
|
||||||
|
@ -1055,8 +1054,7 @@ class TagHelperNode(Node):
|
||||||
resolved_args = [var.resolve(context) for var in self.args]
|
resolved_args = [var.resolve(context) for var in self.args]
|
||||||
if self.takes_context:
|
if self.takes_context:
|
||||||
resolved_args = [context] + resolved_args
|
resolved_args = [context] + resolved_args
|
||||||
resolved_kwargs = dict((k, v.resolve(context))
|
resolved_kwargs = dict((k, v.resolve(context)) for k, v in self.kwargs.items())
|
||||||
for k, v in self.kwargs.items())
|
|
||||||
return resolved_args, resolved_kwargs
|
return resolved_args, resolved_kwargs
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -80,8 +80,7 @@ def format_html(format_string, *args, **kwargs):
|
||||||
of str.format or % interpolation to build up small HTML fragments.
|
of str.format or % interpolation to build up small HTML fragments.
|
||||||
"""
|
"""
|
||||||
args_safe = map(conditional_escape, args)
|
args_safe = map(conditional_escape, args)
|
||||||
kwargs_safe = dict((k, conditional_escape(v)) for (k, v) in
|
kwargs_safe = dict((k, conditional_escape(v)) for (k, v) in six.iteritems(kwargs))
|
||||||
six.iteritems(kwargs))
|
|
||||||
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
|
return mark_safe(format_string.format(*args_safe, **kwargs_safe))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -17,8 +17,7 @@ def _resolve_name(name, package, level):
|
||||||
try:
|
try:
|
||||||
dot = package.rindex('.', 0, dot)
|
dot = package.rindex('.', 0, dot)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError("attempted relative import beyond top-level "
|
raise ValueError("attempted relative import beyond top-level package")
|
||||||
"package")
|
|
||||||
return "%s.%s" % (package[:dot], name)
|
return "%s.%s" % (package[:dot], name)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude=.git,./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./django/utils/lru_cache.py,./tests/comment_tests/*,./django/test/_doctest.py,./django/utils/six.py,./django/conf/app_template/*
|
exclude=.git,./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./django/utils/lru_cache.py,./tests/comment_tests/*,./django/test/_doctest.py,./django/utils/six.py,./django/conf/app_template/*
|
||||||
ignore=E127,E128,E501,W601
|
ignore=E128,E501,W601
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
license-file = LICENSE
|
license-file = LICENSE
|
||||||
|
|
|
@ -243,9 +243,13 @@ class ListFiltersTests(TestCase):
|
||||||
self.assertEqual(force_text(filterspec.title), 'date registered')
|
self.assertEqual(force_text(filterspec.title), 'date registered')
|
||||||
choice = select_by(filterspec.choices(changelist), "display", "Today")
|
choice = select_by(filterspec.choices(changelist), "display", "Today")
|
||||||
self.assertEqual(choice['selected'], True)
|
self.assertEqual(choice['selected'], True)
|
||||||
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
|
self.assertEqual(
|
||||||
'&date_registered__lt=%s'
|
choice['query_string'],
|
||||||
% (self.today, self.tomorrow))
|
'?date_registered__gte=%s&date_registered__lt=%s' % (
|
||||||
|
self.today,
|
||||||
|
self.tomorrow,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
|
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(day=1),
|
||||||
'date_registered__lt': self.next_month})
|
'date_registered__lt': self.next_month})
|
||||||
|
@ -264,9 +268,13 @@ class ListFiltersTests(TestCase):
|
||||||
self.assertEqual(force_text(filterspec.title), 'date registered')
|
self.assertEqual(force_text(filterspec.title), 'date registered')
|
||||||
choice = select_by(filterspec.choices(changelist), "display", "This month")
|
choice = select_by(filterspec.choices(changelist), "display", "This month")
|
||||||
self.assertEqual(choice['selected'], True)
|
self.assertEqual(choice['selected'], True)
|
||||||
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
|
self.assertEqual(
|
||||||
'&date_registered__lt=%s'
|
choice['query_string'],
|
||||||
% (self.today.replace(day=1), self.next_month))
|
'?date_registered__gte=%s&date_registered__lt=%s' % (
|
||||||
|
self.today.replace(day=1),
|
||||||
|
self.next_month,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
|
request = self.request_factory.get('/', {'date_registered__gte': self.today.replace(month=1, day=1),
|
||||||
'date_registered__lt': self.next_year})
|
'date_registered__lt': self.next_year})
|
||||||
|
@ -285,12 +293,18 @@ class ListFiltersTests(TestCase):
|
||||||
self.assertEqual(force_text(filterspec.title), 'date registered')
|
self.assertEqual(force_text(filterspec.title), 'date registered')
|
||||||
choice = select_by(filterspec.choices(changelist), "display", "This year")
|
choice = select_by(filterspec.choices(changelist), "display", "This year")
|
||||||
self.assertEqual(choice['selected'], True)
|
self.assertEqual(choice['selected'], True)
|
||||||
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
|
self.assertEqual(
|
||||||
'&date_registered__lt=%s'
|
choice['query_string'],
|
||||||
% (self.today.replace(month=1, day=1), self.next_year))
|
'?date_registered__gte=%s&date_registered__lt=%s' % (
|
||||||
|
self.today.replace(month=1, day=1),
|
||||||
|
self.next_year,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
request = self.request_factory.get('/', {'date_registered__gte': str(self.one_week_ago),
|
request = self.request_factory.get('/', {
|
||||||
'date_registered__lt': str(self.tomorrow)})
|
'date_registered__gte': str(self.one_week_ago),
|
||||||
|
'date_registered__lt': str(self.tomorrow),
|
||||||
|
})
|
||||||
changelist = self.get_changelist(request, Book, modeladmin)
|
changelist = self.get_changelist(request, Book, modeladmin)
|
||||||
|
|
||||||
# Make sure the correct queryset is returned
|
# Make sure the correct queryset is returned
|
||||||
|
@ -302,9 +316,13 @@ class ListFiltersTests(TestCase):
|
||||||
self.assertEqual(force_text(filterspec.title), 'date registered')
|
self.assertEqual(force_text(filterspec.title), 'date registered')
|
||||||
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
|
choice = select_by(filterspec.choices(changelist), "display", "Past 7 days")
|
||||||
self.assertEqual(choice['selected'], True)
|
self.assertEqual(choice['selected'], True)
|
||||||
self.assertEqual(choice['query_string'], '?date_registered__gte=%s'
|
self.assertEqual(
|
||||||
'&date_registered__lt=%s'
|
choice['query_string'],
|
||||||
% (str(self.one_week_ago), str(self.tomorrow)))
|
'?date_registered__gte=%s&date_registered__lt=%s' % (
|
||||||
|
str(self.one_week_ago),
|
||||||
|
str(self.tomorrow),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@override_settings(USE_TZ=True)
|
@override_settings(USE_TZ=True)
|
||||||
def test_datefieldlistfilter_with_time_zone_support(self):
|
def test_datefieldlistfilter_with_time_zone_support(self):
|
||||||
|
|
|
@ -509,8 +509,7 @@ class DefaultFiltersTests(TestCase):
|
||||||
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>')
|
'</li>\n\t</ul>\n\t</li>\n\t<li>item 2</li>')
|
||||||
|
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
unordered_list(['item 1', ['item 1.1', ['item 1.1.1',
|
unordered_list(['item 1', ['item 1.1', ['item 1.1.1', ['item 1.1.1.1']]]]),
|
||||||
['item 1.1.1.1']]]]),
|
|
||||||
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'
|
'\t<li>item 1\n\t<ul>\n\t\t<li>item 1.1\n\t\t<ul>\n\t\t\t<li>'
|
||||||
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'
|
'item 1.1.1\n\t\t\t<ul>\n\t\t\t\t<li>item 1.1.1.1</li>\n\t\t\t'
|
||||||
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>')
|
'</ul>\n\t\t\t</li>\n\t\t</ul>\n\t\t</li>\n\t</ul>\n\t</li>')
|
||||||
|
@ -531,8 +530,7 @@ class DefaultFiltersTests(TestCase):
|
||||||
|
|
||||||
a = ULItem('a')
|
a = ULItem('a')
|
||||||
b = ULItem('b')
|
b = ULItem('b')
|
||||||
self.assertEqual(unordered_list([a, b]),
|
self.assertEqual(unordered_list([a, b]), '\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
|
||||||
'\t<li>ulitem-a</li>\n\t<li>ulitem-b</li>')
|
|
||||||
|
|
||||||
# Old format for unordered lists should still work
|
# Old format for unordered lists should still work
|
||||||
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
|
self.assertEqual(unordered_list(['item 1', []]), '\t<li>item 1</li>')
|
||||||
|
@ -635,12 +633,10 @@ class DefaultFiltersTests(TestCase):
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1.0\xa0GB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1.0\xa0GB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1.0\xa0TB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1.0\xa0TB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1.0\xa0PB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1.0\xa0PB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000),
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000), '2000.0\xa0PB')
|
||||||
'2000.0\xa0PB')
|
|
||||||
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0bytes')
|
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0bytes')
|
||||||
self.assertEqual(filesizeformat(""), '0\xa0bytes')
|
self.assertEqual(filesizeformat(""), '0\xa0bytes')
|
||||||
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"),
|
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"), '0\xa0bytes')
|
||||||
'0\xa0bytes')
|
|
||||||
|
|
||||||
def test_pluralize(self):
|
def test_pluralize(self):
|
||||||
self.assertEqual(pluralize(1), '')
|
self.assertEqual(pluralize(1), '')
|
||||||
|
@ -701,11 +697,8 @@ class DefaultFiltersI18NTests(TransRealMixin, TestCase):
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024,0\xa0MB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 - 1), '1024,0\xa0MB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1,0\xa0GB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024), '1,0\xa0GB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1,0\xa0TB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024), '1,0\xa0TB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024),
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024), '1,0\xa0PB')
|
||||||
'1,0\xa0PB')
|
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000), '2000,0\xa0PB')
|
||||||
self.assertEqual(filesizeformat(1024 * 1024 * 1024 * 1024 * 1024 * 2000),
|
|
||||||
'2000,0\xa0PB')
|
|
||||||
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0Bytes')
|
self.assertEqual(filesizeformat(complex(1, -1)), '0\xa0Bytes')
|
||||||
self.assertEqual(filesizeformat(""), '0\xa0Bytes')
|
self.assertEqual(filesizeformat(""), '0\xa0Bytes')
|
||||||
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"),
|
self.assertEqual(filesizeformat("\N{GREEK SMALL LETTER ALPHA}"), '0\xa0Bytes')
|
||||||
'0\xa0Bytes')
|
|
||||||
|
|
|
@ -859,21 +859,24 @@ class SMTPBackendTests(BaseEmailBackendTests, SimpleTestCase):
|
||||||
def get_mailbox_content(self):
|
def get_mailbox_content(self):
|
||||||
return self.server.get_sink()
|
return self.server.get_sink()
|
||||||
|
|
||||||
@override_settings(EMAIL_HOST_USER="not empty username",
|
@override_settings(
|
||||||
|
EMAIL_HOST_USER="not empty username",
|
||||||
EMAIL_HOST_PASSWORD="not empty password")
|
EMAIL_HOST_PASSWORD="not empty password")
|
||||||
def test_email_authentication_use_settings(self):
|
def test_email_authentication_use_settings(self):
|
||||||
backend = smtp.EmailBackend()
|
backend = smtp.EmailBackend()
|
||||||
self.assertEqual(backend.username, 'not empty username')
|
self.assertEqual(backend.username, 'not empty username')
|
||||||
self.assertEqual(backend.password, 'not empty password')
|
self.assertEqual(backend.password, 'not empty password')
|
||||||
|
|
||||||
@override_settings(EMAIL_HOST_USER="not empty username",
|
@override_settings(
|
||||||
|
EMAIL_HOST_USER="not empty username",
|
||||||
EMAIL_HOST_PASSWORD="not empty password")
|
EMAIL_HOST_PASSWORD="not empty password")
|
||||||
def test_email_authentication_override_settings(self):
|
def test_email_authentication_override_settings(self):
|
||||||
backend = smtp.EmailBackend(username='username', password='password')
|
backend = smtp.EmailBackend(username='username', password='password')
|
||||||
self.assertEqual(backend.username, 'username')
|
self.assertEqual(backend.username, 'username')
|
||||||
self.assertEqual(backend.password, 'password')
|
self.assertEqual(backend.password, 'password')
|
||||||
|
|
||||||
@override_settings(EMAIL_HOST_USER="not empty username",
|
@override_settings(
|
||||||
|
EMAIL_HOST_USER="not empty username",
|
||||||
EMAIL_HOST_PASSWORD="not empty password")
|
EMAIL_HOST_PASSWORD="not empty password")
|
||||||
def test_email_disabled_authentication(self):
|
def test_email_disabled_authentication(self):
|
||||||
backend = smtp.EmailBackend(username='', password='')
|
backend = smtp.EmailBackend(username='', password='')
|
||||||
|
|
|
@ -412,13 +412,11 @@ class ModelInheritanceTest(TestCase):
|
||||||
# when more than one model has a concrete->abstract->concrete
|
# when more than one model has a concrete->abstract->concrete
|
||||||
# inheritance hierarchy.
|
# inheritance hierarchy.
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len([field for field in BusStation._meta.local_fields
|
len([field for field in BusStation._meta.local_fields if field.primary_key]),
|
||||||
if field.primary_key]),
|
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
len([field for field in TrainStation._meta.local_fields
|
len([field for field in TrainStation._meta.local_fields if field.primary_key]),
|
||||||
if field.primary_key]),
|
|
||||||
1
|
1
|
||||||
)
|
)
|
||||||
self.assertIs(BusStation._meta.pk.model, BusStation)
|
self.assertIs(BusStation._meta.pk.model, BusStation)
|
||||||
|
|
|
@ -793,8 +793,7 @@ class NullableTest(TestCase):
|
||||||
for e in qs]
|
for e in qs]
|
||||||
|
|
||||||
qs2 = Employee.objects.select_related('boss')
|
qs2 = Employee.objects.select_related('boss')
|
||||||
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2]
|
||||||
for e in qs2]
|
|
||||||
|
|
||||||
self.assertEqual(co_serfs, co_serfs2)
|
self.assertEqual(co_serfs, co_serfs2)
|
||||||
|
|
||||||
|
@ -806,8 +805,7 @@ class NullableTest(TestCase):
|
||||||
for e in qs]
|
for e in qs]
|
||||||
|
|
||||||
qs2 = Employee.objects.all()
|
qs2 = Employee.objects.all()
|
||||||
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
|
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else [] for e in qs2]
|
||||||
for e in qs2]
|
|
||||||
|
|
||||||
self.assertEqual(co_serfs, co_serfs2)
|
self.assertEqual(co_serfs, co_serfs2)
|
||||||
|
|
||||||
|
|
|
@ -32,11 +32,15 @@ class RawQueryTests(TestCase):
|
||||||
|
|
||||||
for field in model._meta.fields:
|
for field in model._meta.fields:
|
||||||
# Check that all values on the model are equal
|
# Check that all values on the model are equal
|
||||||
self.assertEqual(getattr(item, field.attname),
|
self.assertEqual(
|
||||||
getattr(orig_item, field.attname))
|
getattr(item, field.attname),
|
||||||
|
getattr(orig_item, field.attname)
|
||||||
|
)
|
||||||
# This includes checking that they are the same type
|
# This includes checking that they are the same type
|
||||||
self.assertEqual(type(getattr(item, field.attname)),
|
self.assertEqual(
|
||||||
type(getattr(orig_item, field.attname)))
|
type(getattr(item, field.attname)),
|
||||||
|
type(getattr(orig_item, field.attname))
|
||||||
|
)
|
||||||
|
|
||||||
def assertNoAnnotations(self, results):
|
def assertNoAnnotations(self, results):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -783,8 +783,7 @@ class TestMiscFinder(TestCase):
|
||||||
class TestTemplateTag(StaticFilesTestCase):
|
class TestTemplateTag(StaticFilesTestCase):
|
||||||
|
|
||||||
def test_template_tag(self):
|
def test_template_tag(self):
|
||||||
self.assertStaticRenders("does/not/exist.png",
|
self.assertStaticRenders("does/not/exist.png", "/static/does/not/exist.png")
|
||||||
"/static/does/not/exist.png")
|
|
||||||
self.assertStaticRenders("testfile.txt", "/static/testfile.txt")
|
self.assertStaticRenders("testfile.txt", "/static/testfile.txt")
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -859,14 +859,16 @@ class TemplateTests(TestCase):
|
||||||
"""
|
"""
|
||||||
Test the {% timezone %} templatetag.
|
Test the {% timezone %} templatetag.
|
||||||
"""
|
"""
|
||||||
tpl = Template("{% load tz %}"
|
tpl = Template(
|
||||||
|
"{% load tz %}"
|
||||||
"{{ dt }}|"
|
"{{ dt }}|"
|
||||||
"{% timezone tz1 %}"
|
"{% timezone tz1 %}"
|
||||||
"{{ dt }}|"
|
"{{ dt }}|"
|
||||||
"{% timezone tz2 %}"
|
"{% timezone tz2 %}"
|
||||||
"{{ dt }}"
|
"{{ dt }}"
|
||||||
"{% endtimezone %}"
|
"{% endtimezone %}"
|
||||||
"{% endtimezone %}")
|
"{% endtimezone %}"
|
||||||
|
)
|
||||||
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
|
ctx = Context({'dt': datetime.datetime(2011, 9, 1, 10, 20, 30, tzinfo=UTC),
|
||||||
'tz1': ICT, 'tz2': None})
|
'tz1': ICT, 'tz2': None})
|
||||||
self.assertEqual(tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00")
|
self.assertEqual(tpl.render(ctx), "2011-09-01T13:20:30+03:00|2011-09-01T17:20:30+07:00|2011-09-01T13:20:30+03:00")
|
||||||
|
|
|
@ -32,12 +32,9 @@ urlpatterns = patterns('',
|
||||||
url(r'^price/\$(\d+)/$', empty_view, name="price"),
|
url(r'^price/\$(\d+)/$', empty_view, name="price"),
|
||||||
url(r'^price/[$](\d+)/$', empty_view, name="price2"),
|
url(r'^price/[$](\d+)/$', empty_view, name="price2"),
|
||||||
url(r'^price/[\$](\d+)/$', empty_view, name="price3"),
|
url(r'^price/[\$](\d+)/$', empty_view, name="price3"),
|
||||||
url(r'^product/(?P<product>\w+)\+\(\$(?P<price>\d+(\.\d+)?)\)/$',
|
url(r'^product/(?P<product>\w+)\+\(\$(?P<price>\d+(\.\d+)?)\)/$', empty_view, name="product"),
|
||||||
empty_view, name="product"),
|
url(r'^headlines/(?P<year>\d+)\.(?P<month>\d+)\.(?P<day>\d+)/$', empty_view, name="headlines"),
|
||||||
url(r'^headlines/(?P<year>\d+)\.(?P<month>\d+)\.(?P<day>\d+)/$', empty_view,
|
url(r'^windows_path/(?P<drive_name>[A-Z]):\\(?P<path>.+)/$', empty_view, name="windows"),
|
||||||
name="headlines"),
|
|
||||||
url(r'^windows_path/(?P<drive_name>[A-Z]):\\(?P<path>.+)/$', empty_view,
|
|
||||||
name="windows"),
|
|
||||||
url(r'^special_chars/(?P<chars>.+)/$', empty_view, name="special"),
|
url(r'^special_chars/(?P<chars>.+)/$', empty_view, name="special"),
|
||||||
url(r'^(?P<name>.+)/\d+/$', empty_view, name="mixed"),
|
url(r'^(?P<name>.+)/\d+/$', empty_view, name="mixed"),
|
||||||
url(r'^repeats/a{1,2}/$', empty_view, name="repeats"),
|
url(r'^repeats/a{1,2}/$', empty_view, name="repeats"),
|
||||||
|
@ -46,8 +43,7 @@ urlpatterns = patterns('',
|
||||||
url(r'^(?i)CaseInsensitive/(\w+)', empty_view, name="insensitive"),
|
url(r'^(?i)CaseInsensitive/(\w+)', empty_view, name="insensitive"),
|
||||||
url(r'^test/1/?', empty_view, name="test"),
|
url(r'^test/1/?', empty_view, name="test"),
|
||||||
url(r'^(?i)test/2/?$', empty_view, name="test2"),
|
url(r'^(?i)test/2/?$', empty_view, name="test2"),
|
||||||
url(r'^outer/(?P<outer>\d+)/',
|
url(r'^outer/(?P<outer>\d+)/', include('urlpatterns_reverse.included_urls')),
|
||||||
include('urlpatterns_reverse.included_urls')),
|
|
||||||
url('', include('urlpatterns_reverse.extra_urls')),
|
url('', include('urlpatterns_reverse.extra_urls')),
|
||||||
|
|
||||||
# This is non-reversible, but we shouldn't blow up when parsing it.
|
# This is non-reversible, but we shouldn't blow up when parsing it.
|
||||||
|
|
|
@ -179,14 +179,17 @@ class MergeDictTests(IgnorePendingDeprecationWarningsMixin, SimpleTestCase):
|
||||||
|
|
||||||
self.assertTrue('value1' in six.itervalues(mm))
|
self.assertTrue('value1' in six.itervalues(mm))
|
||||||
|
|
||||||
self.assertEqual(sorted(six.iteritems(mm), key=lambda k: k[0]),
|
self.assertEqual(
|
||||||
[('key1', 'value1'), ('key2', 'value3'),
|
sorted(six.iteritems(mm), key=lambda k: k[0]),
|
||||||
('key4', 'value6')])
|
[('key1', 'value1'), ('key2', 'value3'), ('key4', 'value6')]
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual([(k, mm.getlist(k)) for k in sorted(mm)],
|
self.assertEqual(
|
||||||
|
[(k, mm.getlist(k)) for k in sorted(mm)],
|
||||||
[('key1', ['value1']),
|
[('key1', ['value1']),
|
||||||
('key2', ['value2', 'value3']),
|
('key2', ['value2', 'value3']),
|
||||||
('key4', ['value5', 'value6'])])
|
('key4', ['value5', 'value6'])]
|
||||||
|
)
|
||||||
|
|
||||||
def test_bool_casting(self):
|
def test_bool_casting(self):
|
||||||
empty = MergeDict({}, {}, {})
|
empty = MergeDict({}, {}, {})
|
||||||
|
@ -212,12 +215,15 @@ class MultiValueDictTests(SimpleTestCase):
|
||||||
self.assertEqual(d['name'], 'Simon')
|
self.assertEqual(d['name'], 'Simon')
|
||||||
self.assertEqual(d.get('name'), 'Simon')
|
self.assertEqual(d.get('name'), 'Simon')
|
||||||
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
|
self.assertEqual(d.getlist('name'), ['Adrian', 'Simon'])
|
||||||
self.assertEqual(sorted(list(six.iteritems(d))),
|
self.assertEqual(
|
||||||
[('name', 'Simon'), ('position', 'Developer')])
|
sorted(list(six.iteritems(d))),
|
||||||
|
[('name', 'Simon'), ('position', 'Developer')]
|
||||||
|
)
|
||||||
|
|
||||||
self.assertEqual(sorted(list(six.iterlists(d))),
|
self.assertEqual(
|
||||||
[('name', ['Adrian', 'Simon']),
|
sorted(list(six.iterlists(d))),
|
||||||
('position', ['Developer'])])
|
[('name', ['Adrian', 'Simon']), ('position', ['Developer'])]
|
||||||
|
)
|
||||||
|
|
||||||
six.assertRaisesRegex(self, MultiValueDictKeyError, 'lastname',
|
six.assertRaisesRegex(self, MultiValueDictKeyError, 'lastname',
|
||||||
d.__getitem__, 'lastname')
|
d.__getitem__, 'lastname')
|
||||||
|
@ -298,5 +304,7 @@ class DictWrapperTests(SimpleTestCase):
|
||||||
def test_dictwrapper(self):
|
def test_dictwrapper(self):
|
||||||
f = lambda x: "*%s" % x
|
f = lambda x: "*%s" % x
|
||||||
d = DictWrapper({'a': 'a'}, f, 'xx_')
|
d = DictWrapper({'a': 'a'}, f, 'xx_')
|
||||||
self.assertEqual("Normal: %(a)s. Modified: %(xx_a)s" % d,
|
self.assertEqual(
|
||||||
'Normal: a. Modified: *a')
|
"Normal: %(a)s. Modified: %(xx_a)s" % d,
|
||||||
|
'Normal: a. Modified: *a'
|
||||||
|
)
|
||||||
|
|
|
@ -16,54 +16,44 @@ class TermColorTests(unittest.TestCase):
|
||||||
|
|
||||||
def test_fg(self):
|
def test_fg(self):
|
||||||
self.assertEqual(parse_color_setting('error=green'),
|
self.assertEqual(parse_color_setting('error=green'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
|
|
||||||
def test_fg_bg(self):
|
def test_fg_bg(self):
|
||||||
self.assertEqual(parse_color_setting('error=green/blue'),
|
self.assertEqual(parse_color_setting('error=green/blue'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue'}))
|
|
||||||
|
|
||||||
def test_fg_opts(self):
|
def test_fg_opts(self):
|
||||||
self.assertEqual(parse_color_setting('error=green,blink'),
|
self.assertEqual(parse_color_setting('error=green,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink',)}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green,bold,blink'),
|
self.assertEqual(parse_color_setting('error=green,bold,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink', 'bold')}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink', 'bold')}))
|
|
||||||
|
|
||||||
def test_fg_bg_opts(self):
|
def test_fg_bg_opts(self):
|
||||||
self.assertEqual(parse_color_setting('error=green/blue,blink'),
|
self.assertEqual(parse_color_setting('error=green/blue,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green/blue,bold,blink'),
|
self.assertEqual(parse_color_setting('error=green/blue,bold,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink', 'bold')}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink', 'bold')}))
|
|
||||||
|
|
||||||
def test_override_palette(self):
|
def test_override_palette(self):
|
||||||
self.assertEqual(parse_color_setting('light;error=green'),
|
self.assertEqual(parse_color_setting('light;error=green'),
|
||||||
dict(PALETTES[LIGHT_PALETTE],
|
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
|
|
||||||
def test_override_nocolor(self):
|
def test_override_nocolor(self):
|
||||||
self.assertEqual(parse_color_setting('nocolor;error=green'),
|
self.assertEqual(parse_color_setting('nocolor;error=green'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
|
|
||||||
def test_reverse_override(self):
|
def test_reverse_override(self):
|
||||||
self.assertEqual(parse_color_setting('error=green;light'), PALETTES[LIGHT_PALETTE])
|
self.assertEqual(parse_color_setting('error=green;light'), PALETTES[LIGHT_PALETTE])
|
||||||
|
|
||||||
def test_multiple_roles(self):
|
def test_multiple_roles(self):
|
||||||
self.assertEqual(parse_color_setting('error=green;sql_field=blue'),
|
self.assertEqual(parse_color_setting('error=green;sql_field=blue'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'}))
|
||||||
ERROR={'fg': 'green'},
|
|
||||||
SQL_FIELD={'fg': 'blue'}))
|
|
||||||
|
|
||||||
def test_override_with_multiple_roles(self):
|
def test_override_with_multiple_roles(self):
|
||||||
self.assertEqual(parse_color_setting('light;error=green;sql_field=blue'),
|
self.assertEqual(parse_color_setting('light;error=green;sql_field=blue'),
|
||||||
dict(PALETTES[LIGHT_PALETTE],
|
dict(PALETTES[LIGHT_PALETTE], ERROR={'fg': 'green'}, SQL_FIELD={'fg': 'blue'}))
|
||||||
ERROR={'fg': 'green'},
|
|
||||||
SQL_FIELD={'fg': 'blue'}))
|
|
||||||
|
|
||||||
def test_empty_definition(self):
|
def test_empty_definition(self):
|
||||||
self.assertEqual(parse_color_setting(';'), None)
|
self.assertEqual(parse_color_setting(';'), None)
|
||||||
|
@ -72,14 +62,13 @@ class TermColorTests(unittest.TestCase):
|
||||||
|
|
||||||
def test_empty_options(self):
|
def test_empty_options(self):
|
||||||
self.assertEqual(parse_color_setting('error=green,'),
|
self.assertEqual(parse_color_setting('error=green,'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green,,,'),
|
self.assertEqual(parse_color_setting('error=green,,,'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green,,blink,,'),
|
self.assertEqual(parse_color_setting('error=green,,blink,,'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink',)}))
|
|
||||||
|
|
||||||
def test_bad_palette(self):
|
def test_bad_palette(self):
|
||||||
self.assertEqual(parse_color_setting('unknown'), None)
|
self.assertEqual(parse_color_setting('unknown'), None)
|
||||||
|
@ -88,67 +77,59 @@ class TermColorTests(unittest.TestCase):
|
||||||
self.assertEqual(parse_color_setting('unknown='), None)
|
self.assertEqual(parse_color_setting('unknown='), None)
|
||||||
self.assertEqual(parse_color_setting('unknown=green'), None)
|
self.assertEqual(parse_color_setting('unknown=green'), None)
|
||||||
self.assertEqual(parse_color_setting('unknown=green;sql_field=blue'),
|
self.assertEqual(parse_color_setting('unknown=green;sql_field=blue'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
|
||||||
SQL_FIELD={'fg': 'blue'}))
|
|
||||||
|
|
||||||
def test_bad_color(self):
|
def test_bad_color(self):
|
||||||
self.assertEqual(parse_color_setting('error='), None)
|
self.assertEqual(parse_color_setting('error='), None)
|
||||||
self.assertEqual(parse_color_setting('error=;sql_field=blue'),
|
self.assertEqual(parse_color_setting('error=;sql_field=blue'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
|
||||||
SQL_FIELD={'fg': 'blue'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=unknown'), None)
|
self.assertEqual(parse_color_setting('error=unknown'), None)
|
||||||
self.assertEqual(parse_color_setting('error=unknown;sql_field=blue'),
|
self.assertEqual(parse_color_setting('error=unknown;sql_field=blue'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], SQL_FIELD={'fg': 'blue'}))
|
||||||
SQL_FIELD={'fg': 'blue'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green/unknown'),
|
self.assertEqual(parse_color_setting('error=green/unknown'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green/blue/something'),
|
self.assertEqual(parse_color_setting('error=green/blue/something'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green/blue/something,blink'),
|
self.assertEqual(parse_color_setting('error=green/blue/something,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue', 'opts': ('blink',)}))
|
|
||||||
|
|
||||||
def test_bad_option(self):
|
def test_bad_option(self):
|
||||||
self.assertEqual(parse_color_setting('error=green,unknown'),
|
self.assertEqual(parse_color_setting('error=green,unknown'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=green,unknown,blink'),
|
self.assertEqual(parse_color_setting('error=green,unknown,blink'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink',)}))
|
|
||||||
|
|
||||||
def test_role_case(self):
|
def test_role_case(self):
|
||||||
self.assertEqual(parse_color_setting('ERROR=green'),
|
self.assertEqual(parse_color_setting('ERROR=green'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('eRrOr=green'),
|
self.assertEqual(parse_color_setting('eRrOr=green'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
|
|
||||||
def test_color_case(self):
|
def test_color_case(self):
|
||||||
self.assertEqual(parse_color_setting('error=GREEN'),
|
self.assertEqual(parse_color_setting('error=GREEN'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=GREEN/BLUE'),
|
self.assertEqual(parse_color_setting('error=GREEN/BLUE'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue'}))
|
|
||||||
|
|
||||||
self.assertEqual(parse_color_setting('error=gReEn'),
|
self.assertEqual(parse_color_setting('error=gReEn'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green'}))
|
||||||
ERROR={'fg': 'green'}))
|
|
||||||
self.assertEqual(parse_color_setting('error=gReEn/bLuE'),
|
self.assertEqual(parse_color_setting('error=gReEn/bLuE'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'bg': 'blue'}))
|
||||||
ERROR={'fg': 'green', 'bg': 'blue'}))
|
|
||||||
|
|
||||||
def test_opts_case(self):
|
def test_opts_case(self):
|
||||||
self.assertEqual(parse_color_setting('error=green,BLINK'),
|
self.assertEqual(parse_color_setting('error=green,BLINK'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink',)}))
|
|
||||||
|
|
||||||
self.assertEqual(parse_color_setting('error=green,bLiNk'),
|
self.assertEqual(parse_color_setting('error=green,bLiNk'),
|
||||||
dict(PALETTES[NOCOLOR_PALETTE],
|
dict(PALETTES[NOCOLOR_PALETTE], ERROR={'fg': 'green', 'opts': ('blink',)}))
|
||||||
ERROR={'fg': 'green', 'opts': ('blink',)}))
|
|
||||||
|
|
||||||
def test_colorize_empty_text(self):
|
def test_colorize_empty_text(self):
|
||||||
self.assertEqual(colorize(text=None), '\x1b[m\x1b[0m')
|
self.assertEqual(colorize(text=None), '\x1b[m\x1b[0m')
|
||||||
|
|
|
@ -75,8 +75,7 @@ class StaticTests(SimpleTestCase):
|
||||||
response_content = b''.join(response)
|
response_content = b''.join(response)
|
||||||
with open(path.join(media_dir, file_name), 'rb') as fp:
|
with open(path.join(media_dir, file_name), 'rb') as fp:
|
||||||
self.assertEqual(fp.read(), response_content)
|
self.assertEqual(fp.read(), response_content)
|
||||||
self.assertEqual(len(response_content),
|
self.assertEqual(len(response_content), int(response['Content-Length']))
|
||||||
int(response['Content-Length']))
|
|
||||||
|
|
||||||
def test_invalid_if_modified_since2(self):
|
def test_invalid_if_modified_since2(self):
|
||||||
"""Handle even more bogus If-Modified-Since values gracefully
|
"""Handle even more bogus If-Modified-Since values gracefully
|
||||||
|
@ -91,8 +90,7 @@ class StaticTests(SimpleTestCase):
|
||||||
response_content = b''.join(response)
|
response_content = b''.join(response)
|
||||||
with open(path.join(media_dir, file_name), 'rb') as fp:
|
with open(path.join(media_dir, file_name), 'rb') as fp:
|
||||||
self.assertEqual(fp.read(), response_content)
|
self.assertEqual(fp.read(), response_content)
|
||||||
self.assertEqual(len(response_content),
|
self.assertEqual(len(response_content), int(response['Content-Length']))
|
||||||
int(response['Content-Length']))
|
|
||||||
|
|
||||||
def test_404(self):
|
def test_404(self):
|
||||||
response = self.client.get('/views/%s/non_existing_resource' % self.prefix)
|
response = self.client.get('/views/%s/non_existing_resource' % self.prefix)
|
||||||
|
|
Loading…
Reference in New Issue