Fixed #21288 -- Fixed E126 pep8 warnings
This commit is contained in:
parent
a3690168cb
commit
b289fcf1bf
|
@ -217,8 +217,8 @@ class RelatedFieldListFilter(FieldListFilter):
|
|||
}
|
||||
|
||||
FieldListFilter.register(lambda f: (
|
||||
bool(f.rel) if hasattr(f, 'rel') else
|
||||
isinstance(f, models.related.RelatedObject)), RelatedFieldListFilter)
|
||||
bool(f.rel) if hasattr(f, 'rel') else
|
||||
isinstance(f, models.related.RelatedObject)), RelatedFieldListFilter)
|
||||
|
||||
|
||||
class BooleanFieldListFilter(FieldListFilter):
|
||||
|
@ -241,7 +241,7 @@ class BooleanFieldListFilter(FieldListFilter):
|
|||
yield {
|
||||
'selected': self.lookup_val == lookup and not self.lookup_val2,
|
||||
'query_string': cl.get_query_string({
|
||||
self.lookup_kwarg: lookup,
|
||||
self.lookup_kwarg: lookup,
|
||||
}, [self.lookup_kwarg2]),
|
||||
'display': title,
|
||||
}
|
||||
|
@ -249,7 +249,7 @@ class BooleanFieldListFilter(FieldListFilter):
|
|||
yield {
|
||||
'selected': self.lookup_val2 == 'True',
|
||||
'query_string': cl.get_query_string({
|
||||
self.lookup_kwarg2: 'True',
|
||||
self.lookup_kwarg2: 'True',
|
||||
}, [self.lookup_kwarg]),
|
||||
'display': _('Unknown'),
|
||||
}
|
||||
|
@ -278,7 +278,7 @@ class ChoicesFieldListFilter(FieldListFilter):
|
|||
yield {
|
||||
'selected': smart_text(lookup) == self.lookup_val,
|
||||
'query_string': cl.get_query_string({
|
||||
self.lookup_kwarg: lookup}),
|
||||
self.lookup_kwarg: lookup}),
|
||||
'display': title,
|
||||
}
|
||||
|
||||
|
@ -340,7 +340,7 @@ class DateFieldListFilter(FieldListFilter):
|
|||
yield {
|
||||
'selected': self.date_params == param_dict,
|
||||
'query_string': cl.get_query_string(
|
||||
param_dict, [self.field_generic]),
|
||||
param_dict, [self.field_generic]),
|
||||
'display': title,
|
||||
}
|
||||
|
||||
|
|
|
@ -150,13 +150,12 @@ class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
|
|||
# rendered output. formfield can be None if it came from a
|
||||
# OneToOneField with parent_link=True or a M2M intermediary.
|
||||
if formfield and db_field.name not in self.raw_id_fields:
|
||||
related_modeladmin = self.admin_site._registry.get(
|
||||
db_field.rel.to)
|
||||
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
|
||||
can_add_related = bool(related_modeladmin and
|
||||
related_modeladmin.has_add_permission(request))
|
||||
related_modeladmin.has_add_permission(request))
|
||||
formfield.widget = widgets.RelatedFieldWidgetWrapper(
|
||||
formfield.widget, db_field.rel, self.admin_site,
|
||||
can_add_related=can_add_related)
|
||||
formfield.widget, db_field.rel, self.admin_site,
|
||||
can_add_related=can_add_related)
|
||||
|
||||
return formfield
|
||||
|
||||
|
@ -1196,11 +1195,11 @@ class ModelAdmin(BaseModelAdmin):
|
|||
|
||||
opts = self.model._meta
|
||||
|
||||
self.message_user(request, _(
|
||||
'The %(name)s "%(obj)s" was deleted successfully.') % {
|
||||
self.message_user(request,
|
||||
_('The %(name)s "%(obj)s" was deleted successfully.') % {
|
||||
'name': force_text(opts.verbose_name),
|
||||
'obj': force_text(obj_display)
|
||||
}, messages.SUCCESS)
|
||||
}, messages.SUCCESS)
|
||||
|
||||
if self.has_change_permission(request, None):
|
||||
post_url = reverse('admin:%s_%s_changelist' %
|
||||
|
@ -1313,9 +1312,9 @@ class ModelAdmin(BaseModelAdmin):
|
|||
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
|
||||
|
||||
if request.method == 'POST' and "_saveasnew" in request.POST:
|
||||
return self.add_view(request, form_url=reverse('admin:%s_%s_add' %
|
||||
(opts.app_label, opts.model_name),
|
||||
current_app=self.admin_site.name))
|
||||
return self.add_view(request, form_url=reverse('admin:%s_%s_add' % (
|
||||
opts.app_label, opts.model_name),
|
||||
current_app=self.admin_site.name))
|
||||
|
||||
ModelForm = self.get_form(request, obj)
|
||||
if request.method == 'POST':
|
||||
|
@ -1544,7 +1543,7 @@ class ModelAdmin(BaseModelAdmin):
|
|||
if obj is None:
|
||||
raise Http404(
|
||||
_('%(name)s object with primary key %(key)r does not exist.') %
|
||||
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
|
||||
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
|
||||
)
|
||||
|
||||
using = router.db_for_write(self.model)
|
||||
|
|
|
@ -164,8 +164,7 @@ def result_headers(cl):
|
|||
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
|
||||
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
|
||||
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
|
||||
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes))
|
||||
if th_classes else '',
|
||||
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes)) if th_classes else '',
|
||||
}
|
||||
|
||||
def _boolean_icon(field_val):
|
||||
|
@ -246,8 +245,7 @@ def items_for_result(cl, result, form):
|
|||
link_or_text = format_html(
|
||||
'<a href="{0}"{1}>{2}</a>',
|
||||
url,
|
||||
format_html(' onclick="opener.dismissRelatedLookupPopup(window, '{0}'); return false;"', result_id)
|
||||
if cl.is_popup else '',
|
||||
format_html(' onclick="opener.dismissRelatedLookupPopup(window, '{0}'); return false;"', result_id) if cl.is_popup else '',
|
||||
result_repr)
|
||||
|
||||
yield format_html('<{0}{1}>{2}</{3}>',
|
||||
|
@ -261,7 +259,7 @@ def items_for_result(cl, result, form):
|
|||
# can provide fields on a per request basis
|
||||
if (form and field_name in form.fields and not (
|
||||
field_name == cl.model._meta.pk.name and
|
||||
form[cl.model._meta.pk.name].is_hidden)):
|
||||
form[cl.model._meta.pk.name].is_hidden)):
|
||||
bf = form[field_name]
|
||||
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
|
||||
yield format_html('<td{0}>{1}</td>', row_class, result_repr)
|
||||
|
|
|
@ -33,8 +33,7 @@ def submit_row(context):
|
|||
'show_delete_link': (not is_popup and context['has_delete_permission']
|
||||
and change and context.get('show_delete', True)),
|
||||
'show_save_as_new': not is_popup and change and save_as,
|
||||
'show_save_and_add_another': context['has_add_permission'] and
|
||||
not is_popup and (not save_as or context['add']),
|
||||
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
|
||||
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
||||
'is_popup': is_popup,
|
||||
'show_save': True,
|
||||
|
|
|
@ -34,7 +34,7 @@ class Command(BaseCommand):
|
|||
|
||||
try:
|
||||
u = UserModel._default_manager.using(options.get('database')).get(**{
|
||||
UserModel.USERNAME_FIELD: username
|
||||
UserModel.USERNAME_FIELD: username
|
||||
})
|
||||
except UserModel.DoesNotExist:
|
||||
raise CommandError("user '%s' does not exist" % username)
|
||||
|
|
|
@ -427,7 +427,7 @@ class PermissionDeniedBackendTest(TestCase):
|
|||
self.assertEqual(authenticate(username='test', password='test'), None)
|
||||
|
||||
@override_settings(AUTHENTICATION_BACKENDS=tuple(
|
||||
settings.AUTHENTICATION_BACKENDS) + (backend, ))
|
||||
settings.AUTHENTICATION_BACKENDS) + (backend, ))
|
||||
def test_authenticates(self):
|
||||
self.assertEqual(authenticate(username='test', password='test'), self.user1)
|
||||
|
||||
|
|
|
@ -177,9 +177,10 @@ class PasswordResetTest(AuthViewsTestCase):
|
|||
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
|
||||
# is invoked, but we check here as a practical consequence.
|
||||
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
||||
response = self.client.post('/password_reset/',
|
||||
{'email': 'staffmember@example.com'},
|
||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||
response = self.client.post(
|
||||
'/password_reset/',
|
||||
{'email': 'staffmember@example.com'},
|
||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(len(mail.outbox), 0)
|
||||
|
@ -190,9 +191,10 @@ class PasswordResetTest(AuthViewsTestCase):
|
|||
def test_poisoned_http_host_admin_site(self):
|
||||
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
|
||||
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
||||
response = self.client.post('/admin_password_reset/',
|
||||
{'email': 'staffmember@example.com'},
|
||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||
response = self.client.post(
|
||||
'/admin_password_reset/',
|
||||
{'email': 'staffmember@example.com'},
|
||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||
)
|
||||
self.assertEqual(response.status_code, 400)
|
||||
self.assertEqual(len(mail.outbox), 0)
|
||||
|
@ -356,7 +358,7 @@ class ChangePasswordTest(AuthViewsTestCase):
|
|||
'password': password,
|
||||
})
|
||||
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
|
||||
'username': User._meta.get_field('username').verbose_name
|
||||
'username': User._meta.get_field('username').verbose_name
|
||||
})
|
||||
|
||||
def logout(self):
|
||||
|
@ -490,8 +492,8 @@ class LoginTest(AuthViewsTestCase):
|
|||
'good_url': urlquote(good_url),
|
||||
}
|
||||
response = self.client.post(safe_url, {
|
||||
'username': 'testclient',
|
||||
'password': password,
|
||||
'username': 'testclient',
|
||||
'password': password,
|
||||
})
|
||||
self.assertEqual(response.status_code, 302)
|
||||
self.assertTrue(good_url in response.url,
|
||||
|
|
|
@ -240,12 +240,10 @@ class GenericRelation(ForeignObject):
|
|||
|
||||
"""
|
||||
return self.rel.to._base_manager.db_manager(using).filter(**{
|
||||
"%s__pk" % self.content_type_field_name:
|
||||
ContentType.objects.db_manager(using).get_for_model(
|
||||
self.model, for_concrete_model=self.for_concrete_model).pk,
|
||||
"%s__in" % self.object_id_field_name:
|
||||
[obj.pk for obj in objs]
|
||||
})
|
||||
"%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
|
||||
self.model, for_concrete_model=self.for_concrete_model).pk,
|
||||
"%s__in" % self.object_id_field_name: [obj.pk for obj in objs]
|
||||
})
|
||||
|
||||
|
||||
class ReverseGenericRelatedObjectsDescriptor(object):
|
||||
|
@ -352,8 +350,7 @@ def create_generic_related_manager(superclass):
|
|||
db = self._db or router.db_for_read(self.model, instance=instances[0])
|
||||
query = {
|
||||
'%s__pk' % self.content_type_field_name: self.content_type.id,
|
||||
'%s__in' % self.object_id_field_name:
|
||||
set(obj._get_pk_val() for obj in instances)
|
||||
'%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
|
||||
}
|
||||
qs = super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**query)
|
||||
# We (possibly) need to convert object IDs to the type of the
|
||||
|
|
|
@ -27,24 +27,24 @@ class FlatpageTemplateTagTests(TestCase):
|
|||
def test_get_flatpages_tag(self):
|
||||
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context())
|
||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
||||
|
||||
def test_get_flatpages_tag_for_anon_user(self):
|
||||
"The flatpage template tag retrives unregistered flatpages for an anonymous user"
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages for anonuser as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages for anonuser as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context({
|
||||
'anonuser': AnonymousUser()
|
||||
'anonuser': AnonymousUser()
|
||||
}))
|
||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
||||
|
||||
|
@ -53,37 +53,37 @@ class FlatpageTemplateTagTests(TestCase):
|
|||
"The flatpage template tag retrives all flatpages for an authenticated user"
|
||||
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages for me as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages for me as flatpages %}"
|
||||
"{% for page in flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context({
|
||||
'me': me
|
||||
'me': me
|
||||
}))
|
||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,")
|
||||
|
||||
def test_get_flatpages_with_prefix(self):
|
||||
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context())
|
||||
self.assertEqual(out, "A Nested Flatpage,")
|
||||
|
||||
def test_get_flatpages_with_prefix_for_anon_user(self):
|
||||
"The flatpage template tag retrives unregistered prefixed flatpages for an anonymous user"
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context({
|
||||
'anonuser': AnonymousUser()
|
||||
'anonuser': AnonymousUser()
|
||||
}))
|
||||
self.assertEqual(out, "A Nested Flatpage,")
|
||||
|
||||
|
@ -92,26 +92,26 @@ class FlatpageTemplateTagTests(TestCase):
|
|||
"The flatpage template tag retrive prefixed flatpages for an authenticated user"
|
||||
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' for me as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages '/location/' for me as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context({
|
||||
'me': me
|
||||
'me': me
|
||||
}))
|
||||
self.assertEqual(out, "A Nested Flatpage,Sekrit Nested Flatpage,")
|
||||
|
||||
def test_get_flatpages_with_variable_prefix(self):
|
||||
"The prefix for the flatpage template tag can be a template variable"
|
||||
out = Template(
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages location_prefix as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
"{% load flatpages %}"
|
||||
"{% get_flatpages location_prefix as location_flatpages %}"
|
||||
"{% for page in location_flatpages %}"
|
||||
"{{ page.title }},"
|
||||
"{% endfor %}"
|
||||
).render(Context({
|
||||
'location_prefix': '/location/'
|
||||
'location_prefix': '/location/'
|
||||
}))
|
||||
self.assertEqual(out, "A Nested Flatpage,")
|
||||
|
||||
|
|
|
@ -70,8 +70,8 @@ class BaseStorage(object):
|
|||
|
||||
if wizard_files and not self.file_storage:
|
||||
raise NoFileStorageConfigured(
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
|
||||
files = {}
|
||||
for field, field_dict in six.iteritems(wizard_files):
|
||||
|
@ -84,8 +84,8 @@ class BaseStorage(object):
|
|||
def set_step_files(self, step, files):
|
||||
if files and not self.file_storage:
|
||||
raise NoFileStorageConfigured(
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
|
||||
if step not in self.data[self.step_files_key]:
|
||||
self.data[self.step_files_key][step] = {}
|
||||
|
|
|
@ -184,8 +184,8 @@ class WizardView(TemplateView):
|
|||
if (isinstance(field, forms.FileField) and
|
||||
not hasattr(cls, 'file_storage')):
|
||||
raise NoFileStorageConfigured(
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
"You need to define 'file_storage' in your "
|
||||
"wizard view in order to handle file uploads.")
|
||||
|
||||
# build the kwargs for the wizardview instances
|
||||
kwargs['form_list'] = computed_form_list
|
||||
|
|
|
@ -10,15 +10,15 @@ from django.contrib.gis.measure import Area, Distance
|
|||
|
||||
|
||||
ALL_TERMS = set([
|
||||
'bbcontains', 'bboverlaps', 'contained', 'contains',
|
||||
'contains_properly', 'coveredby', 'covers', 'crosses', 'disjoint',
|
||||
'distance_gt', 'distance_gte', 'distance_lt', 'distance_lte',
|
||||
'dwithin', 'equals', 'exact',
|
||||
'intersects', 'overlaps', 'relate', 'same_as', 'touches', 'within',
|
||||
'left', 'right', 'overlaps_left', 'overlaps_right',
|
||||
'overlaps_above', 'overlaps_below',
|
||||
'strictly_above', 'strictly_below'
|
||||
])
|
||||
'bbcontains', 'bboverlaps', 'contained', 'contains',
|
||||
'contains_properly', 'coveredby', 'covers', 'crosses', 'disjoint',
|
||||
'distance_gt', 'distance_gte', 'distance_lt', 'distance_lte',
|
||||
'dwithin', 'equals', 'exact',
|
||||
'intersects', 'overlaps', 'relate', 'same_as', 'touches', 'within',
|
||||
'left', 'right', 'overlaps_left', 'overlaps_right',
|
||||
'overlaps_above', 'overlaps_below',
|
||||
'strictly_above', 'strictly_below'
|
||||
])
|
||||
ALL_TERMS.update(sql.constants.QUERY_TERMS)
|
||||
|
||||
class GeoQuery(sql.Query):
|
||||
|
|
|
@ -46,7 +46,7 @@ class GeoWhereNode(WhereNode):
|
|||
if isinstance(lvalue, GeoConstraint):
|
||||
data, params = lvalue.process(lookup_type, params_or_value, connection)
|
||||
spatial_sql, spatial_params = connection.ops.spatial_lookup_sql(
|
||||
data, lookup_type, params_or_value, lvalue.field, qn)
|
||||
data, lookup_type, params_or_value, lvalue.field, qn)
|
||||
return spatial_sql, spatial_params + params
|
||||
else:
|
||||
return super(GeoWhereNode, self).make_atom(child, qn, connection)
|
||||
|
|
|
@ -155,7 +155,7 @@ class OGRGeometry(GDALBase):
|
|||
"Constructs a Polygon from a bounding box (4-tuple)."
|
||||
x0, y0, x1, y1 = bbox
|
||||
return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
|
||||
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
|
||||
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
|
||||
|
||||
### Geometry set-like operations ###
|
||||
# g = g1 | g2
|
||||
|
|
|
@ -133,11 +133,12 @@ class GEOSMutationTest(unittest.TestCase):
|
|||
|
||||
# _set_list
|
||||
pg._set_list(2, (((1,2),(10,0),(12,9),(-1,15),(1,2)),
|
||||
((4,2),(5,2),(5,3),(4,2))))
|
||||
self.assertEqual(pg.coords,
|
||||
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)),
|
||||
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))),
|
||||
'Polygon _set_list')
|
||||
((4,2),(5,2),(5,3),(4,2))))
|
||||
self.assertEqual(
|
||||
pg.coords,
|
||||
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)),
|
||||
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))),
|
||||
'Polygon _set_list')
|
||||
|
||||
lsa = Polygon(*pg.coords)
|
||||
for f in geos_function_tests:
|
||||
|
|
|
@ -354,11 +354,11 @@ class BaseTests(object):
|
|||
'success'])
|
||||
|
||||
@override_settings_tags(MESSAGE_TAGS={
|
||||
constants.INFO: 'info',
|
||||
constants.DEBUG: '',
|
||||
constants.WARNING: '',
|
||||
constants.ERROR: 'bad',
|
||||
29: 'custom',
|
||||
constants.INFO: 'info',
|
||||
constants.DEBUG: '',
|
||||
constants.WARNING: '',
|
||||
constants.ERROR: 'bad',
|
||||
29: 'custom',
|
||||
}
|
||||
)
|
||||
def test_custom_tags(self):
|
||||
|
|
|
@ -13,7 +13,7 @@ from .models import Redirect
|
|||
@override_settings(
|
||||
APPEND_SLASH=False,
|
||||
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
||||
['django.contrib.redirects.middleware.RedirectFallbackMiddleware'],
|
||||
['django.contrib.redirects.middleware.RedirectFallbackMiddleware'],
|
||||
SITE_ID=1,
|
||||
)
|
||||
class RedirectTests(TestCase):
|
||||
|
@ -72,7 +72,7 @@ class OverriddenRedirectFallbackMiddleware(RedirectFallbackMiddleware):
|
|||
|
||||
@override_settings(
|
||||
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
||||
['django.contrib.redirects.tests.OverriddenRedirectFallbackMiddleware'],
|
||||
['django.contrib.redirects.tests.OverriddenRedirectFallbackMiddleware'],
|
||||
SITE_ID=1,
|
||||
)
|
||||
class OverriddenRedirectMiddlewareTests(TestCase):
|
||||
|
|
|
@ -31,7 +31,7 @@ def index(request, sitemaps,
|
|||
site = site()
|
||||
protocol = req_protocol if site.protocol is None else site.protocol
|
||||
sitemap_url = urlresolvers.reverse(
|
||||
sitemap_url_name, kwargs={'section': section})
|
||||
sitemap_url_name, kwargs={'section': section})
|
||||
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
|
||||
sites.append(absolute_url)
|
||||
for page in range(2, site.paginator.num_pages + 1):
|
||||
|
|
|
@ -165,9 +165,9 @@ class BaseHandler(object):
|
|||
response = callback(request, **param_dict)
|
||||
except:
|
||||
signals.got_request_exception.send(
|
||||
sender=self.__class__, request=request)
|
||||
sender=self.__class__, request=request)
|
||||
response = self.handle_uncaught_exception(request,
|
||||
resolver, sys.exc_info())
|
||||
resolver, sys.exc_info())
|
||||
|
||||
except SuspiciousOperation as e:
|
||||
# The request logger receives events for any problematic request
|
||||
|
@ -181,9 +181,9 @@ class BaseHandler(object):
|
|||
response = callback(request, **param_dict)
|
||||
except:
|
||||
signals.got_request_exception.send(
|
||||
sender=self.__class__, request=request)
|
||||
sender=self.__class__, request=request)
|
||||
response = self.handle_uncaught_exception(request,
|
||||
resolver, sys.exc_info())
|
||||
resolver, sys.exc_info())
|
||||
|
||||
except SystemExit:
|
||||
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
|
||||
|
|
|
@ -78,7 +78,7 @@ class Command(BaseCommand):
|
|||
except DatabaseError as e:
|
||||
raise CommandError(
|
||||
"Cache table '%s' could not be created.\nThe error was: %s." %
|
||||
(tablename, force_text(e)))
|
||||
(tablename, force_text(e)))
|
||||
for statement in index_output:
|
||||
curs.execute(statement)
|
||||
if self.verbosity > 1:
|
||||
|
|
|
@ -46,8 +46,8 @@ class Command(BaseCommand):
|
|||
|
||||
if not len(fixture_labels):
|
||||
raise CommandError(
|
||||
"No database fixture specified. Please provide the path "
|
||||
"of at least one fixture in the command line.")
|
||||
"No database fixture specified. Please provide the path "
|
||||
"of at least one fixture in the command line.")
|
||||
|
||||
self.verbosity = int(options.get('verbosity'))
|
||||
|
||||
|
@ -140,10 +140,10 @@ class Command(BaseCommand):
|
|||
obj.save(using=self.using)
|
||||
except (DatabaseError, IntegrityError) as e:
|
||||
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
|
||||
'app_label': obj.object._meta.app_label,
|
||||
'object_name': obj.object._meta.object_name,
|
||||
'pk': obj.object.pk,
|
||||
'error_msg': force_text(e)
|
||||
'app_label': obj.object._meta.app_label,
|
||||
'object_name': obj.object._meta.object_name,
|
||||
'pk': obj.object.pk,
|
||||
'error_msg': force_text(e)
|
||||
},)
|
||||
raise
|
||||
|
||||
|
@ -176,8 +176,8 @@ class Command(BaseCommand):
|
|||
# Check kept for backwards-compatibility; it doesn't look very useful.
|
||||
if '.' in os.path.basename(fixture_name):
|
||||
raise CommandError(
|
||||
"Problem installing fixture '%s': %s is not a known "
|
||||
"serialization format." % tuple(fixture_name.rsplit('.')))
|
||||
"Problem installing fixture '%s': %s is not a known "
|
||||
"serialization format." % tuple(fixture_name.rsplit('.')))
|
||||
|
||||
if self.verbosity >= 2:
|
||||
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
||||
|
@ -210,8 +210,8 @@ class Command(BaseCommand):
|
|||
# duplicates are only allowed in different directories.
|
||||
if len(fixture_files_in_dir) > 1:
|
||||
raise CommandError(
|
||||
"Multiple fixtures named '%s' in %s. Aborting." %
|
||||
(fixture_name, humanize(fixture_dir)))
|
||||
"Multiple fixtures named '%s' in %s. Aborting." %
|
||||
(fixture_name, humanize(fixture_dir)))
|
||||
fixture_files.extend(fixture_files_in_dir)
|
||||
|
||||
if fixture_name != 'initial_data' and not fixture_files:
|
||||
|
|
|
@ -225,7 +225,7 @@ class Command(NoArgsCommand):
|
|||
|
||||
if (locale is None and not process_all) or self.domain is None:
|
||||
raise CommandError("Type '%s help %s' for usage information." % (
|
||||
os.path.basename(sys.argv[0]), sys.argv[1]))
|
||||
os.path.basename(sys.argv[0]), sys.argv[1]))
|
||||
|
||||
if self.verbosity > 1:
|
||||
self.stdout.write('examining files with the extensions: %s\n'
|
||||
|
|
|
@ -177,7 +177,7 @@ class Deserializer(base.Deserializer):
|
|||
data = {}
|
||||
if node.hasAttribute('pk'):
|
||||
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
|
||||
node.getAttribute('pk'))
|
||||
node.getAttribute('pk'))
|
||||
|
||||
# Also start building a dict of m2m data (this is saved as
|
||||
# {m2m_accessor_attribute : [list_of_related_objects]})
|
||||
|
@ -272,15 +272,15 @@ class Deserializer(base.Deserializer):
|
|||
if not model_identifier:
|
||||
raise base.DeserializationError(
|
||||
"<%s> node is missing the required '%s' attribute"
|
||||
% (node.nodeName, attr))
|
||||
% (node.nodeName, attr))
|
||||
try:
|
||||
Model = models.get_model(*model_identifier.split("."))
|
||||
except TypeError:
|
||||
Model = None
|
||||
if Model is None:
|
||||
raise base.DeserializationError(
|
||||
"<%s> node has invalid model identifier: '%s'" %
|
||||
(node.nodeName, model_identifier))
|
||||
"<%s> node has invalid model identifier: '%s'"
|
||||
% (node.nodeName, model_identifier))
|
||||
return Model
|
||||
|
||||
|
||||
|
|
|
@ -107,16 +107,20 @@ class ModelBase(type):
|
|||
|
||||
new_class.add_to_class('_meta', Options(meta, **kwargs))
|
||||
if not abstract:
|
||||
new_class.add_to_class('DoesNotExist', subclass_exception(str('DoesNotExist'),
|
||||
tuple(x.DoesNotExist
|
||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
||||
or (ObjectDoesNotExist,),
|
||||
module, attached_to=new_class))
|
||||
new_class.add_to_class('MultipleObjectsReturned', subclass_exception(str('MultipleObjectsReturned'),
|
||||
tuple(x.MultipleObjectsReturned
|
||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
||||
or (MultipleObjectsReturned,),
|
||||
module, attached_to=new_class))
|
||||
new_class.add_to_class(
|
||||
'DoesNotExist',
|
||||
subclass_exception(
|
||||
str('DoesNotExist'),
|
||||
tuple(x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (ObjectDoesNotExist,),
|
||||
module,
|
||||
attached_to=new_class))
|
||||
new_class.add_to_class(
|
||||
'MultipleObjectsReturned',
|
||||
subclass_exception(
|
||||
str('MultipleObjectsReturned'),
|
||||
tuple(x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (MultipleObjectsReturned,),
|
||||
module,
|
||||
attached_to=new_class))
|
||||
if base_meta and not base_meta.abstract:
|
||||
# Non-abstract child classes inherit some attributes from their
|
||||
# non-abstract parent (unless an ABC comes before it in the
|
||||
|
@ -565,9 +569,9 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
field_names.add(field.attname)
|
||||
deferred_fields = [
|
||||
f.attname for f in self._meta.fields
|
||||
if f.attname not in self.__dict__
|
||||
and isinstance(self.__class__.__dict__[f.attname],
|
||||
DeferredAttribute)]
|
||||
if (f.attname not in self.__dict__ and
|
||||
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
|
||||
]
|
||||
|
||||
loaded_fields = field_names.difference(deferred_fields)
|
||||
if loaded_fields:
|
||||
|
|
|
@ -694,7 +694,7 @@ class QuerySet(object):
|
|||
def _filter_or_exclude(self, negate, *args, **kwargs):
|
||||
if args or kwargs:
|
||||
assert self.query.can_filter(), \
|
||||
"Cannot filter a query once a slice has been taken."
|
||||
"Cannot filter a query once a slice has been taken."
|
||||
|
||||
clone = self._clone()
|
||||
if negate:
|
||||
|
|
|
@ -1681,7 +1681,7 @@ class Query(object):
|
|||
count = self.aggregates_module.Count('*', is_summary=True)
|
||||
else:
|
||||
assert len(self.select) == 1, \
|
||||
"Cannot add count col with multiple cols in 'select': %r" % self.select
|
||||
"Cannot add count col with multiple cols in 'select': %r" % self.select
|
||||
count = self.aggregates_module.Count(self.select[0].col)
|
||||
else:
|
||||
opts = self.get_meta()
|
||||
|
@ -1693,7 +1693,7 @@ class Query(object):
|
|||
# Because of SQL portability issues, multi-column, distinct
|
||||
# counts need a sub-query -- see get_count() for details.
|
||||
assert len(self.select) == 1, \
|
||||
"Cannot add count col with multiple cols in 'select'."
|
||||
"Cannot add count col with multiple cols in 'select'."
|
||||
|
||||
count = self.aggregates_module.Count(self.select[0].col, distinct=True)
|
||||
# Distinct handling is done in Count(), so don't do it at this
|
||||
|
|
|
@ -262,7 +262,7 @@ class DateTimeQuery(DateQuery):
|
|||
|
||||
def _check_field(self, field):
|
||||
assert isinstance(field, DateTimeField), \
|
||||
"%r isn't a DateTimeField." % field.name
|
||||
"%r isn't a DateTimeField." % field.name
|
||||
|
||||
def _get_select(self, col, lookup_type):
|
||||
if self.tzinfo is None:
|
||||
|
|
|
@ -17,8 +17,8 @@ import warnings
|
|||
from functools import wraps
|
||||
|
||||
from django.db import (
|
||||
connections, DEFAULT_DB_ALIAS,
|
||||
DatabaseError, ProgrammingError)
|
||||
connections, DEFAULT_DB_ALIAS,
|
||||
DatabaseError, ProgrammingError)
|
||||
from django.utils.decorators import available_attrs
|
||||
|
||||
|
||||
|
|
|
@ -687,7 +687,7 @@ class BaseModelFormSet(BaseFormSet):
|
|||
else:
|
||||
return ugettext("Please correct the duplicate data for %(field)s, "
|
||||
"which must be unique.") % {
|
||||
"field": get_text_list(unique_check, six.text_type(_("and"))),
|
||||
"field": get_text_list(unique_check, six.text_type(_("and"))),
|
||||
}
|
||||
|
||||
def get_date_error_message(self, date_check):
|
||||
|
@ -1042,8 +1042,8 @@ class ModelChoiceIterator(object):
|
|||
yield self.choice(obj)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.queryset) +\
|
||||
(1 if self.field.empty_label is not None else 0)
|
||||
return (len(self.queryset) +
|
||||
(1 if self.field.empty_label is not None else 0))
|
||||
|
||||
def choice(self, obj):
|
||||
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
|
||||
|
|
|
@ -62,9 +62,9 @@ class Media(object):
|
|||
# We need to sort the keys, and iterate over the sorted list.
|
||||
media = sorted(self._css.keys())
|
||||
return chain(*[
|
||||
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
|
||||
for path in self._css[medium]]
|
||||
for medium in media])
|
||||
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
|
||||
for path in self._css[medium]]
|
||||
for medium in media])
|
||||
|
||||
def absolute_path(self, path, prefix=None):
|
||||
if path.startswith(('http://', 'https://', '/')):
|
||||
|
|
|
@ -59,8 +59,8 @@ def fix_IE_for_attach(request, response):
|
|||
pass
|
||||
if response.has_header('Cache-Control'):
|
||||
cache_control_values = [value.strip() for value in
|
||||
response['Cache-Control'].split(',')
|
||||
if value.strip().lower() not in offending_headers]
|
||||
response['Cache-Control'].split(',')
|
||||
if value.strip().lower() not in offending_headers]
|
||||
|
||||
if not len(cache_control_values):
|
||||
del response['Cache-Control']
|
||||
|
|
|
@ -38,7 +38,7 @@ class LocaleMiddleware(object):
|
|||
def process_response(self, request, response):
|
||||
language = translation.get_language()
|
||||
language_from_path = translation.get_language_from_path(
|
||||
request.path_info, supported=self._supported_languages
|
||||
request.path_info, supported=self._supported_languages
|
||||
)
|
||||
if (response.status_code == 404 and not language_from_path
|
||||
and self.is_language_prefix_patterns_used()):
|
||||
|
|
|
@ -279,7 +279,7 @@ def setup_databases(verbosity, interactive, **kwargs):
|
|||
connection = connections[alias]
|
||||
if test_db_name is None:
|
||||
test_db_name = connection.creation.create_test_db(
|
||||
verbosity, autoclobber=not interactive)
|
||||
verbosity, autoclobber=not interactive)
|
||||
destroy = True
|
||||
else:
|
||||
connection.settings_dict['NAME'] = test_db_name
|
||||
|
|
|
@ -740,7 +740,7 @@ class TransactionTestCase(SimpleTestCase):
|
|||
conn = connections[db_name]
|
||||
if conn.features.supports_sequence_reset:
|
||||
sql_list = conn.ops.sequence_reset_by_name_sql(
|
||||
no_style(), conn.introspection.sequence_list())
|
||||
no_style(), conn.introspection.sequence_list())
|
||||
if sql_list:
|
||||
with transaction.commit_on_success_unless_managed(using=db_name):
|
||||
cursor = conn.cursor()
|
||||
|
|
|
@ -97,8 +97,8 @@ def format_html_join(sep, format_string, args_generator):
|
|||
|
||||
"""
|
||||
return mark_safe(conditional_escape(sep).join(
|
||||
format_html(format_string, *tuple(args))
|
||||
for args in args_generator))
|
||||
format_html(format_string, *tuple(args))
|
||||
for args in args_generator))
|
||||
|
||||
|
||||
def linebreaks(value, autoescape=False):
|
||||
|
|
|
@ -14,8 +14,8 @@ from django.utils.encoding import force_str, force_text
|
|||
from django.utils.functional import allow_lazy
|
||||
from django.utils import six
|
||||
from django.utils.six.moves.urllib.parse import (
|
||||
quote, quote_plus, unquote, unquote_plus, urlparse,
|
||||
urlencode as original_urlencode)
|
||||
quote, quote_plus, unquote, unquote_plus, urlparse,
|
||||
urlencode as original_urlencode)
|
||||
|
||||
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
|
||||
|
||||
|
|
|
@ -143,14 +143,14 @@ class JsLexer(Lexer):
|
|||
]
|
||||
|
||||
states = {
|
||||
'div': # slash will mean division
|
||||
both_before + [
|
||||
Tok("punct", literals("/= /"), next='reg'),
|
||||
] + both_after,
|
||||
# slash will mean division
|
||||
'div': both_before + [
|
||||
Tok("punct", literals("/= /"), next='reg'),
|
||||
] + both_after,
|
||||
|
||||
'reg': # slash will mean regex
|
||||
both_before + [
|
||||
Tok("regex",
|
||||
# slash will mean regex
|
||||
'reg': both_before + [
|
||||
Tok("regex",
|
||||
r"""
|
||||
/ # opening slash
|
||||
# First character is..
|
||||
|
@ -174,7 +174,7 @@ class JsLexer(Lexer):
|
|||
/ # closing slash
|
||||
[a-zA-Z0-9]* # trailing flags
|
||||
""", next='div'),
|
||||
] + both_after,
|
||||
] + both_after,
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
|
|
|
@ -136,7 +136,7 @@ class LocalTimezone(ReferenceLocalTimezone):
|
|||
except (OverflowError, ValueError) as exc:
|
||||
exc_type = type(exc)
|
||||
exc_value = exc_type(
|
||||
"Unsupported value: %r. You should install pytz." % dt)
|
||||
"Unsupported value: %r. You should install pytz." % dt)
|
||||
exc_value.__cause__ = exc
|
||||
six.reraise(exc_type, exc_value, sys.exc_info()[2])
|
||||
|
||||
|
|
|
@ -366,7 +366,7 @@ class BaseDateListView(MultipleObjectMixin, DateMixin, View):
|
|||
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
|
||||
if is_empty:
|
||||
raise Http404(_("No %(verbose_name_plural)s available") % {
|
||||
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
|
||||
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
|
||||
})
|
||||
|
||||
return qs
|
||||
|
|
|
@ -65,8 +65,8 @@ class MultipleObjectMixin(ContextMixin):
|
|||
return (paginator, page, page.object_list, page.has_other_pages())
|
||||
except InvalidPage as e:
|
||||
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
|
||||
'page_number': page_number,
|
||||
'message': str(e)
|
||||
'page_number': page_number,
|
||||
'message': str(e)
|
||||
})
|
||||
|
||||
def get_paginate_by(self, queryset):
|
||||
|
|
|
@ -6,8 +6,8 @@ import os
|
|||
testing = False
|
||||
|
||||
DONT_TOUCH = (
|
||||
'./index.txt',
|
||||
)
|
||||
'./index.txt',
|
||||
)
|
||||
|
||||
def target_name(fn):
|
||||
if fn.endswith('.txt'):
|
||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
|||
|
||||
[flake8]
|
||||
exclude=./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./tests/comment_tests/*,./django/test/_doctest.py
|
||||
ignore=E124,E125,E126,E127,E128,E225,E226,E241,E251,E302,E501,E203,E221,E227,E231,E261,E301,F401,F403,W601
|
||||
ignore=E124,E125,E127,E128,E225,E226,E241,E251,E302,E501,E203,E221,E227,E231,E261,E301,F401,F403,W601
|
||||
|
||||
[metadata]
|
||||
license-file = LICENSE
|
||||
|
|
|
@ -169,10 +169,9 @@ class Sketch(models.Model):
|
|||
class Fabric(models.Model):
|
||||
NG_CHOICES = (
|
||||
('Textured', (
|
||||
('x', 'Horizontal'),
|
||||
('y', 'Vertical'),
|
||||
)
|
||||
),
|
||||
('x', 'Horizontal'),
|
||||
('y', 'Vertical'),
|
||||
)),
|
||||
('plain', 'Smooth'),
|
||||
)
|
||||
surface = models.CharField(max_length=20, choices=NG_CHOICES)
|
||||
|
@ -390,8 +389,8 @@ class Post(models.Model):
|
|||
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
|
||||
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
|
||||
posted = models.DateField(
|
||||
default=datetime.date.today,
|
||||
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
|
||||
default=datetime.date.today,
|
||||
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
|
||||
)
|
||||
public = models.NullBooleanField()
|
||||
|
||||
|
|
|
@ -425,12 +425,10 @@ class AdminViewBasicTest(AdminViewBasicTestCase):
|
|||
test=lambda obj, value: obj.chap.book.name == value),
|
||||
'chap__book__promo__id__exact': dict(
|
||||
values=[p.id for p in Promo.objects.all()],
|
||||
test=lambda obj, value:
|
||||
obj.chap.book.promo_set.filter(id=value).exists()),
|
||||
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
|
||||
'chap__book__promo__name': dict(
|
||||
values=[p.name for p in Promo.objects.all()],
|
||||
test=lambda obj, value:
|
||||
obj.chap.book.promo_set.filter(name=value).exists()),
|
||||
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
|
||||
}
|
||||
for filter_path, params in filters.items():
|
||||
for value in params['values']:
|
||||
|
@ -1253,9 +1251,9 @@ class AdminViewPermissionsTest(TestCase):
|
|||
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/delete/' % article_pk)
|
||||
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
|
||||
response = self.client.post('/test_admin/admin/admin_views/customarticle/', data={
|
||||
'index': 0,
|
||||
'action': ['delete_selected'],
|
||||
'_selected_action': ['1'],
|
||||
'index': 0,
|
||||
'action': ['delete_selected'],
|
||||
'_selected_action': ['1'],
|
||||
})
|
||||
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
|
||||
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/history/' % article_pk)
|
||||
|
|
|
@ -54,8 +54,12 @@ class AdminFormfieldForDBFieldTests(TestCase):
|
|||
# Check that we got a field of the right type
|
||||
self.assertTrue(
|
||||
isinstance(widget, widgetclass),
|
||||
"Wrong widget for %s.%s: expected %s, got %s" %
|
||||
(model.__class__.__name__, fieldname, widgetclass, type(widget))
|
||||
"Wrong widget for %s.%s: expected %s, got %s" % (
|
||||
model.__class__.__name__,
|
||||
fieldname,
|
||||
widgetclass,
|
||||
type(widget),
|
||||
)
|
||||
)
|
||||
|
||||
# Return the formfield so that other tests can continue
|
||||
|
|
|
@ -605,8 +605,7 @@ class ModelTest(TestCase):
|
|||
)
|
||||
|
||||
dicts = Article.objects.filter(
|
||||
pub_date__year=2008).extra(
|
||||
select={'dashed-value': '1'}
|
||||
pub_date__year=2008).extra(select={'dashed-value': '1'}
|
||||
).values('headline', 'dashed-value')
|
||||
self.assertEqual([sorted(d.items()) for d in dicts],
|
||||
[[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]])
|
||||
|
@ -629,8 +628,7 @@ class ModelTest(TestCase):
|
|||
)
|
||||
|
||||
articles = Article.objects.filter(
|
||||
pub_date__year=2008).extra(
|
||||
select={'dashed-value': '1', 'undashedvalue': '2'})
|
||||
pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'})
|
||||
self.assertEqual(articles[0].undashedvalue, 2)
|
||||
|
||||
def test_create_relation_with_ugettext_lazy(self):
|
||||
|
|
|
@ -1138,14 +1138,14 @@ class GetCacheTests(unittest.TestCase):
|
|||
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=1,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=1,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
USE_I18N=False,
|
||||
},
|
||||
USE_I18N=False,
|
||||
)
|
||||
class CacheUtils(TestCase):
|
||||
"""TestCase for django.utils.cache functions."""
|
||||
|
@ -1245,25 +1245,25 @@ class CacheUtils(TestCase):
|
|||
|
||||
|
||||
@override_settings(
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'KEY_PREFIX': 'cacheprefix',
|
||||
},
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'KEY_PREFIX': 'cacheprefix',
|
||||
},
|
||||
},
|
||||
)
|
||||
class PrefixedCacheUtils(CacheUtils):
|
||||
pass
|
||||
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='test',
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='test',
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
},
|
||||
)
|
||||
class CacheHEADTest(TestCase):
|
||||
|
||||
|
@ -1318,16 +1318,16 @@ class CacheHEADTest(TestCase):
|
|||
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
LANGUAGES=(
|
||||
('en', 'English'),
|
||||
('es', 'Spanish'),
|
||||
),
|
||||
},
|
||||
LANGUAGES=(
|
||||
('en', 'English'),
|
||||
('es', 'Spanish'),
|
||||
),
|
||||
)
|
||||
class CacheI18nTest(TestCase):
|
||||
|
||||
|
@ -1496,10 +1496,10 @@ class CacheI18nTest(TestCase):
|
|||
"Cache keys should include the time zone name when time zones are active")
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
USE_ETAGS=True,
|
||||
USE_I18N=True,
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
USE_ETAGS=True,
|
||||
USE_I18N=True,
|
||||
)
|
||||
def test_middleware(self):
|
||||
def set_cache(request, lang, msg):
|
||||
|
@ -1560,9 +1560,9 @@ class CacheI18nTest(TestCase):
|
|||
translation.deactivate()
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
USE_ETAGS=True,
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||
CACHE_MIDDLEWARE_SECONDS=60,
|
||||
USE_ETAGS=True,
|
||||
)
|
||||
def test_middleware_doesnt_cache_streaming_response(self):
|
||||
request = self._get_request()
|
||||
|
@ -1581,12 +1581,12 @@ class CacheI18nTest(TestCase):
|
|||
self.assertIsNone(get_cache_data)
|
||||
|
||||
@override_settings(
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'KEY_PREFIX': 'cacheprefix'
|
||||
},
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'KEY_PREFIX': 'cacheprefix'
|
||||
},
|
||||
},
|
||||
)
|
||||
class PrefixedCacheI18nTest(CacheI18nTest):
|
||||
pass
|
||||
|
@ -1597,20 +1597,20 @@ def hello_world_view(request, value):
|
|||
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_ALIAS='other',
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=30,
|
||||
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
'other': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'LOCATION': 'other',
|
||||
'TIMEOUT': '1',
|
||||
},
|
||||
CACHE_MIDDLEWARE_ALIAS='other',
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=30,
|
||||
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
'other': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
'LOCATION': 'other',
|
||||
'TIMEOUT': '1',
|
||||
},
|
||||
},
|
||||
)
|
||||
class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase):
|
||||
|
||||
|
@ -1816,14 +1816,14 @@ class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase):
|
|||
self.assertEqual(response.content, b'Hello World 16')
|
||||
|
||||
@override_settings(
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=1,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||
CACHE_MIDDLEWARE_SECONDS=1,
|
||||
CACHES={
|
||||
'default': {
|
||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||
},
|
||||
USE_I18N=False,
|
||||
},
|
||||
USE_I18N=False,
|
||||
)
|
||||
class TestWithTemplateResponse(TestCase):
|
||||
"""
|
||||
|
|
|
@ -51,8 +51,7 @@ class DistinctOnTests(TestCase):
|
|||
['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
|
||||
),
|
||||
(
|
||||
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).
|
||||
distinct('name').order_by('name'),
|
||||
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).distinct('name').order_by('name'),
|
||||
['<Celebrity: c1>', '<Celebrity: c2>'],
|
||||
),
|
||||
# Does combining querysets work?
|
||||
|
|
|
@ -13,9 +13,9 @@ class ExtraRegressTests(TestCase):
|
|||
|
||||
def setUp(self):
|
||||
self.u = User.objects.create_user(
|
||||
username="fred",
|
||||
password="secret",
|
||||
email="fred@example.com"
|
||||
username="fred",
|
||||
password="secret",
|
||||
email="fred@example.com"
|
||||
)
|
||||
|
||||
def test_regression_7314_7372(self):
|
||||
|
@ -41,9 +41,9 @@ class ExtraRegressTests(TestCase):
|
|||
|
||||
# Queryset to match most recent revision:
|
||||
qs = RevisionableModel.objects.extra(
|
||||
where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)" % {
|
||||
'table': RevisionableModel._meta.db_table,
|
||||
}]
|
||||
where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)" % {
|
||||
'table': RevisionableModel._meta.db_table,
|
||||
}]
|
||||
)
|
||||
|
||||
self.assertQuerysetEqual(qs,
|
||||
|
@ -74,8 +74,8 @@ class ExtraRegressTests(TestCase):
|
|||
# select portions. Applies when portions are updated or otherwise
|
||||
# moved around.
|
||||
qs = User.objects.extra(
|
||||
select=OrderedDict((("alpha", "%s"), ("beta", "2"), ("gamma", "%s"))),
|
||||
select_params=(1, 3)
|
||||
select=OrderedDict((("alpha", "%s"), ("beta", "2"), ("gamma", "%s"))),
|
||||
select_params=(1, 3)
|
||||
)
|
||||
qs = qs.extra(select={"beta": 4})
|
||||
qs = qs.extra(select={"alpha": "%s"}, select_params=[5])
|
||||
|
@ -129,11 +129,11 @@ class ExtraRegressTests(TestCase):
|
|||
should still be present because of the extra() call.
|
||||
"""
|
||||
self.assertQuerysetEqual(
|
||||
Order.objects.extra(where=["username=%s"],
|
||||
params=["fred"],
|
||||
tables=["auth_user"]
|
||||
).order_by('created_by'),
|
||||
[]
|
||||
Order.objects.extra(where=["username=%s"],
|
||||
params=["fred"],
|
||||
tables=["auth_user"]
|
||||
).order_by('created_by'),
|
||||
[]
|
||||
)
|
||||
|
||||
def test_regression_8819(self):
|
||||
|
@ -294,7 +294,7 @@ class ExtraRegressTests(TestCase):
|
|||
|
||||
self.assertQuerysetEqual(
|
||||
TestObject.objects.filter(
|
||||
pk__in=TestObject.objects.extra(select={'extra': 1}).values('pk')
|
||||
pk__in=TestObject.objects.extra(select={'extra': 1}).values('pk')
|
||||
),
|
||||
['<TestObject: TestObject: first,second,third>']
|
||||
)
|
||||
|
@ -312,8 +312,7 @@ class ExtraRegressTests(TestCase):
|
|||
)
|
||||
|
||||
self.assertQuerysetEqual(
|
||||
TestObject.objects.filter(pk=obj.pk) |
|
||||
TestObject.objects.extra(where=["id > %s"], params=[obj.pk]),
|
||||
TestObject.objects.filter(pk=obj.pk) | TestObject.objects.extra(where=["id > %s"], params=[obj.pk]),
|
||||
['<TestObject: TestObject: first,second,third>']
|
||||
)
|
||||
|
||||
|
|
|
@ -377,8 +377,7 @@ class FileUploadTests(TestCase):
|
|||
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
|
||||
post_data = [
|
||||
'--%(boundary)s',
|
||||
'Content-Disposition: form-data; name="file_field"; '
|
||||
'filename="MiXeD_cAsE.txt"',
|
||||
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
|
||||
'Content-Type: application/octet-stream',
|
||||
'',
|
||||
'file contents\n'
|
||||
|
|
|
@ -1134,14 +1134,14 @@ class FieldsTests(SimpleTestCase):
|
|||
f.choices = [p for p in f.choices if p[0].endswith('.py')]
|
||||
f.choices.sort()
|
||||
expected = [
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
]
|
||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||
self.assertEqual(exp[1], got[1])
|
||||
|
@ -1155,14 +1155,14 @@ class FieldsTests(SimpleTestCase):
|
|||
f = FilePathField(path=path, match='^.*?\.py$')
|
||||
f.choices.sort()
|
||||
expected = [
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
]
|
||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||
self.assertEqual(exp[1], got[1])
|
||||
|
@ -1174,16 +1174,16 @@ class FieldsTests(SimpleTestCase):
|
|||
f = FilePathField(path=path, recursive=True, match='^.*?\.py$')
|
||||
f.choices.sort()
|
||||
expected = [
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/extras/__init__.py', 'extras/__init__.py'),
|
||||
('/django/forms/extras/widgets.py', 'extras/widgets.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
('/django/forms/__init__.py', '__init__.py'),
|
||||
('/django/forms/extras/__init__.py', 'extras/__init__.py'),
|
||||
('/django/forms/extras/widgets.py', 'extras/widgets.py'),
|
||||
('/django/forms/fields.py', 'fields.py'),
|
||||
('/django/forms/forms.py', 'forms.py'),
|
||||
('/django/forms/formsets.py', 'formsets.py'),
|
||||
('/django/forms/models.py', 'models.py'),
|
||||
('/django/forms/util.py', 'util.py'),
|
||||
('/django/forms/utils.py', 'utils.py'),
|
||||
('/django/forms/widgets.py', 'widgets.py')
|
||||
]
|
||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||
self.assertEqual(exp[1], got[1])
|
||||
|
|
|
@ -1182,9 +1182,9 @@ class ClearableFileInputTests(TestCase):
|
|||
widget = ClearableFileInput()
|
||||
widget.is_required = False
|
||||
self.assertEqual(widget.value_from_datadict(
|
||||
data={'myfile-clear': True},
|
||||
files={},
|
||||
name='myfile'), False)
|
||||
data={'myfile-clear': True},
|
||||
files={},
|
||||
name='myfile'), False)
|
||||
|
||||
def test_clear_input_checked_returns_false_only_if_not_required(self):
|
||||
"""
|
||||
|
@ -1196,6 +1196,6 @@ class ClearableFileInputTests(TestCase):
|
|||
widget.is_required = True
|
||||
f = SimpleUploadedFile('something.txt', b'content')
|
||||
self.assertEqual(widget.value_from_datadict(
|
||||
data={'myfile-clear': True},
|
||||
files={'myfile': f},
|
||||
name='myfile'), f)
|
||||
data={'myfile-clear': True},
|
||||
files={'myfile': f},
|
||||
name='myfile'), f)
|
||||
|
|
|
@ -124,10 +124,10 @@ class ModelFormCallableModelDefault(TestCase):
|
|||
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
|
||||
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
|
||||
self.assertHTMLEqual(ChoiceFieldForm(initial={
|
||||
'choice': obj2,
|
||||
'choice_int': obj2,
|
||||
'multi_choice': [obj2,obj3],
|
||||
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
|
||||
'choice': obj2,
|
||||
'choice_int': obj2,
|
||||
'multi_choice': [obj2,obj3],
|
||||
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
|
||||
}).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
|
||||
<option value="1">ChoiceOption 1</option>
|
||||
<option value="2" selected="selected">ChoiceOption 2</option>
|
||||
|
|
|
@ -98,23 +98,23 @@ class GenericRelationsTests(TestCase):
|
|||
)
|
||||
|
||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||
('clearish', Mineral, quartz.pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('hairy', Animal, lion.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk),
|
||||
('yellow', Animal, lion.pk)
|
||||
('clearish', Mineral, quartz.pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('hairy', Animal, lion.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk),
|
||||
('yellow', Animal, lion.pk)
|
||||
],
|
||||
comp_func
|
||||
)
|
||||
lion.delete()
|
||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||
('clearish', Mineral, quartz.pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
('clearish', Mineral, quartz.pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
],
|
||||
comp_func
|
||||
)
|
||||
|
@ -124,11 +124,11 @@ class GenericRelationsTests(TestCase):
|
|||
quartz_pk = quartz.pk
|
||||
quartz.delete()
|
||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||
('clearish', Mineral, quartz_pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
('clearish', Mineral, quartz_pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('fatty', Vegetable, bacon.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
],
|
||||
comp_func
|
||||
)
|
||||
|
@ -138,10 +138,10 @@ class GenericRelationsTests(TestCase):
|
|||
tag.delete()
|
||||
self.assertQuerysetEqual(bacon.tags.all(), ["<TaggedItem: salty>"])
|
||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||
('clearish', Mineral, quartz_pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
('clearish', Mineral, quartz_pk),
|
||||
('fatty', Animal, platypus.pk),
|
||||
('salty', Vegetable, bacon.pk),
|
||||
('shiny', Animal, platypus.pk)
|
||||
],
|
||||
comp_func
|
||||
)
|
||||
|
|
|
@ -600,8 +600,8 @@ class FormattingTests(TransRealMixin, TestCase):
|
|||
# Russian locale (with E as month)
|
||||
with translation.override('ru', deactivate=True):
|
||||
self.assertHTMLEqual(
|
||||
'<select name="mydate_day" id="id_mydate_day">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>\n<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>\n<option value="3">\u041c\u0430\u0440\u0442</option>\n<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>\n<option value="5">\u041c\u0430\u0439</option>\n<option value="6">\u0418\u044e\u043d\u044c</option>\n<option value="7">\u0418\u044e\u043b\u044c</option>\n<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>\n<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>\n<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>\n<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>\n<option value="12" selected="selected">\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
|
||||
SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
|
||||
'<select name="mydate_day" id="id_mydate_day">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>\n<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>\n<option value="3">\u041c\u0430\u0440\u0442</option>\n<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>\n<option value="5">\u041c\u0430\u0439</option>\n<option value="6">\u0418\u044e\u043d\u044c</option>\n<option value="7">\u0418\u044e\u043b\u044c</option>\n<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>\n<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>\n<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>\n<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>\n<option value="12" selected="selected">\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
|
||||
SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
|
||||
)
|
||||
|
||||
# English locale
|
||||
|
|
|
@ -169,8 +169,8 @@ class AdminEmailHandlerTest(TestCase):
|
|||
self.assertTrue(admin_email_handler.connection().fail_silently)
|
||||
|
||||
@override_settings(
|
||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
|
||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
|
||||
)
|
||||
def test_accepts_args(self):
|
||||
"""
|
||||
|
@ -199,9 +199,9 @@ class AdminEmailHandlerTest(TestCase):
|
|||
admin_email_handler.filters = orig_filters
|
||||
|
||||
@override_settings(
|
||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
|
||||
INTERNAL_IPS=('127.0.0.1',),
|
||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
|
||||
INTERNAL_IPS=('127.0.0.1',),
|
||||
)
|
||||
def test_accepts_args_and_request(self):
|
||||
"""
|
||||
|
@ -234,9 +234,9 @@ class AdminEmailHandlerTest(TestCase):
|
|||
admin_email_handler.filters = orig_filters
|
||||
|
||||
@override_settings(
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='',
|
||||
DEBUG=False,
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='',
|
||||
DEBUG=False,
|
||||
)
|
||||
def test_subject_accepts_newlines(self):
|
||||
"""
|
||||
|
@ -257,9 +257,9 @@ class AdminEmailHandlerTest(TestCase):
|
|||
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
||||
|
||||
@override_settings(
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='',
|
||||
DEBUG=False,
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
EMAIL_SUBJECT_PREFIX='',
|
||||
DEBUG=False,
|
||||
)
|
||||
def test_truncate_subject(self):
|
||||
"""
|
||||
|
@ -279,8 +279,8 @@ class AdminEmailHandlerTest(TestCase):
|
|||
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
||||
|
||||
@override_settings(
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
DEBUG=False,
|
||||
ADMINS=(('admin', 'admin@example.com'),),
|
||||
DEBUG=False,
|
||||
)
|
||||
def test_uses_custom_email_backend(self):
|
||||
"""
|
||||
|
|
|
@ -324,8 +324,8 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
|||
|
||||
connection = mail.get_connection('mail.custombackend.EmailBackend')
|
||||
send_mass_mail([
|
||||
('Subject1', 'Content1', 'from1@example.com', ['to1@example.com']),
|
||||
('Subject2', 'Content2', 'from2@example.com', ['to2@example.com']),
|
||||
('Subject1', 'Content1', 'from1@example.com', ['to1@example.com']),
|
||||
('Subject2', 'Content2', 'from2@example.com', ['to2@example.com']),
|
||||
], connection=connection)
|
||||
self.assertEqual(mail.outbox, [])
|
||||
self.assertEqual(len(connection.test_outbox), 2)
|
||||
|
|
|
@ -55,23 +55,23 @@ class ManagersRegressionTests(TestCase):
|
|||
# Since Child6 inherits from Child4, the corresponding rows from f1 and
|
||||
# f2 also appear here. This is the expected result.
|
||||
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
|
||||
"<Child4: d1>",
|
||||
"<Child4: d2>",
|
||||
"<Child4: f1>",
|
||||
"<Child4: f2>"
|
||||
"<Child4: d1>",
|
||||
"<Child4: d2>",
|
||||
"<Child4: f1>",
|
||||
"<Child4: f2>"
|
||||
]
|
||||
)
|
||||
self.assertQuerysetEqual(Child4.manager1.all(), [
|
||||
"<Child4: d1>",
|
||||
"<Child4: f1>"
|
||||
"<Child4: d1>",
|
||||
"<Child4: f1>"
|
||||
],
|
||||
ordered=False
|
||||
)
|
||||
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
|
||||
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>"])
|
||||
self.assertQuerysetEqual(Child7._default_manager.order_by('name'), [
|
||||
"<Child7: barney>",
|
||||
"<Child7: fred>"
|
||||
"<Child7: barney>",
|
||||
"<Child7: fred>"
|
||||
]
|
||||
)
|
||||
|
||||
|
|
|
@ -213,9 +213,9 @@ class ManyToOneTests(TestCase):
|
|||
self.assertQuerysetEqual(
|
||||
Article.objects.filter(reporter__in=[self.r.id,self.r2.id]).distinct(),
|
||||
[
|
||||
"<Article: John's second story>",
|
||||
"<Article: Paul's story>",
|
||||
"<Article: This is a test>",
|
||||
"<Article: John's second story>",
|
||||
"<Article: Paul's story>",
|
||||
"<Article: This is a test>",
|
||||
])
|
||||
self.assertQuerysetEqual(
|
||||
Article.objects.filter(reporter__in=[self.r,self.r2]).distinct(),
|
||||
|
@ -229,8 +229,8 @@ class ManyToOneTests(TestCase):
|
|||
# then converted into a query
|
||||
self.assertQuerysetEqual(
|
||||
Article.objects.filter(
|
||||
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
|
||||
).distinct(),
|
||||
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
|
||||
).distinct(),
|
||||
[
|
||||
"<Article: John's second story>",
|
||||
"<Article: This is a test>",
|
||||
|
|
|
@ -374,7 +374,7 @@ class MiddlewareTests(BaseMiddlewareExceptionTest):
|
|||
self._add_middleware(middleware)
|
||||
self._add_middleware(pre_middleware)
|
||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||
],
|
||||
ValueError())
|
||||
|
||||
|
@ -391,7 +391,7 @@ class MiddlewareTests(BaseMiddlewareExceptionTest):
|
|||
self._add_middleware(middleware)
|
||||
self._add_middleware(pre_middleware)
|
||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||
],
|
||||
ValueError())
|
||||
|
||||
|
@ -685,8 +685,8 @@ class BadMiddlewareTests(BaseMiddlewareExceptionTest):
|
|||
self._add_middleware(bad_middleware)
|
||||
self._add_middleware(pre_middleware)
|
||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||
'Test Response Exception'
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||
'Test Response Exception'
|
||||
])
|
||||
|
||||
# Check that the right middleware methods have been invoked
|
||||
|
@ -702,7 +702,7 @@ class BadMiddlewareTests(BaseMiddlewareExceptionTest):
|
|||
self._add_middleware(bad_middleware)
|
||||
self._add_middleware(pre_middleware)
|
||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||
],
|
||||
ValueError())
|
||||
|
||||
|
|
|
@ -31,13 +31,13 @@ class Bar(models.Model):
|
|||
class Whiz(models.Model):
|
||||
CHOICES = (
|
||||
('Group 1', (
|
||||
(1, 'First'),
|
||||
(2, 'Second'),
|
||||
(1, 'First'),
|
||||
(2, 'Second'),
|
||||
)
|
||||
),
|
||||
('Group 2', (
|
||||
(3, 'Third'),
|
||||
(4, 'Fourth'),
|
||||
(3, 'Third'),
|
||||
(4, 'Fourth'),
|
||||
)
|
||||
),
|
||||
(0, 'Other'),
|
||||
|
@ -218,9 +218,9 @@ if Image:
|
|||
height_field='mugshot_height',
|
||||
width_field='mugshot_width')
|
||||
headshot_height = models.PositiveSmallIntegerField(
|
||||
blank=True, null=True)
|
||||
blank=True, null=True)
|
||||
headshot_width = models.PositiveSmallIntegerField(
|
||||
blank=True, null=True)
|
||||
blank=True, null=True)
|
||||
headshot = TestImageField(blank=True, null=True,
|
||||
storage=temp_storage, upload_to='tests',
|
||||
height_field='headshot_height',
|
||||
|
|
|
@ -679,12 +679,12 @@ class UniqueTest(TestCase):
|
|||
isbn = '12345'
|
||||
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
|
||||
form = DerivedBookForm({
|
||||
'title': 'Other',
|
||||
'author': self.writer.pk,
|
||||
'isbn': '9876',
|
||||
'suffix1': '0',
|
||||
'suffix2': '0'
|
||||
})
|
||||
'title': 'Other',
|
||||
'author': self.writer.pk,
|
||||
'isbn': '9876',
|
||||
'suffix1': '0',
|
||||
'suffix2': '0'
|
||||
})
|
||||
self.assertFalse(form.is_valid())
|
||||
self.assertEqual(len(form.errors), 1)
|
||||
self.assertEqual(form.errors['__all__'],
|
||||
|
@ -953,12 +953,12 @@ class OldFormForXTests(TestCase):
|
|||
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
|
||||
|
||||
art = Article(
|
||||
headline='Test article',
|
||||
slug='test-article',
|
||||
pub_date=datetime.date(1988, 1, 4),
|
||||
writer=w,
|
||||
article='Hello.'
|
||||
)
|
||||
headline='Test article',
|
||||
slug='test-article',
|
||||
pub_date=datetime.date(1988, 1, 4),
|
||||
writer=w,
|
||||
article='Hello.'
|
||||
)
|
||||
art.save()
|
||||
art_id_1 = art.id
|
||||
self.assertEqual(art_id_1 is not None, True)
|
||||
|
@ -984,11 +984,11 @@ class OldFormForXTests(TestCase):
|
|||
<option value="3">Live</option>
|
||||
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
||||
f = TestArticleForm({
|
||||
'headline': 'Test headline',
|
||||
'slug': 'test-headline',
|
||||
'pub_date': '1984-02-06',
|
||||
'writer': six.text_type(w_royko.pk),
|
||||
'article': 'Hello.'
|
||||
'headline': 'Test headline',
|
||||
'slug': 'test-headline',
|
||||
'pub_date': '1984-02-06',
|
||||
'writer': six.text_type(w_royko.pk),
|
||||
'article': 'Hello.'
|
||||
}, instance=art)
|
||||
self.assertEqual(f.errors, {})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
|
@ -999,9 +999,9 @@ class OldFormForXTests(TestCase):
|
|||
# You can create a form over a subset of the available fields
|
||||
# by specifying a 'fields' argument to form_for_instance.
|
||||
f = PartialArticleFormWithSlug({
|
||||
'headline': 'New headline',
|
||||
'slug': 'new-headline',
|
||||
'pub_date': '1988-01-04'
|
||||
'headline': 'New headline',
|
||||
'slug': 'new-headline',
|
||||
'pub_date': '1988-01-04'
|
||||
}, auto_id=False, instance=art)
|
||||
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
|
||||
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
|
||||
|
@ -1040,11 +1040,11 @@ class OldFormForXTests(TestCase):
|
|||
|
||||
# Initial values can be provided for model forms
|
||||
f = TestArticleForm(
|
||||
auto_id=False,
|
||||
initial={
|
||||
'headline': 'Your headline here',
|
||||
'categories': [str(c1.id), str(c2.id)]
|
||||
})
|
||||
auto_id=False,
|
||||
initial={
|
||||
'headline': 'Your headline here',
|
||||
'categories': [str(c1.id), str(c2.id)]
|
||||
})
|
||||
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
|
||||
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
|
||||
<li>Pub date: <input type="text" name="pub_date" /></li>
|
||||
|
@ -1067,12 +1067,12 @@ class OldFormForXTests(TestCase):
|
|||
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
||||
|
||||
f = TestArticleForm({
|
||||
'headline': 'New headline',
|
||||
'slug': 'new-headline',
|
||||
'pub_date': '1988-01-04',
|
||||
'writer': six.text_type(w_royko.pk),
|
||||
'article': 'Hello.',
|
||||
'categories': [six.text_type(c1.id), six.text_type(c2.id)]
|
||||
'headline': 'New headline',
|
||||
'slug': 'new-headline',
|
||||
'pub_date': '1988-01-04',
|
||||
'writer': six.text_type(w_royko.pk),
|
||||
'article': 'Hello.',
|
||||
'categories': [six.text_type(c1.id), six.text_type(c2.id)]
|
||||
}, instance=new_art)
|
||||
new_art = f.save()
|
||||
self.assertEqual(new_art.id == art_id_1, True)
|
||||
|
@ -1388,8 +1388,8 @@ class OldFormForXTests(TestCase):
|
|||
# Upload a file and ensure it all works as expected.
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
||||
instance = f.save()
|
||||
|
@ -1397,8 +1397,8 @@ class OldFormForXTests(TestCase):
|
|||
|
||||
instance.file.delete()
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
||||
instance = f.save()
|
||||
|
@ -1406,16 +1406,16 @@ class OldFormForXTests(TestCase):
|
|||
|
||||
# Check if the max_length attribute has been inherited from the model.
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
|
||||
self.assertEqual(f.is_valid(), False)
|
||||
|
||||
# Edit an instance that already has the file defined in the model. This will not
|
||||
# save the file again, but leave it exactly as it is.
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
instance=instance)
|
||||
data={'description': 'Assistance'},
|
||||
instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
|
||||
instance = f.save()
|
||||
|
@ -1427,8 +1427,8 @@ class OldFormForXTests(TestCase):
|
|||
# Override the file by uploading a new one.
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
||||
|
@ -1436,8 +1436,8 @@ class OldFormForXTests(TestCase):
|
|||
# Delete the current file since this is not done by Django.
|
||||
instance.file.delete()
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
||||
|
@ -1455,8 +1455,8 @@ class OldFormForXTests(TestCase):
|
|||
self.assertEqual(instance.file.name, '')
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
||||
|
@ -1464,8 +1464,8 @@ class OldFormForXTests(TestCase):
|
|||
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'New Description'},
|
||||
instance=instance)
|
||||
data={'description': 'New Description'},
|
||||
instance=instance)
|
||||
f.fields['file'].required = False
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
|
@ -1477,8 +1477,8 @@ class OldFormForXTests(TestCase):
|
|||
instance.delete()
|
||||
|
||||
f = TextFileForm(
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
|
||||
data={'description': 'Assistance'},
|
||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
||||
|
@ -1511,8 +1511,8 @@ class OldFormForXTests(TestCase):
|
|||
image_data2 = fp.read()
|
||||
|
||||
f = ImageFileForm(
|
||||
data={'description': 'An image'},
|
||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||
data={'description': 'An image'},
|
||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
||||
instance = f.save()
|
||||
|
@ -1524,8 +1524,8 @@ class OldFormForXTests(TestCase):
|
|||
# because the dimension fields are not null=True.
|
||||
instance.image.delete(save=False)
|
||||
f = ImageFileForm(
|
||||
data={'description': 'An image'},
|
||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||
data={'description': 'An image'},
|
||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
||||
instance = f.save()
|
||||
|
@ -1550,8 +1550,8 @@ class OldFormForXTests(TestCase):
|
|||
# Override the file by uploading a new one.
|
||||
|
||||
f = ImageFileForm(
|
||||
data={'description': 'Changed it'},
|
||||
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
|
||||
data={'description': 'Changed it'},
|
||||
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.image.name, 'tests/test2.png')
|
||||
|
@ -1564,8 +1564,8 @@ class OldFormForXTests(TestCase):
|
|||
instance.delete()
|
||||
|
||||
f = ImageFileForm(
|
||||
data={'description': 'Changed it'},
|
||||
files={'image': SimpleUploadedFile('test2.png', image_data2)})
|
||||
data={'description': 'Changed it'},
|
||||
files={'image': SimpleUploadedFile('test2.png', image_data2)})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.image.name, 'tests/test2.png')
|
||||
|
@ -1593,8 +1593,8 @@ class OldFormForXTests(TestCase):
|
|||
self.assertEqual(instance.height, None)
|
||||
|
||||
f = OptionalImageFileForm(
|
||||
data={'description': 'And a final one'},
|
||||
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
|
||||
data={'description': 'And a final one'},
|
||||
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.image.name, 'tests/test3.png')
|
||||
|
@ -1603,8 +1603,8 @@ class OldFormForXTests(TestCase):
|
|||
|
||||
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
|
||||
f = OptionalImageFileForm(
|
||||
data={'description': 'New Description'},
|
||||
instance=instance)
|
||||
data={'description': 'New Description'},
|
||||
instance=instance)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.description, 'New Description')
|
||||
|
@ -1617,8 +1617,8 @@ class OldFormForXTests(TestCase):
|
|||
instance.delete()
|
||||
|
||||
f = OptionalImageFileForm(
|
||||
data={'description': 'And a final one'},
|
||||
files={'image': SimpleUploadedFile('test4.png', image_data2)}
|
||||
data={'description': 'And a final one'},
|
||||
files={'image': SimpleUploadedFile('test4.png', image_data2)}
|
||||
)
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
|
@ -1628,8 +1628,8 @@ class OldFormForXTests(TestCase):
|
|||
instance.delete()
|
||||
# Test callable upload_to behavior that's dependent on the value of another field in the model
|
||||
f = ImageFileForm(
|
||||
data={'description': 'And a final one', 'path': 'foo'},
|
||||
files={'image': SimpleUploadedFile('test4.png', image_data)})
|
||||
data={'description': 'And a final one', 'path': 'foo'},
|
||||
files={'image': SimpleUploadedFile('test4.png', image_data)})
|
||||
self.assertEqual(f.is_valid(), True)
|
||||
instance = f.save()
|
||||
self.assertEqual(instance.image.name, 'foo/test4.png')
|
||||
|
|
|
@ -369,25 +369,25 @@ class FormfieldShouldDeleteFormTests(TestCase):
|
|||
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
|
||||
|
||||
data = {
|
||||
'form-TOTAL_FORMS': '4',
|
||||
'form-INITIAL_FORMS': '0',
|
||||
'form-MAX_NUM_FORMS': '4',
|
||||
'form-0-username': 'John',
|
||||
'form-0-serial': '1',
|
||||
'form-1-username': 'Paul',
|
||||
'form-1-serial': '2',
|
||||
'form-2-username': 'George',
|
||||
'form-2-serial': '3',
|
||||
'form-3-username': 'Ringo',
|
||||
'form-3-serial': '5',
|
||||
}
|
||||
'form-TOTAL_FORMS': '4',
|
||||
'form-INITIAL_FORMS': '0',
|
||||
'form-MAX_NUM_FORMS': '4',
|
||||
'form-0-username': 'John',
|
||||
'form-0-serial': '1',
|
||||
'form-1-username': 'Paul',
|
||||
'form-1-serial': '2',
|
||||
'form-2-username': 'George',
|
||||
'form-2-serial': '3',
|
||||
'form-3-username': 'Ringo',
|
||||
'form-3-serial': '5',
|
||||
}
|
||||
|
||||
delete_all_ids = {
|
||||
'form-0-DELETE': '1',
|
||||
'form-1-DELETE': '1',
|
||||
'form-2-DELETE': '1',
|
||||
'form-3-DELETE': '1',
|
||||
}
|
||||
'form-0-DELETE': '1',
|
||||
'form-1-DELETE': '1',
|
||||
'form-2-DELETE': '1',
|
||||
'form-3-DELETE': '1',
|
||||
}
|
||||
|
||||
def test_init_database(self):
|
||||
""" Add test data to database via formset """
|
||||
|
|
|
@ -123,8 +123,8 @@ class DerivedM(BaseM):
|
|||
derived_name = models.CharField(max_length=100)
|
||||
|
||||
def __str__(self):
|
||||
return "PK = %d, base_name = %s, derived_name = %s" \
|
||||
% (self.customPK, self.base_name, self.derived_name)
|
||||
return "PK = %d, base_name = %s, derived_name = %s" % (
|
||||
self.customPK, self.base_name, self.derived_name)
|
||||
|
||||
class AuditBase(models.Model):
|
||||
planned_date = models.DateField()
|
||||
|
|
|
@ -385,8 +385,8 @@ class ModelInheritanceTest(TestCase):
|
|||
# abstract models in the inheritance chain, for consistency with
|
||||
# verbose_name.
|
||||
self.assertEqual(
|
||||
InternalCertificationAudit._meta.verbose_name_plural,
|
||||
'Audits'
|
||||
InternalCertificationAudit._meta.verbose_name_plural,
|
||||
'Audits'
|
||||
)
|
||||
|
||||
def test_inherited_nullable_exclude(self):
|
||||
|
|
|
@ -121,16 +121,16 @@ class ModelTests(TestCase):
|
|||
|
||||
# Regression test for #18969
|
||||
self.assertQuerysetEqual(
|
||||
Party.objects.filter(when__year=1), [
|
||||
datetime.date(1, 3, 3),
|
||||
],
|
||||
attrgetter("when")
|
||||
Party.objects.filter(when__year=1), [
|
||||
datetime.date(1, 3, 3),
|
||||
],
|
||||
attrgetter("when")
|
||||
)
|
||||
self.assertQuerysetEqual(
|
||||
Party.objects.filter(when__year='1'), [
|
||||
datetime.date(1, 3, 3),
|
||||
],
|
||||
attrgetter("when")
|
||||
Party.objects.filter(when__year='1'), [
|
||||
datetime.date(1, 3, 3),
|
||||
],
|
||||
attrgetter("when")
|
||||
)
|
||||
|
||||
if (3,) <= sys.version_info < (3, 3) and connection.vendor == 'mysql':
|
||||
|
|
|
@ -28,11 +28,11 @@ class NullQueriesTests(TestCase):
|
|||
|
||||
# Excluding the previous result returns everything.
|
||||
self.assertQuerysetEqual(
|
||||
Choice.objects.exclude(choice=None).order_by('id'),
|
||||
[
|
||||
'<Choice: Choice: Because. in poll Q: Why? >',
|
||||
'<Choice: Choice: Why Not? in poll Q: Why? >'
|
||||
]
|
||||
Choice.objects.exclude(choice=None).order_by('id'),
|
||||
[
|
||||
'<Choice: Choice: Because. in poll Q: Why? >',
|
||||
'<Choice: Choice: Why Not? in poll Q: Why? >'
|
||||
]
|
||||
)
|
||||
|
||||
# Valid query, but fails because foo isn't a keyword
|
||||
|
|
|
@ -248,11 +248,11 @@ class ModelPaginationTests(TestCase):
|
|||
p = paginator.page(1)
|
||||
self.assertEqual("<Page 1 of 2>", six.text_type(p))
|
||||
self.assertQuerysetEqual(p.object_list, [
|
||||
"<Article: Article 1>",
|
||||
"<Article: Article 2>",
|
||||
"<Article: Article 3>",
|
||||
"<Article: Article 4>",
|
||||
"<Article: Article 5>"
|
||||
"<Article: Article 1>",
|
||||
"<Article: Article 2>",
|
||||
"<Article: Article 3>",
|
||||
"<Article: Article 4>",
|
||||
"<Article: Article 5>"
|
||||
],
|
||||
ordered=False
|
||||
)
|
||||
|
@ -269,10 +269,10 @@ class ModelPaginationTests(TestCase):
|
|||
p = paginator.page(2)
|
||||
self.assertEqual("<Page 2 of 2>", six.text_type(p))
|
||||
self.assertQuerysetEqual(p.object_list, [
|
||||
"<Article: Article 6>",
|
||||
"<Article: Article 7>",
|
||||
"<Article: Article 8>",
|
||||
"<Article: Article 9>"
|
||||
"<Article: Article 6>",
|
||||
"<Article: Article 7>",
|
||||
"<Article: Article 8>",
|
||||
"<Article: Article 9>"
|
||||
],
|
||||
ordered=False
|
||||
)
|
||||
|
@ -302,8 +302,8 @@ class ModelPaginationTests(TestCase):
|
|||
# Make sure slicing the Page object with numbers and slice objects work.
|
||||
self.assertEqual(p[0], Article.objects.get(headline='Article 1'))
|
||||
self.assertQuerysetEqual(p[slice(2)], [
|
||||
"<Article: Article 1>",
|
||||
"<Article: Article 2>",
|
||||
"<Article: Article 1>",
|
||||
"<Article: Article 2>",
|
||||
]
|
||||
)
|
||||
# After __getitem__ is called, object_list is a list
|
||||
|
|
|
@ -540,7 +540,7 @@ class Queries1Tests(BaseQuerysetTest):
|
|||
['<Author: a1>']
|
||||
)
|
||||
self.assertQuerysetEqual(
|
||||
Author.objects.filter(Q(extra__note=self.n1)|Q(item__note=self.n3)).filter(id=self.a1.id),
|
||||
Author.objects.filter(Q(extra__note=self.n1)|Q(item__note=self.n3)).filter(id=self.a1.id),
|
||||
['<Author: a1>']
|
||||
)
|
||||
|
||||
|
|
|
@ -298,12 +298,12 @@ class RequestsTests(SimpleTestCase):
|
|||
# we don't want the data held in memory twice, and we don't want to
|
||||
# silence the error by setting body = '' either.
|
||||
payload = FakePayload("\r\n".join([
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||
'CONTENT_LENGTH': len(payload),
|
||||
|
@ -320,12 +320,12 @@ class RequestsTests(SimpleTestCase):
|
|||
# being a binary upload, in which case it should still be accessible
|
||||
# via body.
|
||||
payload_data = b"\r\n".join([
|
||||
b'--boundary',
|
||||
b'Content-ID: id; name="name"',
|
||||
b'',
|
||||
b'value',
|
||||
b'--boundary--'
|
||||
b''])
|
||||
b'--boundary',
|
||||
b'Content-ID: id; name="name"',
|
||||
b'',
|
||||
b'value',
|
||||
b'--boundary--'
|
||||
b''])
|
||||
payload = FakePayload(payload_data)
|
||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
|
||||
|
@ -343,12 +343,12 @@ class RequestsTests(SimpleTestCase):
|
|||
# Every request.POST with Content-Length >= 0 is a valid request,
|
||||
# this test ensures that we handle Content-Length == 0.
|
||||
payload = FakePayload("\r\n".join([
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||
'CONTENT_LENGTH': 0,
|
||||
|
@ -413,12 +413,12 @@ class RequestsTests(SimpleTestCase):
|
|||
the stream is read second. Using multipart/form-data instead of urlencoded.
|
||||
"""
|
||||
payload = FakePayload("\r\n".join([
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
'--boundary',
|
||||
'Content-Disposition: form-data; name="name"',
|
||||
'',
|
||||
'value',
|
||||
'--boundary--'
|
||||
'']))
|
||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||
'CONTENT_LENGTH': len(payload),
|
||||
|
|
|
@ -19,8 +19,8 @@ class ReverseSingleRelatedTests(TestCase):
|
|||
|
||||
# Only one source is available via all() due to the custom default manager.
|
||||
self.assertQuerysetEqual(
|
||||
Source.objects.all(),
|
||||
["<Source: Source object>"]
|
||||
Source.objects.all(),
|
||||
["<Source: Source object>"]
|
||||
)
|
||||
|
||||
self.assertEqual(public_item.source, public_source)
|
||||
|
|
|
@ -96,8 +96,8 @@ class SelectRelatedRegressTests(TestCase):
|
|||
Item.objects.create(name="item2")
|
||||
|
||||
self.assertQuerysetEqual(
|
||||
Item.objects.select_related("child").order_by("name"),
|
||||
["<Item: item1>", "<Item: item2>"]
|
||||
Item.objects.select_related("child").order_by("name"),
|
||||
["<Item: item1>", "<Item: item2>"]
|
||||
)
|
||||
|
||||
def test_regression_12851(self):
|
||||
|
|
|
@ -556,10 +556,8 @@ def naturalKeyTest(format, self):
|
|||
self.assertEqual(books[1].object.pk, None)
|
||||
|
||||
|
||||
for format in [
|
||||
f for f in serializers.get_serializer_formats()
|
||||
if not isinstance(serializers.get_serializer(f), serializers.BadSerializer)
|
||||
]:
|
||||
for format in [f for f in serializers.get_serializer_formats()
|
||||
if not isinstance(serializers.get_serializer(f), serializers.BadSerializer)]:
|
||||
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
|
||||
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
|
||||
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
|
||||
|
|
|
@ -31,8 +31,8 @@ class TestSigner(TestCase):
|
|||
signer = signing.Signer('predictable-secret', salt='extra-salt')
|
||||
self.assertEqual(
|
||||
signer.signature('hello'),
|
||||
signing.base64_hmac('extra-salt' + 'signer',
|
||||
'hello', 'predictable-secret').decode()
|
||||
signing.base64_hmac('extra-salt' + 'signer',
|
||||
'hello', 'predictable-secret').decode()
|
||||
)
|
||||
self.assertNotEqual(
|
||||
signing.Signer('predictable-secret', salt='one').signature('hello'),
|
||||
|
|
|
@ -111,7 +111,7 @@ class CachedLoader(unittest.TestCase):
|
|||
self.old_TEMPLATE_LOADERS = settings.TEMPLATE_LOADERS
|
||||
settings.TEMPLATE_LOADERS = (
|
||||
('django.template.loaders.cached.Loader', (
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
)
|
||||
),
|
||||
)
|
||||
|
|
|
@ -150,8 +150,8 @@ class SimpleTemplateResponseTest(TestCase):
|
|||
# Create a template response. The context is
|
||||
# known to be unpickleable (e.g., a function).
|
||||
response = SimpleTemplateResponse('first/test.html', {
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
})
|
||||
self.assertRaises(ContentNotRenderedError,
|
||||
pickle.dumps, response)
|
||||
|
@ -178,8 +178,8 @@ class SimpleTemplateResponseTest(TestCase):
|
|||
|
||||
def test_repickling(self):
|
||||
response = SimpleTemplateResponse('first/test.html', {
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
})
|
||||
self.assertRaises(ContentNotRenderedError,
|
||||
pickle.dumps, response)
|
||||
|
@ -191,8 +191,8 @@ class SimpleTemplateResponseTest(TestCase):
|
|||
|
||||
def test_pickling_cookie(self):
|
||||
response = SimpleTemplateResponse('first/test.html', {
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
})
|
||||
|
||||
response.cookies['key'] = 'value'
|
||||
|
@ -284,8 +284,8 @@ class TemplateResponseTest(TestCase):
|
|||
|
||||
def test_repickling(self):
|
||||
response = SimpleTemplateResponse('first/test.html', {
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
})
|
||||
self.assertRaises(ContentNotRenderedError,
|
||||
pickle.dumps, response)
|
||||
|
|
|
@ -1217,10 +1217,10 @@ class UploadedFileEncodingTest(TestCase):
|
|||
self.assertEqual(b'Content-Type: text/plain',
|
||||
encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2])
|
||||
self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], (
|
||||
b'Content-Type: application/x-compress',
|
||||
b'Content-Type: application/x-zip',
|
||||
b'Content-Type: application/x-zip-compressed',
|
||||
b'Content-Type: application/zip',))
|
||||
b'Content-Type: application/x-compress',
|
||||
b'Content-Type: application/x-zip',
|
||||
b'Content-Type: application/x-zip-compressed',
|
||||
b'Content-Type: application/zip',))
|
||||
self.assertEqual(b'Content-Type: application/octet-stream',
|
||||
encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2])
|
||||
|
||||
|
|
|
@ -208,51 +208,57 @@ class LegacyDatabaseTests(TestCase):
|
|||
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
|
||||
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||
[morning_min_dt, afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||
[morning_min_dt, afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||
[morning_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||
[morning_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||
[afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||
[afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
|
||||
def test_query_datetimes(self):
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 0, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 0, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 0, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||
transform=lambda d: d)
|
||||
|
||||
def test_raw_sql(self):
|
||||
# Regression test for #17755
|
||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
||||
event = Event.objects.create(dt=dt)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||
[event],
|
||||
transform=lambda d: d)
|
||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||
[event],
|
||||
transform=lambda d: d)
|
||||
|
||||
def test_filter_date_field_with_aware_datetime(self):
|
||||
# Regression test for #17742
|
||||
|
@ -456,82 +462,94 @@ class NewDatabaseTests(TestCase):
|
|||
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
|
||||
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||
[morning_min_dt, afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||
[morning_min_dt, afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||
[morning_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||
[morning_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
self.assertQuerysetEqual(
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||
[afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||
[afternoon_min_dt],
|
||||
transform=lambda d: d.dt)
|
||||
|
||||
@skipUnlessDBFeature('has_zoneinfo_database')
|
||||
def test_query_datetimes(self):
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||
transform=lambda d: d)
|
||||
|
||||
@skipUnlessDBFeature('has_zoneinfo_database')
|
||||
def test_query_datetimes_in_other_timezone(self):
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
||||
with timezone.override(UTC):
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'year'),
|
||||
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'month'),
|
||||
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'day'),
|
||||
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'hour'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'minute'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.datetimes('dt', 'second'),
|
||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||
transform=lambda d: d)
|
||||
|
||||
def test_raw_sql(self):
|
||||
# Regression test for #17755
|
||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||
event = Event.objects.create(dt=dt)
|
||||
self.assertQuerysetEqual(
|
||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||
[event],
|
||||
transform=lambda d: d)
|
||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||
[event],
|
||||
transform=lambda d: d)
|
||||
|
||||
@requires_tz_support
|
||||
def test_filter_date_field_with_aware_datetime(self):
|
||||
|
|
|
@ -203,7 +203,7 @@ class AtomicTests(TransactionTestCase):
|
|||
with self.assertRaises(DatabaseError):
|
||||
with transaction.atomic(savepoint=False):
|
||||
connection.cursor().execute(
|
||||
"SELECT no_such_col FROM transactions_reporter")
|
||||
"SELECT no_such_col FROM transactions_reporter")
|
||||
# prevent atomic from rolling back since we're recovering manually
|
||||
self.assertTrue(transaction.get_rollback())
|
||||
transaction.set_rollback(False)
|
||||
|
|
|
@ -61,14 +61,14 @@ class TzinfoTests(IgnorePendingDeprecationWarningsMixin, unittest.TestCase):
|
|||
# US/Eastern -- we force its representation to "EST"
|
||||
tz = LocalTimezone(dt + datetime.timedelta(days=1))
|
||||
self.assertEqual(
|
||||
repr(datetime.datetime.fromtimestamp(ts - 3600, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 0, 0, tzinfo=EST)')
|
||||
repr(datetime.datetime.fromtimestamp(ts - 3600, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 0, 0, tzinfo=EST)')
|
||||
self.assertEqual(
|
||||
repr(datetime.datetime.fromtimestamp(ts, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||
repr(datetime.datetime.fromtimestamp(ts, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||
self.assertEqual(
|
||||
repr(datetime.datetime.fromtimestamp(ts + 3600, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||
repr(datetime.datetime.fromtimestamp(ts + 3600, tz)),
|
||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||
|
||||
def test_copy(self):
|
||||
now = datetime.datetime.now()
|
||||
|
|
Loading…
Reference in New Issue