Fixed #21288 -- Fixed E126 pep8 warnings
This commit is contained in:
parent
a3690168cb
commit
b289fcf1bf
|
@ -217,8 +217,8 @@ class RelatedFieldListFilter(FieldListFilter):
|
||||||
}
|
}
|
||||||
|
|
||||||
FieldListFilter.register(lambda f: (
|
FieldListFilter.register(lambda f: (
|
||||||
bool(f.rel) if hasattr(f, 'rel') else
|
bool(f.rel) if hasattr(f, 'rel') else
|
||||||
isinstance(f, models.related.RelatedObject)), RelatedFieldListFilter)
|
isinstance(f, models.related.RelatedObject)), RelatedFieldListFilter)
|
||||||
|
|
||||||
|
|
||||||
class BooleanFieldListFilter(FieldListFilter):
|
class BooleanFieldListFilter(FieldListFilter):
|
||||||
|
@ -241,7 +241,7 @@ class BooleanFieldListFilter(FieldListFilter):
|
||||||
yield {
|
yield {
|
||||||
'selected': self.lookup_val == lookup and not self.lookup_val2,
|
'selected': self.lookup_val == lookup and not self.lookup_val2,
|
||||||
'query_string': cl.get_query_string({
|
'query_string': cl.get_query_string({
|
||||||
self.lookup_kwarg: lookup,
|
self.lookup_kwarg: lookup,
|
||||||
}, [self.lookup_kwarg2]),
|
}, [self.lookup_kwarg2]),
|
||||||
'display': title,
|
'display': title,
|
||||||
}
|
}
|
||||||
|
@ -249,7 +249,7 @@ class BooleanFieldListFilter(FieldListFilter):
|
||||||
yield {
|
yield {
|
||||||
'selected': self.lookup_val2 == 'True',
|
'selected': self.lookup_val2 == 'True',
|
||||||
'query_string': cl.get_query_string({
|
'query_string': cl.get_query_string({
|
||||||
self.lookup_kwarg2: 'True',
|
self.lookup_kwarg2: 'True',
|
||||||
}, [self.lookup_kwarg]),
|
}, [self.lookup_kwarg]),
|
||||||
'display': _('Unknown'),
|
'display': _('Unknown'),
|
||||||
}
|
}
|
||||||
|
@ -278,7 +278,7 @@ class ChoicesFieldListFilter(FieldListFilter):
|
||||||
yield {
|
yield {
|
||||||
'selected': smart_text(lookup) == self.lookup_val,
|
'selected': smart_text(lookup) == self.lookup_val,
|
||||||
'query_string': cl.get_query_string({
|
'query_string': cl.get_query_string({
|
||||||
self.lookup_kwarg: lookup}),
|
self.lookup_kwarg: lookup}),
|
||||||
'display': title,
|
'display': title,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -340,7 +340,7 @@ class DateFieldListFilter(FieldListFilter):
|
||||||
yield {
|
yield {
|
||||||
'selected': self.date_params == param_dict,
|
'selected': self.date_params == param_dict,
|
||||||
'query_string': cl.get_query_string(
|
'query_string': cl.get_query_string(
|
||||||
param_dict, [self.field_generic]),
|
param_dict, [self.field_generic]),
|
||||||
'display': title,
|
'display': title,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -150,13 +150,12 @@ class BaseModelAdmin(six.with_metaclass(RenameBaseModelAdminMethods)):
|
||||||
# rendered output. formfield can be None if it came from a
|
# rendered output. formfield can be None if it came from a
|
||||||
# OneToOneField with parent_link=True or a M2M intermediary.
|
# OneToOneField with parent_link=True or a M2M intermediary.
|
||||||
if formfield and db_field.name not in self.raw_id_fields:
|
if formfield and db_field.name not in self.raw_id_fields:
|
||||||
related_modeladmin = self.admin_site._registry.get(
|
related_modeladmin = self.admin_site._registry.get(db_field.rel.to)
|
||||||
db_field.rel.to)
|
|
||||||
can_add_related = bool(related_modeladmin and
|
can_add_related = bool(related_modeladmin and
|
||||||
related_modeladmin.has_add_permission(request))
|
related_modeladmin.has_add_permission(request))
|
||||||
formfield.widget = widgets.RelatedFieldWidgetWrapper(
|
formfield.widget = widgets.RelatedFieldWidgetWrapper(
|
||||||
formfield.widget, db_field.rel, self.admin_site,
|
formfield.widget, db_field.rel, self.admin_site,
|
||||||
can_add_related=can_add_related)
|
can_add_related=can_add_related)
|
||||||
|
|
||||||
return formfield
|
return formfield
|
||||||
|
|
||||||
|
@ -1196,11 +1195,11 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
|
|
||||||
opts = self.model._meta
|
opts = self.model._meta
|
||||||
|
|
||||||
self.message_user(request, _(
|
self.message_user(request,
|
||||||
'The %(name)s "%(obj)s" was deleted successfully.') % {
|
_('The %(name)s "%(obj)s" was deleted successfully.') % {
|
||||||
'name': force_text(opts.verbose_name),
|
'name': force_text(opts.verbose_name),
|
||||||
'obj': force_text(obj_display)
|
'obj': force_text(obj_display)
|
||||||
}, messages.SUCCESS)
|
}, messages.SUCCESS)
|
||||||
|
|
||||||
if self.has_change_permission(request, None):
|
if self.has_change_permission(request, None):
|
||||||
post_url = reverse('admin:%s_%s_changelist' %
|
post_url = reverse('admin:%s_%s_changelist' %
|
||||||
|
@ -1313,9 +1312,9 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
|
raise Http404(_('%(name)s object with primary key %(key)r does not exist.') % {'name': force_text(opts.verbose_name), 'key': escape(object_id)})
|
||||||
|
|
||||||
if request.method == 'POST' and "_saveasnew" in request.POST:
|
if request.method == 'POST' and "_saveasnew" in request.POST:
|
||||||
return self.add_view(request, form_url=reverse('admin:%s_%s_add' %
|
return self.add_view(request, form_url=reverse('admin:%s_%s_add' % (
|
||||||
(opts.app_label, opts.model_name),
|
opts.app_label, opts.model_name),
|
||||||
current_app=self.admin_site.name))
|
current_app=self.admin_site.name))
|
||||||
|
|
||||||
ModelForm = self.get_form(request, obj)
|
ModelForm = self.get_form(request, obj)
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
|
@ -1544,7 +1543,7 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
if obj is None:
|
if obj is None:
|
||||||
raise Http404(
|
raise Http404(
|
||||||
_('%(name)s object with primary key %(key)r does not exist.') %
|
_('%(name)s object with primary key %(key)r does not exist.') %
|
||||||
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
|
{'name': force_text(opts.verbose_name), 'key': escape(object_id)}
|
||||||
)
|
)
|
||||||
|
|
||||||
using = router.db_for_write(self.model)
|
using = router.db_for_write(self.model)
|
||||||
|
|
|
@ -164,8 +164,7 @@ def result_headers(cl):
|
||||||
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
|
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
|
||||||
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
|
"url_remove": cl.get_query_string({ORDER_VAR: '.'.join(o_list_remove)}),
|
||||||
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
|
"url_toggle": cl.get_query_string({ORDER_VAR: '.'.join(o_list_toggle)}),
|
||||||
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes))
|
"class_attrib": format_html(' class="{0}"', ' '.join(th_classes)) if th_classes else '',
|
||||||
if th_classes else '',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def _boolean_icon(field_val):
|
def _boolean_icon(field_val):
|
||||||
|
@ -246,8 +245,7 @@ def items_for_result(cl, result, form):
|
||||||
link_or_text = format_html(
|
link_or_text = format_html(
|
||||||
'<a href="{0}"{1}>{2}</a>',
|
'<a href="{0}"{1}>{2}</a>',
|
||||||
url,
|
url,
|
||||||
format_html(' onclick="opener.dismissRelatedLookupPopup(window, '{0}'); return false;"', result_id)
|
format_html(' onclick="opener.dismissRelatedLookupPopup(window, '{0}'); return false;"', result_id) if cl.is_popup else '',
|
||||||
if cl.is_popup else '',
|
|
||||||
result_repr)
|
result_repr)
|
||||||
|
|
||||||
yield format_html('<{0}{1}>{2}</{3}>',
|
yield format_html('<{0}{1}>{2}</{3}>',
|
||||||
|
@ -261,7 +259,7 @@ def items_for_result(cl, result, form):
|
||||||
# can provide fields on a per request basis
|
# can provide fields on a per request basis
|
||||||
if (form and field_name in form.fields and not (
|
if (form and field_name in form.fields and not (
|
||||||
field_name == cl.model._meta.pk.name and
|
field_name == cl.model._meta.pk.name and
|
||||||
form[cl.model._meta.pk.name].is_hidden)):
|
form[cl.model._meta.pk.name].is_hidden)):
|
||||||
bf = form[field_name]
|
bf = form[field_name]
|
||||||
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
|
result_repr = mark_safe(force_text(bf.errors) + force_text(bf))
|
||||||
yield format_html('<td{0}>{1}</td>', row_class, result_repr)
|
yield format_html('<td{0}>{1}</td>', row_class, result_repr)
|
||||||
|
|
|
@ -33,8 +33,7 @@ def submit_row(context):
|
||||||
'show_delete_link': (not is_popup and context['has_delete_permission']
|
'show_delete_link': (not is_popup and context['has_delete_permission']
|
||||||
and change and context.get('show_delete', True)),
|
and change and context.get('show_delete', True)),
|
||||||
'show_save_as_new': not is_popup and change and save_as,
|
'show_save_as_new': not is_popup and change and save_as,
|
||||||
'show_save_and_add_another': context['has_add_permission'] and
|
'show_save_and_add_another': context['has_add_permission'] and not is_popup and (not save_as or context['add']),
|
||||||
not is_popup and (not save_as or context['add']),
|
|
||||||
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
'show_save_and_continue': not is_popup and context['has_change_permission'],
|
||||||
'is_popup': is_popup,
|
'is_popup': is_popup,
|
||||||
'show_save': True,
|
'show_save': True,
|
||||||
|
|
|
@ -34,7 +34,7 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
u = UserModel._default_manager.using(options.get('database')).get(**{
|
u = UserModel._default_manager.using(options.get('database')).get(**{
|
||||||
UserModel.USERNAME_FIELD: username
|
UserModel.USERNAME_FIELD: username
|
||||||
})
|
})
|
||||||
except UserModel.DoesNotExist:
|
except UserModel.DoesNotExist:
|
||||||
raise CommandError("user '%s' does not exist" % username)
|
raise CommandError("user '%s' does not exist" % username)
|
||||||
|
|
|
@ -427,7 +427,7 @@ class PermissionDeniedBackendTest(TestCase):
|
||||||
self.assertEqual(authenticate(username='test', password='test'), None)
|
self.assertEqual(authenticate(username='test', password='test'), None)
|
||||||
|
|
||||||
@override_settings(AUTHENTICATION_BACKENDS=tuple(
|
@override_settings(AUTHENTICATION_BACKENDS=tuple(
|
||||||
settings.AUTHENTICATION_BACKENDS) + (backend, ))
|
settings.AUTHENTICATION_BACKENDS) + (backend, ))
|
||||||
def test_authenticates(self):
|
def test_authenticates(self):
|
||||||
self.assertEqual(authenticate(username='test', password='test'), self.user1)
|
self.assertEqual(authenticate(username='test', password='test'), self.user1)
|
||||||
|
|
||||||
|
|
|
@ -177,9 +177,10 @@ class PasswordResetTest(AuthViewsTestCase):
|
||||||
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
|
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
|
||||||
# is invoked, but we check here as a practical consequence.
|
# is invoked, but we check here as a practical consequence.
|
||||||
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
||||||
response = self.client.post('/password_reset/',
|
response = self.client.post(
|
||||||
{'email': 'staffmember@example.com'},
|
'/password_reset/',
|
||||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
{'email': 'staffmember@example.com'},
|
||||||
|
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
self.assertEqual(len(mail.outbox), 0)
|
self.assertEqual(len(mail.outbox), 0)
|
||||||
|
@ -190,9 +191,10 @@ class PasswordResetTest(AuthViewsTestCase):
|
||||||
def test_poisoned_http_host_admin_site(self):
|
def test_poisoned_http_host_admin_site(self):
|
||||||
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
|
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
|
||||||
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
with patch_logger('django.security.DisallowedHost', 'error') as logger_calls:
|
||||||
response = self.client.post('/admin_password_reset/',
|
response = self.client.post(
|
||||||
{'email': 'staffmember@example.com'},
|
'/admin_password_reset/',
|
||||||
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
{'email': 'staffmember@example.com'},
|
||||||
|
HTTP_HOST='www.example:dr.frankenstein@evil.tld'
|
||||||
)
|
)
|
||||||
self.assertEqual(response.status_code, 400)
|
self.assertEqual(response.status_code, 400)
|
||||||
self.assertEqual(len(mail.outbox), 0)
|
self.assertEqual(len(mail.outbox), 0)
|
||||||
|
@ -356,7 +358,7 @@ class ChangePasswordTest(AuthViewsTestCase):
|
||||||
'password': password,
|
'password': password,
|
||||||
})
|
})
|
||||||
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
|
self.assertFormError(response, AuthenticationForm.error_messages['invalid_login'] % {
|
||||||
'username': User._meta.get_field('username').verbose_name
|
'username': User._meta.get_field('username').verbose_name
|
||||||
})
|
})
|
||||||
|
|
||||||
def logout(self):
|
def logout(self):
|
||||||
|
@ -490,8 +492,8 @@ class LoginTest(AuthViewsTestCase):
|
||||||
'good_url': urlquote(good_url),
|
'good_url': urlquote(good_url),
|
||||||
}
|
}
|
||||||
response = self.client.post(safe_url, {
|
response = self.client.post(safe_url, {
|
||||||
'username': 'testclient',
|
'username': 'testclient',
|
||||||
'password': password,
|
'password': password,
|
||||||
})
|
})
|
||||||
self.assertEqual(response.status_code, 302)
|
self.assertEqual(response.status_code, 302)
|
||||||
self.assertTrue(good_url in response.url,
|
self.assertTrue(good_url in response.url,
|
||||||
|
|
|
@ -240,12 +240,10 @@ class GenericRelation(ForeignObject):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return self.rel.to._base_manager.db_manager(using).filter(**{
|
return self.rel.to._base_manager.db_manager(using).filter(**{
|
||||||
"%s__pk" % self.content_type_field_name:
|
"%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
|
||||||
ContentType.objects.db_manager(using).get_for_model(
|
self.model, for_concrete_model=self.for_concrete_model).pk,
|
||||||
self.model, for_concrete_model=self.for_concrete_model).pk,
|
"%s__in" % self.object_id_field_name: [obj.pk for obj in objs]
|
||||||
"%s__in" % self.object_id_field_name:
|
})
|
||||||
[obj.pk for obj in objs]
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
class ReverseGenericRelatedObjectsDescriptor(object):
|
class ReverseGenericRelatedObjectsDescriptor(object):
|
||||||
|
@ -352,8 +350,7 @@ def create_generic_related_manager(superclass):
|
||||||
db = self._db or router.db_for_read(self.model, instance=instances[0])
|
db = self._db or router.db_for_read(self.model, instance=instances[0])
|
||||||
query = {
|
query = {
|
||||||
'%s__pk' % self.content_type_field_name: self.content_type.id,
|
'%s__pk' % self.content_type_field_name: self.content_type.id,
|
||||||
'%s__in' % self.object_id_field_name:
|
'%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
|
||||||
set(obj._get_pk_val() for obj in instances)
|
|
||||||
}
|
}
|
||||||
qs = super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**query)
|
qs = super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**query)
|
||||||
# We (possibly) need to convert object IDs to the type of the
|
# We (possibly) need to convert object IDs to the type of the
|
||||||
|
|
|
@ -27,24 +27,24 @@ class FlatpageTemplateTagTests(TestCase):
|
||||||
def test_get_flatpages_tag(self):
|
def test_get_flatpages_tag(self):
|
||||||
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages as flatpages %}"
|
"{% get_flatpages as flatpages %}"
|
||||||
"{% for page in flatpages %}"
|
"{% for page in flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context())
|
).render(Context())
|
||||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
||||||
|
|
||||||
def test_get_flatpages_tag_for_anon_user(self):
|
def test_get_flatpages_tag_for_anon_user(self):
|
||||||
"The flatpage template tag retrives unregistered flatpages for an anonymous user"
|
"The flatpage template tag retrives unregistered flatpages for an anonymous user"
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages for anonuser as flatpages %}"
|
"{% get_flatpages for anonuser as flatpages %}"
|
||||||
"{% for page in flatpages %}"
|
"{% for page in flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context({
|
).render(Context({
|
||||||
'anonuser': AnonymousUser()
|
'anonuser': AnonymousUser()
|
||||||
}))
|
}))
|
||||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
self.assertEqual(out, "A Flatpage,A Nested Flatpage,")
|
||||||
|
|
||||||
|
@ -53,37 +53,37 @@ class FlatpageTemplateTagTests(TestCase):
|
||||||
"The flatpage template tag retrives all flatpages for an authenticated user"
|
"The flatpage template tag retrives all flatpages for an authenticated user"
|
||||||
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages for me as flatpages %}"
|
"{% get_flatpages for me as flatpages %}"
|
||||||
"{% for page in flatpages %}"
|
"{% for page in flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context({
|
).render(Context({
|
||||||
'me': me
|
'me': me
|
||||||
}))
|
}))
|
||||||
self.assertEqual(out, "A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,")
|
self.assertEqual(out, "A Flatpage,A Nested Flatpage,Sekrit Nested Flatpage,Sekrit Flatpage,")
|
||||||
|
|
||||||
def test_get_flatpages_with_prefix(self):
|
def test_get_flatpages_with_prefix(self):
|
||||||
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
"The flatpage template tag retrives unregistered prefixed flatpages by default"
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages '/location/' as location_flatpages %}"
|
"{% get_flatpages '/location/' as location_flatpages %}"
|
||||||
"{% for page in location_flatpages %}"
|
"{% for page in location_flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context())
|
).render(Context())
|
||||||
self.assertEqual(out, "A Nested Flatpage,")
|
self.assertEqual(out, "A Nested Flatpage,")
|
||||||
|
|
||||||
def test_get_flatpages_with_prefix_for_anon_user(self):
|
def test_get_flatpages_with_prefix_for_anon_user(self):
|
||||||
"The flatpage template tag retrives unregistered prefixed flatpages for an anonymous user"
|
"The flatpage template tag retrives unregistered prefixed flatpages for an anonymous user"
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
|
"{% get_flatpages '/location/' for anonuser as location_flatpages %}"
|
||||||
"{% for page in location_flatpages %}"
|
"{% for page in location_flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context({
|
).render(Context({
|
||||||
'anonuser': AnonymousUser()
|
'anonuser': AnonymousUser()
|
||||||
}))
|
}))
|
||||||
self.assertEqual(out, "A Nested Flatpage,")
|
self.assertEqual(out, "A Nested Flatpage,")
|
||||||
|
|
||||||
|
@ -92,26 +92,26 @@ class FlatpageTemplateTagTests(TestCase):
|
||||||
"The flatpage template tag retrive prefixed flatpages for an authenticated user"
|
"The flatpage template tag retrive prefixed flatpages for an authenticated user"
|
||||||
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages '/location/' for me as location_flatpages %}"
|
"{% get_flatpages '/location/' for me as location_flatpages %}"
|
||||||
"{% for page in location_flatpages %}"
|
"{% for page in location_flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context({
|
).render(Context({
|
||||||
'me': me
|
'me': me
|
||||||
}))
|
}))
|
||||||
self.assertEqual(out, "A Nested Flatpage,Sekrit Nested Flatpage,")
|
self.assertEqual(out, "A Nested Flatpage,Sekrit Nested Flatpage,")
|
||||||
|
|
||||||
def test_get_flatpages_with_variable_prefix(self):
|
def test_get_flatpages_with_variable_prefix(self):
|
||||||
"The prefix for the flatpage template tag can be a template variable"
|
"The prefix for the flatpage template tag can be a template variable"
|
||||||
out = Template(
|
out = Template(
|
||||||
"{% load flatpages %}"
|
"{% load flatpages %}"
|
||||||
"{% get_flatpages location_prefix as location_flatpages %}"
|
"{% get_flatpages location_prefix as location_flatpages %}"
|
||||||
"{% for page in location_flatpages %}"
|
"{% for page in location_flatpages %}"
|
||||||
"{{ page.title }},"
|
"{{ page.title }},"
|
||||||
"{% endfor %}"
|
"{% endfor %}"
|
||||||
).render(Context({
|
).render(Context({
|
||||||
'location_prefix': '/location/'
|
'location_prefix': '/location/'
|
||||||
}))
|
}))
|
||||||
self.assertEqual(out, "A Nested Flatpage,")
|
self.assertEqual(out, "A Nested Flatpage,")
|
||||||
|
|
||||||
|
|
|
@ -70,8 +70,8 @@ class BaseStorage(object):
|
||||||
|
|
||||||
if wizard_files and not self.file_storage:
|
if wizard_files and not self.file_storage:
|
||||||
raise NoFileStorageConfigured(
|
raise NoFileStorageConfigured(
|
||||||
"You need to define 'file_storage' in your "
|
"You need to define 'file_storage' in your "
|
||||||
"wizard view in order to handle file uploads.")
|
"wizard view in order to handle file uploads.")
|
||||||
|
|
||||||
files = {}
|
files = {}
|
||||||
for field, field_dict in six.iteritems(wizard_files):
|
for field, field_dict in six.iteritems(wizard_files):
|
||||||
|
@ -84,8 +84,8 @@ class BaseStorage(object):
|
||||||
def set_step_files(self, step, files):
|
def set_step_files(self, step, files):
|
||||||
if files and not self.file_storage:
|
if files and not self.file_storage:
|
||||||
raise NoFileStorageConfigured(
|
raise NoFileStorageConfigured(
|
||||||
"You need to define 'file_storage' in your "
|
"You need to define 'file_storage' in your "
|
||||||
"wizard view in order to handle file uploads.")
|
"wizard view in order to handle file uploads.")
|
||||||
|
|
||||||
if step not in self.data[self.step_files_key]:
|
if step not in self.data[self.step_files_key]:
|
||||||
self.data[self.step_files_key][step] = {}
|
self.data[self.step_files_key][step] = {}
|
||||||
|
|
|
@ -184,8 +184,8 @@ class WizardView(TemplateView):
|
||||||
if (isinstance(field, forms.FileField) and
|
if (isinstance(field, forms.FileField) and
|
||||||
not hasattr(cls, 'file_storage')):
|
not hasattr(cls, 'file_storage')):
|
||||||
raise NoFileStorageConfigured(
|
raise NoFileStorageConfigured(
|
||||||
"You need to define 'file_storage' in your "
|
"You need to define 'file_storage' in your "
|
||||||
"wizard view in order to handle file uploads.")
|
"wizard view in order to handle file uploads.")
|
||||||
|
|
||||||
# build the kwargs for the wizardview instances
|
# build the kwargs for the wizardview instances
|
||||||
kwargs['form_list'] = computed_form_list
|
kwargs['form_list'] = computed_form_list
|
||||||
|
|
|
@ -10,15 +10,15 @@ from django.contrib.gis.measure import Area, Distance
|
||||||
|
|
||||||
|
|
||||||
ALL_TERMS = set([
|
ALL_TERMS = set([
|
||||||
'bbcontains', 'bboverlaps', 'contained', 'contains',
|
'bbcontains', 'bboverlaps', 'contained', 'contains',
|
||||||
'contains_properly', 'coveredby', 'covers', 'crosses', 'disjoint',
|
'contains_properly', 'coveredby', 'covers', 'crosses', 'disjoint',
|
||||||
'distance_gt', 'distance_gte', 'distance_lt', 'distance_lte',
|
'distance_gt', 'distance_gte', 'distance_lt', 'distance_lte',
|
||||||
'dwithin', 'equals', 'exact',
|
'dwithin', 'equals', 'exact',
|
||||||
'intersects', 'overlaps', 'relate', 'same_as', 'touches', 'within',
|
'intersects', 'overlaps', 'relate', 'same_as', 'touches', 'within',
|
||||||
'left', 'right', 'overlaps_left', 'overlaps_right',
|
'left', 'right', 'overlaps_left', 'overlaps_right',
|
||||||
'overlaps_above', 'overlaps_below',
|
'overlaps_above', 'overlaps_below',
|
||||||
'strictly_above', 'strictly_below'
|
'strictly_above', 'strictly_below'
|
||||||
])
|
])
|
||||||
ALL_TERMS.update(sql.constants.QUERY_TERMS)
|
ALL_TERMS.update(sql.constants.QUERY_TERMS)
|
||||||
|
|
||||||
class GeoQuery(sql.Query):
|
class GeoQuery(sql.Query):
|
||||||
|
|
|
@ -46,7 +46,7 @@ class GeoWhereNode(WhereNode):
|
||||||
if isinstance(lvalue, GeoConstraint):
|
if isinstance(lvalue, GeoConstraint):
|
||||||
data, params = lvalue.process(lookup_type, params_or_value, connection)
|
data, params = lvalue.process(lookup_type, params_or_value, connection)
|
||||||
spatial_sql, spatial_params = connection.ops.spatial_lookup_sql(
|
spatial_sql, spatial_params = connection.ops.spatial_lookup_sql(
|
||||||
data, lookup_type, params_or_value, lvalue.field, qn)
|
data, lookup_type, params_or_value, lvalue.field, qn)
|
||||||
return spatial_sql, spatial_params + params
|
return spatial_sql, spatial_params + params
|
||||||
else:
|
else:
|
||||||
return super(GeoWhereNode, self).make_atom(child, qn, connection)
|
return super(GeoWhereNode, self).make_atom(child, qn, connection)
|
||||||
|
|
|
@ -155,7 +155,7 @@ class OGRGeometry(GDALBase):
|
||||||
"Constructs a Polygon from a bounding box (4-tuple)."
|
"Constructs a Polygon from a bounding box (4-tuple)."
|
||||||
x0, y0, x1, y1 = bbox
|
x0, y0, x1, y1 = bbox
|
||||||
return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
|
return OGRGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' % (
|
||||||
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
|
x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
|
||||||
|
|
||||||
### Geometry set-like operations ###
|
### Geometry set-like operations ###
|
||||||
# g = g1 | g2
|
# g = g1 | g2
|
||||||
|
|
|
@ -133,11 +133,12 @@ class GEOSMutationTest(unittest.TestCase):
|
||||||
|
|
||||||
# _set_list
|
# _set_list
|
||||||
pg._set_list(2, (((1,2),(10,0),(12,9),(-1,15),(1,2)),
|
pg._set_list(2, (((1,2),(10,0),(12,9),(-1,15),(1,2)),
|
||||||
((4,2),(5,2),(5,3),(4,2))))
|
((4,2),(5,2),(5,3),(4,2))))
|
||||||
self.assertEqual(pg.coords,
|
self.assertEqual(
|
||||||
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)),
|
pg.coords,
|
||||||
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))),
|
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)),
|
||||||
'Polygon _set_list')
|
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))),
|
||||||
|
'Polygon _set_list')
|
||||||
|
|
||||||
lsa = Polygon(*pg.coords)
|
lsa = Polygon(*pg.coords)
|
||||||
for f in geos_function_tests:
|
for f in geos_function_tests:
|
||||||
|
|
|
@ -354,11 +354,11 @@ class BaseTests(object):
|
||||||
'success'])
|
'success'])
|
||||||
|
|
||||||
@override_settings_tags(MESSAGE_TAGS={
|
@override_settings_tags(MESSAGE_TAGS={
|
||||||
constants.INFO: 'info',
|
constants.INFO: 'info',
|
||||||
constants.DEBUG: '',
|
constants.DEBUG: '',
|
||||||
constants.WARNING: '',
|
constants.WARNING: '',
|
||||||
constants.ERROR: 'bad',
|
constants.ERROR: 'bad',
|
||||||
29: 'custom',
|
29: 'custom',
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
def test_custom_tags(self):
|
def test_custom_tags(self):
|
||||||
|
|
|
@ -13,7 +13,7 @@ from .models import Redirect
|
||||||
@override_settings(
|
@override_settings(
|
||||||
APPEND_SLASH=False,
|
APPEND_SLASH=False,
|
||||||
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
||||||
['django.contrib.redirects.middleware.RedirectFallbackMiddleware'],
|
['django.contrib.redirects.middleware.RedirectFallbackMiddleware'],
|
||||||
SITE_ID=1,
|
SITE_ID=1,
|
||||||
)
|
)
|
||||||
class RedirectTests(TestCase):
|
class RedirectTests(TestCase):
|
||||||
|
@ -72,7 +72,7 @@ class OverriddenRedirectFallbackMiddleware(RedirectFallbackMiddleware):
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
MIDDLEWARE_CLASSES=list(settings.MIDDLEWARE_CLASSES) +
|
||||||
['django.contrib.redirects.tests.OverriddenRedirectFallbackMiddleware'],
|
['django.contrib.redirects.tests.OverriddenRedirectFallbackMiddleware'],
|
||||||
SITE_ID=1,
|
SITE_ID=1,
|
||||||
)
|
)
|
||||||
class OverriddenRedirectMiddlewareTests(TestCase):
|
class OverriddenRedirectMiddlewareTests(TestCase):
|
||||||
|
|
|
@ -31,7 +31,7 @@ def index(request, sitemaps,
|
||||||
site = site()
|
site = site()
|
||||||
protocol = req_protocol if site.protocol is None else site.protocol
|
protocol = req_protocol if site.protocol is None else site.protocol
|
||||||
sitemap_url = urlresolvers.reverse(
|
sitemap_url = urlresolvers.reverse(
|
||||||
sitemap_url_name, kwargs={'section': section})
|
sitemap_url_name, kwargs={'section': section})
|
||||||
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
|
absolute_url = '%s://%s%s' % (protocol, req_site.domain, sitemap_url)
|
||||||
sites.append(absolute_url)
|
sites.append(absolute_url)
|
||||||
for page in range(2, site.paginator.num_pages + 1):
|
for page in range(2, site.paginator.num_pages + 1):
|
||||||
|
|
|
@ -165,9 +165,9 @@ class BaseHandler(object):
|
||||||
response = callback(request, **param_dict)
|
response = callback(request, **param_dict)
|
||||||
except:
|
except:
|
||||||
signals.got_request_exception.send(
|
signals.got_request_exception.send(
|
||||||
sender=self.__class__, request=request)
|
sender=self.__class__, request=request)
|
||||||
response = self.handle_uncaught_exception(request,
|
response = self.handle_uncaught_exception(request,
|
||||||
resolver, sys.exc_info())
|
resolver, sys.exc_info())
|
||||||
|
|
||||||
except SuspiciousOperation as e:
|
except SuspiciousOperation as e:
|
||||||
# The request logger receives events for any problematic request
|
# The request logger receives events for any problematic request
|
||||||
|
@ -181,9 +181,9 @@ class BaseHandler(object):
|
||||||
response = callback(request, **param_dict)
|
response = callback(request, **param_dict)
|
||||||
except:
|
except:
|
||||||
signals.got_request_exception.send(
|
signals.got_request_exception.send(
|
||||||
sender=self.__class__, request=request)
|
sender=self.__class__, request=request)
|
||||||
response = self.handle_uncaught_exception(request,
|
response = self.handle_uncaught_exception(request,
|
||||||
resolver, sys.exc_info())
|
resolver, sys.exc_info())
|
||||||
|
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
|
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
|
||||||
|
|
|
@ -78,7 +78,7 @@ class Command(BaseCommand):
|
||||||
except DatabaseError as e:
|
except DatabaseError as e:
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Cache table '%s' could not be created.\nThe error was: %s." %
|
"Cache table '%s' could not be created.\nThe error was: %s." %
|
||||||
(tablename, force_text(e)))
|
(tablename, force_text(e)))
|
||||||
for statement in index_output:
|
for statement in index_output:
|
||||||
curs.execute(statement)
|
curs.execute(statement)
|
||||||
if self.verbosity > 1:
|
if self.verbosity > 1:
|
||||||
|
|
|
@ -46,8 +46,8 @@ class Command(BaseCommand):
|
||||||
|
|
||||||
if not len(fixture_labels):
|
if not len(fixture_labels):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"No database fixture specified. Please provide the path "
|
"No database fixture specified. Please provide the path "
|
||||||
"of at least one fixture in the command line.")
|
"of at least one fixture in the command line.")
|
||||||
|
|
||||||
self.verbosity = int(options.get('verbosity'))
|
self.verbosity = int(options.get('verbosity'))
|
||||||
|
|
||||||
|
@ -140,10 +140,10 @@ class Command(BaseCommand):
|
||||||
obj.save(using=self.using)
|
obj.save(using=self.using)
|
||||||
except (DatabaseError, IntegrityError) as e:
|
except (DatabaseError, IntegrityError) as e:
|
||||||
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
|
e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
|
||||||
'app_label': obj.object._meta.app_label,
|
'app_label': obj.object._meta.app_label,
|
||||||
'object_name': obj.object._meta.object_name,
|
'object_name': obj.object._meta.object_name,
|
||||||
'pk': obj.object.pk,
|
'pk': obj.object.pk,
|
||||||
'error_msg': force_text(e)
|
'error_msg': force_text(e)
|
||||||
},)
|
},)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -176,8 +176,8 @@ class Command(BaseCommand):
|
||||||
# Check kept for backwards-compatibility; it doesn't look very useful.
|
# Check kept for backwards-compatibility; it doesn't look very useful.
|
||||||
if '.' in os.path.basename(fixture_name):
|
if '.' in os.path.basename(fixture_name):
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Problem installing fixture '%s': %s is not a known "
|
"Problem installing fixture '%s': %s is not a known "
|
||||||
"serialization format." % tuple(fixture_name.rsplit('.')))
|
"serialization format." % tuple(fixture_name.rsplit('.')))
|
||||||
|
|
||||||
if self.verbosity >= 2:
|
if self.verbosity >= 2:
|
||||||
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
self.stdout.write("Loading '%s' fixtures..." % fixture_name)
|
||||||
|
@ -210,8 +210,8 @@ class Command(BaseCommand):
|
||||||
# duplicates are only allowed in different directories.
|
# duplicates are only allowed in different directories.
|
||||||
if len(fixture_files_in_dir) > 1:
|
if len(fixture_files_in_dir) > 1:
|
||||||
raise CommandError(
|
raise CommandError(
|
||||||
"Multiple fixtures named '%s' in %s. Aborting." %
|
"Multiple fixtures named '%s' in %s. Aborting." %
|
||||||
(fixture_name, humanize(fixture_dir)))
|
(fixture_name, humanize(fixture_dir)))
|
||||||
fixture_files.extend(fixture_files_in_dir)
|
fixture_files.extend(fixture_files_in_dir)
|
||||||
|
|
||||||
if fixture_name != 'initial_data' and not fixture_files:
|
if fixture_name != 'initial_data' and not fixture_files:
|
||||||
|
|
|
@ -225,7 +225,7 @@ class Command(NoArgsCommand):
|
||||||
|
|
||||||
if (locale is None and not process_all) or self.domain is None:
|
if (locale is None and not process_all) or self.domain is None:
|
||||||
raise CommandError("Type '%s help %s' for usage information." % (
|
raise CommandError("Type '%s help %s' for usage information." % (
|
||||||
os.path.basename(sys.argv[0]), sys.argv[1]))
|
os.path.basename(sys.argv[0]), sys.argv[1]))
|
||||||
|
|
||||||
if self.verbosity > 1:
|
if self.verbosity > 1:
|
||||||
self.stdout.write('examining files with the extensions: %s\n'
|
self.stdout.write('examining files with the extensions: %s\n'
|
||||||
|
|
|
@ -177,7 +177,7 @@ class Deserializer(base.Deserializer):
|
||||||
data = {}
|
data = {}
|
||||||
if node.hasAttribute('pk'):
|
if node.hasAttribute('pk'):
|
||||||
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
|
data[Model._meta.pk.attname] = Model._meta.pk.to_python(
|
||||||
node.getAttribute('pk'))
|
node.getAttribute('pk'))
|
||||||
|
|
||||||
# Also start building a dict of m2m data (this is saved as
|
# Also start building a dict of m2m data (this is saved as
|
||||||
# {m2m_accessor_attribute : [list_of_related_objects]})
|
# {m2m_accessor_attribute : [list_of_related_objects]})
|
||||||
|
@ -272,15 +272,15 @@ class Deserializer(base.Deserializer):
|
||||||
if not model_identifier:
|
if not model_identifier:
|
||||||
raise base.DeserializationError(
|
raise base.DeserializationError(
|
||||||
"<%s> node is missing the required '%s' attribute"
|
"<%s> node is missing the required '%s' attribute"
|
||||||
% (node.nodeName, attr))
|
% (node.nodeName, attr))
|
||||||
try:
|
try:
|
||||||
Model = models.get_model(*model_identifier.split("."))
|
Model = models.get_model(*model_identifier.split("."))
|
||||||
except TypeError:
|
except TypeError:
|
||||||
Model = None
|
Model = None
|
||||||
if Model is None:
|
if Model is None:
|
||||||
raise base.DeserializationError(
|
raise base.DeserializationError(
|
||||||
"<%s> node has invalid model identifier: '%s'" %
|
"<%s> node has invalid model identifier: '%s'"
|
||||||
(node.nodeName, model_identifier))
|
% (node.nodeName, model_identifier))
|
||||||
return Model
|
return Model
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -107,16 +107,20 @@ class ModelBase(type):
|
||||||
|
|
||||||
new_class.add_to_class('_meta', Options(meta, **kwargs))
|
new_class.add_to_class('_meta', Options(meta, **kwargs))
|
||||||
if not abstract:
|
if not abstract:
|
||||||
new_class.add_to_class('DoesNotExist', subclass_exception(str('DoesNotExist'),
|
new_class.add_to_class(
|
||||||
tuple(x.DoesNotExist
|
'DoesNotExist',
|
||||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
subclass_exception(
|
||||||
or (ObjectDoesNotExist,),
|
str('DoesNotExist'),
|
||||||
module, attached_to=new_class))
|
tuple(x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (ObjectDoesNotExist,),
|
||||||
new_class.add_to_class('MultipleObjectsReturned', subclass_exception(str('MultipleObjectsReturned'),
|
module,
|
||||||
tuple(x.MultipleObjectsReturned
|
attached_to=new_class))
|
||||||
for x in parents if hasattr(x, '_meta') and not x._meta.abstract)
|
new_class.add_to_class(
|
||||||
or (MultipleObjectsReturned,),
|
'MultipleObjectsReturned',
|
||||||
module, attached_to=new_class))
|
subclass_exception(
|
||||||
|
str('MultipleObjectsReturned'),
|
||||||
|
tuple(x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract) or (MultipleObjectsReturned,),
|
||||||
|
module,
|
||||||
|
attached_to=new_class))
|
||||||
if base_meta and not base_meta.abstract:
|
if base_meta and not base_meta.abstract:
|
||||||
# Non-abstract child classes inherit some attributes from their
|
# Non-abstract child classes inherit some attributes from their
|
||||||
# non-abstract parent (unless an ABC comes before it in the
|
# non-abstract parent (unless an ABC comes before it in the
|
||||||
|
@ -565,9 +569,9 @@ class Model(six.with_metaclass(ModelBase)):
|
||||||
field_names.add(field.attname)
|
field_names.add(field.attname)
|
||||||
deferred_fields = [
|
deferred_fields = [
|
||||||
f.attname for f in self._meta.fields
|
f.attname for f in self._meta.fields
|
||||||
if f.attname not in self.__dict__
|
if (f.attname not in self.__dict__ and
|
||||||
and isinstance(self.__class__.__dict__[f.attname],
|
isinstance(self.__class__.__dict__[f.attname], DeferredAttribute))
|
||||||
DeferredAttribute)]
|
]
|
||||||
|
|
||||||
loaded_fields = field_names.difference(deferred_fields)
|
loaded_fields = field_names.difference(deferred_fields)
|
||||||
if loaded_fields:
|
if loaded_fields:
|
||||||
|
|
|
@ -694,7 +694,7 @@ class QuerySet(object):
|
||||||
def _filter_or_exclude(self, negate, *args, **kwargs):
|
def _filter_or_exclude(self, negate, *args, **kwargs):
|
||||||
if args or kwargs:
|
if args or kwargs:
|
||||||
assert self.query.can_filter(), \
|
assert self.query.can_filter(), \
|
||||||
"Cannot filter a query once a slice has been taken."
|
"Cannot filter a query once a slice has been taken."
|
||||||
|
|
||||||
clone = self._clone()
|
clone = self._clone()
|
||||||
if negate:
|
if negate:
|
||||||
|
|
|
@ -1681,7 +1681,7 @@ class Query(object):
|
||||||
count = self.aggregates_module.Count('*', is_summary=True)
|
count = self.aggregates_module.Count('*', is_summary=True)
|
||||||
else:
|
else:
|
||||||
assert len(self.select) == 1, \
|
assert len(self.select) == 1, \
|
||||||
"Cannot add count col with multiple cols in 'select': %r" % self.select
|
"Cannot add count col with multiple cols in 'select': %r" % self.select
|
||||||
count = self.aggregates_module.Count(self.select[0].col)
|
count = self.aggregates_module.Count(self.select[0].col)
|
||||||
else:
|
else:
|
||||||
opts = self.get_meta()
|
opts = self.get_meta()
|
||||||
|
@ -1693,7 +1693,7 @@ class Query(object):
|
||||||
# Because of SQL portability issues, multi-column, distinct
|
# Because of SQL portability issues, multi-column, distinct
|
||||||
# counts need a sub-query -- see get_count() for details.
|
# counts need a sub-query -- see get_count() for details.
|
||||||
assert len(self.select) == 1, \
|
assert len(self.select) == 1, \
|
||||||
"Cannot add count col with multiple cols in 'select'."
|
"Cannot add count col with multiple cols in 'select'."
|
||||||
|
|
||||||
count = self.aggregates_module.Count(self.select[0].col, distinct=True)
|
count = self.aggregates_module.Count(self.select[0].col, distinct=True)
|
||||||
# Distinct handling is done in Count(), so don't do it at this
|
# Distinct handling is done in Count(), so don't do it at this
|
||||||
|
|
|
@ -262,7 +262,7 @@ class DateTimeQuery(DateQuery):
|
||||||
|
|
||||||
def _check_field(self, field):
|
def _check_field(self, field):
|
||||||
assert isinstance(field, DateTimeField), \
|
assert isinstance(field, DateTimeField), \
|
||||||
"%r isn't a DateTimeField." % field.name
|
"%r isn't a DateTimeField." % field.name
|
||||||
|
|
||||||
def _get_select(self, col, lookup_type):
|
def _get_select(self, col, lookup_type):
|
||||||
if self.tzinfo is None:
|
if self.tzinfo is None:
|
||||||
|
|
|
@ -17,8 +17,8 @@ import warnings
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
from django.db import (
|
from django.db import (
|
||||||
connections, DEFAULT_DB_ALIAS,
|
connections, DEFAULT_DB_ALIAS,
|
||||||
DatabaseError, ProgrammingError)
|
DatabaseError, ProgrammingError)
|
||||||
from django.utils.decorators import available_attrs
|
from django.utils.decorators import available_attrs
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -687,7 +687,7 @@ class BaseModelFormSet(BaseFormSet):
|
||||||
else:
|
else:
|
||||||
return ugettext("Please correct the duplicate data for %(field)s, "
|
return ugettext("Please correct the duplicate data for %(field)s, "
|
||||||
"which must be unique.") % {
|
"which must be unique.") % {
|
||||||
"field": get_text_list(unique_check, six.text_type(_("and"))),
|
"field": get_text_list(unique_check, six.text_type(_("and"))),
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_date_error_message(self, date_check):
|
def get_date_error_message(self, date_check):
|
||||||
|
@ -1042,8 +1042,8 @@ class ModelChoiceIterator(object):
|
||||||
yield self.choice(obj)
|
yield self.choice(obj)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
return len(self.queryset) +\
|
return (len(self.queryset) +
|
||||||
(1 if self.field.empty_label is not None else 0)
|
(1 if self.field.empty_label is not None else 0))
|
||||||
|
|
||||||
def choice(self, obj):
|
def choice(self, obj):
|
||||||
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
|
return (self.field.prepare_value(obj), self.field.label_from_instance(obj))
|
||||||
|
|
|
@ -62,9 +62,9 @@ class Media(object):
|
||||||
# We need to sort the keys, and iterate over the sorted list.
|
# We need to sort the keys, and iterate over the sorted list.
|
||||||
media = sorted(self._css.keys())
|
media = sorted(self._css.keys())
|
||||||
return chain(*[
|
return chain(*[
|
||||||
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
|
[format_html('<link href="{0}" type="text/css" media="{1}" rel="stylesheet" />', self.absolute_path(path), medium)
|
||||||
for path in self._css[medium]]
|
for path in self._css[medium]]
|
||||||
for medium in media])
|
for medium in media])
|
||||||
|
|
||||||
def absolute_path(self, path, prefix=None):
|
def absolute_path(self, path, prefix=None):
|
||||||
if path.startswith(('http://', 'https://', '/')):
|
if path.startswith(('http://', 'https://', '/')):
|
||||||
|
|
|
@ -59,8 +59,8 @@ def fix_IE_for_attach(request, response):
|
||||||
pass
|
pass
|
||||||
if response.has_header('Cache-Control'):
|
if response.has_header('Cache-Control'):
|
||||||
cache_control_values = [value.strip() for value in
|
cache_control_values = [value.strip() for value in
|
||||||
response['Cache-Control'].split(',')
|
response['Cache-Control'].split(',')
|
||||||
if value.strip().lower() not in offending_headers]
|
if value.strip().lower() not in offending_headers]
|
||||||
|
|
||||||
if not len(cache_control_values):
|
if not len(cache_control_values):
|
||||||
del response['Cache-Control']
|
del response['Cache-Control']
|
||||||
|
|
|
@ -38,7 +38,7 @@ class LocaleMiddleware(object):
|
||||||
def process_response(self, request, response):
|
def process_response(self, request, response):
|
||||||
language = translation.get_language()
|
language = translation.get_language()
|
||||||
language_from_path = translation.get_language_from_path(
|
language_from_path = translation.get_language_from_path(
|
||||||
request.path_info, supported=self._supported_languages
|
request.path_info, supported=self._supported_languages
|
||||||
)
|
)
|
||||||
if (response.status_code == 404 and not language_from_path
|
if (response.status_code == 404 and not language_from_path
|
||||||
and self.is_language_prefix_patterns_used()):
|
and self.is_language_prefix_patterns_used()):
|
||||||
|
|
|
@ -279,7 +279,7 @@ def setup_databases(verbosity, interactive, **kwargs):
|
||||||
connection = connections[alias]
|
connection = connections[alias]
|
||||||
if test_db_name is None:
|
if test_db_name is None:
|
||||||
test_db_name = connection.creation.create_test_db(
|
test_db_name = connection.creation.create_test_db(
|
||||||
verbosity, autoclobber=not interactive)
|
verbosity, autoclobber=not interactive)
|
||||||
destroy = True
|
destroy = True
|
||||||
else:
|
else:
|
||||||
connection.settings_dict['NAME'] = test_db_name
|
connection.settings_dict['NAME'] = test_db_name
|
||||||
|
|
|
@ -740,7 +740,7 @@ class TransactionTestCase(SimpleTestCase):
|
||||||
conn = connections[db_name]
|
conn = connections[db_name]
|
||||||
if conn.features.supports_sequence_reset:
|
if conn.features.supports_sequence_reset:
|
||||||
sql_list = conn.ops.sequence_reset_by_name_sql(
|
sql_list = conn.ops.sequence_reset_by_name_sql(
|
||||||
no_style(), conn.introspection.sequence_list())
|
no_style(), conn.introspection.sequence_list())
|
||||||
if sql_list:
|
if sql_list:
|
||||||
with transaction.commit_on_success_unless_managed(using=db_name):
|
with transaction.commit_on_success_unless_managed(using=db_name):
|
||||||
cursor = conn.cursor()
|
cursor = conn.cursor()
|
||||||
|
|
|
@ -97,8 +97,8 @@ def format_html_join(sep, format_string, args_generator):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return mark_safe(conditional_escape(sep).join(
|
return mark_safe(conditional_escape(sep).join(
|
||||||
format_html(format_string, *tuple(args))
|
format_html(format_string, *tuple(args))
|
||||||
for args in args_generator))
|
for args in args_generator))
|
||||||
|
|
||||||
|
|
||||||
def linebreaks(value, autoescape=False):
|
def linebreaks(value, autoescape=False):
|
||||||
|
|
|
@ -14,8 +14,8 @@ from django.utils.encoding import force_str, force_text
|
||||||
from django.utils.functional import allow_lazy
|
from django.utils.functional import allow_lazy
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
from django.utils.six.moves.urllib.parse import (
|
from django.utils.six.moves.urllib.parse import (
|
||||||
quote, quote_plus, unquote, unquote_plus, urlparse,
|
quote, quote_plus, unquote, unquote_plus, urlparse,
|
||||||
urlencode as original_urlencode)
|
urlencode as original_urlencode)
|
||||||
|
|
||||||
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
|
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
|
||||||
|
|
||||||
|
|
|
@ -143,14 +143,14 @@ class JsLexer(Lexer):
|
||||||
]
|
]
|
||||||
|
|
||||||
states = {
|
states = {
|
||||||
'div': # slash will mean division
|
# slash will mean division
|
||||||
both_before + [
|
'div': both_before + [
|
||||||
Tok("punct", literals("/= /"), next='reg'),
|
Tok("punct", literals("/= /"), next='reg'),
|
||||||
] + both_after,
|
] + both_after,
|
||||||
|
|
||||||
'reg': # slash will mean regex
|
# slash will mean regex
|
||||||
both_before + [
|
'reg': both_before + [
|
||||||
Tok("regex",
|
Tok("regex",
|
||||||
r"""
|
r"""
|
||||||
/ # opening slash
|
/ # opening slash
|
||||||
# First character is..
|
# First character is..
|
||||||
|
@ -174,7 +174,7 @@ class JsLexer(Lexer):
|
||||||
/ # closing slash
|
/ # closing slash
|
||||||
[a-zA-Z0-9]* # trailing flags
|
[a-zA-Z0-9]* # trailing flags
|
||||||
""", next='div'),
|
""", next='div'),
|
||||||
] + both_after,
|
] + both_after,
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
|
@ -136,7 +136,7 @@ class LocalTimezone(ReferenceLocalTimezone):
|
||||||
except (OverflowError, ValueError) as exc:
|
except (OverflowError, ValueError) as exc:
|
||||||
exc_type = type(exc)
|
exc_type = type(exc)
|
||||||
exc_value = exc_type(
|
exc_value = exc_type(
|
||||||
"Unsupported value: %r. You should install pytz." % dt)
|
"Unsupported value: %r. You should install pytz." % dt)
|
||||||
exc_value.__cause__ = exc
|
exc_value.__cause__ = exc
|
||||||
six.reraise(exc_type, exc_value, sys.exc_info()[2])
|
six.reraise(exc_type, exc_value, sys.exc_info()[2])
|
||||||
|
|
||||||
|
|
|
@ -366,7 +366,7 @@ class BaseDateListView(MultipleObjectMixin, DateMixin, View):
|
||||||
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
|
is_empty = len(qs) == 0 if paginate_by is None else not qs.exists()
|
||||||
if is_empty:
|
if is_empty:
|
||||||
raise Http404(_("No %(verbose_name_plural)s available") % {
|
raise Http404(_("No %(verbose_name_plural)s available") % {
|
||||||
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
|
'verbose_name_plural': force_text(qs.model._meta.verbose_name_plural)
|
||||||
})
|
})
|
||||||
|
|
||||||
return qs
|
return qs
|
||||||
|
|
|
@ -65,8 +65,8 @@ class MultipleObjectMixin(ContextMixin):
|
||||||
return (paginator, page, page.object_list, page.has_other_pages())
|
return (paginator, page, page.object_list, page.has_other_pages())
|
||||||
except InvalidPage as e:
|
except InvalidPage as e:
|
||||||
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
|
raise Http404(_('Invalid page (%(page_number)s): %(message)s') % {
|
||||||
'page_number': page_number,
|
'page_number': page_number,
|
||||||
'message': str(e)
|
'message': str(e)
|
||||||
})
|
})
|
||||||
|
|
||||||
def get_paginate_by(self, queryset):
|
def get_paginate_by(self, queryset):
|
||||||
|
|
|
@ -6,8 +6,8 @@ import os
|
||||||
testing = False
|
testing = False
|
||||||
|
|
||||||
DONT_TOUCH = (
|
DONT_TOUCH = (
|
||||||
'./index.txt',
|
'./index.txt',
|
||||||
)
|
)
|
||||||
|
|
||||||
def target_name(fn):
|
def target_name(fn):
|
||||||
if fn.endswith('.txt'):
|
if fn.endswith('.txt'):
|
||||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude=./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./tests/comment_tests/*,./django/test/_doctest.py
|
exclude=./django/utils/dictconfig.py,./django/contrib/comments/*,./django/utils/unittest.py,./tests/comment_tests/*,./django/test/_doctest.py
|
||||||
ignore=E124,E125,E126,E127,E128,E225,E226,E241,E251,E302,E501,E203,E221,E227,E231,E261,E301,F401,F403,W601
|
ignore=E124,E125,E127,E128,E225,E226,E241,E251,E302,E501,E203,E221,E227,E231,E261,E301,F401,F403,W601
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
license-file = LICENSE
|
license-file = LICENSE
|
||||||
|
|
|
@ -169,10 +169,9 @@ class Sketch(models.Model):
|
||||||
class Fabric(models.Model):
|
class Fabric(models.Model):
|
||||||
NG_CHOICES = (
|
NG_CHOICES = (
|
||||||
('Textured', (
|
('Textured', (
|
||||||
('x', 'Horizontal'),
|
('x', 'Horizontal'),
|
||||||
('y', 'Vertical'),
|
('y', 'Vertical'),
|
||||||
)
|
)),
|
||||||
),
|
|
||||||
('plain', 'Smooth'),
|
('plain', 'Smooth'),
|
||||||
)
|
)
|
||||||
surface = models.CharField(max_length=20, choices=NG_CHOICES)
|
surface = models.CharField(max_length=20, choices=NG_CHOICES)
|
||||||
|
@ -390,8 +389,8 @@ class Post(models.Model):
|
||||||
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
|
title = models.CharField(max_length=100, help_text="Some help text for the title (with unicode ŠĐĆŽćžšđ)")
|
||||||
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
|
content = models.TextField(help_text="Some help text for the content (with unicode ŠĐĆŽćžšđ)")
|
||||||
posted = models.DateField(
|
posted = models.DateField(
|
||||||
default=datetime.date.today,
|
default=datetime.date.today,
|
||||||
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
|
help_text="Some help text for the date (with unicode ŠĐĆŽćžšđ)"
|
||||||
)
|
)
|
||||||
public = models.NullBooleanField()
|
public = models.NullBooleanField()
|
||||||
|
|
||||||
|
|
|
@ -425,12 +425,10 @@ class AdminViewBasicTest(AdminViewBasicTestCase):
|
||||||
test=lambda obj, value: obj.chap.book.name == value),
|
test=lambda obj, value: obj.chap.book.name == value),
|
||||||
'chap__book__promo__id__exact': dict(
|
'chap__book__promo__id__exact': dict(
|
||||||
values=[p.id for p in Promo.objects.all()],
|
values=[p.id for p in Promo.objects.all()],
|
||||||
test=lambda obj, value:
|
test=lambda obj, value: obj.chap.book.promo_set.filter(id=value).exists()),
|
||||||
obj.chap.book.promo_set.filter(id=value).exists()),
|
|
||||||
'chap__book__promo__name': dict(
|
'chap__book__promo__name': dict(
|
||||||
values=[p.name for p in Promo.objects.all()],
|
values=[p.name for p in Promo.objects.all()],
|
||||||
test=lambda obj, value:
|
test=lambda obj, value: obj.chap.book.promo_set.filter(name=value).exists()),
|
||||||
obj.chap.book.promo_set.filter(name=value).exists()),
|
|
||||||
}
|
}
|
||||||
for filter_path, params in filters.items():
|
for filter_path, params in filters.items():
|
||||||
for value in params['values']:
|
for value in params['values']:
|
||||||
|
@ -1253,9 +1251,9 @@ class AdminViewPermissionsTest(TestCase):
|
||||||
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/delete/' % article_pk)
|
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/delete/' % article_pk)
|
||||||
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
|
self.assertTemplateUsed(response, 'custom_admin/delete_confirmation.html')
|
||||||
response = self.client.post('/test_admin/admin/admin_views/customarticle/', data={
|
response = self.client.post('/test_admin/admin/admin_views/customarticle/', data={
|
||||||
'index': 0,
|
'index': 0,
|
||||||
'action': ['delete_selected'],
|
'action': ['delete_selected'],
|
||||||
'_selected_action': ['1'],
|
'_selected_action': ['1'],
|
||||||
})
|
})
|
||||||
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
|
self.assertTemplateUsed(response, 'custom_admin/delete_selected_confirmation.html')
|
||||||
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/history/' % article_pk)
|
response = self.client.get('/test_admin/admin/admin_views/customarticle/%d/history/' % article_pk)
|
||||||
|
|
|
@ -54,8 +54,12 @@ class AdminFormfieldForDBFieldTests(TestCase):
|
||||||
# Check that we got a field of the right type
|
# Check that we got a field of the right type
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
isinstance(widget, widgetclass),
|
isinstance(widget, widgetclass),
|
||||||
"Wrong widget for %s.%s: expected %s, got %s" %
|
"Wrong widget for %s.%s: expected %s, got %s" % (
|
||||||
(model.__class__.__name__, fieldname, widgetclass, type(widget))
|
model.__class__.__name__,
|
||||||
|
fieldname,
|
||||||
|
widgetclass,
|
||||||
|
type(widget),
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Return the formfield so that other tests can continue
|
# Return the formfield so that other tests can continue
|
||||||
|
|
|
@ -605,8 +605,7 @@ class ModelTest(TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
dicts = Article.objects.filter(
|
dicts = Article.objects.filter(
|
||||||
pub_date__year=2008).extra(
|
pub_date__year=2008).extra(select={'dashed-value': '1'}
|
||||||
select={'dashed-value': '1'}
|
|
||||||
).values('headline', 'dashed-value')
|
).values('headline', 'dashed-value')
|
||||||
self.assertEqual([sorted(d.items()) for d in dicts],
|
self.assertEqual([sorted(d.items()) for d in dicts],
|
||||||
[[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]])
|
[[('dashed-value', 1), ('headline', 'Article 11')], [('dashed-value', 1), ('headline', 'Article 12')]])
|
||||||
|
@ -629,8 +628,7 @@ class ModelTest(TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
articles = Article.objects.filter(
|
articles = Article.objects.filter(
|
||||||
pub_date__year=2008).extra(
|
pub_date__year=2008).extra(select={'dashed-value': '1', 'undashedvalue': '2'})
|
||||||
select={'dashed-value': '1', 'undashedvalue': '2'})
|
|
||||||
self.assertEqual(articles[0].undashedvalue, 2)
|
self.assertEqual(articles[0].undashedvalue, 2)
|
||||||
|
|
||||||
def test_create_relation_with_ugettext_lazy(self):
|
def test_create_relation_with_ugettext_lazy(self):
|
||||||
|
|
|
@ -1138,14 +1138,14 @@ class GetCacheTests(unittest.TestCase):
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||||
CACHE_MIDDLEWARE_SECONDS=1,
|
CACHE_MIDDLEWARE_SECONDS=1,
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
USE_I18N=False,
|
},
|
||||||
|
USE_I18N=False,
|
||||||
)
|
)
|
||||||
class CacheUtils(TestCase):
|
class CacheUtils(TestCase):
|
||||||
"""TestCase for django.utils.cache functions."""
|
"""TestCase for django.utils.cache functions."""
|
||||||
|
@ -1245,25 +1245,25 @@ class CacheUtils(TestCase):
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
'KEY_PREFIX': 'cacheprefix',
|
'KEY_PREFIX': 'cacheprefix',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
},
|
||||||
)
|
)
|
||||||
class PrefixedCacheUtils(CacheUtils):
|
class PrefixedCacheUtils(CacheUtils):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_SECONDS=60,
|
CACHE_MIDDLEWARE_SECONDS=60,
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX='test',
|
CACHE_MIDDLEWARE_KEY_PREFIX='test',
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
},
|
||||||
)
|
)
|
||||||
class CacheHEADTest(TestCase):
|
class CacheHEADTest(TestCase):
|
||||||
|
|
||||||
|
@ -1318,16 +1318,16 @@ class CacheHEADTest(TestCase):
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
LANGUAGES=(
|
},
|
||||||
('en', 'English'),
|
LANGUAGES=(
|
||||||
('es', 'Spanish'),
|
('en', 'English'),
|
||||||
),
|
('es', 'Spanish'),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
class CacheI18nTest(TestCase):
|
class CacheI18nTest(TestCase):
|
||||||
|
|
||||||
|
@ -1496,10 +1496,10 @@ class CacheI18nTest(TestCase):
|
||||||
"Cache keys should include the time zone name when time zones are active")
|
"Cache keys should include the time zone name when time zones are active")
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||||
CACHE_MIDDLEWARE_SECONDS=60,
|
CACHE_MIDDLEWARE_SECONDS=60,
|
||||||
USE_ETAGS=True,
|
USE_ETAGS=True,
|
||||||
USE_I18N=True,
|
USE_I18N=True,
|
||||||
)
|
)
|
||||||
def test_middleware(self):
|
def test_middleware(self):
|
||||||
def set_cache(request, lang, msg):
|
def set_cache(request, lang, msg):
|
||||||
|
@ -1560,9 +1560,9 @@ class CacheI18nTest(TestCase):
|
||||||
translation.deactivate()
|
translation.deactivate()
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
CACHE_MIDDLEWARE_KEY_PREFIX="test",
|
||||||
CACHE_MIDDLEWARE_SECONDS=60,
|
CACHE_MIDDLEWARE_SECONDS=60,
|
||||||
USE_ETAGS=True,
|
USE_ETAGS=True,
|
||||||
)
|
)
|
||||||
def test_middleware_doesnt_cache_streaming_response(self):
|
def test_middleware_doesnt_cache_streaming_response(self):
|
||||||
request = self._get_request()
|
request = self._get_request()
|
||||||
|
@ -1581,12 +1581,12 @@ class CacheI18nTest(TestCase):
|
||||||
self.assertIsNone(get_cache_data)
|
self.assertIsNone(get_cache_data)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
'KEY_PREFIX': 'cacheprefix'
|
'KEY_PREFIX': 'cacheprefix'
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
},
|
||||||
)
|
)
|
||||||
class PrefixedCacheI18nTest(CacheI18nTest):
|
class PrefixedCacheI18nTest(CacheI18nTest):
|
||||||
pass
|
pass
|
||||||
|
@ -1597,20 +1597,20 @@ def hello_world_view(request, value):
|
||||||
|
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_ALIAS='other',
|
CACHE_MIDDLEWARE_ALIAS='other',
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
|
CACHE_MIDDLEWARE_KEY_PREFIX='middlewareprefix',
|
||||||
CACHE_MIDDLEWARE_SECONDS=30,
|
CACHE_MIDDLEWARE_SECONDS=30,
|
||||||
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
|
CACHE_MIDDLEWARE_ANONYMOUS_ONLY=False,
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
},
|
|
||||||
'other': {
|
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
|
||||||
'LOCATION': 'other',
|
|
||||||
'TIMEOUT': '1',
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
|
'other': {
|
||||||
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
|
'LOCATION': 'other',
|
||||||
|
'TIMEOUT': '1',
|
||||||
|
},
|
||||||
|
},
|
||||||
)
|
)
|
||||||
class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase):
|
class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase):
|
||||||
|
|
||||||
|
@ -1816,14 +1816,14 @@ class CacheMiddlewareTest(IgnoreDeprecationWarningsMixin, TestCase):
|
||||||
self.assertEqual(response.content, b'Hello World 16')
|
self.assertEqual(response.content, b'Hello World 16')
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
CACHE_MIDDLEWARE_KEY_PREFIX='settingsprefix',
|
||||||
CACHE_MIDDLEWARE_SECONDS=1,
|
CACHE_MIDDLEWARE_SECONDS=1,
|
||||||
CACHES={
|
CACHES={
|
||||||
'default': {
|
'default': {
|
||||||
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
|
||||||
},
|
|
||||||
},
|
},
|
||||||
USE_I18N=False,
|
},
|
||||||
|
USE_I18N=False,
|
||||||
)
|
)
|
||||||
class TestWithTemplateResponse(TestCase):
|
class TestWithTemplateResponse(TestCase):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -51,8 +51,7 @@ class DistinctOnTests(TestCase):
|
||||||
['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
|
['<Staff: p1>', '<Staff: p1>', '<Staff: p2>', '<Staff: p3>'],
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).
|
Celebrity.objects.filter(fan__in=[self.fan1, self.fan2, self.fan3]).distinct('name').order_by('name'),
|
||||||
distinct('name').order_by('name'),
|
|
||||||
['<Celebrity: c1>', '<Celebrity: c2>'],
|
['<Celebrity: c1>', '<Celebrity: c2>'],
|
||||||
),
|
),
|
||||||
# Does combining querysets work?
|
# Does combining querysets work?
|
||||||
|
|
|
@ -13,9 +13,9 @@ class ExtraRegressTests(TestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
self.u = User.objects.create_user(
|
self.u = User.objects.create_user(
|
||||||
username="fred",
|
username="fred",
|
||||||
password="secret",
|
password="secret",
|
||||||
email="fred@example.com"
|
email="fred@example.com"
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_regression_7314_7372(self):
|
def test_regression_7314_7372(self):
|
||||||
|
@ -41,9 +41,9 @@ class ExtraRegressTests(TestCase):
|
||||||
|
|
||||||
# Queryset to match most recent revision:
|
# Queryset to match most recent revision:
|
||||||
qs = RevisionableModel.objects.extra(
|
qs = RevisionableModel.objects.extra(
|
||||||
where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)" % {
|
where=["%(table)s.id IN (SELECT MAX(rev.id) FROM %(table)s rev GROUP BY rev.base_id)" % {
|
||||||
'table': RevisionableModel._meta.db_table,
|
'table': RevisionableModel._meta.db_table,
|
||||||
}]
|
}]
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertQuerysetEqual(qs,
|
self.assertQuerysetEqual(qs,
|
||||||
|
@ -74,8 +74,8 @@ class ExtraRegressTests(TestCase):
|
||||||
# select portions. Applies when portions are updated or otherwise
|
# select portions. Applies when portions are updated or otherwise
|
||||||
# moved around.
|
# moved around.
|
||||||
qs = User.objects.extra(
|
qs = User.objects.extra(
|
||||||
select=OrderedDict((("alpha", "%s"), ("beta", "2"), ("gamma", "%s"))),
|
select=OrderedDict((("alpha", "%s"), ("beta", "2"), ("gamma", "%s"))),
|
||||||
select_params=(1, 3)
|
select_params=(1, 3)
|
||||||
)
|
)
|
||||||
qs = qs.extra(select={"beta": 4})
|
qs = qs.extra(select={"beta": 4})
|
||||||
qs = qs.extra(select={"alpha": "%s"}, select_params=[5])
|
qs = qs.extra(select={"alpha": "%s"}, select_params=[5])
|
||||||
|
@ -129,11 +129,11 @@ class ExtraRegressTests(TestCase):
|
||||||
should still be present because of the extra() call.
|
should still be present because of the extra() call.
|
||||||
"""
|
"""
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Order.objects.extra(where=["username=%s"],
|
Order.objects.extra(where=["username=%s"],
|
||||||
params=["fred"],
|
params=["fred"],
|
||||||
tables=["auth_user"]
|
tables=["auth_user"]
|
||||||
).order_by('created_by'),
|
).order_by('created_by'),
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_regression_8819(self):
|
def test_regression_8819(self):
|
||||||
|
@ -294,7 +294,7 @@ class ExtraRegressTests(TestCase):
|
||||||
|
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
TestObject.objects.filter(
|
TestObject.objects.filter(
|
||||||
pk__in=TestObject.objects.extra(select={'extra': 1}).values('pk')
|
pk__in=TestObject.objects.extra(select={'extra': 1}).values('pk')
|
||||||
),
|
),
|
||||||
['<TestObject: TestObject: first,second,third>']
|
['<TestObject: TestObject: first,second,third>']
|
||||||
)
|
)
|
||||||
|
@ -312,8 +312,7 @@ class ExtraRegressTests(TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
TestObject.objects.filter(pk=obj.pk) |
|
TestObject.objects.filter(pk=obj.pk) | TestObject.objects.extra(where=["id > %s"], params=[obj.pk]),
|
||||||
TestObject.objects.extra(where=["id > %s"], params=[obj.pk]),
|
|
||||||
['<TestObject: TestObject: first,second,third>']
|
['<TestObject: TestObject: first,second,third>']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -377,8 +377,7 @@ class FileUploadTests(TestCase):
|
||||||
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
|
vars = {'boundary': 'oUrBoUnDaRyStRiNg'}
|
||||||
post_data = [
|
post_data = [
|
||||||
'--%(boundary)s',
|
'--%(boundary)s',
|
||||||
'Content-Disposition: form-data; name="file_field"; '
|
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
|
||||||
'filename="MiXeD_cAsE.txt"',
|
|
||||||
'Content-Type: application/octet-stream',
|
'Content-Type: application/octet-stream',
|
||||||
'',
|
'',
|
||||||
'file contents\n'
|
'file contents\n'
|
||||||
|
|
|
@ -1134,14 +1134,14 @@ class FieldsTests(SimpleTestCase):
|
||||||
f.choices = [p for p in f.choices if p[0].endswith('.py')]
|
f.choices = [p for p in f.choices if p[0].endswith('.py')]
|
||||||
f.choices.sort()
|
f.choices.sort()
|
||||||
expected = [
|
expected = [
|
||||||
('/django/forms/__init__.py', '__init__.py'),
|
('/django/forms/__init__.py', '__init__.py'),
|
||||||
('/django/forms/fields.py', 'fields.py'),
|
('/django/forms/fields.py', 'fields.py'),
|
||||||
('/django/forms/forms.py', 'forms.py'),
|
('/django/forms/forms.py', 'forms.py'),
|
||||||
('/django/forms/formsets.py', 'formsets.py'),
|
('/django/forms/formsets.py', 'formsets.py'),
|
||||||
('/django/forms/models.py', 'models.py'),
|
('/django/forms/models.py', 'models.py'),
|
||||||
('/django/forms/util.py', 'util.py'),
|
('/django/forms/util.py', 'util.py'),
|
||||||
('/django/forms/utils.py', 'utils.py'),
|
('/django/forms/utils.py', 'utils.py'),
|
||||||
('/django/forms/widgets.py', 'widgets.py')
|
('/django/forms/widgets.py', 'widgets.py')
|
||||||
]
|
]
|
||||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||||
self.assertEqual(exp[1], got[1])
|
self.assertEqual(exp[1], got[1])
|
||||||
|
@ -1155,14 +1155,14 @@ class FieldsTests(SimpleTestCase):
|
||||||
f = FilePathField(path=path, match='^.*?\.py$')
|
f = FilePathField(path=path, match='^.*?\.py$')
|
||||||
f.choices.sort()
|
f.choices.sort()
|
||||||
expected = [
|
expected = [
|
||||||
('/django/forms/__init__.py', '__init__.py'),
|
('/django/forms/__init__.py', '__init__.py'),
|
||||||
('/django/forms/fields.py', 'fields.py'),
|
('/django/forms/fields.py', 'fields.py'),
|
||||||
('/django/forms/forms.py', 'forms.py'),
|
('/django/forms/forms.py', 'forms.py'),
|
||||||
('/django/forms/formsets.py', 'formsets.py'),
|
('/django/forms/formsets.py', 'formsets.py'),
|
||||||
('/django/forms/models.py', 'models.py'),
|
('/django/forms/models.py', 'models.py'),
|
||||||
('/django/forms/util.py', 'util.py'),
|
('/django/forms/util.py', 'util.py'),
|
||||||
('/django/forms/utils.py', 'utils.py'),
|
('/django/forms/utils.py', 'utils.py'),
|
||||||
('/django/forms/widgets.py', 'widgets.py')
|
('/django/forms/widgets.py', 'widgets.py')
|
||||||
]
|
]
|
||||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||||
self.assertEqual(exp[1], got[1])
|
self.assertEqual(exp[1], got[1])
|
||||||
|
@ -1174,16 +1174,16 @@ class FieldsTests(SimpleTestCase):
|
||||||
f = FilePathField(path=path, recursive=True, match='^.*?\.py$')
|
f = FilePathField(path=path, recursive=True, match='^.*?\.py$')
|
||||||
f.choices.sort()
|
f.choices.sort()
|
||||||
expected = [
|
expected = [
|
||||||
('/django/forms/__init__.py', '__init__.py'),
|
('/django/forms/__init__.py', '__init__.py'),
|
||||||
('/django/forms/extras/__init__.py', 'extras/__init__.py'),
|
('/django/forms/extras/__init__.py', 'extras/__init__.py'),
|
||||||
('/django/forms/extras/widgets.py', 'extras/widgets.py'),
|
('/django/forms/extras/widgets.py', 'extras/widgets.py'),
|
||||||
('/django/forms/fields.py', 'fields.py'),
|
('/django/forms/fields.py', 'fields.py'),
|
||||||
('/django/forms/forms.py', 'forms.py'),
|
('/django/forms/forms.py', 'forms.py'),
|
||||||
('/django/forms/formsets.py', 'formsets.py'),
|
('/django/forms/formsets.py', 'formsets.py'),
|
||||||
('/django/forms/models.py', 'models.py'),
|
('/django/forms/models.py', 'models.py'),
|
||||||
('/django/forms/util.py', 'util.py'),
|
('/django/forms/util.py', 'util.py'),
|
||||||
('/django/forms/utils.py', 'utils.py'),
|
('/django/forms/utils.py', 'utils.py'),
|
||||||
('/django/forms/widgets.py', 'widgets.py')
|
('/django/forms/widgets.py', 'widgets.py')
|
||||||
]
|
]
|
||||||
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
for exp, got in zip(expected, fix_os_paths(f.choices)):
|
||||||
self.assertEqual(exp[1], got[1])
|
self.assertEqual(exp[1], got[1])
|
||||||
|
|
|
@ -1182,9 +1182,9 @@ class ClearableFileInputTests(TestCase):
|
||||||
widget = ClearableFileInput()
|
widget = ClearableFileInput()
|
||||||
widget.is_required = False
|
widget.is_required = False
|
||||||
self.assertEqual(widget.value_from_datadict(
|
self.assertEqual(widget.value_from_datadict(
|
||||||
data={'myfile-clear': True},
|
data={'myfile-clear': True},
|
||||||
files={},
|
files={},
|
||||||
name='myfile'), False)
|
name='myfile'), False)
|
||||||
|
|
||||||
def test_clear_input_checked_returns_false_only_if_not_required(self):
|
def test_clear_input_checked_returns_false_only_if_not_required(self):
|
||||||
"""
|
"""
|
||||||
|
@ -1196,6 +1196,6 @@ class ClearableFileInputTests(TestCase):
|
||||||
widget.is_required = True
|
widget.is_required = True
|
||||||
f = SimpleUploadedFile('something.txt', b'content')
|
f = SimpleUploadedFile('something.txt', b'content')
|
||||||
self.assertEqual(widget.value_from_datadict(
|
self.assertEqual(widget.value_from_datadict(
|
||||||
data={'myfile-clear': True},
|
data={'myfile-clear': True},
|
||||||
files={'myfile': f},
|
files={'myfile': f},
|
||||||
name='myfile'), f)
|
name='myfile'), f)
|
||||||
|
|
|
@ -124,10 +124,10 @@ class ModelFormCallableModelDefault(TestCase):
|
||||||
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
|
obj2 = ChoiceOptionModel.objects.create(id=2, name='option 2')
|
||||||
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
|
obj3 = ChoiceOptionModel.objects.create(id=3, name='option 3')
|
||||||
self.assertHTMLEqual(ChoiceFieldForm(initial={
|
self.assertHTMLEqual(ChoiceFieldForm(initial={
|
||||||
'choice': obj2,
|
'choice': obj2,
|
||||||
'choice_int': obj2,
|
'choice_int': obj2,
|
||||||
'multi_choice': [obj2,obj3],
|
'multi_choice': [obj2,obj3],
|
||||||
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
|
'multi_choice_int': ChoiceOptionModel.objects.exclude(name="default"),
|
||||||
}).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
|
}).as_p(), """<p><label for="id_choice">Choice:</label> <select name="choice" id="id_choice">
|
||||||
<option value="1">ChoiceOption 1</option>
|
<option value="1">ChoiceOption 1</option>
|
||||||
<option value="2" selected="selected">ChoiceOption 2</option>
|
<option value="2" selected="selected">ChoiceOption 2</option>
|
||||||
|
|
|
@ -98,23 +98,23 @@ class GenericRelationsTests(TestCase):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||||
('clearish', Mineral, quartz.pk),
|
('clearish', Mineral, quartz.pk),
|
||||||
('fatty', Animal, platypus.pk),
|
('fatty', Animal, platypus.pk),
|
||||||
('fatty', Vegetable, bacon.pk),
|
('fatty', Vegetable, bacon.pk),
|
||||||
('hairy', Animal, lion.pk),
|
('hairy', Animal, lion.pk),
|
||||||
('salty', Vegetable, bacon.pk),
|
('salty', Vegetable, bacon.pk),
|
||||||
('shiny', Animal, platypus.pk),
|
('shiny', Animal, platypus.pk),
|
||||||
('yellow', Animal, lion.pk)
|
('yellow', Animal, lion.pk)
|
||||||
],
|
],
|
||||||
comp_func
|
comp_func
|
||||||
)
|
)
|
||||||
lion.delete()
|
lion.delete()
|
||||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||||
('clearish', Mineral, quartz.pk),
|
('clearish', Mineral, quartz.pk),
|
||||||
('fatty', Animal, platypus.pk),
|
('fatty', Animal, platypus.pk),
|
||||||
('fatty', Vegetable, bacon.pk),
|
('fatty', Vegetable, bacon.pk),
|
||||||
('salty', Vegetable, bacon.pk),
|
('salty', Vegetable, bacon.pk),
|
||||||
('shiny', Animal, platypus.pk)
|
('shiny', Animal, platypus.pk)
|
||||||
],
|
],
|
||||||
comp_func
|
comp_func
|
||||||
)
|
)
|
||||||
|
@ -124,11 +124,11 @@ class GenericRelationsTests(TestCase):
|
||||||
quartz_pk = quartz.pk
|
quartz_pk = quartz.pk
|
||||||
quartz.delete()
|
quartz.delete()
|
||||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||||
('clearish', Mineral, quartz_pk),
|
('clearish', Mineral, quartz_pk),
|
||||||
('fatty', Animal, platypus.pk),
|
('fatty', Animal, platypus.pk),
|
||||||
('fatty', Vegetable, bacon.pk),
|
('fatty', Vegetable, bacon.pk),
|
||||||
('salty', Vegetable, bacon.pk),
|
('salty', Vegetable, bacon.pk),
|
||||||
('shiny', Animal, platypus.pk)
|
('shiny', Animal, platypus.pk)
|
||||||
],
|
],
|
||||||
comp_func
|
comp_func
|
||||||
)
|
)
|
||||||
|
@ -138,10 +138,10 @@ class GenericRelationsTests(TestCase):
|
||||||
tag.delete()
|
tag.delete()
|
||||||
self.assertQuerysetEqual(bacon.tags.all(), ["<TaggedItem: salty>"])
|
self.assertQuerysetEqual(bacon.tags.all(), ["<TaggedItem: salty>"])
|
||||||
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
self.assertQuerysetEqual(TaggedItem.objects.all(), [
|
||||||
('clearish', Mineral, quartz_pk),
|
('clearish', Mineral, quartz_pk),
|
||||||
('fatty', Animal, platypus.pk),
|
('fatty', Animal, platypus.pk),
|
||||||
('salty', Vegetable, bacon.pk),
|
('salty', Vegetable, bacon.pk),
|
||||||
('shiny', Animal, platypus.pk)
|
('shiny', Animal, platypus.pk)
|
||||||
],
|
],
|
||||||
comp_func
|
comp_func
|
||||||
)
|
)
|
||||||
|
|
|
@ -600,8 +600,8 @@ class FormattingTests(TransRealMixin, TestCase):
|
||||||
# Russian locale (with E as month)
|
# Russian locale (with E as month)
|
||||||
with translation.override('ru', deactivate=True):
|
with translation.override('ru', deactivate=True):
|
||||||
self.assertHTMLEqual(
|
self.assertHTMLEqual(
|
||||||
'<select name="mydate_day" id="id_mydate_day">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>\n<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>\n<option value="3">\u041c\u0430\u0440\u0442</option>\n<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>\n<option value="5">\u041c\u0430\u0439</option>\n<option value="6">\u0418\u044e\u043d\u044c</option>\n<option value="7">\u0418\u044e\u043b\u044c</option>\n<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>\n<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>\n<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>\n<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>\n<option value="12" selected="selected">\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
|
'<select name="mydate_day" id="id_mydate_day">\n<option value="1">1</option>\n<option value="2">2</option>\n<option value="3">3</option>\n<option value="4">4</option>\n<option value="5">5</option>\n<option value="6">6</option>\n<option value="7">7</option>\n<option value="8">8</option>\n<option value="9">9</option>\n<option value="10">10</option>\n<option value="11">11</option>\n<option value="12">12</option>\n<option value="13">13</option>\n<option value="14">14</option>\n<option value="15">15</option>\n<option value="16">16</option>\n<option value="17">17</option>\n<option value="18">18</option>\n<option value="19">19</option>\n<option value="20">20</option>\n<option value="21">21</option>\n<option value="22">22</option>\n<option value="23">23</option>\n<option value="24">24</option>\n<option value="25">25</option>\n<option value="26">26</option>\n<option value="27">27</option>\n<option value="28">28</option>\n<option value="29">29</option>\n<option value="30">30</option>\n<option value="31" selected="selected">31</option>\n</select>\n<select name="mydate_month" id="id_mydate_month">\n<option value="1">\u042f\u043d\u0432\u0430\u0440\u044c</option>\n<option value="2">\u0424\u0435\u0432\u0440\u0430\u043b\u044c</option>\n<option value="3">\u041c\u0430\u0440\u0442</option>\n<option value="4">\u0410\u043f\u0440\u0435\u043b\u044c</option>\n<option value="5">\u041c\u0430\u0439</option>\n<option value="6">\u0418\u044e\u043d\u044c</option>\n<option value="7">\u0418\u044e\u043b\u044c</option>\n<option value="8">\u0410\u0432\u0433\u0443\u0441\u0442</option>\n<option value="9">\u0421\u0435\u043d\u0442\u044f\u0431\u0440\u044c</option>\n<option value="10">\u041e\u043a\u0442\u044f\u0431\u0440\u044c</option>\n<option value="11">\u041d\u043e\u044f\u0431\u0440\u044c</option>\n<option value="12" selected="selected">\u0414\u0435\u043a\u0430\u0431\u0440\u044c</option>\n</select>\n<select name="mydate_year" id="id_mydate_year">\n<option value="2009" selected="selected">2009</option>\n<option value="2010">2010</option>\n<option value="2011">2011</option>\n<option value="2012">2012</option>\n<option value="2013">2013</option>\n<option value="2014">2014</option>\n<option value="2015">2015</option>\n<option value="2016">2016</option>\n<option value="2017">2017</option>\n<option value="2018">2018</option>\n</select>',
|
||||||
SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
|
SelectDateWidget(years=range(2009, 2019)).render('mydate', datetime.date(2009, 12, 31))
|
||||||
)
|
)
|
||||||
|
|
||||||
# English locale
|
# English locale
|
||||||
|
|
|
@ -169,8 +169,8 @@ class AdminEmailHandlerTest(TestCase):
|
||||||
self.assertTrue(admin_email_handler.connection().fail_silently)
|
self.assertTrue(admin_email_handler.connection().fail_silently)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
|
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-'
|
||||||
)
|
)
|
||||||
def test_accepts_args(self):
|
def test_accepts_args(self):
|
||||||
"""
|
"""
|
||||||
|
@ -199,9 +199,9 @@ class AdminEmailHandlerTest(TestCase):
|
||||||
admin_email_handler.filters = orig_filters
|
admin_email_handler.filters = orig_filters
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
ADMINS=(('whatever admin', 'admin@example.com'),),
|
ADMINS=(('whatever admin', 'admin@example.com'),),
|
||||||
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
|
EMAIL_SUBJECT_PREFIX='-SuperAwesomeSubject-',
|
||||||
INTERNAL_IPS=('127.0.0.1',),
|
INTERNAL_IPS=('127.0.0.1',),
|
||||||
)
|
)
|
||||||
def test_accepts_args_and_request(self):
|
def test_accepts_args_and_request(self):
|
||||||
"""
|
"""
|
||||||
|
@ -234,9 +234,9 @@ class AdminEmailHandlerTest(TestCase):
|
||||||
admin_email_handler.filters = orig_filters
|
admin_email_handler.filters = orig_filters
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
ADMINS=(('admin', 'admin@example.com'),),
|
ADMINS=(('admin', 'admin@example.com'),),
|
||||||
EMAIL_SUBJECT_PREFIX='',
|
EMAIL_SUBJECT_PREFIX='',
|
||||||
DEBUG=False,
|
DEBUG=False,
|
||||||
)
|
)
|
||||||
def test_subject_accepts_newlines(self):
|
def test_subject_accepts_newlines(self):
|
||||||
"""
|
"""
|
||||||
|
@ -257,9 +257,9 @@ class AdminEmailHandlerTest(TestCase):
|
||||||
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
ADMINS=(('admin', 'admin@example.com'),),
|
ADMINS=(('admin', 'admin@example.com'),),
|
||||||
EMAIL_SUBJECT_PREFIX='',
|
EMAIL_SUBJECT_PREFIX='',
|
||||||
DEBUG=False,
|
DEBUG=False,
|
||||||
)
|
)
|
||||||
def test_truncate_subject(self):
|
def test_truncate_subject(self):
|
||||||
"""
|
"""
|
||||||
|
@ -279,8 +279,8 @@ class AdminEmailHandlerTest(TestCase):
|
||||||
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
self.assertEqual(mail.outbox[0].subject, expected_subject)
|
||||||
|
|
||||||
@override_settings(
|
@override_settings(
|
||||||
ADMINS=(('admin', 'admin@example.com'),),
|
ADMINS=(('admin', 'admin@example.com'),),
|
||||||
DEBUG=False,
|
DEBUG=False,
|
||||||
)
|
)
|
||||||
def test_uses_custom_email_backend(self):
|
def test_uses_custom_email_backend(self):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -324,8 +324,8 @@ class MailTests(HeadersCheckMixin, SimpleTestCase):
|
||||||
|
|
||||||
connection = mail.get_connection('mail.custombackend.EmailBackend')
|
connection = mail.get_connection('mail.custombackend.EmailBackend')
|
||||||
send_mass_mail([
|
send_mass_mail([
|
||||||
('Subject1', 'Content1', 'from1@example.com', ['to1@example.com']),
|
('Subject1', 'Content1', 'from1@example.com', ['to1@example.com']),
|
||||||
('Subject2', 'Content2', 'from2@example.com', ['to2@example.com']),
|
('Subject2', 'Content2', 'from2@example.com', ['to2@example.com']),
|
||||||
], connection=connection)
|
], connection=connection)
|
||||||
self.assertEqual(mail.outbox, [])
|
self.assertEqual(mail.outbox, [])
|
||||||
self.assertEqual(len(connection.test_outbox), 2)
|
self.assertEqual(len(connection.test_outbox), 2)
|
||||||
|
|
|
@ -55,23 +55,23 @@ class ManagersRegressionTests(TestCase):
|
||||||
# Since Child6 inherits from Child4, the corresponding rows from f1 and
|
# Since Child6 inherits from Child4, the corresponding rows from f1 and
|
||||||
# f2 also appear here. This is the expected result.
|
# f2 also appear here. This is the expected result.
|
||||||
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
|
self.assertQuerysetEqual(Child4._default_manager.order_by('data'), [
|
||||||
"<Child4: d1>",
|
"<Child4: d1>",
|
||||||
"<Child4: d2>",
|
"<Child4: d2>",
|
||||||
"<Child4: f1>",
|
"<Child4: f1>",
|
||||||
"<Child4: f2>"
|
"<Child4: f2>"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(Child4.manager1.all(), [
|
self.assertQuerysetEqual(Child4.manager1.all(), [
|
||||||
"<Child4: d1>",
|
"<Child4: d1>",
|
||||||
"<Child4: f1>"
|
"<Child4: f1>"
|
||||||
],
|
],
|
||||||
ordered=False
|
ordered=False
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
|
self.assertQuerysetEqual(Child5._default_manager.all(), ["<Child5: fred>"])
|
||||||
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>"])
|
self.assertQuerysetEqual(Child6._default_manager.all(), ["<Child6: f1>"])
|
||||||
self.assertQuerysetEqual(Child7._default_manager.order_by('name'), [
|
self.assertQuerysetEqual(Child7._default_manager.order_by('name'), [
|
||||||
"<Child7: barney>",
|
"<Child7: barney>",
|
||||||
"<Child7: fred>"
|
"<Child7: fred>"
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -213,9 +213,9 @@ class ManyToOneTests(TestCase):
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Article.objects.filter(reporter__in=[self.r.id,self.r2.id]).distinct(),
|
Article.objects.filter(reporter__in=[self.r.id,self.r2.id]).distinct(),
|
||||||
[
|
[
|
||||||
"<Article: John's second story>",
|
"<Article: John's second story>",
|
||||||
"<Article: Paul's story>",
|
"<Article: Paul's story>",
|
||||||
"<Article: This is a test>",
|
"<Article: This is a test>",
|
||||||
])
|
])
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Article.objects.filter(reporter__in=[self.r,self.r2]).distinct(),
|
Article.objects.filter(reporter__in=[self.r,self.r2]).distinct(),
|
||||||
|
@ -229,8 +229,8 @@ class ManyToOneTests(TestCase):
|
||||||
# then converted into a query
|
# then converted into a query
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Article.objects.filter(
|
Article.objects.filter(
|
||||||
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
|
reporter__in=Reporter.objects.filter(first_name='John').values('pk').query
|
||||||
).distinct(),
|
).distinct(),
|
||||||
[
|
[
|
||||||
"<Article: John's second story>",
|
"<Article: John's second story>",
|
||||||
"<Article: This is a test>",
|
"<Article: This is a test>",
|
||||||
|
|
|
@ -374,7 +374,7 @@ class MiddlewareTests(BaseMiddlewareExceptionTest):
|
||||||
self._add_middleware(middleware)
|
self._add_middleware(middleware)
|
||||||
self._add_middleware(pre_middleware)
|
self._add_middleware(pre_middleware)
|
||||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||||
],
|
],
|
||||||
ValueError())
|
ValueError())
|
||||||
|
|
||||||
|
@ -391,7 +391,7 @@ class MiddlewareTests(BaseMiddlewareExceptionTest):
|
||||||
self._add_middleware(middleware)
|
self._add_middleware(middleware)
|
||||||
self._add_middleware(pre_middleware)
|
self._add_middleware(pre_middleware)
|
||||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||||
],
|
],
|
||||||
ValueError())
|
ValueError())
|
||||||
|
|
||||||
|
@ -685,8 +685,8 @@ class BadMiddlewareTests(BaseMiddlewareExceptionTest):
|
||||||
self._add_middleware(bad_middleware)
|
self._add_middleware(bad_middleware)
|
||||||
self._add_middleware(pre_middleware)
|
self._add_middleware(pre_middleware)
|
||||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object.",
|
||||||
'Test Response Exception'
|
'Test Response Exception'
|
||||||
])
|
])
|
||||||
|
|
||||||
# Check that the right middleware methods have been invoked
|
# Check that the right middleware methods have been invoked
|
||||||
|
@ -702,7 +702,7 @@ class BadMiddlewareTests(BaseMiddlewareExceptionTest):
|
||||||
self._add_middleware(bad_middleware)
|
self._add_middleware(bad_middleware)
|
||||||
self._add_middleware(pre_middleware)
|
self._add_middleware(pre_middleware)
|
||||||
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
self.assert_exceptions_handled('/middleware_exceptions/null_view/', [
|
||||||
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
"The view middleware_exceptions.views.null_view didn't return an HttpResponse object."
|
||||||
],
|
],
|
||||||
ValueError())
|
ValueError())
|
||||||
|
|
||||||
|
|
|
@ -31,13 +31,13 @@ class Bar(models.Model):
|
||||||
class Whiz(models.Model):
|
class Whiz(models.Model):
|
||||||
CHOICES = (
|
CHOICES = (
|
||||||
('Group 1', (
|
('Group 1', (
|
||||||
(1, 'First'),
|
(1, 'First'),
|
||||||
(2, 'Second'),
|
(2, 'Second'),
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
('Group 2', (
|
('Group 2', (
|
||||||
(3, 'Third'),
|
(3, 'Third'),
|
||||||
(4, 'Fourth'),
|
(4, 'Fourth'),
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
(0, 'Other'),
|
(0, 'Other'),
|
||||||
|
@ -218,9 +218,9 @@ if Image:
|
||||||
height_field='mugshot_height',
|
height_field='mugshot_height',
|
||||||
width_field='mugshot_width')
|
width_field='mugshot_width')
|
||||||
headshot_height = models.PositiveSmallIntegerField(
|
headshot_height = models.PositiveSmallIntegerField(
|
||||||
blank=True, null=True)
|
blank=True, null=True)
|
||||||
headshot_width = models.PositiveSmallIntegerField(
|
headshot_width = models.PositiveSmallIntegerField(
|
||||||
blank=True, null=True)
|
blank=True, null=True)
|
||||||
headshot = TestImageField(blank=True, null=True,
|
headshot = TestImageField(blank=True, null=True,
|
||||||
storage=temp_storage, upload_to='tests',
|
storage=temp_storage, upload_to='tests',
|
||||||
height_field='headshot_height',
|
height_field='headshot_height',
|
||||||
|
|
|
@ -679,12 +679,12 @@ class UniqueTest(TestCase):
|
||||||
isbn = '12345'
|
isbn = '12345'
|
||||||
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
|
DerivedBook.objects.create(title=title, author=self.writer, isbn=isbn)
|
||||||
form = DerivedBookForm({
|
form = DerivedBookForm({
|
||||||
'title': 'Other',
|
'title': 'Other',
|
||||||
'author': self.writer.pk,
|
'author': self.writer.pk,
|
||||||
'isbn': '9876',
|
'isbn': '9876',
|
||||||
'suffix1': '0',
|
'suffix1': '0',
|
||||||
'suffix2': '0'
|
'suffix2': '0'
|
||||||
})
|
})
|
||||||
self.assertFalse(form.is_valid())
|
self.assertFalse(form.is_valid())
|
||||||
self.assertEqual(len(form.errors), 1)
|
self.assertEqual(len(form.errors), 1)
|
||||||
self.assertEqual(form.errors['__all__'],
|
self.assertEqual(form.errors['__all__'],
|
||||||
|
@ -953,12 +953,12 @@ class OldFormForXTests(TestCase):
|
||||||
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
|
self.assertHTMLEqual(six.text_type(f), '''<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br /><span class="helptext">Use both first and last names.</span></td></tr>''')
|
||||||
|
|
||||||
art = Article(
|
art = Article(
|
||||||
headline='Test article',
|
headline='Test article',
|
||||||
slug='test-article',
|
slug='test-article',
|
||||||
pub_date=datetime.date(1988, 1, 4),
|
pub_date=datetime.date(1988, 1, 4),
|
||||||
writer=w,
|
writer=w,
|
||||||
article='Hello.'
|
article='Hello.'
|
||||||
)
|
)
|
||||||
art.save()
|
art.save()
|
||||||
art_id_1 = art.id
|
art_id_1 = art.id
|
||||||
self.assertEqual(art_id_1 is not None, True)
|
self.assertEqual(art_id_1 is not None, True)
|
||||||
|
@ -984,11 +984,11 @@ class OldFormForXTests(TestCase):
|
||||||
<option value="3">Live</option>
|
<option value="3">Live</option>
|
||||||
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
||||||
f = TestArticleForm({
|
f = TestArticleForm({
|
||||||
'headline': 'Test headline',
|
'headline': 'Test headline',
|
||||||
'slug': 'test-headline',
|
'slug': 'test-headline',
|
||||||
'pub_date': '1984-02-06',
|
'pub_date': '1984-02-06',
|
||||||
'writer': six.text_type(w_royko.pk),
|
'writer': six.text_type(w_royko.pk),
|
||||||
'article': 'Hello.'
|
'article': 'Hello.'
|
||||||
}, instance=art)
|
}, instance=art)
|
||||||
self.assertEqual(f.errors, {})
|
self.assertEqual(f.errors, {})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
|
@ -999,9 +999,9 @@ class OldFormForXTests(TestCase):
|
||||||
# You can create a form over a subset of the available fields
|
# You can create a form over a subset of the available fields
|
||||||
# by specifying a 'fields' argument to form_for_instance.
|
# by specifying a 'fields' argument to form_for_instance.
|
||||||
f = PartialArticleFormWithSlug({
|
f = PartialArticleFormWithSlug({
|
||||||
'headline': 'New headline',
|
'headline': 'New headline',
|
||||||
'slug': 'new-headline',
|
'slug': 'new-headline',
|
||||||
'pub_date': '1988-01-04'
|
'pub_date': '1988-01-04'
|
||||||
}, auto_id=False, instance=art)
|
}, auto_id=False, instance=art)
|
||||||
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
|
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
|
||||||
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
|
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
|
||||||
|
@ -1040,11 +1040,11 @@ class OldFormForXTests(TestCase):
|
||||||
|
|
||||||
# Initial values can be provided for model forms
|
# Initial values can be provided for model forms
|
||||||
f = TestArticleForm(
|
f = TestArticleForm(
|
||||||
auto_id=False,
|
auto_id=False,
|
||||||
initial={
|
initial={
|
||||||
'headline': 'Your headline here',
|
'headline': 'Your headline here',
|
||||||
'categories': [str(c1.id), str(c2.id)]
|
'categories': [str(c1.id), str(c2.id)]
|
||||||
})
|
})
|
||||||
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
|
self.assertHTMLEqual(f.as_ul(), '''<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
|
||||||
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
|
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
|
||||||
<li>Pub date: <input type="text" name="pub_date" /></li>
|
<li>Pub date: <input type="text" name="pub_date" /></li>
|
||||||
|
@ -1067,12 +1067,12 @@ class OldFormForXTests(TestCase):
|
||||||
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
</select></li>''' % (w_woodward.pk, w_royko.pk, c1.pk, c2.pk, c3.pk))
|
||||||
|
|
||||||
f = TestArticleForm({
|
f = TestArticleForm({
|
||||||
'headline': 'New headline',
|
'headline': 'New headline',
|
||||||
'slug': 'new-headline',
|
'slug': 'new-headline',
|
||||||
'pub_date': '1988-01-04',
|
'pub_date': '1988-01-04',
|
||||||
'writer': six.text_type(w_royko.pk),
|
'writer': six.text_type(w_royko.pk),
|
||||||
'article': 'Hello.',
|
'article': 'Hello.',
|
||||||
'categories': [six.text_type(c1.id), six.text_type(c2.id)]
|
'categories': [six.text_type(c1.id), six.text_type(c2.id)]
|
||||||
}, instance=new_art)
|
}, instance=new_art)
|
||||||
new_art = f.save()
|
new_art = f.save()
|
||||||
self.assertEqual(new_art.id == art_id_1, True)
|
self.assertEqual(new_art.id == art_id_1, True)
|
||||||
|
@ -1388,8 +1388,8 @@ class OldFormForXTests(TestCase):
|
||||||
# Upload a file and ensure it all works as expected.
|
# Upload a file and ensure it all works as expected.
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1397,8 +1397,8 @@ class OldFormForXTests(TestCase):
|
||||||
|
|
||||||
instance.file.delete()
|
instance.file.delete()
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
files={'file': SimpleUploadedFile('test1.txt', b'hello world')})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
self.assertEqual(type(f.cleaned_data['file']), SimpleUploadedFile)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1406,16 +1406,16 @@ class OldFormForXTests(TestCase):
|
||||||
|
|
||||||
# Check if the max_length attribute has been inherited from the model.
|
# Check if the max_length attribute has been inherited from the model.
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
|
files={'file': SimpleUploadedFile('test-maxlength.txt', b'hello world')})
|
||||||
self.assertEqual(f.is_valid(), False)
|
self.assertEqual(f.is_valid(), False)
|
||||||
|
|
||||||
# Edit an instance that already has the file defined in the model. This will not
|
# Edit an instance that already has the file defined in the model. This will not
|
||||||
# save the file again, but leave it exactly as it is.
|
# save the file again, but leave it exactly as it is.
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
instance=instance)
|
instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
|
self.assertEqual(f.cleaned_data['file'].name, 'tests/test1.txt')
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1427,8 +1427,8 @@ class OldFormForXTests(TestCase):
|
||||||
# Override the file by uploading a new one.
|
# Override the file by uploading a new one.
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
|
files={'file': SimpleUploadedFile('test2.txt', b'hello world')}, instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
||||||
|
@ -1436,8 +1436,8 @@ class OldFormForXTests(TestCase):
|
||||||
# Delete the current file since this is not done by Django.
|
# Delete the current file since this is not done by Django.
|
||||||
instance.file.delete()
|
instance.file.delete()
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
|
files={'file': SimpleUploadedFile('test2.txt', b'hello world')})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
self.assertEqual(instance.file.name, 'tests/test2.txt')
|
||||||
|
@ -1455,8 +1455,8 @@ class OldFormForXTests(TestCase):
|
||||||
self.assertEqual(instance.file.name, '')
|
self.assertEqual(instance.file.name, '')
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
|
files={'file': SimpleUploadedFile('test3.txt', b'hello world')}, instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
||||||
|
@ -1464,8 +1464,8 @@ class OldFormForXTests(TestCase):
|
||||||
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
|
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'New Description'},
|
data={'description': 'New Description'},
|
||||||
instance=instance)
|
instance=instance)
|
||||||
f.fields['file'].required = False
|
f.fields['file'].required = False
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1477,8 +1477,8 @@ class OldFormForXTests(TestCase):
|
||||||
instance.delete()
|
instance.delete()
|
||||||
|
|
||||||
f = TextFileForm(
|
f = TextFileForm(
|
||||||
data={'description': 'Assistance'},
|
data={'description': 'Assistance'},
|
||||||
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
|
files={'file': SimpleUploadedFile('test3.txt', b'hello world')})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
self.assertEqual(instance.file.name, 'tests/test3.txt')
|
||||||
|
@ -1511,8 +1511,8 @@ class OldFormForXTests(TestCase):
|
||||||
image_data2 = fp.read()
|
image_data2 = fp.read()
|
||||||
|
|
||||||
f = ImageFileForm(
|
f = ImageFileForm(
|
||||||
data={'description': 'An image'},
|
data={'description': 'An image'},
|
||||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1524,8 +1524,8 @@ class OldFormForXTests(TestCase):
|
||||||
# because the dimension fields are not null=True.
|
# because the dimension fields are not null=True.
|
||||||
instance.image.delete(save=False)
|
instance.image.delete(save=False)
|
||||||
f = ImageFileForm(
|
f = ImageFileForm(
|
||||||
data={'description': 'An image'},
|
data={'description': 'An image'},
|
||||||
files={'image': SimpleUploadedFile('test.png', image_data)})
|
files={'image': SimpleUploadedFile('test.png', image_data)})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
self.assertEqual(type(f.cleaned_data['image']), SimpleUploadedFile)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1550,8 +1550,8 @@ class OldFormForXTests(TestCase):
|
||||||
# Override the file by uploading a new one.
|
# Override the file by uploading a new one.
|
||||||
|
|
||||||
f = ImageFileForm(
|
f = ImageFileForm(
|
||||||
data={'description': 'Changed it'},
|
data={'description': 'Changed it'},
|
||||||
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
|
files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.image.name, 'tests/test2.png')
|
self.assertEqual(instance.image.name, 'tests/test2.png')
|
||||||
|
@ -1564,8 +1564,8 @@ class OldFormForXTests(TestCase):
|
||||||
instance.delete()
|
instance.delete()
|
||||||
|
|
||||||
f = ImageFileForm(
|
f = ImageFileForm(
|
||||||
data={'description': 'Changed it'},
|
data={'description': 'Changed it'},
|
||||||
files={'image': SimpleUploadedFile('test2.png', image_data2)})
|
files={'image': SimpleUploadedFile('test2.png', image_data2)})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.image.name, 'tests/test2.png')
|
self.assertEqual(instance.image.name, 'tests/test2.png')
|
||||||
|
@ -1593,8 +1593,8 @@ class OldFormForXTests(TestCase):
|
||||||
self.assertEqual(instance.height, None)
|
self.assertEqual(instance.height, None)
|
||||||
|
|
||||||
f = OptionalImageFileForm(
|
f = OptionalImageFileForm(
|
||||||
data={'description': 'And a final one'},
|
data={'description': 'And a final one'},
|
||||||
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
|
files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.image.name, 'tests/test3.png')
|
self.assertEqual(instance.image.name, 'tests/test3.png')
|
||||||
|
@ -1603,8 +1603,8 @@ class OldFormForXTests(TestCase):
|
||||||
|
|
||||||
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
|
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
|
||||||
f = OptionalImageFileForm(
|
f = OptionalImageFileForm(
|
||||||
data={'description': 'New Description'},
|
data={'description': 'New Description'},
|
||||||
instance=instance)
|
instance=instance)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.description, 'New Description')
|
self.assertEqual(instance.description, 'New Description')
|
||||||
|
@ -1617,8 +1617,8 @@ class OldFormForXTests(TestCase):
|
||||||
instance.delete()
|
instance.delete()
|
||||||
|
|
||||||
f = OptionalImageFileForm(
|
f = OptionalImageFileForm(
|
||||||
data={'description': 'And a final one'},
|
data={'description': 'And a final one'},
|
||||||
files={'image': SimpleUploadedFile('test4.png', image_data2)}
|
files={'image': SimpleUploadedFile('test4.png', image_data2)}
|
||||||
)
|
)
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
|
@ -1628,8 +1628,8 @@ class OldFormForXTests(TestCase):
|
||||||
instance.delete()
|
instance.delete()
|
||||||
# Test callable upload_to behavior that's dependent on the value of another field in the model
|
# Test callable upload_to behavior that's dependent on the value of another field in the model
|
||||||
f = ImageFileForm(
|
f = ImageFileForm(
|
||||||
data={'description': 'And a final one', 'path': 'foo'},
|
data={'description': 'And a final one', 'path': 'foo'},
|
||||||
files={'image': SimpleUploadedFile('test4.png', image_data)})
|
files={'image': SimpleUploadedFile('test4.png', image_data)})
|
||||||
self.assertEqual(f.is_valid(), True)
|
self.assertEqual(f.is_valid(), True)
|
||||||
instance = f.save()
|
instance = f.save()
|
||||||
self.assertEqual(instance.image.name, 'foo/test4.png')
|
self.assertEqual(instance.image.name, 'foo/test4.png')
|
||||||
|
|
|
@ -369,25 +369,25 @@ class FormfieldShouldDeleteFormTests(TestCase):
|
||||||
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
|
DeleteFormset = modelformset_factory(User, form=CustomDeleteUserForm, formset=BaseCustomDeleteModelFormSet)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
'form-TOTAL_FORMS': '4',
|
'form-TOTAL_FORMS': '4',
|
||||||
'form-INITIAL_FORMS': '0',
|
'form-INITIAL_FORMS': '0',
|
||||||
'form-MAX_NUM_FORMS': '4',
|
'form-MAX_NUM_FORMS': '4',
|
||||||
'form-0-username': 'John',
|
'form-0-username': 'John',
|
||||||
'form-0-serial': '1',
|
'form-0-serial': '1',
|
||||||
'form-1-username': 'Paul',
|
'form-1-username': 'Paul',
|
||||||
'form-1-serial': '2',
|
'form-1-serial': '2',
|
||||||
'form-2-username': 'George',
|
'form-2-username': 'George',
|
||||||
'form-2-serial': '3',
|
'form-2-serial': '3',
|
||||||
'form-3-username': 'Ringo',
|
'form-3-username': 'Ringo',
|
||||||
'form-3-serial': '5',
|
'form-3-serial': '5',
|
||||||
}
|
}
|
||||||
|
|
||||||
delete_all_ids = {
|
delete_all_ids = {
|
||||||
'form-0-DELETE': '1',
|
'form-0-DELETE': '1',
|
||||||
'form-1-DELETE': '1',
|
'form-1-DELETE': '1',
|
||||||
'form-2-DELETE': '1',
|
'form-2-DELETE': '1',
|
||||||
'form-3-DELETE': '1',
|
'form-3-DELETE': '1',
|
||||||
}
|
}
|
||||||
|
|
||||||
def test_init_database(self):
|
def test_init_database(self):
|
||||||
""" Add test data to database via formset """
|
""" Add test data to database via formset """
|
||||||
|
|
|
@ -123,8 +123,8 @@ class DerivedM(BaseM):
|
||||||
derived_name = models.CharField(max_length=100)
|
derived_name = models.CharField(max_length=100)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "PK = %d, base_name = %s, derived_name = %s" \
|
return "PK = %d, base_name = %s, derived_name = %s" % (
|
||||||
% (self.customPK, self.base_name, self.derived_name)
|
self.customPK, self.base_name, self.derived_name)
|
||||||
|
|
||||||
class AuditBase(models.Model):
|
class AuditBase(models.Model):
|
||||||
planned_date = models.DateField()
|
planned_date = models.DateField()
|
||||||
|
|
|
@ -385,8 +385,8 @@ class ModelInheritanceTest(TestCase):
|
||||||
# abstract models in the inheritance chain, for consistency with
|
# abstract models in the inheritance chain, for consistency with
|
||||||
# verbose_name.
|
# verbose_name.
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
InternalCertificationAudit._meta.verbose_name_plural,
|
InternalCertificationAudit._meta.verbose_name_plural,
|
||||||
'Audits'
|
'Audits'
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_inherited_nullable_exclude(self):
|
def test_inherited_nullable_exclude(self):
|
||||||
|
|
|
@ -121,16 +121,16 @@ class ModelTests(TestCase):
|
||||||
|
|
||||||
# Regression test for #18969
|
# Regression test for #18969
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Party.objects.filter(when__year=1), [
|
Party.objects.filter(when__year=1), [
|
||||||
datetime.date(1, 3, 3),
|
datetime.date(1, 3, 3),
|
||||||
],
|
],
|
||||||
attrgetter("when")
|
attrgetter("when")
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Party.objects.filter(when__year='1'), [
|
Party.objects.filter(when__year='1'), [
|
||||||
datetime.date(1, 3, 3),
|
datetime.date(1, 3, 3),
|
||||||
],
|
],
|
||||||
attrgetter("when")
|
attrgetter("when")
|
||||||
)
|
)
|
||||||
|
|
||||||
if (3,) <= sys.version_info < (3, 3) and connection.vendor == 'mysql':
|
if (3,) <= sys.version_info < (3, 3) and connection.vendor == 'mysql':
|
||||||
|
|
|
@ -28,11 +28,11 @@ class NullQueriesTests(TestCase):
|
||||||
|
|
||||||
# Excluding the previous result returns everything.
|
# Excluding the previous result returns everything.
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Choice.objects.exclude(choice=None).order_by('id'),
|
Choice.objects.exclude(choice=None).order_by('id'),
|
||||||
[
|
[
|
||||||
'<Choice: Choice: Because. in poll Q: Why? >',
|
'<Choice: Choice: Because. in poll Q: Why? >',
|
||||||
'<Choice: Choice: Why Not? in poll Q: Why? >'
|
'<Choice: Choice: Why Not? in poll Q: Why? >'
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
# Valid query, but fails because foo isn't a keyword
|
# Valid query, but fails because foo isn't a keyword
|
||||||
|
|
|
@ -248,11 +248,11 @@ class ModelPaginationTests(TestCase):
|
||||||
p = paginator.page(1)
|
p = paginator.page(1)
|
||||||
self.assertEqual("<Page 1 of 2>", six.text_type(p))
|
self.assertEqual("<Page 1 of 2>", six.text_type(p))
|
||||||
self.assertQuerysetEqual(p.object_list, [
|
self.assertQuerysetEqual(p.object_list, [
|
||||||
"<Article: Article 1>",
|
"<Article: Article 1>",
|
||||||
"<Article: Article 2>",
|
"<Article: Article 2>",
|
||||||
"<Article: Article 3>",
|
"<Article: Article 3>",
|
||||||
"<Article: Article 4>",
|
"<Article: Article 4>",
|
||||||
"<Article: Article 5>"
|
"<Article: Article 5>"
|
||||||
],
|
],
|
||||||
ordered=False
|
ordered=False
|
||||||
)
|
)
|
||||||
|
@ -269,10 +269,10 @@ class ModelPaginationTests(TestCase):
|
||||||
p = paginator.page(2)
|
p = paginator.page(2)
|
||||||
self.assertEqual("<Page 2 of 2>", six.text_type(p))
|
self.assertEqual("<Page 2 of 2>", six.text_type(p))
|
||||||
self.assertQuerysetEqual(p.object_list, [
|
self.assertQuerysetEqual(p.object_list, [
|
||||||
"<Article: Article 6>",
|
"<Article: Article 6>",
|
||||||
"<Article: Article 7>",
|
"<Article: Article 7>",
|
||||||
"<Article: Article 8>",
|
"<Article: Article 8>",
|
||||||
"<Article: Article 9>"
|
"<Article: Article 9>"
|
||||||
],
|
],
|
||||||
ordered=False
|
ordered=False
|
||||||
)
|
)
|
||||||
|
@ -302,8 +302,8 @@ class ModelPaginationTests(TestCase):
|
||||||
# Make sure slicing the Page object with numbers and slice objects work.
|
# Make sure slicing the Page object with numbers and slice objects work.
|
||||||
self.assertEqual(p[0], Article.objects.get(headline='Article 1'))
|
self.assertEqual(p[0], Article.objects.get(headline='Article 1'))
|
||||||
self.assertQuerysetEqual(p[slice(2)], [
|
self.assertQuerysetEqual(p[slice(2)], [
|
||||||
"<Article: Article 1>",
|
"<Article: Article 1>",
|
||||||
"<Article: Article 2>",
|
"<Article: Article 2>",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
# After __getitem__ is called, object_list is a list
|
# After __getitem__ is called, object_list is a list
|
||||||
|
|
|
@ -540,7 +540,7 @@ class Queries1Tests(BaseQuerysetTest):
|
||||||
['<Author: a1>']
|
['<Author: a1>']
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Author.objects.filter(Q(extra__note=self.n1)|Q(item__note=self.n3)).filter(id=self.a1.id),
|
Author.objects.filter(Q(extra__note=self.n1)|Q(item__note=self.n3)).filter(id=self.a1.id),
|
||||||
['<Author: a1>']
|
['<Author: a1>']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -298,12 +298,12 @@ class RequestsTests(SimpleTestCase):
|
||||||
# we don't want the data held in memory twice, and we don't want to
|
# we don't want the data held in memory twice, and we don't want to
|
||||||
# silence the error by setting body = '' either.
|
# silence the error by setting body = '' either.
|
||||||
payload = FakePayload("\r\n".join([
|
payload = FakePayload("\r\n".join([
|
||||||
'--boundary',
|
'--boundary',
|
||||||
'Content-Disposition: form-data; name="name"',
|
'Content-Disposition: form-data; name="name"',
|
||||||
'',
|
'',
|
||||||
'value',
|
'value',
|
||||||
'--boundary--'
|
'--boundary--'
|
||||||
'']))
|
'']))
|
||||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||||
'CONTENT_LENGTH': len(payload),
|
'CONTENT_LENGTH': len(payload),
|
||||||
|
@ -320,12 +320,12 @@ class RequestsTests(SimpleTestCase):
|
||||||
# being a binary upload, in which case it should still be accessible
|
# being a binary upload, in which case it should still be accessible
|
||||||
# via body.
|
# via body.
|
||||||
payload_data = b"\r\n".join([
|
payload_data = b"\r\n".join([
|
||||||
b'--boundary',
|
b'--boundary',
|
||||||
b'Content-ID: id; name="name"',
|
b'Content-ID: id; name="name"',
|
||||||
b'',
|
b'',
|
||||||
b'value',
|
b'value',
|
||||||
b'--boundary--'
|
b'--boundary--'
|
||||||
b''])
|
b''])
|
||||||
payload = FakePayload(payload_data)
|
payload = FakePayload(payload_data)
|
||||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||||
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
|
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
|
||||||
|
@ -343,12 +343,12 @@ class RequestsTests(SimpleTestCase):
|
||||||
# Every request.POST with Content-Length >= 0 is a valid request,
|
# Every request.POST with Content-Length >= 0 is a valid request,
|
||||||
# this test ensures that we handle Content-Length == 0.
|
# this test ensures that we handle Content-Length == 0.
|
||||||
payload = FakePayload("\r\n".join([
|
payload = FakePayload("\r\n".join([
|
||||||
'--boundary',
|
'--boundary',
|
||||||
'Content-Disposition: form-data; name="name"',
|
'Content-Disposition: form-data; name="name"',
|
||||||
'',
|
'',
|
||||||
'value',
|
'value',
|
||||||
'--boundary--'
|
'--boundary--'
|
||||||
'']))
|
'']))
|
||||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||||
'CONTENT_LENGTH': 0,
|
'CONTENT_LENGTH': 0,
|
||||||
|
@ -413,12 +413,12 @@ class RequestsTests(SimpleTestCase):
|
||||||
the stream is read second. Using multipart/form-data instead of urlencoded.
|
the stream is read second. Using multipart/form-data instead of urlencoded.
|
||||||
"""
|
"""
|
||||||
payload = FakePayload("\r\n".join([
|
payload = FakePayload("\r\n".join([
|
||||||
'--boundary',
|
'--boundary',
|
||||||
'Content-Disposition: form-data; name="name"',
|
'Content-Disposition: form-data; name="name"',
|
||||||
'',
|
'',
|
||||||
'value',
|
'value',
|
||||||
'--boundary--'
|
'--boundary--'
|
||||||
'']))
|
'']))
|
||||||
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
request = WSGIRequest({'REQUEST_METHOD': 'POST',
|
||||||
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
|
||||||
'CONTENT_LENGTH': len(payload),
|
'CONTENT_LENGTH': len(payload),
|
||||||
|
|
|
@ -19,8 +19,8 @@ class ReverseSingleRelatedTests(TestCase):
|
||||||
|
|
||||||
# Only one source is available via all() due to the custom default manager.
|
# Only one source is available via all() due to the custom default manager.
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Source.objects.all(),
|
Source.objects.all(),
|
||||||
["<Source: Source object>"]
|
["<Source: Source object>"]
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(public_item.source, public_source)
|
self.assertEqual(public_item.source, public_source)
|
||||||
|
|
|
@ -96,8 +96,8 @@ class SelectRelatedRegressTests(TestCase):
|
||||||
Item.objects.create(name="item2")
|
Item.objects.create(name="item2")
|
||||||
|
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Item.objects.select_related("child").order_by("name"),
|
Item.objects.select_related("child").order_by("name"),
|
||||||
["<Item: item1>", "<Item: item2>"]
|
["<Item: item1>", "<Item: item2>"]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_regression_12851(self):
|
def test_regression_12851(self):
|
||||||
|
|
|
@ -556,10 +556,8 @@ def naturalKeyTest(format, self):
|
||||||
self.assertEqual(books[1].object.pk, None)
|
self.assertEqual(books[1].object.pk, None)
|
||||||
|
|
||||||
|
|
||||||
for format in [
|
for format in [f for f in serializers.get_serializer_formats()
|
||||||
f for f in serializers.get_serializer_formats()
|
if not isinstance(serializers.get_serializer(f), serializers.BadSerializer)]:
|
||||||
if not isinstance(serializers.get_serializer(f), serializers.BadSerializer)
|
|
||||||
]:
|
|
||||||
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
|
setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format))
|
||||||
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
|
setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format))
|
||||||
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
|
setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format))
|
||||||
|
|
|
@ -31,8 +31,8 @@ class TestSigner(TestCase):
|
||||||
signer = signing.Signer('predictable-secret', salt='extra-salt')
|
signer = signing.Signer('predictable-secret', salt='extra-salt')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
signer.signature('hello'),
|
signer.signature('hello'),
|
||||||
signing.base64_hmac('extra-salt' + 'signer',
|
signing.base64_hmac('extra-salt' + 'signer',
|
||||||
'hello', 'predictable-secret').decode()
|
'hello', 'predictable-secret').decode()
|
||||||
)
|
)
|
||||||
self.assertNotEqual(
|
self.assertNotEqual(
|
||||||
signing.Signer('predictable-secret', salt='one').signature('hello'),
|
signing.Signer('predictable-secret', salt='one').signature('hello'),
|
||||||
|
|
|
@ -111,7 +111,7 @@ class CachedLoader(unittest.TestCase):
|
||||||
self.old_TEMPLATE_LOADERS = settings.TEMPLATE_LOADERS
|
self.old_TEMPLATE_LOADERS = settings.TEMPLATE_LOADERS
|
||||||
settings.TEMPLATE_LOADERS = (
|
settings.TEMPLATE_LOADERS = (
|
||||||
('django.template.loaders.cached.Loader', (
|
('django.template.loaders.cached.Loader', (
|
||||||
'django.template.loaders.filesystem.Loader',
|
'django.template.loaders.filesystem.Loader',
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -150,8 +150,8 @@ class SimpleTemplateResponseTest(TestCase):
|
||||||
# Create a template response. The context is
|
# Create a template response. The context is
|
||||||
# known to be unpickleable (e.g., a function).
|
# known to be unpickleable (e.g., a function).
|
||||||
response = SimpleTemplateResponse('first/test.html', {
|
response = SimpleTemplateResponse('first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
'fn': datetime.now,
|
'fn': datetime.now,
|
||||||
})
|
})
|
||||||
self.assertRaises(ContentNotRenderedError,
|
self.assertRaises(ContentNotRenderedError,
|
||||||
pickle.dumps, response)
|
pickle.dumps, response)
|
||||||
|
@ -178,8 +178,8 @@ class SimpleTemplateResponseTest(TestCase):
|
||||||
|
|
||||||
def test_repickling(self):
|
def test_repickling(self):
|
||||||
response = SimpleTemplateResponse('first/test.html', {
|
response = SimpleTemplateResponse('first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
'fn': datetime.now,
|
'fn': datetime.now,
|
||||||
})
|
})
|
||||||
self.assertRaises(ContentNotRenderedError,
|
self.assertRaises(ContentNotRenderedError,
|
||||||
pickle.dumps, response)
|
pickle.dumps, response)
|
||||||
|
@ -191,8 +191,8 @@ class SimpleTemplateResponseTest(TestCase):
|
||||||
|
|
||||||
def test_pickling_cookie(self):
|
def test_pickling_cookie(self):
|
||||||
response = SimpleTemplateResponse('first/test.html', {
|
response = SimpleTemplateResponse('first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
'fn': datetime.now,
|
'fn': datetime.now,
|
||||||
})
|
})
|
||||||
|
|
||||||
response.cookies['key'] = 'value'
|
response.cookies['key'] = 'value'
|
||||||
|
@ -284,8 +284,8 @@ class TemplateResponseTest(TestCase):
|
||||||
|
|
||||||
def test_repickling(self):
|
def test_repickling(self):
|
||||||
response = SimpleTemplateResponse('first/test.html', {
|
response = SimpleTemplateResponse('first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
'fn': datetime.now,
|
'fn': datetime.now,
|
||||||
})
|
})
|
||||||
self.assertRaises(ContentNotRenderedError,
|
self.assertRaises(ContentNotRenderedError,
|
||||||
pickle.dumps, response)
|
pickle.dumps, response)
|
||||||
|
|
|
@ -1217,10 +1217,10 @@ class UploadedFileEncodingTest(TestCase):
|
||||||
self.assertEqual(b'Content-Type: text/plain',
|
self.assertEqual(b'Content-Type: text/plain',
|
||||||
encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2])
|
encode_file('IGNORE', 'IGNORE', DummyFile("file.txt"))[2])
|
||||||
self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], (
|
self.assertIn(encode_file('IGNORE', 'IGNORE', DummyFile("file.zip"))[2], (
|
||||||
b'Content-Type: application/x-compress',
|
b'Content-Type: application/x-compress',
|
||||||
b'Content-Type: application/x-zip',
|
b'Content-Type: application/x-zip',
|
||||||
b'Content-Type: application/x-zip-compressed',
|
b'Content-Type: application/x-zip-compressed',
|
||||||
b'Content-Type: application/zip',))
|
b'Content-Type: application/zip',))
|
||||||
self.assertEqual(b'Content-Type: application/octet-stream',
|
self.assertEqual(b'Content-Type: application/octet-stream',
|
||||||
encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2])
|
encode_file('IGNORE', 'IGNORE', DummyFile("file.unknown"))[2])
|
||||||
|
|
||||||
|
|
|
@ -208,51 +208,57 @@ class LegacyDatabaseTests(TestCase):
|
||||||
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
|
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40)
|
||||||
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||||
[morning_min_dt, afternoon_min_dt],
|
[morning_min_dt, afternoon_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||||
[morning_min_dt],
|
[morning_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||||
[afternoon_min_dt],
|
[afternoon_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
|
|
||||||
def test_query_datetimes(self):
|
def test_query_datetimes(self):
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0))
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0))
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
Event.objects.datetimes('dt', 'year'),
|
||||||
transform=lambda d: d)
|
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
transform=lambda d: d)
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
self.assertQuerysetEqual(
|
||||||
transform=lambda d: d)
|
Event.objects.datetimes('dt', 'month'),
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
transform=lambda d: d)
|
||||||
transform=lambda d: d)
|
self.assertQuerysetEqual(
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
Event.objects.datetimes('dt', 'day'),
|
||||||
[datetime.datetime(2011, 1, 1, 1, 0, 0),
|
[datetime.datetime(2011, 1, 1, 0, 0, 0)],
|
||||||
datetime.datetime(2011, 1, 1, 4, 0, 0)],
|
transform=lambda d: d)
|
||||||
transform=lambda d: d)
|
self.assertQuerysetEqual(
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
Event.objects.datetimes('dt', 'hour'),
|
||||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
[datetime.datetime(2011, 1, 1, 1, 0, 0),
|
||||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
datetime.datetime(2011, 1, 1, 4, 0, 0)],
|
||||||
transform=lambda d: d)
|
transform=lambda d: d)
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
Event.objects.datetimes('dt', 'minute'),
|
||||||
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||||
transform=lambda d: d)
|
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
Event.objects.datetimes('dt', 'second'),
|
||||||
|
[datetime.datetime(2011, 1, 1, 1, 30, 0),
|
||||||
|
datetime.datetime(2011, 1, 1, 4, 30, 0)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
|
||||||
def test_raw_sql(self):
|
def test_raw_sql(self):
|
||||||
# Regression test for #17755
|
# Regression test for #17755
|
||||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
dt = datetime.datetime(2011, 9, 1, 13, 20, 30)
|
||||||
event = Event.objects.create(dt=dt)
|
event = Event.objects.create(dt=dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||||
[event],
|
[event],
|
||||||
transform=lambda d: d)
|
transform=lambda d: d)
|
||||||
|
|
||||||
def test_filter_date_field_with_aware_datetime(self):
|
def test_filter_date_field_with_aware_datetime(self):
|
||||||
# Regression test for #17742
|
# Regression test for #17742
|
||||||
|
@ -456,82 +462,94 @@ class NewDatabaseTests(TestCase):
|
||||||
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
|
morning_min_dt = datetime.datetime(2011, 9, 1, 3, 20, 40, tzinfo=EAT)
|
||||||
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
afternoon_min_dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
Session.objects.annotate(dt=Min('events__dt')).order_by('dt'),
|
||||||
[morning_min_dt, afternoon_min_dt],
|
[morning_min_dt, afternoon_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
Session.objects.annotate(dt=Min('events__dt')).filter(dt__lt=afternoon_min_dt),
|
||||||
[morning_min_dt],
|
[morning_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
Session.objects.annotate(dt=Min('events__dt')).filter(dt__gte=afternoon_min_dt),
|
||||||
[afternoon_min_dt],
|
[afternoon_min_dt],
|
||||||
transform=lambda d: d.dt)
|
transform=lambda d: d.dt)
|
||||||
|
|
||||||
@skipUnlessDBFeature('has_zoneinfo_database')
|
@skipUnlessDBFeature('has_zoneinfo_database')
|
||||||
def test_query_datetimes(self):
|
def test_query_datetimes(self):
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
Event.objects.datetimes('dt', 'year'),
|
||||||
transform=lambda d: d)
|
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
transform=lambda d: d)
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
self.assertQuerysetEqual(
|
||||||
transform=lambda d: d)
|
Event.objects.datetimes('dt', 'month'),
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||||
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
transform=lambda d: d)
|
||||||
transform=lambda d: d)
|
self.assertQuerysetEqual(
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
Event.objects.datetimes('dt', 'day'),
|
||||||
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
|
[datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=EAT)],
|
||||||
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
|
transform=lambda d: d)
|
||||||
transform=lambda d: d)
|
self.assertQuerysetEqual(
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
Event.objects.datetimes('dt', 'hour'),
|
||||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
[datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=EAT),
|
||||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
datetime.datetime(2011, 1, 1, 4, 0, 0, tzinfo=EAT)],
|
||||||
transform=lambda d: d)
|
transform=lambda d: d)
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
Event.objects.datetimes('dt', 'minute'),
|
||||||
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||||
transform=lambda d: d)
|
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
Event.objects.datetimes('dt', 'second'),
|
||||||
|
[datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT),
|
||||||
|
datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
|
||||||
@skipUnlessDBFeature('has_zoneinfo_database')
|
@skipUnlessDBFeature('has_zoneinfo_database')
|
||||||
def test_query_datetimes_in_other_timezone(self):
|
def test_query_datetimes_in_other_timezone(self):
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=EAT))
|
||||||
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
Event.objects.create(dt=datetime.datetime(2011, 1, 1, 4, 30, 0, tzinfo=EAT))
|
||||||
with timezone.override(UTC):
|
with timezone.override(UTC):
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'year'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
|
Event.objects.datetimes('dt', 'year'),
|
||||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
[datetime.datetime(2010, 1, 1, 0, 0, 0, tzinfo=UTC),
|
||||||
transform=lambda d: d)
|
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'month'),
|
transform=lambda d: d)
|
||||||
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
|
self.assertQuerysetEqual(
|
||||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
Event.objects.datetimes('dt', 'month'),
|
||||||
transform=lambda d: d)
|
[datetime.datetime(2010, 12, 1, 0, 0, 0, tzinfo=UTC),
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'day'),
|
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||||
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
|
transform=lambda d: d)
|
||||||
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
self.assertQuerysetEqual(
|
||||||
transform=lambda d: d)
|
Event.objects.datetimes('dt', 'day'),
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'hour'),
|
[datetime.datetime(2010, 12, 31, 0, 0, 0, tzinfo=UTC),
|
||||||
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
|
datetime.datetime(2011, 1, 1, 0, 0, 0, tzinfo=UTC)],
|
||||||
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
|
transform=lambda d: d)
|
||||||
transform=lambda d: d)
|
self.assertQuerysetEqual(
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'minute'),
|
Event.objects.datetimes('dt', 'hour'),
|
||||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
[datetime.datetime(2010, 12, 31, 22, 0, 0, tzinfo=UTC),
|
||||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
datetime.datetime(2011, 1, 1, 1, 0, 0, tzinfo=UTC)],
|
||||||
transform=lambda d: d)
|
transform=lambda d: d)
|
||||||
self.assertQuerysetEqual(Event.objects.datetimes('dt', 'second'),
|
self.assertQuerysetEqual(
|
||||||
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
Event.objects.datetimes('dt', 'minute'),
|
||||||
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||||
transform=lambda d: d)
|
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
self.assertQuerysetEqual(
|
||||||
|
Event.objects.datetimes('dt', 'second'),
|
||||||
|
[datetime.datetime(2010, 12, 31, 22, 30, 0, tzinfo=UTC),
|
||||||
|
datetime.datetime(2011, 1, 1, 1, 30, 0, tzinfo=UTC)],
|
||||||
|
transform=lambda d: d)
|
||||||
|
|
||||||
def test_raw_sql(self):
|
def test_raw_sql(self):
|
||||||
# Regression test for #17755
|
# Regression test for #17755
|
||||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||||
event = Event.objects.create(dt=dt)
|
event = Event.objects.create(dt=dt)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
Event.objects.raw('SELECT * FROM timezones_event WHERE dt = %s', [dt]),
|
||||||
[event],
|
[event],
|
||||||
transform=lambda d: d)
|
transform=lambda d: d)
|
||||||
|
|
||||||
@requires_tz_support
|
@requires_tz_support
|
||||||
def test_filter_date_field_with_aware_datetime(self):
|
def test_filter_date_field_with_aware_datetime(self):
|
||||||
|
|
|
@ -203,7 +203,7 @@ class AtomicTests(TransactionTestCase):
|
||||||
with self.assertRaises(DatabaseError):
|
with self.assertRaises(DatabaseError):
|
||||||
with transaction.atomic(savepoint=False):
|
with transaction.atomic(savepoint=False):
|
||||||
connection.cursor().execute(
|
connection.cursor().execute(
|
||||||
"SELECT no_such_col FROM transactions_reporter")
|
"SELECT no_such_col FROM transactions_reporter")
|
||||||
# prevent atomic from rolling back since we're recovering manually
|
# prevent atomic from rolling back since we're recovering manually
|
||||||
self.assertTrue(transaction.get_rollback())
|
self.assertTrue(transaction.get_rollback())
|
||||||
transaction.set_rollback(False)
|
transaction.set_rollback(False)
|
||||||
|
|
|
@ -61,14 +61,14 @@ class TzinfoTests(IgnorePendingDeprecationWarningsMixin, unittest.TestCase):
|
||||||
# US/Eastern -- we force its representation to "EST"
|
# US/Eastern -- we force its representation to "EST"
|
||||||
tz = LocalTimezone(dt + datetime.timedelta(days=1))
|
tz = LocalTimezone(dt + datetime.timedelta(days=1))
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
repr(datetime.datetime.fromtimestamp(ts - 3600, tz)),
|
repr(datetime.datetime.fromtimestamp(ts - 3600, tz)),
|
||||||
'datetime.datetime(2010, 11, 7, 0, 0, tzinfo=EST)')
|
'datetime.datetime(2010, 11, 7, 0, 0, tzinfo=EST)')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
repr(datetime.datetime.fromtimestamp(ts, tz)),
|
repr(datetime.datetime.fromtimestamp(ts, tz)),
|
||||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
repr(datetime.datetime.fromtimestamp(ts + 3600, tz)),
|
repr(datetime.datetime.fromtimestamp(ts + 3600, tz)),
|
||||||
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
'datetime.datetime(2010, 11, 7, 1, 0, tzinfo=EST)')
|
||||||
|
|
||||||
def test_copy(self):
|
def test_copy(self):
|
||||||
now = datetime.datetime.now()
|
now = datetime.datetime.now()
|
||||||
|
|
Loading…
Reference in New Issue