Fixed many spelling mistakes in code, comments, and docs.
This commit is contained in:
parent
b6dd0afead
commit
93452a70e8
|
@ -16,7 +16,7 @@ def delete_selected(modeladmin, request, queryset):
|
||||||
"""
|
"""
|
||||||
Default action which deletes the selected objects.
|
Default action which deletes the selected objects.
|
||||||
|
|
||||||
This action first displays a confirmation page whichs shows all the
|
This action first displays a confirmation page which shows all the
|
||||||
deleteable objects, or, if the user has no permission one of the related
|
deleteable objects, or, if the user has no permission one of the related
|
||||||
childs (foreignkeys), a "permission denied" message.
|
childs (foreignkeys), a "permission denied" message.
|
||||||
|
|
||||||
|
|
|
@ -13,7 +13,7 @@ class PermLookupDict(object):
|
||||||
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
|
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
|
# To fix 'item in perms.someapp' and __getitem__ interaction we need to
|
||||||
# define __iter__. See #18979 for details.
|
# define __iter__. See #18979 for details.
|
||||||
raise TypeError("PermLookupDict is not iterable.")
|
raise TypeError("PermLookupDict is not iterable.")
|
||||||
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ class PostGISGeometryColumns(models.Model):
|
||||||
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
|
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
|
||||||
"""
|
"""
|
||||||
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
|
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
|
||||||
documentaiton at Ch. 4.2.1.
|
documentation at Ch. 4.2.1.
|
||||||
"""
|
"""
|
||||||
srid = models.IntegerField(primary_key=True)
|
srid = models.IntegerField(primary_key=True)
|
||||||
auth_name = models.CharField(max_length=256)
|
auth_name = models.CharField(max_length=256)
|
||||||
|
|
|
@ -406,7 +406,7 @@ class GeoQuerySet(QuerySet):
|
||||||
SQL function to call.
|
SQL function to call.
|
||||||
|
|
||||||
settings:
|
settings:
|
||||||
Dictonary of internal settings to customize for the spatial procedure.
|
Dictionary of internal settings to customize for the spatial procedure.
|
||||||
|
|
||||||
Public Keyword Arguments:
|
Public Keyword Arguments:
|
||||||
|
|
||||||
|
|
|
@ -3,7 +3,7 @@ from django.utils import six
|
||||||
|
|
||||||
|
|
||||||
class OGRGeomType(object):
|
class OGRGeomType(object):
|
||||||
"Encapulates OGR Geometry Types."
|
"Encapsulates OGR Geometry Types."
|
||||||
|
|
||||||
wkb25bit = -2147483648
|
wkb25bit = -2147483648
|
||||||
|
|
||||||
|
|
|
@ -78,7 +78,7 @@ to_hex = BinOutput('GEOSGeomToHEX_buf')
|
||||||
to_wkb = BinOutput('GEOSGeomToWKB_buf')
|
to_wkb = BinOutput('GEOSGeomToWKB_buf')
|
||||||
to_wkt = StringFromGeom('GEOSGeomToWKT')
|
to_wkt = StringFromGeom('GEOSGeomToWKT')
|
||||||
|
|
||||||
# The GEOS geometry type, typeid, num_coordites and number of geometries
|
# The GEOS geometry type, typeid, num_coordinates and number of geometries
|
||||||
geos_normalize = IntFromGeom('GEOSNormalize')
|
geos_normalize = IntFromGeom('GEOSNormalize')
|
||||||
geos_type = StringFromGeom('GEOSGeomType')
|
geos_type = StringFromGeom('GEOSGeomType')
|
||||||
geos_typeid = IntFromGeom('GEOSGeomTypeId')
|
geos_typeid = IntFromGeom('GEOSGeomTypeId')
|
||||||
|
|
|
@ -7,7 +7,7 @@ from django.contrib.gis.geos.prototypes.errcheck import check_predicate
|
||||||
|
|
||||||
# Prepared geometry constructor and destructors.
|
# Prepared geometry constructor and destructors.
|
||||||
geos_prepare = GEOSFuncFactory('GEOSPrepare', argtypes=[GEOM_PTR], restype=PREPGEOM_PTR)
|
geos_prepare = GEOSFuncFactory('GEOSPrepare', argtypes=[GEOM_PTR], restype=PREPGEOM_PTR)
|
||||||
prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtpes=[PREPGEOM_PTR])
|
prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtypes=[PREPGEOM_PTR])
|
||||||
|
|
||||||
|
|
||||||
# Prepared geometry binary predicate support.
|
# Prepared geometry binary predicate support.
|
||||||
|
|
|
@ -44,7 +44,7 @@ class KMLSitemap(Sitemap):
|
||||||
|
|
||||||
def get_urls(self, page=1, site=None, protocol=None):
|
def get_urls(self, page=1, site=None, protocol=None):
|
||||||
"""
|
"""
|
||||||
This method is overrridden so the appropriate `geo_format` attribute
|
This method is overridden so the appropriate `geo_format` attribute
|
||||||
is placed on each URL element.
|
is placed on each URL element.
|
||||||
"""
|
"""
|
||||||
urls = Sitemap.get_urls(self, page=page, site=site, protocol=protocol)
|
urls = Sitemap.get_urls(self, page=page, site=site, protocol=protocol)
|
||||||
|
|
|
@ -40,7 +40,7 @@ class Command(BaseCommand):
|
||||||
yield "# You'll have to do the following manually to clean this up:"
|
yield "# You'll have to do the following manually to clean this up:"
|
||||||
yield "# * Rearrange models' order"
|
yield "# * Rearrange models' order"
|
||||||
yield "# * Make sure each model has one field with primary_key=True"
|
yield "# * Make sure each model has one field with primary_key=True"
|
||||||
yield "# * Make sure each ForeignKey has `on_delete` set to the desidered behavior."
|
yield "# * Make sure each ForeignKey has `on_delete` set to the desired behavior."
|
||||||
yield (
|
yield (
|
||||||
"# * Remove `managed = False` lines if you wish to allow "
|
"# * Remove `managed = False` lines if you wish to allow "
|
||||||
"Django to create, modify, and delete the table"
|
"Django to create, modify, and delete the table"
|
||||||
|
|
|
@ -309,7 +309,7 @@ class BaseExpression(object):
|
||||||
Does this expression contain a reference to some of the
|
Does this expression contain a reference to some of the
|
||||||
existing aggregates? If so, returns the aggregate and also
|
existing aggregates? If so, returns the aggregate and also
|
||||||
the lookup parts that *weren't* found. So, if
|
the lookup parts that *weren't* found. So, if
|
||||||
exsiting_aggregates = {'max_id': Max('id')}
|
existing_aggregates = {'max_id': Max('id')}
|
||||||
self.name = 'max_id'
|
self.name = 'max_id'
|
||||||
queryset.filter(max_id__range=[10,100])
|
queryset.filter(max_id__range=[10,100])
|
||||||
then this method will return Max('id') and those parts of the
|
then this method will return Max('id') and those parts of the
|
||||||
|
|
|
@ -354,9 +354,9 @@ class Token(object):
|
||||||
for bit in bits:
|
for bit in bits:
|
||||||
# Handle translation-marked template pieces
|
# Handle translation-marked template pieces
|
||||||
if bit.startswith(('_("', "_('")):
|
if bit.startswith(('_("', "_('")):
|
||||||
sentinal = bit[2] + ')'
|
sentinel = bit[2] + ')'
|
||||||
trans_bit = [bit]
|
trans_bit = [bit]
|
||||||
while not bit.endswith(sentinal):
|
while not bit.endswith(sentinel):
|
||||||
bit = next(bits)
|
bit = next(bits)
|
||||||
trans_bit.append(bit)
|
trans_bit.append(bit)
|
||||||
bit = ' '.join(trans_bit)
|
bit = ' '.join(trans_bit)
|
||||||
|
|
|
@ -797,7 +797,7 @@ def default_if_none(value, arg):
|
||||||
|
|
||||||
@register.filter(is_safe=False)
|
@register.filter(is_safe=False)
|
||||||
def divisibleby(value, arg):
|
def divisibleby(value, arg):
|
||||||
"""Returns True if the value is devisible by the argument."""
|
"""Returns True if the value is divisible by the argument."""
|
||||||
return int(value) % int(arg) == 0
|
return int(value) % int(arg) == 0
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -764,7 +764,7 @@ def do_for(parser, token):
|
||||||
than -- the following::
|
than -- the following::
|
||||||
|
|
||||||
<ul>
|
<ul>
|
||||||
{% if althete_list %}
|
{% if athlete_list %}
|
||||||
{% for athlete in athlete_list %}
|
{% for athlete in athlete_list %}
|
||||||
<li>{{ athlete.name }}</li>
|
<li>{{ athlete.name }}</li>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
|
|
|
@ -84,7 +84,7 @@ class RemoteTestResult(object):
|
||||||
def test_index(self):
|
def test_index(self):
|
||||||
return self.testsRun - 1
|
return self.testsRun - 1
|
||||||
|
|
||||||
def check_pickleable(self, test, err):
|
def check_picklable(self, test, err):
|
||||||
# Ensure that sys.exc_info() tuples are picklable. This displays a
|
# Ensure that sys.exc_info() tuples are picklable. This displays a
|
||||||
# clear multiprocessing.pool.RemoteTraceback generated in the child
|
# clear multiprocessing.pool.RemoteTraceback generated in the child
|
||||||
# process instead of a multiprocessing.pool.MaybeEncodingError, making
|
# process instead of a multiprocessing.pool.MaybeEncodingError, making
|
||||||
|
@ -152,12 +152,12 @@ failure and get a correct traceback.
|
||||||
self.events.append(('stopTest', self.test_index))
|
self.events.append(('stopTest', self.test_index))
|
||||||
|
|
||||||
def addError(self, test, err):
|
def addError(self, test, err):
|
||||||
self.check_pickleable(test, err)
|
self.check_picklable(test, err)
|
||||||
self.events.append(('addError', self.test_index, err))
|
self.events.append(('addError', self.test_index, err))
|
||||||
self.stop_if_failfast()
|
self.stop_if_failfast()
|
||||||
|
|
||||||
def addFailure(self, test, err):
|
def addFailure(self, test, err):
|
||||||
self.check_pickleable(test, err)
|
self.check_picklable(test, err)
|
||||||
self.events.append(('addFailure', self.test_index, err))
|
self.events.append(('addFailure', self.test_index, err))
|
||||||
self.stop_if_failfast()
|
self.stop_if_failfast()
|
||||||
|
|
||||||
|
@ -177,7 +177,7 @@ failure and get a correct traceback.
|
||||||
# expected failure occurs.
|
# expected failure occurs.
|
||||||
if tblib is None:
|
if tblib is None:
|
||||||
err = err[0], err[1], None
|
err = err[0], err[1], None
|
||||||
self.check_pickleable(test, err)
|
self.check_picklable(test, err)
|
||||||
self.events.append(('addExpectedFailure', self.test_index, err))
|
self.events.append(('addExpectedFailure', self.test_index, err))
|
||||||
|
|
||||||
def addUnexpectedSuccess(self, test):
|
def addUnexpectedSuccess(self, test):
|
||||||
|
@ -299,7 +299,7 @@ class ParallelTestSuite(unittest.TestSuite):
|
||||||
To minimize pickling errors when getting results from workers:
|
To minimize pickling errors when getting results from workers:
|
||||||
|
|
||||||
- pass back numeric indexes in self.subsuites instead of tests
|
- pass back numeric indexes in self.subsuites instead of tests
|
||||||
- make tracebacks pickleable with tblib, if available
|
- make tracebacks picklable with tblib, if available
|
||||||
|
|
||||||
Even with tblib, errors may still occur for dynamically created
|
Even with tblib, errors may still occur for dynamically created
|
||||||
exception classes such Model.DoesNotExist which cannot be unpickled.
|
exception classes such Model.DoesNotExist which cannot be unpickled.
|
||||||
|
|
|
@ -118,7 +118,7 @@ div.admonition-philosophy { padding-left:65px; background:url(docicons-philosoph
|
||||||
div.admonition-behind-the-scenes { padding-left:65px; background:url(docicons-behindscenes.png) .8em .8em no-repeat;}
|
div.admonition-behind-the-scenes { padding-left:65px; background:url(docicons-behindscenes.png) .8em .8em no-repeat;}
|
||||||
.admonition.warning { background:url(docicons-warning.png) .8em .8em no-repeat; border:1px solid #ffc83c;}
|
.admonition.warning { background:url(docicons-warning.png) .8em .8em no-repeat; border:1px solid #ffc83c;}
|
||||||
|
|
||||||
/*** versoinadded/changes ***/
|
/*** versionadded/changes ***/
|
||||||
div.versionadded, div.versionchanged { }
|
div.versionadded, div.versionchanged { }
|
||||||
div.versionadded span.title, div.versionchanged span.title, span.versionmodified { font-weight: bold; }
|
div.versionadded span.title, div.versionchanged span.title, span.versionmodified { font-weight: bold; }
|
||||||
div.versionadded, div.versionchanged, div.deprecated { color:#555; }
|
div.versionadded, div.versionchanged, div.deprecated { color:#555; }
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
# This file is execfile()d with the current directory set to its containing dir.
|
# This file is execfile()d with the current directory set to its containing dir.
|
||||||
#
|
#
|
||||||
# The contents of this file are pickled, so don't put values in the namespace
|
# The contents of this file are pickled, so don't put values in the namespace
|
||||||
# that aren't pickleable (module imports are okay, they're removed automatically).
|
# that aren't picklable (module imports are okay, they're removed automatically).
|
||||||
#
|
#
|
||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
|
@ -851,7 +851,7 @@ This example illustrates all possible attributes and methods for a
|
||||||
|
|
||||||
def item_updateddate(self):
|
def item_updateddate(self):
|
||||||
"""
|
"""
|
||||||
Returns the updateddated for every item in the feed.
|
Returns the updateddate for every item in the feed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
item_updateddate = datetime.datetime(2005, 5, 3) # Hard-coded updateddate.
|
item_updateddate = datetime.datetime(2005, 5, 3) # Hard-coded updateddate.
|
||||||
|
|
|
@ -558,7 +558,7 @@ Peucker
|
||||||
pgAdmin
|
pgAdmin
|
||||||
phishing
|
phishing
|
||||||
php
|
php
|
||||||
pickleable
|
picklable
|
||||||
picosecond
|
picosecond
|
||||||
PIL
|
PIL
|
||||||
pingback
|
pingback
|
||||||
|
|
|
@ -715,7 +715,7 @@ To apply permission checks to :doc:`class-based views
|
||||||
|
|
||||||
.. versionadded:: 1.9
|
.. versionadded:: 1.9
|
||||||
|
|
||||||
This mixin, just like the ``permisison_required``
|
This mixin, just like the ``permission_required``
|
||||||
decorator, checks whether the user accessing a view has all given
|
decorator, checks whether the user accessing a view has all given
|
||||||
permissions. You should specify the permission (or an iterable of
|
permissions. You should specify the permission (or an iterable of
|
||||||
permissions) using the ``permission_required`` parameter::
|
permissions) using the ``permission_required`` parameter::
|
||||||
|
|
|
@ -657,7 +657,7 @@ Technical details
|
||||||
|
|
||||||
* The session dictionary accepts any :mod:`json` serializable value when using
|
* The session dictionary accepts any :mod:`json` serializable value when using
|
||||||
:class:`~django.contrib.sessions.serializers.JSONSerializer` or any
|
:class:`~django.contrib.sessions.serializers.JSONSerializer` or any
|
||||||
pickleable Python object when using
|
picklable Python object when using
|
||||||
:class:`~django.contrib.sessions.serializers.PickleSerializer`. See the
|
:class:`~django.contrib.sessions.serializers.PickleSerializer`. See the
|
||||||
:mod:`pickle` module for more information.
|
:mod:`pickle` module for more information.
|
||||||
|
|
||||||
|
|
|
@ -818,7 +818,7 @@ class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
|
||||||
|
|
||||||
def test_add_row_selection(self):
|
def test_add_row_selection(self):
|
||||||
"""
|
"""
|
||||||
Ensure that the status line for selected rows gets updated correcly (#22038)
|
Ensure that the status line for selected rows gets updated correctly (#22038)
|
||||||
"""
|
"""
|
||||||
self.admin_login(username='super', password='secret')
|
self.admin_login(username='super', password='secret')
|
||||||
self.selenium.get('%s%s' % (self.live_server_url,
|
self.selenium.get('%s%s' % (self.live_server_url,
|
||||||
|
|
|
@ -45,7 +45,7 @@ class TestDataMixin(object):
|
||||||
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
|
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
|
||||||
date_joined=datetime(2007, 5, 30, 13, 20, 10)
|
date_joined=datetime(2007, 5, 30, 13, 20, 10)
|
||||||
)
|
)
|
||||||
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat')
|
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagen', model='Passat')
|
||||||
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
|
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
|
||||||
|
|
||||||
|
|
||||||
|
@ -206,7 +206,7 @@ class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
|
||||||
self.client.login(username="super", password="secret")
|
self.client.login(username="super", password="secret")
|
||||||
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
|
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
|
||||||
self.assertNotContains(response, "BMW M3")
|
self.assertNotContains(response, "BMW M3")
|
||||||
self.assertContains(response, "Volkswagon Passat")
|
self.assertContains(response, "Volkswagen Passat")
|
||||||
|
|
||||||
|
|
||||||
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
|
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
|
||||||
|
|
|
@ -509,7 +509,7 @@ class AggregateTestCase(TestCase):
|
||||||
|
|
||||||
def test_sum_distinct_aggregate(self):
|
def test_sum_distinct_aggregate(self):
|
||||||
"""
|
"""
|
||||||
Sum on a distict() QuerySet should aggregate only the distinct items.
|
Sum on a distinct() QuerySet should aggregate only the distinct items.
|
||||||
"""
|
"""
|
||||||
authors = Author.objects.filter(book__in=[5, 6])
|
authors = Author.objects.filter(book__in=[5, 6])
|
||||||
self.assertEqual(authors.count(), 3)
|
self.assertEqual(authors.count(), 3)
|
||||||
|
|
|
@ -1346,9 +1346,9 @@ class AggregationTests(TestCase):
|
||||||
in group by.
|
in group by.
|
||||||
"""
|
"""
|
||||||
qs = Book.objects.annotate(
|
qs = Book.objects.annotate(
|
||||||
acount=Count('authors')
|
account=Count('authors')
|
||||||
).filter(
|
).filter(
|
||||||
acount=F('publisher__num_awards')
|
account=F('publisher__num_awards')
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
qs, ['Sams Teach Yourself Django in 24 Hours'],
|
qs, ['Sams Teach Yourself Django in 24 Hours'],
|
||||||
|
|
|
@ -392,7 +392,7 @@ class UserChangeFormTest(TestDataMixin, TestCase):
|
||||||
# Just check we can create it
|
# Just check we can create it
|
||||||
MyUserForm({})
|
MyUserForm({})
|
||||||
|
|
||||||
def test_unsuable_password(self):
|
def test_unusable_password(self):
|
||||||
user = User.objects.get(username='empty_password')
|
user = User.objects.get(username='empty_password')
|
||||||
user.set_unusable_password()
|
user.set_unusable_password()
|
||||||
user.save()
|
user.save()
|
||||||
|
|
|
@ -543,7 +543,7 @@ class LoginTest(AuthViewsTestCase):
|
||||||
for bad_url in ('http://example.com',
|
for bad_url in ('http://example.com',
|
||||||
'http:///example.com',
|
'http:///example.com',
|
||||||
'https://example.com',
|
'https://example.com',
|
||||||
'ftp://exampel.com',
|
'ftp://example.com',
|
||||||
'///example.com',
|
'///example.com',
|
||||||
'//example.com',
|
'//example.com',
|
||||||
'javascript:alert("XSS")'):
|
'javascript:alert("XSS")'):
|
||||||
|
@ -564,7 +564,7 @@ class LoginTest(AuthViewsTestCase):
|
||||||
# These URLs *should* still pass the security check
|
# These URLs *should* still pass the security check
|
||||||
for good_url in ('/view/?param=http://example.com',
|
for good_url in ('/view/?param=http://example.com',
|
||||||
'/view/?param=https://example.com',
|
'/view/?param=https://example.com',
|
||||||
'/view?param=ftp://exampel.com',
|
'/view?param=ftp://example.com',
|
||||||
'view/?param=//example.com',
|
'view/?param=//example.com',
|
||||||
'https://testserver/',
|
'https://testserver/',
|
||||||
'HTTPS://testserver/',
|
'HTTPS://testserver/',
|
||||||
|
@ -830,7 +830,7 @@ class LogoutTest(AuthViewsTestCase):
|
||||||
for bad_url in ('http://example.com',
|
for bad_url in ('http://example.com',
|
||||||
'http:///example.com',
|
'http:///example.com',
|
||||||
'https://example.com',
|
'https://example.com',
|
||||||
'ftp://exampel.com',
|
'ftp://example.com',
|
||||||
'///example.com',
|
'///example.com',
|
||||||
'//example.com',
|
'//example.com',
|
||||||
'javascript:alert("XSS")'):
|
'javascript:alert("XSS")'):
|
||||||
|
@ -849,7 +849,7 @@ class LogoutTest(AuthViewsTestCase):
|
||||||
# These URLs *should* still pass the security check
|
# These URLs *should* still pass the security check
|
||||||
for good_url in ('/view/?param=http://example.com',
|
for good_url in ('/view/?param=http://example.com',
|
||||||
'/view/?param=https://example.com',
|
'/view/?param=https://example.com',
|
||||||
'/view?param=ftp://exampel.com',
|
'/view?param=ftp://example.com',
|
||||||
'view/?param=//example.com',
|
'view/?param=//example.com',
|
||||||
'https://testserver/',
|
'https://testserver/',
|
||||||
'HTTPS://testserver/',
|
'HTTPS://testserver/',
|
||||||
|
@ -1016,6 +1016,6 @@ class UUIDUserTests(TestCase):
|
||||||
})
|
})
|
||||||
self.assertRedirects(response, user_change_url)
|
self.assertRedirects(response, user_change_url)
|
||||||
row = LogEntry.objects.latest('id')
|
row = LogEntry.objects.latest('id')
|
||||||
self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change()
|
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
|
||||||
self.assertEqual(row.object_id, str(u.pk))
|
self.assertEqual(row.object_id, str(u.pk))
|
||||||
self.assertEqual(row.change_message, 'Changed password.')
|
self.assertEqual(row.change_message, 'Changed password.')
|
||||||
|
|
|
@ -65,10 +65,10 @@ class BulkCreateTests(TestCase):
|
||||||
}, attrgetter("name"), ordered=False)
|
}, attrgetter("name"), ordered=False)
|
||||||
|
|
||||||
ProxyProxyCountry.objects.bulk_create([
|
ProxyProxyCountry.objects.bulk_create([
|
||||||
ProxyProxyCountry(name="Neitherlands", iso_two_letter="NT"),
|
ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"),
|
||||||
])
|
])
|
||||||
self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), {
|
self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), {
|
||||||
"Qwghlm", "Tortall", "Neitherlands",
|
"Qwghlm", "Tortall", "Netherlands",
|
||||||
}, attrgetter("name"), ordered=False)
|
}, attrgetter("name"), ordered=False)
|
||||||
|
|
||||||
def test_non_auto_increment_pk(self):
|
def test_non_auto_increment_pk(self):
|
||||||
|
|
|
@ -224,7 +224,7 @@ class CheckStrictTransportSecurityTest(SimpleTestCase):
|
||||||
@override_settings(
|
@override_settings(
|
||||||
MIDDLEWARE_CLASSES=[],
|
MIDDLEWARE_CLASSES=[],
|
||||||
SECURE_HSTS_SECONDS=0)
|
SECURE_HSTS_SECONDS=0)
|
||||||
def test_no_sts_no_middlware(self):
|
def test_no_sts_no_middleware(self):
|
||||||
"""
|
"""
|
||||||
Don't warn if SECURE_HSTS_SECONDS isn't > 0 and SecurityMiddleware isn't
|
Don't warn if SECURE_HSTS_SECONDS isn't > 0 and SecurityMiddleware isn't
|
||||||
installed.
|
installed.
|
||||||
|
@ -258,7 +258,7 @@ class CheckStrictTransportSecuritySubdomainsTest(SimpleTestCase):
|
||||||
MIDDLEWARE_CLASSES=[],
|
MIDDLEWARE_CLASSES=[],
|
||||||
SECURE_HSTS_INCLUDE_SUBDOMAINS=False,
|
SECURE_HSTS_INCLUDE_SUBDOMAINS=False,
|
||||||
SECURE_HSTS_SECONDS=3600)
|
SECURE_HSTS_SECONDS=3600)
|
||||||
def test_no_sts_subdomains_no_middlware(self):
|
def test_no_sts_subdomains_no_middleware(self):
|
||||||
"""
|
"""
|
||||||
Don't warn if SecurityMiddleware isn't installed.
|
Don't warn if SecurityMiddleware isn't installed.
|
||||||
"""
|
"""
|
||||||
|
@ -415,7 +415,7 @@ class CheckSSLRedirectTest(SimpleTestCase):
|
||||||
@override_settings(
|
@override_settings(
|
||||||
MIDDLEWARE_CLASSES=[],
|
MIDDLEWARE_CLASSES=[],
|
||||||
SECURE_SSL_REDIRECT=False)
|
SECURE_SSL_REDIRECT=False)
|
||||||
def test_no_ssl_redirect_no_middlware(self):
|
def test_no_ssl_redirect_no_middleware(self):
|
||||||
"""
|
"""
|
||||||
Don't warn if SECURE_SSL_REDIRECT is False and SecurityMiddleware isn't
|
Don't warn if SECURE_SSL_REDIRECT is False and SecurityMiddleware isn't
|
||||||
installed.
|
installed.
|
||||||
|
|
|
@ -392,7 +392,7 @@ class CsrfViewMiddlewareTest(SimpleTestCase):
|
||||||
def test_https_csrf_wildcard_trusted_origin_allowed(self):
|
def test_https_csrf_wildcard_trusted_origin_allowed(self):
|
||||||
"""
|
"""
|
||||||
A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS
|
A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS
|
||||||
wilcard is accepted.
|
wildcard is accepted.
|
||||||
"""
|
"""
|
||||||
req = self._get_POST_request_with_token()
|
req = self._get_POST_request_with_token()
|
||||||
req._is_secure_override = True
|
req._is_secure_override = True
|
||||||
|
|
|
@ -1073,7 +1073,7 @@ class CaseExpressionTests(TestCase):
|
||||||
lambda x: (x, x.foo)
|
lambda x: (x, x.foo)
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_join_promotion_multiple_annonations(self):
|
def test_join_promotion_multiple_annotations(self):
|
||||||
o = CaseTestModel.objects.create(integer=1, integer2=1, string='1')
|
o = CaseTestModel.objects.create(integer=1, integer2=1, string='1')
|
||||||
# Testing that:
|
# Testing that:
|
||||||
# 1. There isn't any object on the remote side of the fk_rel
|
# 1. There isn't any object on the remote side of the fk_rel
|
||||||
|
|
|
@ -749,7 +749,7 @@ class FormsTestCase(SimpleTestCase):
|
||||||
'<input type="hidden" name="when_1" value="01:01" id="id_when_1" />'
|
'<input type="hidden" name="when_1" value="01:01" id="id_when_1" />'
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_mulitple_choice_checkbox(self):
|
def test_multiple_choice_checkbox(self):
|
||||||
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
|
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
|
||||||
class SongForm(Form):
|
class SongForm(Form):
|
||||||
name = CharField()
|
name = CharField()
|
||||||
|
|
|
@ -184,7 +184,7 @@ class FormsModelTestCase(TestCase):
|
||||||
m.delete()
|
m.delete()
|
||||||
|
|
||||||
def test_boundary_conditions(self):
|
def test_boundary_conditions(self):
|
||||||
# Boundary conditions on a PostitiveIntegerField #########################
|
# Boundary conditions on a PositiveIntegerField #########################
|
||||||
class BoundaryForm(ModelForm):
|
class BoundaryForm(ModelForm):
|
||||||
class Meta:
|
class Meta:
|
||||||
model = BoundaryModel
|
model = BoundaryModel
|
||||||
|
|
|
@ -425,7 +425,7 @@ class GISFunctionsTests(TestCase):
|
||||||
union=functions.Union('mpoly', geom),
|
union=functions.Union('mpoly', geom),
|
||||||
)
|
)
|
||||||
|
|
||||||
# For some reason SpatiaLite does something screwey with the Texas geometry here.
|
# For some reason SpatiaLite does something screwy with the Texas geometry here.
|
||||||
# Also, it doesn't like the null intersection.
|
# Also, it doesn't like the null intersection.
|
||||||
if spatialite:
|
if spatialite:
|
||||||
qs = qs.exclude(name='Texas')
|
qs = qs.exclude(name='Texas')
|
||||||
|
|
|
@ -309,7 +309,7 @@ class HttpResponseTests(unittest.TestCase):
|
||||||
h['Content-Disposition'] = 'attachment; filename="%s"' % f
|
h['Content-Disposition'] = 'attachment; filename="%s"' % f
|
||||||
# This one is triggering http://bugs.python.org/issue20747, that is Python
|
# This one is triggering http://bugs.python.org/issue20747, that is Python
|
||||||
# will itself insert a newline in the header
|
# will itself insert a newline in the header
|
||||||
h['Content-Disposition'] = 'attachement; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
||||||
|
|
||||||
def test_newlines_in_headers(self):
|
def test_newlines_in_headers(self):
|
||||||
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
||||||
|
|
|
@ -9,6 +9,6 @@ dummy2 = _("This is another translatable string.")
|
||||||
# This file has a literal with plural forms. When processed first, makemessages
|
# This file has a literal with plural forms. When processed first, makemessages
|
||||||
# shouldn't create a .po file with duplicate `Plural-Forms` headers
|
# shouldn't create a .po file with duplicate `Plural-Forms` headers
|
||||||
number = 3
|
number = 3
|
||||||
dummuy3 = ungettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number}
|
dummy3 = ungettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number}
|
||||||
|
|
||||||
dummy4 = _('Size')
|
dummy4 = _('Size')
|
||||||
|
|
|
@ -4,7 +4,7 @@ from django.db import models
|
||||||
from .base import IsolatedModelsTestCase
|
from .base import IsolatedModelsTestCase
|
||||||
|
|
||||||
|
|
||||||
class DeprecatedFieldssTests(IsolatedModelsTestCase):
|
class DeprecatedFieldsTests(IsolatedModelsTestCase):
|
||||||
def test_IPAddressField_deprecated(self):
|
def test_IPAddressField_deprecated(self):
|
||||||
class IPAddressModel(models.Model):
|
class IPAddressModel(models.Model):
|
||||||
ip = models.IPAddressField()
|
ip = models.IPAddressField()
|
||||||
|
|
|
@ -351,7 +351,7 @@ class FieldNamesTests(IsolatedModelsTestCase):
|
||||||
]
|
]
|
||||||
|
|
||||||
# Second error because the FK specified in the `through` model
|
# Second error because the FK specified in the `through` model
|
||||||
# `m2msimple` has auto-genererated name longer than allowed.
|
# `m2msimple` has auto-generated name longer than allowed.
|
||||||
# There will be no check errors in the other M2M because it
|
# There will be no check errors in the other M2M because it
|
||||||
# specifies db_column for the FK in `through` model even if the actual
|
# specifies db_column for the FK in `through` model even if the actual
|
||||||
# name is longer than the limits of the database.
|
# name is longer than the limits of the database.
|
||||||
|
|
|
@ -1504,7 +1504,7 @@ class M2mThroughFieldsTests(IsolatedModelsTestCase):
|
||||||
]
|
]
|
||||||
self.assertEqual(expected, errors)
|
self.assertEqual(expected, errors)
|
||||||
|
|
||||||
def test_insersection_foreign_object(self):
|
def test_intersection_foreign_object(self):
|
||||||
class Parent(models.Model):
|
class Parent(models.Model):
|
||||||
a = models.PositiveIntegerField()
|
a = models.PositiveIntegerField()
|
||||||
b = models.PositiveIntegerField()
|
b = models.PositiveIntegerField()
|
||||||
|
|
|
@ -346,7 +346,7 @@ class M2mThroughReferentialTests(TestCase):
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_self_referential_non_symmentrical_first_side(self):
|
def test_self_referential_non_symmetrical_first_side(self):
|
||||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||||
Friendship.objects.create(
|
Friendship.objects.create(
|
||||||
|
@ -359,7 +359,7 @@ class M2mThroughReferentialTests(TestCase):
|
||||||
attrgetter("name")
|
attrgetter("name")
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_self_referential_non_symmentrical_second_side(self):
|
def test_self_referential_non_symmetrical_second_side(self):
|
||||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||||
Friendship.objects.create(
|
Friendship.objects.create(
|
||||||
|
@ -371,7 +371,7 @@ class M2mThroughReferentialTests(TestCase):
|
||||||
[]
|
[]
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_self_referential_non_symmentrical_clear_first_side(self):
|
def test_self_referential_non_symmetrical_clear_first_side(self):
|
||||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||||
Friendship.objects.create(
|
Friendship.objects.create(
|
||||||
|
@ -392,7 +392,7 @@ class M2mThroughReferentialTests(TestCase):
|
||||||
attrgetter("name")
|
attrgetter("name")
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_self_referential_symmentrical(self):
|
def test_self_referential_symmetrical(self):
|
||||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||||
Friendship.objects.create(
|
Friendship.objects.create(
|
||||||
|
|
|
@ -197,7 +197,7 @@ class CommonMiddlewareTest(SimpleTestCase):
|
||||||
response = HttpResponseNotFound()
|
response = HttpResponseNotFound()
|
||||||
r = CommonMiddleware().process_response(request, response)
|
r = CommonMiddleware().process_response(request, response)
|
||||||
self.assertIsNotNone(r,
|
self.assertIsNotNone(r,
|
||||||
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
|
"CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||||
self.assertEqual(r.status_code, 301)
|
self.assertEqual(r.status_code, 301)
|
||||||
self.assertEqual(r.url, '/customurlconf/slash/')
|
self.assertEqual(r.url, '/customurlconf/slash/')
|
||||||
|
|
||||||
|
@ -236,7 +236,7 @@ class CommonMiddlewareTest(SimpleTestCase):
|
||||||
response = HttpResponseNotFound()
|
response = HttpResponseNotFound()
|
||||||
r = CommonMiddleware().process_response(request, response)
|
r = CommonMiddleware().process_response(request, response)
|
||||||
self.assertIsNotNone(r,
|
self.assertIsNotNone(r,
|
||||||
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
|
"CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||||
self.assertEqual(r.status_code, 301)
|
self.assertEqual(r.status_code, 301)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
r.url,
|
r.url,
|
||||||
|
|
|
@ -17,9 +17,9 @@ from django.test.utils import isolate_lru_cache
|
||||||
from .models import FoodManager, FoodQuerySet
|
from .models import FoodManager, FoodQuerySet
|
||||||
|
|
||||||
|
|
||||||
class DeconstructableObject(object):
|
class DeconstructibleObject(object):
|
||||||
"""
|
"""
|
||||||
A custom deconstructable object.
|
A custom deconstructible object.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
@ -60,117 +60,117 @@ class AutodetectorTests(TestCase):
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default='Ada Lovelace')),
|
("name", models.CharField(max_length=200, default='Ada Lovelace')),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_1 = ModelState("testapp", "Author", [
|
author_name_deconstructible_1 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject())),
|
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_2 = ModelState("testapp", "Author", [
|
author_name_deconstructible_2 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject())),
|
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_3 = ModelState("testapp", "Author", [
|
author_name_deconstructible_3 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_4 = ModelState("testapp", "Author", [
|
author_name_deconstructible_4 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_list_1 = ModelState("testapp", "Author", [
|
author_name_deconstructible_list_1 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 123])),
|
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_list_2 = ModelState("testapp", "Author", [
|
author_name_deconstructible_list_2 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 123])),
|
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_list_3 = ModelState("testapp", "Author", [
|
author_name_deconstructible_list_3 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 999])),
|
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_tuple_1 = ModelState("testapp", "Author", [
|
author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 123))),
|
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_tuple_2 = ModelState("testapp", "Author", [
|
author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 123))),
|
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_tuple_3 = ModelState("testapp", "Author", [
|
author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 999))),
|
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_dict_1 = ModelState("testapp", "Author", [
|
author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default={
|
("name", models.CharField(max_length=200, default={
|
||||||
'item': DeconstructableObject(), 'otheritem': 123
|
'item': DeconstructibleObject(), 'otheritem': 123
|
||||||
})),
|
})),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_dict_2 = ModelState("testapp", "Author", [
|
author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default={
|
("name", models.CharField(max_length=200, default={
|
||||||
'item': DeconstructableObject(), 'otheritem': 123
|
'item': DeconstructibleObject(), 'otheritem': 123
|
||||||
})),
|
})),
|
||||||
])
|
])
|
||||||
author_name_deconstructable_dict_3 = ModelState("testapp", "Author", [
|
author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default={
|
("name", models.CharField(max_length=200, default={
|
||||||
'item': DeconstructableObject(), 'otheritem': 999
|
'item': DeconstructibleObject(), 'otheritem': 999
|
||||||
})),
|
})),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_1 = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_2 = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_changed_arg = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2-changed'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),),
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_extra_arg = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||||
None,
|
None,
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_changed_kwarg = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c-changed')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c-changed')),
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
author_name_nested_deconstructable_extra_kwarg = ModelState("testapp", "Author", [
|
author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [
|
||||||
("id", models.AutoField(primary_key=True)),
|
("id", models.AutoField(primary_key=True)),
|
||||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||||
DeconstructableObject(1),
|
DeconstructibleObject(1),
|
||||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||||
a=DeconstructableObject('A'),
|
a=DeconstructibleObject('A'),
|
||||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||||
c=None,
|
c=None,
|
||||||
))),
|
))),
|
||||||
])
|
])
|
||||||
|
@ -1383,13 +1383,13 @@ class AutodetectorTests(TestCase):
|
||||||
self.assertOperationTypes(changes, 'testapp', 0, ["AddField"])
|
self.assertOperationTypes(changes, 'testapp', 0, ["AddField"])
|
||||||
self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name")
|
self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name")
|
||||||
|
|
||||||
def test_custom_deconstructable(self):
|
def test_custom_deconstructible(self):
|
||||||
"""
|
"""
|
||||||
Two instances which deconstruct to the same value aren't considered a
|
Two instances which deconstruct to the same value aren't considered a
|
||||||
change.
|
change.
|
||||||
"""
|
"""
|
||||||
before = self.make_project_state([self.author_name_deconstructable_1])
|
before = self.make_project_state([self.author_name_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_2])
|
after = self.make_project_state([self.author_name_deconstructible_2])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
# Right number of migrations?
|
# Right number of migrations?
|
||||||
|
@ -1397,70 +1397,70 @@ class AutodetectorTests(TestCase):
|
||||||
|
|
||||||
def test_deconstruct_field_kwarg(self):
|
def test_deconstruct_field_kwarg(self):
|
||||||
"""Field instances are handled correctly by nested deconstruction."""
|
"""Field instances are handled correctly by nested deconstruction."""
|
||||||
before = self.make_project_state([self.author_name_deconstructable_3])
|
before = self.make_project_state([self.author_name_deconstructible_3])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_4])
|
after = self.make_project_state([self.author_name_deconstructible_4])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(changes, {})
|
self.assertEqual(changes, {})
|
||||||
|
|
||||||
def test_deconstructable_list(self):
|
def test_deconstructible_list(self):
|
||||||
"""Nested deconstruction descends into lists."""
|
"""Nested deconstruction descends into lists."""
|
||||||
# When lists contain items that deconstruct to identical values, those lists
|
# When lists contain items that deconstruct to identical values, those lists
|
||||||
# should be considered equal for the purpose of detecting state changes
|
# should be considered equal for the purpose of detecting state changes
|
||||||
# (even if the original items are unequal).
|
# (even if the original items are unequal).
|
||||||
before = self.make_project_state([self.author_name_deconstructable_list_1])
|
before = self.make_project_state([self.author_name_deconstructible_list_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_list_2])
|
after = self.make_project_state([self.author_name_deconstructible_list_2])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(changes, {})
|
self.assertEqual(changes, {})
|
||||||
|
|
||||||
# Legitimate differences within the deconstructed lists should be reported
|
# Legitimate differences within the deconstructed lists should be reported
|
||||||
# as a change
|
# as a change
|
||||||
before = self.make_project_state([self.author_name_deconstructable_list_1])
|
before = self.make_project_state([self.author_name_deconstructible_list_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_list_3])
|
after = self.make_project_state([self.author_name_deconstructible_list_3])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
def test_deconstructable_tuple(self):
|
def test_deconstructible_tuple(self):
|
||||||
"""Nested deconstruction descends into tuples."""
|
"""Nested deconstruction descends into tuples."""
|
||||||
# When tuples contain items that deconstruct to identical values, those tuples
|
# When tuples contain items that deconstruct to identical values, those tuples
|
||||||
# should be considered equal for the purpose of detecting state changes
|
# should be considered equal for the purpose of detecting state changes
|
||||||
# (even if the original items are unequal).
|
# (even if the original items are unequal).
|
||||||
before = self.make_project_state([self.author_name_deconstructable_tuple_1])
|
before = self.make_project_state([self.author_name_deconstructible_tuple_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_tuple_2])
|
after = self.make_project_state([self.author_name_deconstructible_tuple_2])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(changes, {})
|
self.assertEqual(changes, {})
|
||||||
|
|
||||||
# Legitimate differences within the deconstructed tuples should be reported
|
# Legitimate differences within the deconstructed tuples should be reported
|
||||||
# as a change
|
# as a change
|
||||||
before = self.make_project_state([self.author_name_deconstructable_tuple_1])
|
before = self.make_project_state([self.author_name_deconstructible_tuple_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_tuple_3])
|
after = self.make_project_state([self.author_name_deconstructible_tuple_3])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
def test_deconstructable_dict(self):
|
def test_deconstructible_dict(self):
|
||||||
"""Nested deconstruction descends into dict values."""
|
"""Nested deconstruction descends into dict values."""
|
||||||
# When dicts contain items whose values deconstruct to identical values,
|
# When dicts contain items whose values deconstruct to identical values,
|
||||||
# those dicts should be considered equal for the purpose of detecting
|
# those dicts should be considered equal for the purpose of detecting
|
||||||
# state changes (even if the original values are unequal).
|
# state changes (even if the original values are unequal).
|
||||||
before = self.make_project_state([self.author_name_deconstructable_dict_1])
|
before = self.make_project_state([self.author_name_deconstructible_dict_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_dict_2])
|
after = self.make_project_state([self.author_name_deconstructible_dict_2])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(changes, {})
|
self.assertEqual(changes, {})
|
||||||
|
|
||||||
# Legitimate differences within the deconstructed dicts should be reported
|
# Legitimate differences within the deconstructed dicts should be reported
|
||||||
# as a change
|
# as a change
|
||||||
before = self.make_project_state([self.author_name_deconstructable_dict_1])
|
before = self.make_project_state([self.author_name_deconstructible_dict_1])
|
||||||
after = self.make_project_state([self.author_name_deconstructable_dict_3])
|
after = self.make_project_state([self.author_name_deconstructible_dict_3])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
def test_nested_deconstructable_objects(self):
|
def test_nested_deconstructible_objects(self):
|
||||||
"""
|
"""
|
||||||
Nested deconstruction is applied recursively to the args/kwargs of
|
Nested deconstruction is applied recursively to the args/kwargs of
|
||||||
deconstructed objects.
|
deconstructed objects.
|
||||||
|
@ -1468,45 +1468,45 @@ class AutodetectorTests(TestCase):
|
||||||
# If the items within a deconstructed object's args/kwargs have the same
|
# If the items within a deconstructed object's args/kwargs have the same
|
||||||
# deconstructed values - whether or not the items themselves are different
|
# deconstructed values - whether or not the items themselves are different
|
||||||
# instances - then the object as a whole is regarded as unchanged.
|
# instances - then the object as a whole is regarded as unchanged.
|
||||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_nested_deconstructable_2])
|
after = self.make_project_state([self.author_name_nested_deconstructible_2])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(changes, {})
|
self.assertEqual(changes, {})
|
||||||
|
|
||||||
# Differences that exist solely within the args list of a deconstructed object
|
# Differences that exist solely within the args list of a deconstructed object
|
||||||
# should be reported as changes
|
# should be reported as changes
|
||||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_nested_deconstructable_changed_arg])
|
after = self.make_project_state([self.author_name_nested_deconstructible_changed_arg])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
# Additional args should also be reported as a change
|
# Additional args should also be reported as a change
|
||||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_nested_deconstructable_extra_arg])
|
after = self.make_project_state([self.author_name_nested_deconstructible_extra_arg])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
# Differences that exist solely within the kwargs dict of a deconstructed object
|
# Differences that exist solely within the kwargs dict of a deconstructed object
|
||||||
# should be reported as changes
|
# should be reported as changes
|
||||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_nested_deconstructable_changed_kwarg])
|
after = self.make_project_state([self.author_name_nested_deconstructible_changed_kwarg])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
# Additional kwargs should also be reported as a change
|
# Additional kwargs should also be reported as a change
|
||||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||||
after = self.make_project_state([self.author_name_nested_deconstructable_extra_kwarg])
|
after = self.make_project_state([self.author_name_nested_deconstructible_extra_kwarg])
|
||||||
autodetector = MigrationAutodetector(before, after)
|
autodetector = MigrationAutodetector(before, after)
|
||||||
changes = autodetector._detect_changes()
|
changes = autodetector._detect_changes()
|
||||||
self.assertEqual(len(changes), 1)
|
self.assertEqual(len(changes), 1)
|
||||||
|
|
||||||
def test_deconstruct_type(self):
|
def test_deconstruct_type(self):
|
||||||
"""
|
"""
|
||||||
#22951 -- Uninstanted classes with deconstruct are correctly returned
|
#22951 -- Uninstantiated classes with deconstruct are correctly returned
|
||||||
by deep_deconstruct during serialization.
|
by deep_deconstruct during serialization.
|
||||||
"""
|
"""
|
||||||
author = ModelState(
|
author = ModelState(
|
||||||
|
|
|
@ -635,9 +635,9 @@ class WriterTests(SimpleTestCase):
|
||||||
# Yes, it doesn't make sense to use a class as a default for a
|
# Yes, it doesn't make sense to use a class as a default for a
|
||||||
# CharField. It does make sense for custom fields though, for example
|
# CharField. It does make sense for custom fields though, for example
|
||||||
# an enumfield that takes the enum class as an argument.
|
# an enumfield that takes the enum class as an argument.
|
||||||
class DeconstructableInstances(object):
|
class DeconstructibleInstances(object):
|
||||||
def deconstruct(self):
|
def deconstruct(self):
|
||||||
return ('DeconstructableInstances', [], {})
|
return ('DeconstructibleInstances', [], {})
|
||||||
|
|
||||||
string = MigrationWriter.serialize(models.CharField(default=DeconstructableInstances))[0]
|
string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0]
|
||||||
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructableInstances)")
|
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
|
||||||
|
|
|
@ -251,7 +251,7 @@ if Image:
|
||||||
name = models.CharField(max_length=50)
|
name = models.CharField(max_length=50)
|
||||||
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
|
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
|
||||||
|
|
||||||
class AbsctractPersonWithHeight(models.Model):
|
class AbstractPersonWithHeight(models.Model):
|
||||||
"""
|
"""
|
||||||
Abstract model that defines an ImageField with only one dimension field
|
Abstract model that defines an ImageField with only one dimension field
|
||||||
to make sure the dimension update is correctly run on concrete subclass
|
to make sure the dimension update is correctly run on concrete subclass
|
||||||
|
@ -264,9 +264,9 @@ if Image:
|
||||||
class Meta:
|
class Meta:
|
||||||
abstract = True
|
abstract = True
|
||||||
|
|
||||||
class PersonWithHeight(AbsctractPersonWithHeight):
|
class PersonWithHeight(AbstractPersonWithHeight):
|
||||||
"""
|
"""
|
||||||
Concrete model that subclass an abctract one with only on dimension
|
Concrete model that subclass an abstract one with only on dimension
|
||||||
field.
|
field.
|
||||||
"""
|
"""
|
||||||
name = models.CharField(max_length=50)
|
name = models.CharField(max_length=50)
|
||||||
|
@ -298,7 +298,7 @@ if Image:
|
||||||
Model that:
|
Model that:
|
||||||
* Defines two ImageFields
|
* Defines two ImageFields
|
||||||
* Defines the height/width fields before the ImageFields
|
* Defines the height/width fields before the ImageFields
|
||||||
* Has a nullalble ImageField
|
* Has a nullable ImageField
|
||||||
"""
|
"""
|
||||||
name = models.CharField(max_length=50)
|
name = models.CharField(max_length=50)
|
||||||
mugshot_height = models.PositiveSmallIntegerField()
|
mugshot_height = models.PositiveSmallIntegerField()
|
||||||
|
|
|
@ -39,7 +39,7 @@ class DefaultRelatedNameTests(TestCase):
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
self.fail("Book should have a model_options_bookstores relation.")
|
self.fail("Book should have a model_options_bookstores relation.")
|
||||||
|
|
||||||
def test_inheritance_with_overrided_default_related_name(self):
|
def test_inheritance_with_overridden_default_related_name(self):
|
||||||
try:
|
try:
|
||||||
self.book.editor_stores
|
self.book.editor_stores
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
|
|
|
@ -107,7 +107,7 @@ class ModelAdminTests(TestCase):
|
||||||
self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value'))
|
self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value'))
|
||||||
|
|
||||||
def test_field_arguments(self):
|
def test_field_arguments(self):
|
||||||
# If we specify the fields argument, fieldsets_add and fielsets_change should
|
# If we specify the fields argument, fieldsets_add and fieldsets_change should
|
||||||
# just stick the fields into a formsets structure and return it.
|
# just stick the fields into a formsets structure and return it.
|
||||||
class BandAdmin(ModelAdmin):
|
class BandAdmin(ModelAdmin):
|
||||||
fields = ['name']
|
fields = ['name']
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""
|
"""
|
||||||
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
|
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
|
||||||
|
|
||||||
* #7512: including a nullable foreign key reference in Meta ordering has un
|
* #7512: including a nullable foreign key reference in Meta ordering has
|
||||||
xpected results
|
unexpected results
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
|
@ -199,7 +199,7 @@ class TestQuerying(TestCase):
|
||||||
|
|
||||||
|
|
||||||
@skipUnlessPG92
|
@skipUnlessPG92
|
||||||
class TestQueringWithRanges(TestCase):
|
class TestQueryingWithRanges(TestCase):
|
||||||
def test_date_range(self):
|
def test_date_range(self):
|
||||||
objs = [
|
objs = [
|
||||||
RangeLookupsModel.objects.create(date='2015-01-01'),
|
RangeLookupsModel.objects.create(date='2015-01-01'),
|
||||||
|
|
|
@ -33,7 +33,7 @@ class ProxyModelTests(TestCase):
|
||||||
DEFAULT_DB_ALIAS).as_sql()
|
DEFAULT_DB_ALIAS).as_sql()
|
||||||
self.assertEqual(my_person_sql, person_sql)
|
self.assertEqual(my_person_sql, person_sql)
|
||||||
|
|
||||||
def test_inheretance_new_table(self):
|
def test_inheritance_new_table(self):
|
||||||
"""
|
"""
|
||||||
The StatusPerson models should have its own table (it's using ORM-level
|
The StatusPerson models should have its own table (it's using ORM-level
|
||||||
inheritance).
|
inheritance).
|
||||||
|
|
|
@ -60,7 +60,7 @@ class Note(models.Model):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
super(Note, self).__init__(*args, **kwargs)
|
super(Note, self).__init__(*args, **kwargs)
|
||||||
# Regression for #13227 -- having an attribute that
|
# Regression for #13227 -- having an attribute that
|
||||||
# is unpickleable doesn't stop you from cloning queries
|
# is unpicklable doesn't stop you from cloning queries
|
||||||
# that use objects of that type as an argument.
|
# that use objects of that type as an argument.
|
||||||
self.lock = threading.Lock()
|
self.lock = threading.Lock()
|
||||||
|
|
||||||
|
|
|
@ -1884,7 +1884,7 @@ class Queries6Tests(TestCase):
|
||||||
def test_tickets_8921_9188(self):
|
def test_tickets_8921_9188(self):
|
||||||
# Incorrect SQL was being generated for certain types of exclude()
|
# Incorrect SQL was being generated for certain types of exclude()
|
||||||
# queries that crossed multi-valued relations (#8921, #9188 and some
|
# queries that crossed multi-valued relations (#8921, #9188 and some
|
||||||
# pre-emptively discovered cases).
|
# preemptively discovered cases).
|
||||||
|
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
PointerA.objects.filter(connection__pointerb__id=1),
|
PointerA.objects.filter(connection__pointerb__id=1),
|
||||||
|
@ -3104,7 +3104,7 @@ class NullJoinPromotionOrTest(TestCase):
|
||||||
p1 = Program.objects.create(identifier=i1)
|
p1 = Program.objects.create(identifier=i1)
|
||||||
c1 = Channel.objects.create(identifier=i1)
|
c1 = Channel.objects.create(identifier=i1)
|
||||||
p2 = Program.objects.create(identifier=i2)
|
p2 = Program.objects.create(identifier=i2)
|
||||||
# Test OR + doubleneq. The expected result is that channel is LOUTER
|
# Test OR + doubleneg. The expected result is that channel is LOUTER
|
||||||
# joined, program INNER joined
|
# joined, program INNER joined
|
||||||
qs1_filter = Identifier.objects.filter(
|
qs1_filter = Identifier.objects.filter(
|
||||||
Q(program__id=p2.id, channel__id=c1.id)
|
Q(program__id=p2.id, channel__id=c1.id)
|
||||||
|
@ -3191,7 +3191,7 @@ class JoinReuseTest(TestCase):
|
||||||
|
|
||||||
|
|
||||||
class DisjunctionPromotionTests(TestCase):
|
class DisjunctionPromotionTests(TestCase):
|
||||||
def test_disjuction_promotion_select_related(self):
|
def test_disjunction_promotion_select_related(self):
|
||||||
fk1 = FK1.objects.create(f1='f1', f2='f2')
|
fk1 = FK1.objects.create(f1='f1', f2='f2')
|
||||||
basea = BaseA.objects.create(a=fk1)
|
basea = BaseA.objects.create(a=fk1)
|
||||||
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
|
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
|
||||||
|
|
|
@ -57,7 +57,7 @@ class PickleabilityTestCase(TestCase):
|
||||||
|
|
||||||
def test_model_pickle(self):
|
def test_model_pickle(self):
|
||||||
"""
|
"""
|
||||||
Test that a model not defined on module level is pickleable.
|
Test that a model not defined on module level is picklable.
|
||||||
"""
|
"""
|
||||||
original = Container.SomeModel(pk=1)
|
original = Container.SomeModel(pk=1)
|
||||||
dumped = pickle.dumps(original)
|
dumped = pickle.dumps(original)
|
||||||
|
|
|
@ -618,7 +618,7 @@ class HostValidationTests(SimpleTestCase):
|
||||||
'12.34.56.78:443',
|
'12.34.56.78:443',
|
||||||
'[2001:19f0:feee::dead:beef:cafe]',
|
'[2001:19f0:feee::dead:beef:cafe]',
|
||||||
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
||||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
'xn--4ca9at.com', # Punycode for öäü.com
|
||||||
'anything.multitenant.com',
|
'anything.multitenant.com',
|
||||||
'multitenant.com',
|
'multitenant.com',
|
||||||
'insensitive.com',
|
'insensitive.com',
|
||||||
|
@ -688,7 +688,7 @@ class HostValidationTests(SimpleTestCase):
|
||||||
'12.34.56.78:443',
|
'12.34.56.78:443',
|
||||||
'[2001:19f0:feee::dead:beef:cafe]',
|
'[2001:19f0:feee::dead:beef:cafe]',
|
||||||
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
||||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
'xn--4ca9at.com', # Punycode for öäü.com
|
||||||
]
|
]
|
||||||
|
|
||||||
for host in legit_hosts:
|
for host in legit_hosts:
|
||||||
|
@ -766,7 +766,7 @@ class HostValidationTests(SimpleTestCase):
|
||||||
'example.com',
|
'example.com',
|
||||||
'12.34.56.78',
|
'12.34.56.78',
|
||||||
'[2001:19f0:feee::dead:beef:cafe]',
|
'[2001:19f0:feee::dead:beef:cafe]',
|
||||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
'xn--4ca9at.com', # Punycode for öäü.com
|
||||||
]:
|
]:
|
||||||
request = HttpRequest()
|
request = HttpRequest()
|
||||||
request.META = {'HTTP_HOST': host}
|
request.META = {'HTTP_HOST': host}
|
||||||
|
|
|
@ -297,7 +297,7 @@ The end."""),
|
||||||
|
|
||||||
(im2m_obj, 470, M2MIntermediateData, None),
|
(im2m_obj, 470, M2MIntermediateData, None),
|
||||||
|
|
||||||
# testing post- and prereferences and extra fields
|
# testing post- and pre-references and extra fields
|
||||||
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
|
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
|
||||||
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
|
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
|
||||||
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
|
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
|
||||||
|
|
|
@ -168,7 +168,7 @@ class SignalTests(BaseSignalTest):
|
||||||
data.append(instance)
|
data.append(instance)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
c1 = Car.objects.create(make="Volkswagon", model="Passat")
|
c1 = Car.objects.create(make="Volkswagen", model="Passat")
|
||||||
self.assertEqual(data, [c1, c1])
|
self.assertEqual(data, [c1, c1])
|
||||||
finally:
|
finally:
|
||||||
signals.pre_save.disconnect(decorated_handler)
|
signals.pre_save.disconnect(decorated_handler)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
"""
|
"""
|
||||||
A subset of the tests in tests/servers/tests exercicing
|
A subset of the tests in tests/servers/tests exercising
|
||||||
django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of
|
django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of
|
||||||
django.test.LiveServerTestCase.
|
django.test.LiveServerTestCase.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -155,7 +155,7 @@ class SimpleTemplateResponseTest(SimpleTestCase):
|
||||||
|
|
||||||
def test_pickling(self):
|
def test_pickling(self):
|
||||||
# Create a template response. The context is
|
# Create a template response. The context is
|
||||||
# known to be unpickleable (e.g., a function).
|
# known to be unpicklable (e.g., a function).
|
||||||
response = SimpleTemplateResponse('first/test.html', {
|
response = SimpleTemplateResponse('first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
'fn': datetime.now,
|
'fn': datetime.now,
|
||||||
|
@ -267,7 +267,7 @@ class TemplateResponseTest(SimpleTestCase):
|
||||||
|
|
||||||
def test_pickling(self):
|
def test_pickling(self):
|
||||||
# Create a template response. The context is
|
# Create a template response. The context is
|
||||||
# known to be unpickleable (e.g., a function).
|
# known to be unpicklable (e.g., a function).
|
||||||
response = TemplateResponse(self.factory.get('/'),
|
response = TemplateResponse(self.factory.get('/'),
|
||||||
'first/test.html', {
|
'first/test.html', {
|
||||||
'value': 123,
|
'value': 123,
|
||||||
|
|
|
@ -157,11 +157,11 @@ class DiscoverRunnerTest(TestCase):
|
||||||
self.assertIn('test_2', suite[8].id(),
|
self.assertIn('test_2', suite[8].id(),
|
||||||
msg="Methods of unittest cases should be reversed.")
|
msg="Methods of unittest cases should be reversed.")
|
||||||
|
|
||||||
def test_overrideable_test_suite(self):
|
def test_overridable_test_suite(self):
|
||||||
self.assertEqual(DiscoverRunner().test_suite, TestSuite)
|
self.assertEqual(DiscoverRunner().test_suite, TestSuite)
|
||||||
|
|
||||||
def test_overrideable_test_runner(self):
|
def test_overridable_test_runner(self):
|
||||||
self.assertEqual(DiscoverRunner().test_runner, TextTestRunner)
|
self.assertEqual(DiscoverRunner().test_runner, TextTestRunner)
|
||||||
|
|
||||||
def test_overrideable_test_loader(self):
|
def test_overridable_test_loader(self):
|
||||||
self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader)
|
self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader)
|
||||||
|
|
|
@ -127,7 +127,7 @@ class LegacyDatabaseTests(TestCase):
|
||||||
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
|
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
|
||||||
|
|
||||||
@skipIfDBFeature('supports_timezones')
|
@skipIfDBFeature('supports_timezones')
|
||||||
def test_aware_datetime_unspported(self):
|
def test_aware_datetime_unsupported(self):
|
||||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||||
with self.assertRaises(ValueError):
|
with self.assertRaises(ValueError):
|
||||||
Event.objects.create(dt=dt)
|
Event.objects.create(dt=dt)
|
||||||
|
|
|
@ -129,7 +129,7 @@ class Unmanaged1(models.Model):
|
||||||
db_table = "unmanaged_models_proxy1"
|
db_table = "unmanaged_models_proxy1"
|
||||||
|
|
||||||
|
|
||||||
# Unmanged with an m2m to unmanaged: the intermediary table won't be created.
|
# Unmanaged with an m2m to unmanaged: the intermediary table won't be created.
|
||||||
class Unmanaged2(models.Model):
|
class Unmanaged2(models.Model):
|
||||||
mm = models.ManyToManyField(Unmanaged1)
|
mm = models.ManyToManyField(Unmanaged1)
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ Fragments
|
||||||
<center> ![image](http://iontech.files.wordpress.com/2013/01/androidfragmentation1-264x300.png) </center>
|
<center> ![image](http://iontech.files.wordpress.com/2013/01/androidfragmentation1-264x300.png) </center>
|
||||||
Fragments encompass both layout resource and Java source. Hence, unlike ``, they allow us to reuse the View components along with their functionality, if needed.
|
Fragments encompass both layout resource and Java source. Hence, unlike ``, they allow us to reuse the View components along with their functionality, if needed.
|
||||||
Fragments were first introduced in Honeycomb(API 11), living under the `android.app` package.
|
Fragments were first introduced in Honeycomb(API 11), living under the `android.app` package.
|
||||||
**Note**: API 11 implies that Fragments have no support for devices less than Honeycomb and, for the record, as of writing this post, [more than 50% of Android devices worldwide run versions of Android below Honeycomb](http://developer.android.com/about/dashboards/index.html). Developer dissapointed? You don't have to be, cause google has been cautious enough to add the Fragment APIs to the support library. Yay!
|
**Note**: API 11 implies that Fragments have no support for devices less than Honeycomb and, for the record, as of writing this post, [more than 50% of Android devices worldwide run versions of Android below Honeycomb](http://developer.android.com/about/dashboards/index.html). Developer disappointed? You don't have to be, cause google has been cautious enough to add the Fragment APIs to the support library. Yay!
|
||||||
|
|
||||||
In the support library Fragment APIs sit in the `android.support.v4.app` package. This post assumes that your `minSdk` support is below API 11. Hence we concentrate on the Fragment APIs of the support library.
|
In the support library Fragment APIs sit in the `android.support.v4.app` package. This post assumes that your `minSdk` support is below API 11. Hence we concentrate on the Fragment APIs of the support library.
|
||||||
|
|
||||||
|
|
|
@ -75,7 +75,7 @@ class TestUtilsHttp(unittest.TestCase):
|
||||||
for bad_url in ('http://example.com',
|
for bad_url in ('http://example.com',
|
||||||
'http:///example.com',
|
'http:///example.com',
|
||||||
'https://example.com',
|
'https://example.com',
|
||||||
'ftp://exampel.com',
|
'ftp://example.com',
|
||||||
r'\\example.com',
|
r'\\example.com',
|
||||||
r'\\\example.com',
|
r'\\\example.com',
|
||||||
r'/\\/example.com',
|
r'/\\/example.com',
|
||||||
|
@ -96,7 +96,7 @@ class TestUtilsHttp(unittest.TestCase):
|
||||||
self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
|
self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
|
||||||
for good_url in ('/view/?param=http://example.com',
|
for good_url in ('/view/?param=http://example.com',
|
||||||
'/view/?param=https://example.com',
|
'/view/?param=https://example.com',
|
||||||
'/view?param=ftp://exampel.com',
|
'/view?param=ftp://example.com',
|
||||||
'view/?param=//example.com',
|
'view/?param=//example.com',
|
||||||
'https://testserver/',
|
'https://testserver/',
|
||||||
'HTTPS://testserver/',
|
'HTTPS://testserver/',
|
||||||
|
|
Loading…
Reference in New Issue