Fixed many spelling mistakes in code, comments, and docs.
This commit is contained in:
parent
b6dd0afead
commit
93452a70e8
|
@ -16,7 +16,7 @@ def delete_selected(modeladmin, request, queryset):
|
|||
"""
|
||||
Default action which deletes the selected objects.
|
||||
|
||||
This action first displays a confirmation page whichs shows all the
|
||||
This action first displays a confirmation page which shows all the
|
||||
deleteable objects, or, if the user has no permission one of the related
|
||||
childs (foreignkeys), a "permission denied" message.
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ class PermLookupDict(object):
|
|||
return self.user.has_perm("%s.%s" % (self.app_label, perm_name))
|
||||
|
||||
def __iter__(self):
|
||||
# To fix 'item in perms.someapp' and __getitem__ iteraction we need to
|
||||
# To fix 'item in perms.someapp' and __getitem__ interaction we need to
|
||||
# define __iter__. See #18979 for details.
|
||||
raise TypeError("PermLookupDict is not iterable.")
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ class PostGISGeometryColumns(models.Model):
|
|||
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
|
||||
"""
|
||||
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
|
||||
documentaiton at Ch. 4.2.1.
|
||||
documentation at Ch. 4.2.1.
|
||||
"""
|
||||
srid = models.IntegerField(primary_key=True)
|
||||
auth_name = models.CharField(max_length=256)
|
||||
|
|
|
@ -406,7 +406,7 @@ class GeoQuerySet(QuerySet):
|
|||
SQL function to call.
|
||||
|
||||
settings:
|
||||
Dictonary of internal settings to customize for the spatial procedure.
|
||||
Dictionary of internal settings to customize for the spatial procedure.
|
||||
|
||||
Public Keyword Arguments:
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@ from django.utils import six
|
|||
|
||||
|
||||
class OGRGeomType(object):
|
||||
"Encapulates OGR Geometry Types."
|
||||
"Encapsulates OGR Geometry Types."
|
||||
|
||||
wkb25bit = -2147483648
|
||||
|
||||
|
|
|
@ -78,7 +78,7 @@ to_hex = BinOutput('GEOSGeomToHEX_buf')
|
|||
to_wkb = BinOutput('GEOSGeomToWKB_buf')
|
||||
to_wkt = StringFromGeom('GEOSGeomToWKT')
|
||||
|
||||
# The GEOS geometry type, typeid, num_coordites and number of geometries
|
||||
# The GEOS geometry type, typeid, num_coordinates and number of geometries
|
||||
geos_normalize = IntFromGeom('GEOSNormalize')
|
||||
geos_type = StringFromGeom('GEOSGeomType')
|
||||
geos_typeid = IntFromGeom('GEOSGeomTypeId')
|
||||
|
|
|
@ -7,7 +7,7 @@ from django.contrib.gis.geos.prototypes.errcheck import check_predicate
|
|||
|
||||
# Prepared geometry constructor and destructors.
|
||||
geos_prepare = GEOSFuncFactory('GEOSPrepare', argtypes=[GEOM_PTR], restype=PREPGEOM_PTR)
|
||||
prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtpes=[PREPGEOM_PTR])
|
||||
prepared_destroy = GEOSFuncFactory('GEOSPreparedGeom_destroy', argtypes=[PREPGEOM_PTR])
|
||||
|
||||
|
||||
# Prepared geometry binary predicate support.
|
||||
|
|
|
@ -44,7 +44,7 @@ class KMLSitemap(Sitemap):
|
|||
|
||||
def get_urls(self, page=1, site=None, protocol=None):
|
||||
"""
|
||||
This method is overrridden so the appropriate `geo_format` attribute
|
||||
This method is overridden so the appropriate `geo_format` attribute
|
||||
is placed on each URL element.
|
||||
"""
|
||||
urls = Sitemap.get_urls(self, page=page, site=site, protocol=protocol)
|
||||
|
|
|
@ -40,7 +40,7 @@ class Command(BaseCommand):
|
|||
yield "# You'll have to do the following manually to clean this up:"
|
||||
yield "# * Rearrange models' order"
|
||||
yield "# * Make sure each model has one field with primary_key=True"
|
||||
yield "# * Make sure each ForeignKey has `on_delete` set to the desidered behavior."
|
||||
yield "# * Make sure each ForeignKey has `on_delete` set to the desired behavior."
|
||||
yield (
|
||||
"# * Remove `managed = False` lines if you wish to allow "
|
||||
"Django to create, modify, and delete the table"
|
||||
|
|
|
@ -309,7 +309,7 @@ class BaseExpression(object):
|
|||
Does this expression contain a reference to some of the
|
||||
existing aggregates? If so, returns the aggregate and also
|
||||
the lookup parts that *weren't* found. So, if
|
||||
exsiting_aggregates = {'max_id': Max('id')}
|
||||
existing_aggregates = {'max_id': Max('id')}
|
||||
self.name = 'max_id'
|
||||
queryset.filter(max_id__range=[10,100])
|
||||
then this method will return Max('id') and those parts of the
|
||||
|
|
|
@ -354,9 +354,9 @@ class Token(object):
|
|||
for bit in bits:
|
||||
# Handle translation-marked template pieces
|
||||
if bit.startswith(('_("', "_('")):
|
||||
sentinal = bit[2] + ')'
|
||||
sentinel = bit[2] + ')'
|
||||
trans_bit = [bit]
|
||||
while not bit.endswith(sentinal):
|
||||
while not bit.endswith(sentinel):
|
||||
bit = next(bits)
|
||||
trans_bit.append(bit)
|
||||
bit = ' '.join(trans_bit)
|
||||
|
|
|
@ -797,7 +797,7 @@ def default_if_none(value, arg):
|
|||
|
||||
@register.filter(is_safe=False)
|
||||
def divisibleby(value, arg):
|
||||
"""Returns True if the value is devisible by the argument."""
|
||||
"""Returns True if the value is divisible by the argument."""
|
||||
return int(value) % int(arg) == 0
|
||||
|
||||
|
||||
|
|
|
@ -764,7 +764,7 @@ def do_for(parser, token):
|
|||
than -- the following::
|
||||
|
||||
<ul>
|
||||
{% if althete_list %}
|
||||
{% if athlete_list %}
|
||||
{% for athlete in athlete_list %}
|
||||
<li>{{ athlete.name }}</li>
|
||||
{% endfor %}
|
||||
|
|
|
@ -84,7 +84,7 @@ class RemoteTestResult(object):
|
|||
def test_index(self):
|
||||
return self.testsRun - 1
|
||||
|
||||
def check_pickleable(self, test, err):
|
||||
def check_picklable(self, test, err):
|
||||
# Ensure that sys.exc_info() tuples are picklable. This displays a
|
||||
# clear multiprocessing.pool.RemoteTraceback generated in the child
|
||||
# process instead of a multiprocessing.pool.MaybeEncodingError, making
|
||||
|
@ -152,12 +152,12 @@ failure and get a correct traceback.
|
|||
self.events.append(('stopTest', self.test_index))
|
||||
|
||||
def addError(self, test, err):
|
||||
self.check_pickleable(test, err)
|
||||
self.check_picklable(test, err)
|
||||
self.events.append(('addError', self.test_index, err))
|
||||
self.stop_if_failfast()
|
||||
|
||||
def addFailure(self, test, err):
|
||||
self.check_pickleable(test, err)
|
||||
self.check_picklable(test, err)
|
||||
self.events.append(('addFailure', self.test_index, err))
|
||||
self.stop_if_failfast()
|
||||
|
||||
|
@ -177,7 +177,7 @@ failure and get a correct traceback.
|
|||
# expected failure occurs.
|
||||
if tblib is None:
|
||||
err = err[0], err[1], None
|
||||
self.check_pickleable(test, err)
|
||||
self.check_picklable(test, err)
|
||||
self.events.append(('addExpectedFailure', self.test_index, err))
|
||||
|
||||
def addUnexpectedSuccess(self, test):
|
||||
|
@ -299,7 +299,7 @@ class ParallelTestSuite(unittest.TestSuite):
|
|||
To minimize pickling errors when getting results from workers:
|
||||
|
||||
- pass back numeric indexes in self.subsuites instead of tests
|
||||
- make tracebacks pickleable with tblib, if available
|
||||
- make tracebacks picklable with tblib, if available
|
||||
|
||||
Even with tblib, errors may still occur for dynamically created
|
||||
exception classes such Model.DoesNotExist which cannot be unpickled.
|
||||
|
|
|
@ -118,7 +118,7 @@ div.admonition-philosophy { padding-left:65px; background:url(docicons-philosoph
|
|||
div.admonition-behind-the-scenes { padding-left:65px; background:url(docicons-behindscenes.png) .8em .8em no-repeat;}
|
||||
.admonition.warning { background:url(docicons-warning.png) .8em .8em no-repeat; border:1px solid #ffc83c;}
|
||||
|
||||
/*** versoinadded/changes ***/
|
||||
/*** versionadded/changes ***/
|
||||
div.versionadded, div.versionchanged { }
|
||||
div.versionadded span.title, div.versionchanged span.title, span.versionmodified { font-weight: bold; }
|
||||
div.versionadded, div.versionchanged, div.deprecated { color:#555; }
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
# This file is execfile()d with the current directory set to its containing dir.
|
||||
#
|
||||
# The contents of this file are pickled, so don't put values in the namespace
|
||||
# that aren't pickleable (module imports are okay, they're removed automatically).
|
||||
# that aren't picklable (module imports are okay, they're removed automatically).
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
|
|
@ -851,7 +851,7 @@ This example illustrates all possible attributes and methods for a
|
|||
|
||||
def item_updateddate(self):
|
||||
"""
|
||||
Returns the updateddated for every item in the feed.
|
||||
Returns the updateddate for every item in the feed.
|
||||
"""
|
||||
|
||||
item_updateddate = datetime.datetime(2005, 5, 3) # Hard-coded updateddate.
|
||||
|
|
|
@ -558,7 +558,7 @@ Peucker
|
|||
pgAdmin
|
||||
phishing
|
||||
php
|
||||
pickleable
|
||||
picklable
|
||||
picosecond
|
||||
PIL
|
||||
pingback
|
||||
|
|
|
@ -715,7 +715,7 @@ To apply permission checks to :doc:`class-based views
|
|||
|
||||
.. versionadded:: 1.9
|
||||
|
||||
This mixin, just like the ``permisison_required``
|
||||
This mixin, just like the ``permission_required``
|
||||
decorator, checks whether the user accessing a view has all given
|
||||
permissions. You should specify the permission (or an iterable of
|
||||
permissions) using the ``permission_required`` parameter::
|
||||
|
|
|
@ -657,7 +657,7 @@ Technical details
|
|||
|
||||
* The session dictionary accepts any :mod:`json` serializable value when using
|
||||
:class:`~django.contrib.sessions.serializers.JSONSerializer` or any
|
||||
pickleable Python object when using
|
||||
picklable Python object when using
|
||||
:class:`~django.contrib.sessions.serializers.PickleSerializer`. See the
|
||||
:mod:`pickle` module for more information.
|
||||
|
||||
|
|
|
@ -818,7 +818,7 @@ class SeleniumFirefoxTests(AdminSeleniumWebDriverTestCase):
|
|||
|
||||
def test_add_row_selection(self):
|
||||
"""
|
||||
Ensure that the status line for selected rows gets updated correcly (#22038)
|
||||
Ensure that the status line for selected rows gets updated correctly (#22038)
|
||||
"""
|
||||
self.admin_login(username='super', password='secret')
|
||||
self.selenium.get('%s%s' % (self.live_server_url,
|
||||
|
|
|
@ -45,7 +45,7 @@ class TestDataMixin(object):
|
|||
is_staff=True, last_login=datetime(2007, 5, 30, 13, 20, 10),
|
||||
date_joined=datetime(2007, 5, 30, 13, 20, 10)
|
||||
)
|
||||
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagon', model='Passat')
|
||||
models.Car.objects.create(id=1, owner=cls.u1, make='Volkswagen', model='Passat')
|
||||
models.Car.objects.create(id=2, owner=cls.u2, make='BMW', model='M3')
|
||||
|
||||
|
||||
|
@ -206,7 +206,7 @@ class AdminFormfieldForDBFieldWithRequestTests(TestDataMixin, TestCase):
|
|||
self.client.login(username="super", password="secret")
|
||||
response = self.client.get(reverse('admin:admin_widgets_cartire_add'))
|
||||
self.assertNotContains(response, "BMW M3")
|
||||
self.assertContains(response, "Volkswagon Passat")
|
||||
self.assertContains(response, "Volkswagen Passat")
|
||||
|
||||
|
||||
@override_settings(PASSWORD_HASHERS=['django.contrib.auth.hashers.SHA1PasswordHasher'],
|
||||
|
|
|
@ -509,7 +509,7 @@ class AggregateTestCase(TestCase):
|
|||
|
||||
def test_sum_distinct_aggregate(self):
|
||||
"""
|
||||
Sum on a distict() QuerySet should aggregate only the distinct items.
|
||||
Sum on a distinct() QuerySet should aggregate only the distinct items.
|
||||
"""
|
||||
authors = Author.objects.filter(book__in=[5, 6])
|
||||
self.assertEqual(authors.count(), 3)
|
||||
|
|
|
@ -1346,9 +1346,9 @@ class AggregationTests(TestCase):
|
|||
in group by.
|
||||
"""
|
||||
qs = Book.objects.annotate(
|
||||
acount=Count('authors')
|
||||
account=Count('authors')
|
||||
).filter(
|
||||
acount=F('publisher__num_awards')
|
||||
account=F('publisher__num_awards')
|
||||
)
|
||||
self.assertQuerysetEqual(
|
||||
qs, ['Sams Teach Yourself Django in 24 Hours'],
|
||||
|
|
|
@ -392,7 +392,7 @@ class UserChangeFormTest(TestDataMixin, TestCase):
|
|||
# Just check we can create it
|
||||
MyUserForm({})
|
||||
|
||||
def test_unsuable_password(self):
|
||||
def test_unusable_password(self):
|
||||
user = User.objects.get(username='empty_password')
|
||||
user.set_unusable_password()
|
||||
user.save()
|
||||
|
|
|
@ -543,7 +543,7 @@ class LoginTest(AuthViewsTestCase):
|
|||
for bad_url in ('http://example.com',
|
||||
'http:///example.com',
|
||||
'https://example.com',
|
||||
'ftp://exampel.com',
|
||||
'ftp://example.com',
|
||||
'///example.com',
|
||||
'//example.com',
|
||||
'javascript:alert("XSS")'):
|
||||
|
@ -564,7 +564,7 @@ class LoginTest(AuthViewsTestCase):
|
|||
# These URLs *should* still pass the security check
|
||||
for good_url in ('/view/?param=http://example.com',
|
||||
'/view/?param=https://example.com',
|
||||
'/view?param=ftp://exampel.com',
|
||||
'/view?param=ftp://example.com',
|
||||
'view/?param=//example.com',
|
||||
'https://testserver/',
|
||||
'HTTPS://testserver/',
|
||||
|
@ -830,7 +830,7 @@ class LogoutTest(AuthViewsTestCase):
|
|||
for bad_url in ('http://example.com',
|
||||
'http:///example.com',
|
||||
'https://example.com',
|
||||
'ftp://exampel.com',
|
||||
'ftp://example.com',
|
||||
'///example.com',
|
||||
'//example.com',
|
||||
'javascript:alert("XSS")'):
|
||||
|
@ -849,7 +849,7 @@ class LogoutTest(AuthViewsTestCase):
|
|||
# These URLs *should* still pass the security check
|
||||
for good_url in ('/view/?param=http://example.com',
|
||||
'/view/?param=https://example.com',
|
||||
'/view?param=ftp://exampel.com',
|
||||
'/view?param=ftp://example.com',
|
||||
'view/?param=//example.com',
|
||||
'https://testserver/',
|
||||
'HTTPS://testserver/',
|
||||
|
@ -1016,6 +1016,6 @@ class UUIDUserTests(TestCase):
|
|||
})
|
||||
self.assertRedirects(response, user_change_url)
|
||||
row = LogEntry.objects.latest('id')
|
||||
self.assertEqual(row.user_id, 1) # harcoded in CustomUserAdmin.log_change()
|
||||
self.assertEqual(row.user_id, 1) # hardcoded in CustomUserAdmin.log_change()
|
||||
self.assertEqual(row.object_id, str(u.pk))
|
||||
self.assertEqual(row.change_message, 'Changed password.')
|
||||
|
|
|
@ -65,10 +65,10 @@ class BulkCreateTests(TestCase):
|
|||
}, attrgetter("name"), ordered=False)
|
||||
|
||||
ProxyProxyCountry.objects.bulk_create([
|
||||
ProxyProxyCountry(name="Neitherlands", iso_two_letter="NT"),
|
||||
ProxyProxyCountry(name="Netherlands", iso_two_letter="NT"),
|
||||
])
|
||||
self.assertQuerysetEqual(ProxyProxyCountry.objects.all(), {
|
||||
"Qwghlm", "Tortall", "Neitherlands",
|
||||
"Qwghlm", "Tortall", "Netherlands",
|
||||
}, attrgetter("name"), ordered=False)
|
||||
|
||||
def test_non_auto_increment_pk(self):
|
||||
|
|
|
@ -224,7 +224,7 @@ class CheckStrictTransportSecurityTest(SimpleTestCase):
|
|||
@override_settings(
|
||||
MIDDLEWARE_CLASSES=[],
|
||||
SECURE_HSTS_SECONDS=0)
|
||||
def test_no_sts_no_middlware(self):
|
||||
def test_no_sts_no_middleware(self):
|
||||
"""
|
||||
Don't warn if SECURE_HSTS_SECONDS isn't > 0 and SecurityMiddleware isn't
|
||||
installed.
|
||||
|
@ -258,7 +258,7 @@ class CheckStrictTransportSecuritySubdomainsTest(SimpleTestCase):
|
|||
MIDDLEWARE_CLASSES=[],
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS=False,
|
||||
SECURE_HSTS_SECONDS=3600)
|
||||
def test_no_sts_subdomains_no_middlware(self):
|
||||
def test_no_sts_subdomains_no_middleware(self):
|
||||
"""
|
||||
Don't warn if SecurityMiddleware isn't installed.
|
||||
"""
|
||||
|
@ -415,7 +415,7 @@ class CheckSSLRedirectTest(SimpleTestCase):
|
|||
@override_settings(
|
||||
MIDDLEWARE_CLASSES=[],
|
||||
SECURE_SSL_REDIRECT=False)
|
||||
def test_no_ssl_redirect_no_middlware(self):
|
||||
def test_no_ssl_redirect_no_middleware(self):
|
||||
"""
|
||||
Don't warn if SECURE_SSL_REDIRECT is False and SecurityMiddleware isn't
|
||||
installed.
|
||||
|
|
|
@ -392,7 +392,7 @@ class CsrfViewMiddlewareTest(SimpleTestCase):
|
|||
def test_https_csrf_wildcard_trusted_origin_allowed(self):
|
||||
"""
|
||||
A POST HTTPS request with a referer that matches a CSRF_TRUSTED_ORIGINS
|
||||
wilcard is accepted.
|
||||
wildcard is accepted.
|
||||
"""
|
||||
req = self._get_POST_request_with_token()
|
||||
req._is_secure_override = True
|
||||
|
|
|
@ -1073,7 +1073,7 @@ class CaseExpressionTests(TestCase):
|
|||
lambda x: (x, x.foo)
|
||||
)
|
||||
|
||||
def test_join_promotion_multiple_annonations(self):
|
||||
def test_join_promotion_multiple_annotations(self):
|
||||
o = CaseTestModel.objects.create(integer=1, integer2=1, string='1')
|
||||
# Testing that:
|
||||
# 1. There isn't any object on the remote side of the fk_rel
|
||||
|
|
|
@ -749,7 +749,7 @@ class FormsTestCase(SimpleTestCase):
|
|||
'<input type="hidden" name="when_1" value="01:01" id="id_when_1" />'
|
||||
)
|
||||
|
||||
def test_mulitple_choice_checkbox(self):
|
||||
def test_multiple_choice_checkbox(self):
|
||||
# MultipleChoiceField can also be used with the CheckboxSelectMultiple widget.
|
||||
class SongForm(Form):
|
||||
name = CharField()
|
||||
|
|
|
@ -184,7 +184,7 @@ class FormsModelTestCase(TestCase):
|
|||
m.delete()
|
||||
|
||||
def test_boundary_conditions(self):
|
||||
# Boundary conditions on a PostitiveIntegerField #########################
|
||||
# Boundary conditions on a PositiveIntegerField #########################
|
||||
class BoundaryForm(ModelForm):
|
||||
class Meta:
|
||||
model = BoundaryModel
|
||||
|
|
|
@ -425,7 +425,7 @@ class GISFunctionsTests(TestCase):
|
|||
union=functions.Union('mpoly', geom),
|
||||
)
|
||||
|
||||
# For some reason SpatiaLite does something screwey with the Texas geometry here.
|
||||
# For some reason SpatiaLite does something screwy with the Texas geometry here.
|
||||
# Also, it doesn't like the null intersection.
|
||||
if spatialite:
|
||||
qs = qs.exclude(name='Texas')
|
||||
|
|
|
@ -309,7 +309,7 @@ class HttpResponseTests(unittest.TestCase):
|
|||
h['Content-Disposition'] = 'attachment; filename="%s"' % f
|
||||
# This one is triggering http://bugs.python.org/issue20747, that is Python
|
||||
# will itself insert a newline in the header
|
||||
h['Content-Disposition'] = 'attachement; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
||||
h['Content-Disposition'] = 'attachment; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
|
||||
|
||||
def test_newlines_in_headers(self):
|
||||
# Bug #10188: Do not allow newlines in headers (CR or LF)
|
||||
|
|
|
@ -9,6 +9,6 @@ dummy2 = _("This is another translatable string.")
|
|||
# This file has a literal with plural forms. When processed first, makemessages
|
||||
# shouldn't create a .po file with duplicate `Plural-Forms` headers
|
||||
number = 3
|
||||
dummuy3 = ungettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number}
|
||||
dummy3 = ungettext("%(number)s Foo", "%(number)s Foos", number) % {'number': number}
|
||||
|
||||
dummy4 = _('Size')
|
||||
|
|
|
@ -4,7 +4,7 @@ from django.db import models
|
|||
from .base import IsolatedModelsTestCase
|
||||
|
||||
|
||||
class DeprecatedFieldssTests(IsolatedModelsTestCase):
|
||||
class DeprecatedFieldsTests(IsolatedModelsTestCase):
|
||||
def test_IPAddressField_deprecated(self):
|
||||
class IPAddressModel(models.Model):
|
||||
ip = models.IPAddressField()
|
||||
|
|
|
@ -351,7 +351,7 @@ class FieldNamesTests(IsolatedModelsTestCase):
|
|||
]
|
||||
|
||||
# Second error because the FK specified in the `through` model
|
||||
# `m2msimple` has auto-genererated name longer than allowed.
|
||||
# `m2msimple` has auto-generated name longer than allowed.
|
||||
# There will be no check errors in the other M2M because it
|
||||
# specifies db_column for the FK in `through` model even if the actual
|
||||
# name is longer than the limits of the database.
|
||||
|
|
|
@ -1504,7 +1504,7 @@ class M2mThroughFieldsTests(IsolatedModelsTestCase):
|
|||
]
|
||||
self.assertEqual(expected, errors)
|
||||
|
||||
def test_insersection_foreign_object(self):
|
||||
def test_intersection_foreign_object(self):
|
||||
class Parent(models.Model):
|
||||
a = models.PositiveIntegerField()
|
||||
b = models.PositiveIntegerField()
|
||||
|
|
|
@ -346,7 +346,7 @@ class M2mThroughReferentialTests(TestCase):
|
|||
[]
|
||||
)
|
||||
|
||||
def test_self_referential_non_symmentrical_first_side(self):
|
||||
def test_self_referential_non_symmetrical_first_side(self):
|
||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||
Friendship.objects.create(
|
||||
|
@ -359,7 +359,7 @@ class M2mThroughReferentialTests(TestCase):
|
|||
attrgetter("name")
|
||||
)
|
||||
|
||||
def test_self_referential_non_symmentrical_second_side(self):
|
||||
def test_self_referential_non_symmetrical_second_side(self):
|
||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||
Friendship.objects.create(
|
||||
|
@ -371,7 +371,7 @@ class M2mThroughReferentialTests(TestCase):
|
|||
[]
|
||||
)
|
||||
|
||||
def test_self_referential_non_symmentrical_clear_first_side(self):
|
||||
def test_self_referential_non_symmetrical_clear_first_side(self):
|
||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||
Friendship.objects.create(
|
||||
|
@ -392,7 +392,7 @@ class M2mThroughReferentialTests(TestCase):
|
|||
attrgetter("name")
|
||||
)
|
||||
|
||||
def test_self_referential_symmentrical(self):
|
||||
def test_self_referential_symmetrical(self):
|
||||
tony = PersonSelfRefM2M.objects.create(name="Tony")
|
||||
chris = PersonSelfRefM2M.objects.create(name="Chris")
|
||||
Friendship.objects.create(
|
||||
|
|
|
@ -197,7 +197,7 @@ class CommonMiddlewareTest(SimpleTestCase):
|
|||
response = HttpResponseNotFound()
|
||||
r = CommonMiddleware().process_response(request, response)
|
||||
self.assertIsNotNone(r,
|
||||
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||
"CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||
self.assertEqual(r.status_code, 301)
|
||||
self.assertEqual(r.url, '/customurlconf/slash/')
|
||||
|
||||
|
@ -236,7 +236,7 @@ class CommonMiddlewareTest(SimpleTestCase):
|
|||
response = HttpResponseNotFound()
|
||||
r = CommonMiddleware().process_response(request, response)
|
||||
self.assertIsNotNone(r,
|
||||
"CommonMiddlware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||
"CommonMiddleware failed to return APPEND_SLASH redirect using request.urlconf")
|
||||
self.assertEqual(r.status_code, 301)
|
||||
self.assertEqual(
|
||||
r.url,
|
||||
|
|
|
@ -17,9 +17,9 @@ from django.test.utils import isolate_lru_cache
|
|||
from .models import FoodManager, FoodQuerySet
|
||||
|
||||
|
||||
class DeconstructableObject(object):
|
||||
class DeconstructibleObject(object):
|
||||
"""
|
||||
A custom deconstructable object.
|
||||
A custom deconstructible object.
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
|
@ -60,117 +60,117 @@ class AutodetectorTests(TestCase):
|
|||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default='Ada Lovelace')),
|
||||
])
|
||||
author_name_deconstructable_1 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_1 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject())),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
|
||||
])
|
||||
author_name_deconstructable_2 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_2 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject())),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject())),
|
||||
])
|
||||
author_name_deconstructable_3 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_3 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
||||
])
|
||||
author_name_deconstructable_4 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_4 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=models.IntegerField())),
|
||||
])
|
||||
author_name_deconstructable_list_1 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_list_1 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 123])),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])),
|
||||
])
|
||||
author_name_deconstructable_list_2 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_list_2 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 123])),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 123])),
|
||||
])
|
||||
author_name_deconstructable_list_3 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_list_3 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructableObject(), 999])),
|
||||
("name", models.CharField(max_length=200, default=[DeconstructibleObject(), 999])),
|
||||
])
|
||||
author_name_deconstructable_tuple_1 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_tuple_1 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 123))),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))),
|
||||
])
|
||||
author_name_deconstructable_tuple_2 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_tuple_2 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 123))),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 123))),
|
||||
])
|
||||
author_name_deconstructable_tuple_3 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_tuple_3 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructableObject(), 999))),
|
||||
("name", models.CharField(max_length=200, default=(DeconstructibleObject(), 999))),
|
||||
])
|
||||
author_name_deconstructable_dict_1 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_dict_1 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default={
|
||||
'item': DeconstructableObject(), 'otheritem': 123
|
||||
'item': DeconstructibleObject(), 'otheritem': 123
|
||||
})),
|
||||
])
|
||||
author_name_deconstructable_dict_2 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_dict_2 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default={
|
||||
'item': DeconstructableObject(), 'otheritem': 123
|
||||
'item': DeconstructibleObject(), 'otheritem': 123
|
||||
})),
|
||||
])
|
||||
author_name_deconstructable_dict_3 = ModelState("testapp", "Author", [
|
||||
author_name_deconstructible_dict_3 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default={
|
||||
'item': DeconstructableObject(), 'otheritem': 999
|
||||
'item': DeconstructibleObject(), 'otheritem': 999
|
||||
})),
|
||||
])
|
||||
author_name_nested_deconstructable_1 = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_1 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||
))),
|
||||
])
|
||||
author_name_nested_deconstructable_2 = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_2 = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||
))),
|
||||
])
|
||||
author_name_nested_deconstructable_changed_arg = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_changed_arg = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2-changed'),),
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2-changed'),),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||
))),
|
||||
])
|
||||
author_name_nested_deconstructable_extra_arg = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_extra_arg = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||
None,
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||
))),
|
||||
])
|
||||
author_name_nested_deconstructable_changed_kwarg = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_changed_kwarg = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c-changed')),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c-changed')),
|
||||
))),
|
||||
])
|
||||
author_name_nested_deconstructable_extra_kwarg = ModelState("testapp", "Author", [
|
||||
author_name_nested_deconstructible_extra_kwarg = ModelState("testapp", "Author", [
|
||||
("id", models.AutoField(primary_key=True)),
|
||||
("name", models.CharField(max_length=200, default=DeconstructableObject(
|
||||
DeconstructableObject(1),
|
||||
(DeconstructableObject('t1'), DeconstructableObject('t2'),),
|
||||
a=DeconstructableObject('A'),
|
||||
b=DeconstructableObject(B=DeconstructableObject('c')),
|
||||
("name", models.CharField(max_length=200, default=DeconstructibleObject(
|
||||
DeconstructibleObject(1),
|
||||
(DeconstructibleObject('t1'), DeconstructibleObject('t2'),),
|
||||
a=DeconstructibleObject('A'),
|
||||
b=DeconstructibleObject(B=DeconstructibleObject('c')),
|
||||
c=None,
|
||||
))),
|
||||
])
|
||||
|
@ -1383,13 +1383,13 @@ class AutodetectorTests(TestCase):
|
|||
self.assertOperationTypes(changes, 'testapp', 0, ["AddField"])
|
||||
self.assertOperationAttributes(changes, 'testapp', 0, 0, name="name")
|
||||
|
||||
def test_custom_deconstructable(self):
|
||||
def test_custom_deconstructible(self):
|
||||
"""
|
||||
Two instances which deconstruct to the same value aren't considered a
|
||||
change.
|
||||
"""
|
||||
before = self.make_project_state([self.author_name_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_2])
|
||||
before = self.make_project_state([self.author_name_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_2])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
# Right number of migrations?
|
||||
|
@ -1397,70 +1397,70 @@ class AutodetectorTests(TestCase):
|
|||
|
||||
def test_deconstruct_field_kwarg(self):
|
||||
"""Field instances are handled correctly by nested deconstruction."""
|
||||
before = self.make_project_state([self.author_name_deconstructable_3])
|
||||
after = self.make_project_state([self.author_name_deconstructable_4])
|
||||
before = self.make_project_state([self.author_name_deconstructible_3])
|
||||
after = self.make_project_state([self.author_name_deconstructible_4])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(changes, {})
|
||||
|
||||
def test_deconstructable_list(self):
|
||||
def test_deconstructible_list(self):
|
||||
"""Nested deconstruction descends into lists."""
|
||||
# When lists contain items that deconstruct to identical values, those lists
|
||||
# should be considered equal for the purpose of detecting state changes
|
||||
# (even if the original items are unequal).
|
||||
before = self.make_project_state([self.author_name_deconstructable_list_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_list_2])
|
||||
before = self.make_project_state([self.author_name_deconstructible_list_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_list_2])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(changes, {})
|
||||
|
||||
# Legitimate differences within the deconstructed lists should be reported
|
||||
# as a change
|
||||
before = self.make_project_state([self.author_name_deconstructable_list_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_list_3])
|
||||
before = self.make_project_state([self.author_name_deconstructible_list_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_list_3])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
def test_deconstructable_tuple(self):
|
||||
def test_deconstructible_tuple(self):
|
||||
"""Nested deconstruction descends into tuples."""
|
||||
# When tuples contain items that deconstruct to identical values, those tuples
|
||||
# should be considered equal for the purpose of detecting state changes
|
||||
# (even if the original items are unequal).
|
||||
before = self.make_project_state([self.author_name_deconstructable_tuple_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_tuple_2])
|
||||
before = self.make_project_state([self.author_name_deconstructible_tuple_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_tuple_2])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(changes, {})
|
||||
|
||||
# Legitimate differences within the deconstructed tuples should be reported
|
||||
# as a change
|
||||
before = self.make_project_state([self.author_name_deconstructable_tuple_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_tuple_3])
|
||||
before = self.make_project_state([self.author_name_deconstructible_tuple_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_tuple_3])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
def test_deconstructable_dict(self):
|
||||
def test_deconstructible_dict(self):
|
||||
"""Nested deconstruction descends into dict values."""
|
||||
# When dicts contain items whose values deconstruct to identical values,
|
||||
# those dicts should be considered equal for the purpose of detecting
|
||||
# state changes (even if the original values are unequal).
|
||||
before = self.make_project_state([self.author_name_deconstructable_dict_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_dict_2])
|
||||
before = self.make_project_state([self.author_name_deconstructible_dict_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_dict_2])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(changes, {})
|
||||
|
||||
# Legitimate differences within the deconstructed dicts should be reported
|
||||
# as a change
|
||||
before = self.make_project_state([self.author_name_deconstructable_dict_1])
|
||||
after = self.make_project_state([self.author_name_deconstructable_dict_3])
|
||||
before = self.make_project_state([self.author_name_deconstructible_dict_1])
|
||||
after = self.make_project_state([self.author_name_deconstructible_dict_3])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
def test_nested_deconstructable_objects(self):
|
||||
def test_nested_deconstructible_objects(self):
|
||||
"""
|
||||
Nested deconstruction is applied recursively to the args/kwargs of
|
||||
deconstructed objects.
|
||||
|
@ -1468,45 +1468,45 @@ class AutodetectorTests(TestCase):
|
|||
# If the items within a deconstructed object's args/kwargs have the same
|
||||
# deconstructed values - whether or not the items themselves are different
|
||||
# instances - then the object as a whole is regarded as unchanged.
|
||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructable_2])
|
||||
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructible_2])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(changes, {})
|
||||
|
||||
# Differences that exist solely within the args list of a deconstructed object
|
||||
# should be reported as changes
|
||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructable_changed_arg])
|
||||
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructible_changed_arg])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
# Additional args should also be reported as a change
|
||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructable_extra_arg])
|
||||
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructible_extra_arg])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
# Differences that exist solely within the kwargs dict of a deconstructed object
|
||||
# should be reported as changes
|
||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructable_changed_kwarg])
|
||||
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructible_changed_kwarg])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
# Additional kwargs should also be reported as a change
|
||||
before = self.make_project_state([self.author_name_nested_deconstructable_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructable_extra_kwarg])
|
||||
before = self.make_project_state([self.author_name_nested_deconstructible_1])
|
||||
after = self.make_project_state([self.author_name_nested_deconstructible_extra_kwarg])
|
||||
autodetector = MigrationAutodetector(before, after)
|
||||
changes = autodetector._detect_changes()
|
||||
self.assertEqual(len(changes), 1)
|
||||
|
||||
def test_deconstruct_type(self):
|
||||
"""
|
||||
#22951 -- Uninstanted classes with deconstruct are correctly returned
|
||||
#22951 -- Uninstantiated classes with deconstruct are correctly returned
|
||||
by deep_deconstruct during serialization.
|
||||
"""
|
||||
author = ModelState(
|
||||
|
|
|
@ -635,9 +635,9 @@ class WriterTests(SimpleTestCase):
|
|||
# Yes, it doesn't make sense to use a class as a default for a
|
||||
# CharField. It does make sense for custom fields though, for example
|
||||
# an enumfield that takes the enum class as an argument.
|
||||
class DeconstructableInstances(object):
|
||||
class DeconstructibleInstances(object):
|
||||
def deconstruct(self):
|
||||
return ('DeconstructableInstances', [], {})
|
||||
return ('DeconstructibleInstances', [], {})
|
||||
|
||||
string = MigrationWriter.serialize(models.CharField(default=DeconstructableInstances))[0]
|
||||
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructableInstances)")
|
||||
string = MigrationWriter.serialize(models.CharField(default=DeconstructibleInstances))[0]
|
||||
self.assertEqual(string, "models.CharField(default=migrations.test_writer.DeconstructibleInstances)")
|
||||
|
|
|
@ -251,7 +251,7 @@ if Image:
|
|||
name = models.CharField(max_length=50)
|
||||
mugshot = TestImageField(storage=temp_storage, upload_to='tests')
|
||||
|
||||
class AbsctractPersonWithHeight(models.Model):
|
||||
class AbstractPersonWithHeight(models.Model):
|
||||
"""
|
||||
Abstract model that defines an ImageField with only one dimension field
|
||||
to make sure the dimension update is correctly run on concrete subclass
|
||||
|
@ -264,9 +264,9 @@ if Image:
|
|||
class Meta:
|
||||
abstract = True
|
||||
|
||||
class PersonWithHeight(AbsctractPersonWithHeight):
|
||||
class PersonWithHeight(AbstractPersonWithHeight):
|
||||
"""
|
||||
Concrete model that subclass an abctract one with only on dimension
|
||||
Concrete model that subclass an abstract one with only on dimension
|
||||
field.
|
||||
"""
|
||||
name = models.CharField(max_length=50)
|
||||
|
@ -298,7 +298,7 @@ if Image:
|
|||
Model that:
|
||||
* Defines two ImageFields
|
||||
* Defines the height/width fields before the ImageFields
|
||||
* Has a nullalble ImageField
|
||||
* Has a nullable ImageField
|
||||
"""
|
||||
name = models.CharField(max_length=50)
|
||||
mugshot_height = models.PositiveSmallIntegerField()
|
||||
|
|
|
@ -39,7 +39,7 @@ class DefaultRelatedNameTests(TestCase):
|
|||
except AttributeError:
|
||||
self.fail("Book should have a model_options_bookstores relation.")
|
||||
|
||||
def test_inheritance_with_overrided_default_related_name(self):
|
||||
def test_inheritance_with_overridden_default_related_name(self):
|
||||
try:
|
||||
self.book.editor_stores
|
||||
except AttributeError:
|
||||
|
|
|
@ -107,7 +107,7 @@ class ModelAdminTests(TestCase):
|
|||
self.assertTrue(ma.lookup_allowed('name__nonexistent', 'test_value'))
|
||||
|
||||
def test_field_arguments(self):
|
||||
# If we specify the fields argument, fieldsets_add and fielsets_change should
|
||||
# If we specify the fields argument, fieldsets_add and fieldsets_change should
|
||||
# just stick the fields into a formsets structure and return it.
|
||||
class BandAdmin(ModelAdmin):
|
||||
fields = ['name']
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""
|
||||
Regression tests for proper working of ForeignKey(null=True). Tests these bugs:
|
||||
|
||||
* #7512: including a nullable foreign key reference in Meta ordering has un
|
||||
xpected results
|
||||
* #7512: including a nullable foreign key reference in Meta ordering has
|
||||
unexpected results
|
||||
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
|
|
@ -199,7 +199,7 @@ class TestQuerying(TestCase):
|
|||
|
||||
|
||||
@skipUnlessPG92
|
||||
class TestQueringWithRanges(TestCase):
|
||||
class TestQueryingWithRanges(TestCase):
|
||||
def test_date_range(self):
|
||||
objs = [
|
||||
RangeLookupsModel.objects.create(date='2015-01-01'),
|
||||
|
|
|
@ -33,7 +33,7 @@ class ProxyModelTests(TestCase):
|
|||
DEFAULT_DB_ALIAS).as_sql()
|
||||
self.assertEqual(my_person_sql, person_sql)
|
||||
|
||||
def test_inheretance_new_table(self):
|
||||
def test_inheritance_new_table(self):
|
||||
"""
|
||||
The StatusPerson models should have its own table (it's using ORM-level
|
||||
inheritance).
|
||||
|
|
|
@ -60,7 +60,7 @@ class Note(models.Model):
|
|||
def __init__(self, *args, **kwargs):
|
||||
super(Note, self).__init__(*args, **kwargs)
|
||||
# Regression for #13227 -- having an attribute that
|
||||
# is unpickleable doesn't stop you from cloning queries
|
||||
# is unpicklable doesn't stop you from cloning queries
|
||||
# that use objects of that type as an argument.
|
||||
self.lock = threading.Lock()
|
||||
|
||||
|
|
|
@ -1884,7 +1884,7 @@ class Queries6Tests(TestCase):
|
|||
def test_tickets_8921_9188(self):
|
||||
# Incorrect SQL was being generated for certain types of exclude()
|
||||
# queries that crossed multi-valued relations (#8921, #9188 and some
|
||||
# pre-emptively discovered cases).
|
||||
# preemptively discovered cases).
|
||||
|
||||
self.assertQuerysetEqual(
|
||||
PointerA.objects.filter(connection__pointerb__id=1),
|
||||
|
@ -3104,7 +3104,7 @@ class NullJoinPromotionOrTest(TestCase):
|
|||
p1 = Program.objects.create(identifier=i1)
|
||||
c1 = Channel.objects.create(identifier=i1)
|
||||
p2 = Program.objects.create(identifier=i2)
|
||||
# Test OR + doubleneq. The expected result is that channel is LOUTER
|
||||
# Test OR + doubleneg. The expected result is that channel is LOUTER
|
||||
# joined, program INNER joined
|
||||
qs1_filter = Identifier.objects.filter(
|
||||
Q(program__id=p2.id, channel__id=c1.id)
|
||||
|
@ -3191,7 +3191,7 @@ class JoinReuseTest(TestCase):
|
|||
|
||||
|
||||
class DisjunctionPromotionTests(TestCase):
|
||||
def test_disjuction_promotion_select_related(self):
|
||||
def test_disjunction_promotion_select_related(self):
|
||||
fk1 = FK1.objects.create(f1='f1', f2='f2')
|
||||
basea = BaseA.objects.create(a=fk1)
|
||||
qs = BaseA.objects.filter(Q(a=fk1) | Q(b=2))
|
||||
|
|
|
@ -57,7 +57,7 @@ class PickleabilityTestCase(TestCase):
|
|||
|
||||
def test_model_pickle(self):
|
||||
"""
|
||||
Test that a model not defined on module level is pickleable.
|
||||
Test that a model not defined on module level is picklable.
|
||||
"""
|
||||
original = Container.SomeModel(pk=1)
|
||||
dumped = pickle.dumps(original)
|
||||
|
|
|
@ -618,7 +618,7 @@ class HostValidationTests(SimpleTestCase):
|
|||
'12.34.56.78:443',
|
||||
'[2001:19f0:feee::dead:beef:cafe]',
|
||||
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
||||
'xn--4ca9at.com', # Punycode for öäü.com
|
||||
'anything.multitenant.com',
|
||||
'multitenant.com',
|
||||
'insensitive.com',
|
||||
|
@ -688,7 +688,7 @@ class HostValidationTests(SimpleTestCase):
|
|||
'12.34.56.78:443',
|
||||
'[2001:19f0:feee::dead:beef:cafe]',
|
||||
'[2001:19f0:feee::dead:beef:cafe]:8080',
|
||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
||||
'xn--4ca9at.com', # Punycode for öäü.com
|
||||
]
|
||||
|
||||
for host in legit_hosts:
|
||||
|
@ -766,7 +766,7 @@ class HostValidationTests(SimpleTestCase):
|
|||
'example.com',
|
||||
'12.34.56.78',
|
||||
'[2001:19f0:feee::dead:beef:cafe]',
|
||||
'xn--4ca9at.com', # Punnycode for öäü.com
|
||||
'xn--4ca9at.com', # Punycode for öäü.com
|
||||
]:
|
||||
request = HttpRequest()
|
||||
request.META = {'HTTP_HOST': host}
|
||||
|
|
|
@ -297,7 +297,7 @@ The end."""),
|
|||
|
||||
(im2m_obj, 470, M2MIntermediateData, None),
|
||||
|
||||
# testing post- and prereferences and extra fields
|
||||
# testing post- and pre-references and extra fields
|
||||
(im_obj, 480, Intermediate, {'right': 300, 'left': 470}),
|
||||
(im_obj, 481, Intermediate, {'right': 300, 'left': 490}),
|
||||
(im_obj, 482, Intermediate, {'right': 500, 'left': 470}),
|
||||
|
|
|
@ -168,7 +168,7 @@ class SignalTests(BaseSignalTest):
|
|||
data.append(instance)
|
||||
|
||||
try:
|
||||
c1 = Car.objects.create(make="Volkswagon", model="Passat")
|
||||
c1 = Car.objects.create(make="Volkswagen", model="Passat")
|
||||
self.assertEqual(data, [c1, c1])
|
||||
finally:
|
||||
signals.pre_save.disconnect(decorated_handler)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
"""
|
||||
A subset of the tests in tests/servers/tests exercicing
|
||||
A subset of the tests in tests/servers/tests exercising
|
||||
django.contrib.staticfiles.testing.StaticLiveServerTestCase instead of
|
||||
django.test.LiveServerTestCase.
|
||||
"""
|
||||
|
|
|
@ -155,7 +155,7 @@ class SimpleTemplateResponseTest(SimpleTestCase):
|
|||
|
||||
def test_pickling(self):
|
||||
# Create a template response. The context is
|
||||
# known to be unpickleable (e.g., a function).
|
||||
# known to be unpicklable (e.g., a function).
|
||||
response = SimpleTemplateResponse('first/test.html', {
|
||||
'value': 123,
|
||||
'fn': datetime.now,
|
||||
|
@ -267,7 +267,7 @@ class TemplateResponseTest(SimpleTestCase):
|
|||
|
||||
def test_pickling(self):
|
||||
# Create a template response. The context is
|
||||
# known to be unpickleable (e.g., a function).
|
||||
# known to be unpicklable (e.g., a function).
|
||||
response = TemplateResponse(self.factory.get('/'),
|
||||
'first/test.html', {
|
||||
'value': 123,
|
||||
|
|
|
@ -157,11 +157,11 @@ class DiscoverRunnerTest(TestCase):
|
|||
self.assertIn('test_2', suite[8].id(),
|
||||
msg="Methods of unittest cases should be reversed.")
|
||||
|
||||
def test_overrideable_test_suite(self):
|
||||
def test_overridable_test_suite(self):
|
||||
self.assertEqual(DiscoverRunner().test_suite, TestSuite)
|
||||
|
||||
def test_overrideable_test_runner(self):
|
||||
def test_overridable_test_runner(self):
|
||||
self.assertEqual(DiscoverRunner().test_runner, TextTestRunner)
|
||||
|
||||
def test_overrideable_test_loader(self):
|
||||
def test_overridable_test_loader(self):
|
||||
self.assertEqual(DiscoverRunner().test_loader, defaultTestLoader)
|
||||
|
|
|
@ -127,7 +127,7 @@ class LegacyDatabaseTests(TestCase):
|
|||
self.assertEqual(event.dt.replace(tzinfo=EAT), dt)
|
||||
|
||||
@skipIfDBFeature('supports_timezones')
|
||||
def test_aware_datetime_unspported(self):
|
||||
def test_aware_datetime_unsupported(self):
|
||||
dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT)
|
||||
with self.assertRaises(ValueError):
|
||||
Event.objects.create(dt=dt)
|
||||
|
|
|
@ -129,7 +129,7 @@ class Unmanaged1(models.Model):
|
|||
db_table = "unmanaged_models_proxy1"
|
||||
|
||||
|
||||
# Unmanged with an m2m to unmanaged: the intermediary table won't be created.
|
||||
# Unmanaged with an m2m to unmanaged: the intermediary table won't be created.
|
||||
class Unmanaged2(models.Model):
|
||||
mm = models.ManyToManyField(Unmanaged1)
|
||||
|
||||
|
|
|
@ -29,7 +29,7 @@ Fragments
|
|||
<center> ![image](http://iontech.files.wordpress.com/2013/01/androidfragmentation1-264x300.png) </center>
|
||||
Fragments encompass both layout resource and Java source. Hence, unlike ``, they allow us to reuse the View components along with their functionality, if needed.
|
||||
Fragments were first introduced in Honeycomb(API 11), living under the `android.app` package.
|
||||
**Note**: API 11 implies that Fragments have no support for devices less than Honeycomb and, for the record, as of writing this post, [more than 50% of Android devices worldwide run versions of Android below Honeycomb](http://developer.android.com/about/dashboards/index.html). Developer dissapointed? You don't have to be, cause google has been cautious enough to add the Fragment APIs to the support library. Yay!
|
||||
**Note**: API 11 implies that Fragments have no support for devices less than Honeycomb and, for the record, as of writing this post, [more than 50% of Android devices worldwide run versions of Android below Honeycomb](http://developer.android.com/about/dashboards/index.html). Developer disappointed? You don't have to be, cause google has been cautious enough to add the Fragment APIs to the support library. Yay!
|
||||
|
||||
In the support library Fragment APIs sit in the `android.support.v4.app` package. This post assumes that your `minSdk` support is below API 11. Hence we concentrate on the Fragment APIs of the support library.
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ class TestUtilsHttp(unittest.TestCase):
|
|||
for bad_url in ('http://example.com',
|
||||
'http:///example.com',
|
||||
'https://example.com',
|
||||
'ftp://exampel.com',
|
||||
'ftp://example.com',
|
||||
r'\\example.com',
|
||||
r'\\\example.com',
|
||||
r'/\\/example.com',
|
||||
|
@ -96,7 +96,7 @@ class TestUtilsHttp(unittest.TestCase):
|
|||
self.assertFalse(http.is_safe_url(bad_url, host='testserver'), "%s should be blocked" % bad_url)
|
||||
for good_url in ('/view/?param=http://example.com',
|
||||
'/view/?param=https://example.com',
|
||||
'/view?param=ftp://exampel.com',
|
||||
'/view?param=ftp://example.com',
|
||||
'view/?param=//example.com',
|
||||
'https://testserver/',
|
||||
'HTTPS://testserver/',
|
||||
|
|
Loading…
Reference in New Issue