Fixed many typos in comments and docstrings.

Thanks Piotr Kasprzyk for help with the patch.
This commit is contained in:
Rodolfo Carvalho 2014-03-02 15:25:53 +01:00 committed by Tim Graham
parent 6acaa52386
commit 0d91225892
121 changed files with 208 additions and 208 deletions

View File

@ -54,7 +54,7 @@ class AdminSeleniumWebDriverTestCase(StaticLiveServerCase):
def wait_for(self, css_selector, timeout=10): def wait_for(self, css_selector, timeout=10):
""" """
Helper function that blocks until an css selector is found on the page. Helper function that blocks until a CSS selector is found on the page.
""" """
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
@ -65,7 +65,7 @@ class AdminSeleniumWebDriverTestCase(StaticLiveServerCase):
def wait_for_text(self, css_selector, text, timeout=10): def wait_for_text(self, css_selector, text, timeout=10):
""" """
Helper function that blocks until the text is found in the css selector. Helper function that blocks until the text is found in the CSS selector.
""" """
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
@ -77,7 +77,7 @@ class AdminSeleniumWebDriverTestCase(StaticLiveServerCase):
def wait_for_value(self, css_selector, text, timeout=10): def wait_for_value(self, css_selector, text, timeout=10):
""" """
Helper function that blocks until the value is found in the css selector. Helper function that blocks until the value is found in the CSS selector.
""" """
from selenium.webdriver.common.by import By from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as ec from selenium.webdriver.support import expected_conditions as ec
@ -96,7 +96,7 @@ class AdminSeleniumWebDriverTestCase(StaticLiveServerCase):
# Wait for the next page to be loaded # Wait for the next page to be loaded
self.wait_loaded_tag('body') self.wait_loaded_tag('body')
except TimeoutException: except TimeoutException:
# IE7 occasionnally returns an error "Internet Explorer cannot # IE7 occasionally returns an error "Internet Explorer cannot
# display the webpage" and doesn't load the next page. We just # display the webpage" and doesn't load the next page. We just
# ignore it. # ignore it.
pass pass

View File

@ -108,7 +108,7 @@ def flatten_fieldsets(fieldsets):
def get_deleted_objects(objs, opts, user, admin_site, using): def get_deleted_objects(objs, opts, user, admin_site, using):
""" """
Find all objects related to ``objs`` that should also be deleted. ``objs`` Find all objects related to ``objs`` that should also be deleted. ``objs``
must be a homogenous iterable of objects (e.g. a QuerySet). must be a homogeneous iterable of objects (e.g. a QuerySet).
Returns a nested list of strings suitable for display in the Returns a nested list of strings suitable for display in the
template with the ``unordered_list`` filter. template with the ``unordered_list`` filter.

View File

@ -56,7 +56,7 @@ def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login
def permission_required(perm, login_url=None, raise_exception=False): def permission_required(perm, login_url=None, raise_exception=False):
""" """
Decorator for views that checks whether a user has a particular permission Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if neccesary. enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception If the raise_exception parameter is given the PermissionDenied exception
is raised. is raised.
""" """

View File

@ -186,7 +186,7 @@ class BasePasswordHasher(object):
def salt(self): def salt(self):
""" """
Generates a cryptographically secure nonce salt in ascii Generates a cryptographically secure nonce salt in ASCII
""" """
return get_random_string() return get_random_string()

View File

@ -39,7 +39,7 @@ def mock_inputs(inputs):
def mock_input(prompt): def mock_input(prompt):
# prompt should be encoded in Python 2. This line will raise an # prompt should be encoded in Python 2. This line will raise an
# Exception if prompt contains unencoded non-ascii on Python 2. # Exception if prompt contains unencoded non-ASCII on Python 2.
prompt = str(prompt) prompt = str(prompt)
assert str('__proxy__') not in prompt assert str('__proxy__') not in prompt
response = '' response = ''

View File

@ -90,7 +90,7 @@ def approve(request, comment_id, next=None):
template.RequestContext(request) template.RequestContext(request)
) )
# The following functions actually perform the various flag/aprove/delete # The following functions actually perform the various flag/approve/delete
# actions. They've been broken out into separate functions to that they # actions. They've been broken out into separate functions to that they
# may be called from admin actions. # may be called from admin actions.

View File

@ -51,13 +51,13 @@ class FlatpageCSRFTests(TestCase):
self.assertContains(response, "<p>Isn't it sekrit!</p>") self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage(self): def test_fallback_flatpage(self):
"A flatpage can be served by the fallback middlware" "A flatpage can be served by the fallback middleware"
response = self.client.get('/flatpage/') response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>") self.assertContains(response, "<p>Isn't it flat!</p>")
def test_fallback_non_existent_flatpage(self): def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middlware" "A non-existent flatpage raises a 404 when served by the fallback middleware"
response = self.client.get('/no_such_flatpage/') response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)

View File

@ -48,13 +48,13 @@ class FlatpageMiddlewareTests(TestCase):
self.assertContains(response, "<p>Isn't it sekrit!</p>") self.assertContains(response, "<p>Isn't it sekrit!</p>")
def test_fallback_flatpage(self): def test_fallback_flatpage(self):
"A flatpage can be served by the fallback middlware" "A flatpage can be served by the fallback middleware"
response = self.client.get('/flatpage/') response = self.client.get('/flatpage/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
self.assertContains(response, "<p>Isn't it flat!</p>") self.assertContains(response, "<p>Isn't it flat!</p>")
def test_fallback_non_existent_flatpage(self): def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middlware" "A non-existent flatpage raises a 404 when served by the fallback middleware"
response = self.client.get('/no_such_flatpage/') response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
@ -116,12 +116,12 @@ class FlatpageMiddlewareAppendSlashTests(TestCase):
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
def test_redirect_fallback_flatpage(self): def test_redirect_fallback_flatpage(self):
"A flatpage can be served by the fallback middlware and should add a slash" "A flatpage can be served by the fallback middleware and should add a slash"
response = self.client.get('/flatpage') response = self.client.get('/flatpage')
self.assertRedirects(response, '/flatpage/', status_code=301) self.assertRedirects(response, '/flatpage/', status_code=301)
def test_redirect_fallback_non_existent_flatpage(self): def test_redirect_fallback_non_existent_flatpage(self):
"A non-existent flatpage raises a 404 when served by the fallback middlware and should not add a slash" "A non-existent flatpage raises a 404 when served by the fallback middleware and should not add a slash"
response = self.client.get('/no_such_flatpage') response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)

View File

@ -88,7 +88,7 @@ class FlatpageTemplateTagTests(TestCase):
@skipIfCustomUser @skipIfCustomUser
def test_get_flatpages_with_prefix_for_user(self): def test_get_flatpages_with_prefix_for_user(self):
"The flatpage template tag retrive prefixed flatpages for an authenticated user" "The flatpage template tag retrieve prefixed flatpages for an authenticated user"
me = User.objects.create_user('testuser', 'test@example.com', 's3krit') me = User.objects.create_user('testuser', 'test@example.com', 's3krit')
out = Template( out = Template(
"{% load flatpages %}" "{% load flatpages %}"

View File

@ -53,7 +53,7 @@ class FlatpageViewTests(TestCase):
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
def test_fallback_non_existent_flatpage(self): def test_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middlware is disabled" "A non-existent flatpage won't be served if the fallback middleware is disabled"
response = self.client.get('/no_such_flatpage/') response = self.client.get('/no_such_flatpage/')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
@ -109,7 +109,7 @@ class FlatpageViewAppendSlashTests(TestCase):
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)
def test_redirect_fallback_non_existent_flatpage(self): def test_redirect_fallback_non_existent_flatpage(self):
"A non-existent flatpage won't be served if the fallback middlware is disabled and should not add a slash" "A non-existent flatpage won't be served if the fallback middleware is disabled and should not add a slash"
response = self.client.get('/no_such_flatpage') response = self.client.get('/no_such_flatpage')
self.assertEqual(response.status_code, 404) self.assertEqual(response.status_code, 404)

View File

@ -58,7 +58,7 @@ class PreviewTests(TestCase):
""" """
Test contrib.formtools.preview form retrieval. Test contrib.formtools.preview form retrieval.
Use the client library to see if we can sucessfully retrieve Use the client library to see if we can successfully retrieve
the form (mostly testing the setup ROOT_URLCONF the form (mostly testing the setup ROOT_URLCONF
process). Verify that an additional hidden input field process). Verify that an additional hidden input field
is created to manage the stage. is created to manage the stage.

View File

@ -368,7 +368,7 @@ class WizardView(TemplateView):
def get_form_initial(self, step): def get_form_initial(self, step):
""" """
Returns a dictionary which will be passed to the form for `step` Returns a dictionary which will be passed to the form for `step`
as `initial`. If no initial data was provied while initializing the as `initial`. If no initial data was provided while initializing the
form wizard, a empty dictionary will be returned. form wizard, a empty dictionary will be returned.
""" """
return self.initial_dict.get(step, {}) return self.initial_dict.get(step, {})
@ -376,7 +376,7 @@ class WizardView(TemplateView):
def get_form_instance(self, step): def get_form_instance(self, step):
""" """
Returns a object which will be passed to the form for `step` Returns a object which will be passed to the form for `step`
as `instance`. If no instance object was provied while initializing as `instance`. If no instance object was provided while initializing
the form wizard, None will be returned. the form wizard, None will be returned.
""" """
return self.instance_dict.get(step, None) return self.instance_dict.get(step, None)

View File

@ -148,7 +148,7 @@ class BaseSpatialOperations(object):
class SpatialRefSysMixin(object): class SpatialRefSysMixin(object):
""" """
The SpatialRefSysMixin is a class used by the database-dependent The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundnant code. SpatialRefSys objects to reduce redundant code.
""" """
# For pulling out the spheroid from the spatial reference string. This # For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed. # regular expression is used only if the user does not have GDAL installed.
@ -304,7 +304,7 @@ class SpatialRefSysMixin(object):
def get_units(cls, wkt): def get_units(cls, wkt):
""" """
Class method used by GeometryField on initialization to Class method used by GeometryField on initialization to
retrive the units on the given WKT, without having to use retrieve the units on the given WKT, without having to use
any of the database fields. any of the database fields.
""" """
if gdal.HAS_GDAL: if gdal.HAS_GDAL:

View File

@ -196,7 +196,7 @@ class OracleOperations(DatabaseOperations, BaseSpatialOperations):
else: else:
dist_param = value dist_param = value
# dwithin lookups on oracle require a special string parameter # dwithin lookups on Oracle require a special string parameter
# that starts with "distance=". # that starts with "distance=".
if lookup_type == 'dwithin': if lookup_type == 'dwithin':
dist_param = 'distance=%s' % dist_param dist_param = 'distance=%s' % dist_param
@ -220,7 +220,7 @@ class OracleOperations(DatabaseOperations, BaseSpatialOperations):
placeholder = '%s(%%s, %s)' % (self.transform, f.srid) placeholder = '%s(%%s, %s)' % (self.transform, f.srid)
else: else:
placeholder = '%s' placeholder = '%s'
# No geometry value used for F expression, substitue in # No geometry value used for F expression, substitute in
# the column name instead. # the column name instead.
return placeholder % self.get_expression_column(value) return placeholder % self.get_expression_column(value)
else: else:

View File

@ -51,7 +51,7 @@ class PostGISIntrospection(DatabaseIntrospection):
if not self.postgis_types_reverse: if not self.postgis_types_reverse:
# If the PostGIS types reverse dictionary is not populated, do so # If the PostGIS types reverse dictionary is not populated, do so
# now. In order to prevent unnecessary requests upon connection # now. In order to prevent unnecessary requests upon connection
# intialization, the `data_types_reverse` dictionary is not updated # initialization, the `data_types_reverse` dictionary is not updated
# with the PostGIS custom types until introspection is actually # with the PostGIS custom types until introspection is actually
# performed -- in other words, when this function is called. # performed -- in other words, when this function is called.
self.postgis_types_reverse = self.get_postgis_types() self.postgis_types_reverse = self.get_postgis_types()

View File

@ -290,7 +290,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
def convert_extent3d(self, box3d): def convert_extent3d(self, box3d):
""" """
Returns a 6-tuple extent for the `Extent3D` aggregate by converting Returns a 6-tuple extent for the `Extent3D` aggregate by converting
the 3d bounding-box text returnded by PostGIS (`box3d` argument), for the 3d bounding-box text returned by PostGIS (`box3d` argument), for
example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)". example: "BOX3D(-90.0 30.0 1, -85.0 40.0 2)".
""" """
ll, ur = box3d[6:-1].split(',') ll, ur = box3d[6:-1].split(',')
@ -342,7 +342,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
This is the most complex implementation of the spatial backends due to This is the most complex implementation of the spatial backends due to
what is supported on geodetic geometry columns vs. what's available on what is supported on geodetic geometry columns vs. what's available on
projected geometry columns. In addition, it has to take into account projected geometry columns. In addition, it has to take into account
the newly introduced geography column type introudced in PostGIS 1.5. the geography column type newly introduced in PostGIS 1.5.
""" """
# Getting the distance parameter and any options. # Getting the distance parameter and any options.
if len(dist_val) == 1: if len(dist_val) == 1:
@ -561,7 +561,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
""" """
agg_name = agg.__class__.__name__ agg_name = agg.__class__.__name__
if not self.check_aggregate_support(agg): if not self.check_aggregate_support(agg):
raise NotImplementedError('%s spatial aggregate is not implmented for this backend.' % agg_name) raise NotImplementedError('%s spatial aggregate is not implemented for this backend.' % agg_name)
agg_name = agg_name.lower() agg_name = agg_name.lower()
if agg_name == 'union': if agg_name == 'union':
agg_name += 'agg' agg_name += 'agg'

View File

@ -116,7 +116,7 @@ class SpatiaLiteCreation(DatabaseCreation):
def spatialite_init_file(self): def spatialite_init_file(self):
# SPATIALITE_SQL may be placed in settings to tell GeoDjango # SPATIALITE_SQL may be placed in settings to tell GeoDjango
# to use a specific path to the SpatiaLite initilization SQL. # to use a specific path to the SpatiaLite initialization SQL.
return getattr(settings, 'SPATIALITE_SQL', return getattr(settings, 'SPATIALITE_SQL',
'init_spatialite-%s.%s.sql' % 'init_spatialite-%s.%s.sql' %
self.connection.ops.spatial_version[:2]) self.connection.ops.spatial_version[:2])

View File

@ -233,7 +233,7 @@ class SpatiaLiteOperations(DatabaseOperations, BaseSpatialOperations):
placeholder = '%s(%%s, %s)' % (self.transform, f.srid) placeholder = '%s(%%s, %s)' % (self.transform, f.srid)
else: else:
placeholder = '%s' placeholder = '%s'
# No geometry value used for F expression, substitue in # No geometry value used for F expression, substitute in
# the column name instead. # the column name instead.
return placeholder % self.get_expression_column(value) return placeholder % self.get_expression_column(value)
else: else:
@ -310,7 +310,7 @@ class SpatiaLiteOperations(DatabaseOperations, BaseSpatialOperations):
""" """
agg_name = agg.__class__.__name__ agg_name = agg.__class__.__name__
if not self.check_aggregate_support(agg): if not self.check_aggregate_support(agg):
raise NotImplementedError('%s spatial aggregate is not implmented for this backend.' % agg_name) raise NotImplementedError('%s spatial aggregate is not implemented for this backend.' % agg_name)
agg_name = agg_name.lower() agg_name = agg_name.lower()
if agg_name == 'union': if agg_name == 'union':
agg_name += 'agg' agg_name += 'agg'

View File

@ -73,7 +73,7 @@ class GeoQuerySet(QuerySet):
def collect(self, **kwargs): def collect(self, **kwargs):
""" """
Performs an aggregate collect operation on the given geometry field. Performs an aggregate collect operation on the given geometry field.
This is analagous to a union operation, but much faster because This is analogous to a union operation, but much faster because
boundaries are not dissolved. boundaries are not dissolved.
""" """
return self._spatial_aggregate(aggregates.Collect, **kwargs) return self._spatial_aggregate(aggregates.Collect, **kwargs)
@ -137,7 +137,7 @@ class GeoQuerySet(QuerySet):
def geojson(self, precision=8, crs=False, bbox=False, **kwargs): def geojson(self, precision=8, crs=False, bbox=False, **kwargs):
""" """
Returns a GeoJSON representation of the geomtry field in a `geojson` Returns a GeoJSON representation of the geometry field in a `geojson`
attribute on each element of the GeoQuerySet. attribute on each element of the GeoQuerySet.
The `crs` and `bbox` keywords may be set to True if the user wants The `crs` and `bbox` keywords may be set to True if the user wants

View File

@ -21,7 +21,7 @@ class GeoSQLCompiler(compiler.SQLCompiler):
If 'with_aliases' is true, any column names that are duplicated If 'with_aliases' is true, any column names that are duplicated
(without the table names) are given unique aliases. This is needed in (without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguitity with nested queries. some cases to avoid ambiguity with nested queries.
This routine is overridden from Query to handle customized selection of This routine is overridden from Query to handle customized selection of
geometry columns. geometry columns.

View File

@ -21,7 +21,7 @@ class GeoFeedMixin(object):
def add_georss_point(self, handler, coords, w3c_geo=False): def add_georss_point(self, handler, coords, w3c_geo=False):
""" """
Adds a GeoRSS point with the given coords using the given handler. Adds a GeoRSS point with the given coords using the given handler.
Handles the differences between simple GeoRSS and the more pouplar Handles the differences between simple GeoRSS and the more popular
W3C Geo specification. W3C Geo specification.
""" """
if w3c_geo: if w3c_geo:

View File

@ -12,7 +12,7 @@ from django.utils import six
# Helper routines for retrieving pointers and/or values from # Helper routines for retrieving pointers and/or values from
# arguments passed in by reference. # arguments passed in by reference.
def arg_byref(args, offset=-1): def arg_byref(args, offset=-1):
"Returns the pointer argument's by-refernece value." "Returns the pointer argument's by-reference value."
return args[offset]._obj.value return args[offset]._obj.value
@ -56,8 +56,8 @@ def check_string(result, func, cargs, offset=-1, str_result=False):
ptr = ptr_byref(cargs, offset) ptr = ptr_byref(cargs, offset)
# Getting the string value # Getting the string value
s = ptr.value s = ptr.value
# Correctly freeing the allocated memory beind GDAL pointer # Correctly freeing the allocated memory behind GDAL pointer
# w/the VSIFree routine. # with the VSIFree routine.
if ptr: if ptr:
lgdal.VSIFree(ptr) lgdal.VSIFree(ptr)
return s return s
@ -131,7 +131,7 @@ def check_str_arg(result, func, cargs):
""" """
This is for the OSRGet[Angular|Linear]Units functions, which This is for the OSRGet[Angular|Linear]Units functions, which
require that the returned string pointer not be freed. This require that the returned string pointer not be freed. This
returns both the double and tring values. returns both the double and string values.
""" """
dbl = result dbl = result
ptr = cargs[-1]._obj ptr = cargs[-1]._obj

View File

@ -120,7 +120,7 @@ class SpatialReference(GDALBase):
0 0
>>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole. >>> print(srs['UNIT|AUTHORITY']) # For the units authority, have to use the pipe symbole.
EPSG EPSG
>>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the untis >>> print(srs['UNIT|AUTHORITY', 1]) # The authority value for the units
9122 9122
""" """
if isinstance(target, tuple): if isinstance(target, tuple):

View File

@ -20,7 +20,7 @@ class EnvelopeTest(unittest.TestCase):
self.e = Envelope(0, 0, 5, 5) self.e = Envelope(0, 0, 5, 5)
def test01_init(self): def test01_init(self):
"Testing Envelope initilization." "Testing Envelope initialization."
e1 = Envelope((0, 0, 5, 5)) e1 = Envelope((0, 0, 5, 5))
Envelope(0, 0, 5, 5) Envelope(0, 0, 5, 5)
Envelope(0, '0', '5', 5) # Thanks to ww for this Envelope(0, '0', '5', 5) # Thanks to ww for this

View File

@ -300,7 +300,7 @@ class OGRGeomTest(unittest.TestCase, TestDataMixin):
self.assertEqual(4269, mpoly.srid) self.assertEqual(4269, mpoly.srid)
self.assertEqual('NAD83', mpoly.srs.name) self.assertEqual('NAD83', mpoly.srs.name)
# Incrementing through the multipolyogn after the spatial reference # Incrementing through the multipolygon after the spatial reference
# has been re-assigned. # has been re-assigned.
for poly in mpoly: for poly in mpoly:
self.assertEqual(mpoly.srs.wkt, poly.srs.wkt) self.assertEqual(mpoly.srs.wkt, poly.srs.wkt)

View File

@ -192,7 +192,7 @@ class GeoIP(object):
def country(self, query): def country(self, query):
""" """
Returns a dictonary with with the country code and name when given an Returns a dictionary with with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters. '24.124.1.80' and 'djangoproject.com' are valid parameters.
""" """

View File

@ -56,7 +56,7 @@ GeoIPRecord_delete.restype = None
def check_record(result, func, cargs): def check_record(result, func, cargs):
if result: if result:
# Checking the pointer to the C structure, if valid pull out elements # Checking the pointer to the C structure, if valid pull out elements
# into a dicionary. # into a dictionary.
rec = result.contents rec = result.contents
record = dict((fld, getattr(rec, fld)) for fld, ctype in rec._fields_) record = dict((fld, getattr(rec, fld)) for fld, ctype in rec._fields_)

View File

@ -78,7 +78,7 @@ class GEOSGeometry(GEOSBase, ListMixin):
else: else:
raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.') raise ValueError('String or unicode input unrecognized as WKT EWKT, and HEXEWKB.')
elif isinstance(geo_input, GEOM_PTR): elif isinstance(geo_input, GEOM_PTR):
# When the input is a pointer to a geomtry (GEOM_PTR). # When the input is a pointer to a geometry (GEOM_PTR).
g = geo_input g = geo_input
elif isinstance(geo_input, memoryview): elif isinstance(geo_input, memoryview):
# When the input is a buffer (WKB). # When the input is a buffer (WKB).
@ -682,7 +682,7 @@ class GEOSGeometry(GEOSBase, ListMixin):
def length(self): def length(self):
""" """
Returns the length of this Geometry (e.g., 0 for point, or the Returns the length of this Geometry (e.g., 0 for point, or the
circumfrence of a Polygon). circumference of a Polygon).
""" """
return capi.geos_length(self.ptr, byref(c_double())) return capi.geos_length(self.ptr, byref(c_double()))

View File

@ -38,7 +38,7 @@ else:
raise ImportError('Unsupported OS "%s"' % os.name) raise ImportError('Unsupported OS "%s"' % os.name)
# Using the ctypes `find_library` utility to find the path to the GEOS # Using the ctypes `find_library` utility to find the path to the GEOS
# shared library. This is better than manually specifiying each library name # shared library. This is better than manually specifying each library name
# and extension (e.g., libgeos_c.[so|so.1|dylib].). # and extension (e.g., libgeos_c.[so|so.1|dylib].).
if lib_names: if lib_names:
for lib_name in lib_names: for lib_name in lib_names:
@ -121,7 +121,7 @@ def get_pointer_arr(n):
return GeomArr() return GeomArr()
# Returns the string version of the GEOS library. Have to set the restype # Returns the string version of the GEOS library. Have to set the restype
# explicitly to c_char_p to ensure compatibility accross 32 and 64-bit platforms. # explicitly to c_char_p to ensure compatibility across 32 and 64-bit platforms.
geos_version = lgeos.GEOSversion geos_version = lgeos.GEOSversion
geos_version.argtypes = None geos_version.argtypes = None
geos_version.restype = c_char_p geos_version.restype = c_char_p

View File

@ -68,7 +68,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
# Because pointers have been set to NULL, an exception should be # Because pointers have been set to NULL, an exception should be
# raised when we try to access it. Raising an exception is # raised when we try to access it. Raising an exception is
# preferrable to a segmentation fault that commonly occurs when # preferable to a segmentation fault that commonly occurs when
# a C method is given a NULL memory reference. # a C method is given a NULL memory reference.
for fg in (fg1, fg2): for fg in (fg1, fg2):
# Equivalent to `fg.ptr` # Equivalent to `fg.ptr`
@ -167,7 +167,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
"Testing creation from HEX." "Testing creation from HEX."
for g in self.geometries.hex_wkt: for g in self.geometries.hex_wkt:
geom_h = GEOSGeometry(g.hex) geom_h = GEOSGeometry(g.hex)
# we need to do this so decimal places get normalised # we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt) geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt)
@ -176,7 +176,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
for g in self.geometries.hex_wkt: for g in self.geometries.hex_wkt:
wkb = memoryview(a2b_hex(g.hex.encode())) wkb = memoryview(a2b_hex(g.hex.encode()))
geom_h = GEOSGeometry(wkb) geom_h = GEOSGeometry(wkb)
# we need to do this so decimal places get normalised # we need to do this so decimal places get normalized
geom_t = fromstr(g.wkt) geom_t = fromstr(g.wkt)
self.assertEqual(geom_t.wkt, geom_h.wkt) self.assertEqual(geom_t.wkt, geom_h.wkt)
@ -788,7 +788,7 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
ls = LineString((0, 0), (1, 1)) ls = LineString((0, 0), (1, 1))
self.assertAlmostEqual(1.41421356237, ls.length, 11) self.assertAlmostEqual(1.41421356237, ls.length, 11)
# Should be circumfrence of Polygon # Should be circumference of Polygon
poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0))) poly = Polygon(LinearRing((0, 0), (0, 1), (1, 1), (1, 0), (0, 0)))
self.assertEqual(4.0, poly.length) self.assertEqual(4.0, poly.length)

View File

@ -158,7 +158,7 @@ class GPolyline(GOverlayBase):
opacity: opacity:
The opacity of the polyline, between 0 and 1. Defaults to 1. The opacity of the polyline, between 0 and 1. Defaults to 1.
""" """
# If a GEOS geometry isn't passed in, try to contsruct one. # If a GEOS geometry isn't passed in, try to construct one.
if isinstance(geom, six.string_types): if isinstance(geom, six.string_types):
geom = fromstr(geom) geom = fromstr(geom)
if isinstance(geom, (tuple, list)): if isinstance(geom, (tuple, list)):

View File

@ -27,7 +27,7 @@
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# #
""" """
Distance and Area objects to allow for sensible and convienient calculation Distance and Area objects to allow for sensible and convenient calculation
and conversions. and conversions.
Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio

View File

@ -22,7 +22,7 @@ OpenLayers.Util.properFeatures = function(features, geom_type) {
/** /**
* Class: OpenLayers.Format.DjangoWKT * Class: OpenLayers.Format.DjangoWKT
* Class for reading Well-Known Text, with workarounds to successfully parse * Class for reading Well-Known Text, with workarounds to successfully parse
* geometries and collections as returnes by django.contrib.gis.geos. * geometries and collections as returned by django.contrib.gis.geos.
* *
* Inherits from: * Inherits from:
* - <OpenLayers.Format.WKT> * - <OpenLayers.Format.WKT>
@ -190,7 +190,7 @@ function MapWidget(options) {
scrollable: true scrollable: true
}; };
// Altering using user-provied options // Altering using user-provided options
for (var property in options) { for (var property in options) {
if (options.hasOwnProperty(property)) { if (options.hasOwnProperty(property)) {
this.options[property] = options[property]; this.options[property] = options[property];

View File

@ -24,7 +24,7 @@ class DistanceTest(TestCase):
if HAS_GEOS and HAS_SPATIAL_DB: if HAS_GEOS and HAS_SPATIAL_DB:
# A point we are testing distances with -- using a WGS84 # A point we are testing distances with -- using a WGS84
# coordinate that'll be implicitly transormed to that to # coordinate that'll be implicitly transformed to that to
# the coordinate system of the field, EPSG:32140 (Texas South Central # the coordinate system of the field, EPSG:32140 (Texas South Central
# w/units in meters) # w/units in meters)
stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326) stx_pnt = GEOSGeometry('POINT (-95.370401017314293 29.704867409475465)', 4326)

View File

@ -345,7 +345,7 @@ class GeoLookupTest(TestCase):
nmi = State.objects.create(name='Northern Mariana Islands', poly=None) nmi = State.objects.create(name='Northern Mariana Islands', poly=None)
self.assertEqual(nmi.poly, None) self.assertEqual(nmi.poly, None)
# Assigning a geomery and saving -- then UPDATE back to NULL. # Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))' nmi.poly = 'POLYGON((0 0,1 0,1 1,1 0,0 0))'
nmi.save() nmi.save()
State.objects.filter(name='Northern Mariana Islands').update(poly=None) State.objects.filter(name='Northern Mariana Islands').update(poly=None)
@ -359,7 +359,7 @@ class GeoLookupTest(TestCase):
pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847) pnt1 = fromstr('POINT (649287.0363174 4177429.4494686)', srid=2847)
pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326) pnt2 = fromstr('POINT(-98.4919715741052 29.4333344025053)', srid=4326)
# Not passing in a geometry as first param shoud # Not passing in a geometry as first param should
# raise a type error when initializing the GeoQuerySet # raise a type error when initializing the GeoQuerySet
self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo')) self.assertRaises(ValueError, Country.objects.filter, mpoly__relate=(23, 'foo'))

View File

@ -76,7 +76,7 @@ class OGRInspectTest(TestCase):
self.assertEqual(model_def, '\n'.join(expected)) self.assertEqual(model_def, '\n'.join(expected))
def test_time_field(self): def test_time_field(self):
# Only possible to test this on PostGIS at the momemnt. MySQL # Only possible to test this on PostGIS at the moment. MySQL
# complains about permissions, and SpatiaLite/Oracle are # complains about permissions, and SpatiaLite/Oracle are
# insanely difficult to get support compiled in for in GDAL. # insanely difficult to get support compiled in for in GDAL.
if not connections['default'].ops.postgis: if not connections['default'].ops.postgis:

View File

@ -28,7 +28,7 @@ class GoogleMapsTest(TestCase):
@override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY) @override_settings(GOOGLE_MAPS_API_KEY=GOOGLE_MAPS_API_KEY)
def test_unicode_in_google_maps(self): def test_unicode_in_google_maps(self):
""" """
Test that GoogleMap doesn't crash with non-ascii content. Test that GoogleMap doesn't crash with non-ASCII content.
""" """
from django.contrib.gis.geos import Point from django.contrib.gis.geos import Point
from django.contrib.gis.maps.google.gmap import GoogleMap, GMarker from django.contrib.gis.maps.google.gmap import GoogleMap, GMarker

View File

@ -1,5 +1,5 @@
""" """
Distance and Area objects to allow for sensible and convienient calculation Distance and Area objects to allow for sensible and convenient calculation
and conversions. Here are some tests. and conversions. Here are some tests.
""" """
@ -12,7 +12,7 @@ class DistanceTest(unittest.TestCase):
"Testing the Distance object" "Testing the Distance object"
def testInit(self): def testInit(self):
"Testing initialisation from valid units" "Testing initialization from valid units"
d = Distance(m=100) d = Distance(m=100)
self.assertEqual(d.m, 100) self.assertEqual(d.m, 100)
@ -33,7 +33,7 @@ class DistanceTest(unittest.TestCase):
self.assertEqual(d.mm, 1000.0) self.assertEqual(d.mm, 1000.0)
def testInitInvalid(self): def testInitInvalid(self):
"Testing initialisation from invalid units" "Testing initialization from invalid units"
self.assertRaises(AttributeError, D, banana=100) self.assertRaises(AttributeError, D, banana=100)
def testAccess(self): def testAccess(self):
@ -149,7 +149,7 @@ class AreaTest(unittest.TestCase):
"Testing the Area object" "Testing the Area object"
def testInit(self): def testInit(self):
"Testing initialisation from valid units" "Testing initialization from valid units"
a = Area(sq_m=100) a = Area(sq_m=100)
self.assertEqual(a.sq_m, 100) self.assertEqual(a.sq_m, 100)
@ -160,7 +160,7 @@ class AreaTest(unittest.TestCase):
self.assertEqual(a.sq_m, 258998811.0336) self.assertEqual(a.sq_m, 258998811.0336)
def testInitInvaliA(self): def testInitInvaliA(self):
"Testing initialisation from invalid units" "Testing initialization from invalid units"
self.assertRaises(AttributeError, A, banana=100) self.assertRaises(AttributeError, A, banana=100)
def testAccess(self): def testAccess(self):

View File

@ -489,7 +489,7 @@ class LayerMapping(object):
progress: progress:
When this keyword is set, status information will be printed giving When this keyword is set, status information will be printed giving
the number of features processed and sucessfully saved. By default, the number of features processed and successfully saved. By default,
progress information will pe printed every 1000 features processed, progress information will pe printed every 1000 features processed,
however, this default may be overridden by setting this keyword with an however, this default may be overridden by setting this keyword with an
integer for the desired interval. integer for the desired interval.

View File

@ -111,7 +111,7 @@ class FileUploadHandler(object):
Receive data from the streamed upload parser. ``start`` is the position Receive data from the streamed upload parser. ``start`` is the position
in the file of the chunk. in the file of the chunk.
""" """
raise NotImplementedError('subclasses of FileUploadHandler must provide a recieve_data_chunk() method') raise NotImplementedError('subclasses of FileUploadHandler must provide a receive_data_chunk() method')
def file_complete(self, file_size): def file_complete(self, file_size):
""" """

View File

@ -162,7 +162,7 @@ class WSGIHandler(base.BaseHandler):
if self._request_middleware is None: if self._request_middleware is None:
with self.initLock: with self.initLock:
try: try:
# Check that middleware is still uninitialised. # Check that middleware is still uninitialized.
if self._request_middleware is None: if self._request_middleware is None:
self.load_middleware() self.load_middleware()
except: except:

View File

@ -208,7 +208,7 @@ class BaseCommand(object):
def __init__(self): def __init__(self):
self.style = color_style() self.style = color_style()
# `requires_model_validation` is deprecated in favour of # `requires_model_validation` is deprecated in favor of
# `requires_system_checks`. If both options are present, an error is # `requires_system_checks`. If both options are present, an error is
# raised. Otherwise the present option is used. If none of them is # raised. Otherwise the present option is used. If none of them is
# defined, the default value (True) is used. # defined, the default value (True) is used.
@ -218,7 +218,7 @@ class BaseCommand(object):
if has_old_option: if has_old_option:
warnings.warn( warnings.warn(
'"requires_model_validation" is deprecated ' '"requires_model_validation" is deprecated '
'in favour of "requires_system_checks".', 'in favor of "requires_system_checks".',
PendingDeprecationWarning) PendingDeprecationWarning)
if has_old_option and has_new_option: if has_old_option and has_new_option:
raise ImproperlyConfigured( raise ImproperlyConfigured(

View File

@ -96,7 +96,7 @@ class Command(NoArgsCommand):
field_type = "ForeignKey('%s'" % rel_to field_type = "ForeignKey('%s'" % rel_to
else: else:
# Calling `get_field_type` to get the field type string and any # Calling `get_field_type` to get the field type string and any
# additional paramters and notes. # additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row) field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
extra_params.update(field_params) extra_params.update(field_params)
comment_notes.extend(field_notes) comment_notes.extend(field_notes)

View File

@ -224,7 +224,7 @@ class Command(BaseCommand):
cursor.execute(statement) cursor.execute(statement)
tables.append(connection.introspection.table_name_converter(model._meta.db_table)) tables.append(connection.introspection.table_name_converter(model._meta.db_table))
# We force a commit here, as that was the previous behaviour. # We force a commit here, as that was the previous behavior.
# If you can prove we don't need this, remove it. # If you can prove we don't need this, remove it.
transaction.set_dirty(using=connection.alias) transaction.set_dirty(using=connection.alias)
finally: finally:

View File

@ -10,6 +10,6 @@ class Command(CheckCommand):
help = 'Deprecated. Use "check" command instead. ' + CheckCommand.help help = 'Deprecated. Use "check" command instead. ' + CheckCommand.help
def handle_noargs(self, **options): def handle_noargs(self, **options):
warnings.warn('"validate" has been deprecated in favour of "check".', warnings.warn('"validate" has been deprecated in favor of "check".',
PendingDeprecationWarning) PendingDeprecationWarning)
super(Command, self).handle_noargs(**options) super(Command, self).handle_noargs(**options)

View File

@ -266,7 +266,7 @@ class TemplateCommand(BaseCommand):
guessed_filename += ext guessed_filename += ext
# Move the temporary file to a filename that has better # Move the temporary file to a filename that has better
# chances of being recognnized by the archive utils # chances of being recognized by the archive utils
if used_name != guessed_filename: if used_name != guessed_filename:
guessed_path = path.join(tempdir, guessed_filename) guessed_path = path.join(tempdir, guessed_filename)
shutil.move(the_path, guessed_path) shutil.move(the_path, guessed_path)

View File

@ -10,7 +10,7 @@ There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret" component is a base64 encoded hmac/SHA1 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialised object. signing.loads(s) checks the signature and returns the deserialized object.
If the signature fails, a BadSignature exception is raised. If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk") >>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")

View File

@ -665,7 +665,7 @@ class BaseDatabaseFeatures(object):
# supported by the Python driver # supported by the Python driver
supports_paramstyle_pyformat = True supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterised ones? # Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change? # Does the backend require a connection reset after each material schema change?
@ -1387,7 +1387,7 @@ class BaseDatabaseClient(object):
class BaseDatabaseValidation(object): class BaseDatabaseValidation(object):
""" """
This class encapsualtes all backend-specific model validation. This class encapsulates all backend-specific model validation.
""" """
def __init__(self, connection): def __init__(self, connection):
self.connection = connection self.connection = connection

View File

@ -575,7 +575,7 @@ class DatabaseWrapper(BaseDatabaseWrapper):
# these are set in single statement it isn't clear what is supposed # these are set in single statement it isn't clear what is supposed
# to happen. # to happen.
cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'") cursor.execute("ALTER SESSION SET NLS_TERRITORY = 'AMERICA'")
# Set oracle date to ansi date format. This only needs to execute # Set Oracle date to ANSI date format. This only needs to execute
# once when we create a new connection. We also set the Territory # once when we create a new connection. We also set the Territory
# to 'AMERICA' which forces Sunday to evaluate to a '1' in # to 'AMERICA' which forces Sunday to evaluate to a '1' in
# TO_CHAR(). # TO_CHAR().
@ -730,7 +730,7 @@ class OracleParam(object):
class VariableWrapper(object): class VariableWrapper(object):
""" """
An adapter class for cursor variables that prevents the wrapped object An adapter class for cursor variables that prevents the wrapped object
from being converted into a string when used to instanciate an OracleParam. from being converted into a string when used to instantiate an OracleParam.
This can be used generally for any other object that should be passed into This can be used generally for any other object that should be passed into
Cursor.execute as-is. Cursor.execute as-is.
""" """

View File

@ -732,7 +732,7 @@ class BaseDatabaseSchemaEditor(object):
def _alter_column_type_sql(self, table, column, type): def _alter_column_type_sql(self, table, column, type):
""" """
Hook to specialise column type alteration for different backends, Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields). (e.g. SERIAL in PostgreSQL, PostGIS fields).

View File

@ -124,7 +124,7 @@ class MigrationExecutor(object):
def detect_soft_applied(self, migration): def detect_soft_applied(self, migration):
""" """
Tests whether a migration has been implicity applied - that the Tests whether a migration has been implicitly applied - that the
tables it would create exist. This is intended only for use tables it would create exist. This is intended only for use
on initial migrations (as it only looks for CreateModel). on initial migrations (as it only looks for CreateModel).
""" """

View File

@ -23,7 +23,7 @@ class MigrationLoader(object):
an app. Their names are entirely unimportant from a code perspective, an app. Their names are entirely unimportant from a code perspective,
but will probably follow the 1234_name.py convention. but will probably follow the 1234_name.py convention.
On initialisation, this class will scan those directories, and open and On initialization, this class will scan those directories, and open and
read the python files, looking for a class called Migration, which should read the python files, looking for a class called Migration, which should
inherit from django.db.migrations.Migration. See inherit from django.db.migrations.Migration. See
django.db.migrations.migration for what that looks like. django.db.migrations.migration for what that looks like.

View File

@ -12,7 +12,7 @@ class Migration(object):
- replaces: A list of migration_names - replaces: A list of migration_names
Note that all migrations come out of migrations and into the Loader or Note that all migrations come out of migrations and into the Loader or
Graph as instances, having been initialised with their app label and name. Graph as instances, having been initialized with their app label and name.
""" """
# Operations to apply during this migration, in order. # Operations to apply during this migration, in order.

View File

@ -7,8 +7,8 @@ class MigrationOptimizer(object):
and you are returned a list of equal or shorter length - operations and you are returned a list of equal or shorter length - operations
are merged into one if possible. are merged into one if possible.
For example, a CreateModel and an AddField can be optimised into a For example, a CreateModel and an AddField can be optimized into a
new CreateModel, and CreateModel and DeleteModel can be optimised into new CreateModel, and CreateModel and DeleteModel can be optimized into
nothing. nothing.
""" """
@ -17,9 +17,9 @@ class MigrationOptimizer(object):
Main optimization entry point. Pass in a list of Operation instances, Main optimization entry point. Pass in a list of Operation instances,
get out a new list of Operation instances. get out a new list of Operation instances.
Unfortunately, due to the scope of the optimisation (two combinable Unfortunately, due to the scope of the optimization (two combinable
operations might be separated by several hundred others), this can't be operations might be separated by several hundred others), this can't be
done as a peephole optimisation with checks/output implemented on done as a peephole optimization with checks/output implemented on
the Operations themselves; instead, the optimizer looks at each the Operations themselves; instead, the optimizer looks at each
individual operation and scans forwards in the list to see if there individual operation and scans forwards in the list to see if there
are any matches, stopping at boundaries - operations which can't are any matches, stopping at boundaries - operations which can't

View File

@ -97,7 +97,7 @@ class Collector(object):
def add_field_update(self, field, value, objs): def add_field_update(self, field, value, objs):
""" """
Schedules a field update. 'objs' must be a homogenous iterable Schedules a field update. 'objs' must be a homogeneous iterable
collection of model instances (e.g. a QuerySet). collection of model instances (e.g. a QuerySet).
""" """
if not objs: if not objs:
@ -148,7 +148,7 @@ class Collector(object):
source_attr=None, reverse_dependency=False): source_attr=None, reverse_dependency=False):
""" """
Adds 'objs' to the collection of objects to be deleted as well as all Adds 'objs' to the collection of objects to be deleted as well as all
parent instances. 'objs' must be a homogenous iterable collection of parent instances. 'objs' must be a homogeneous iterable collection of
model instances (e.g. a QuerySet). If 'collect_related' is True, model instances (e.g. a QuerySet). If 'collect_related' is True,
related objects will be handled by their respective on_delete handler. related objects will be handled by their respective on_delete handler.

View File

@ -64,7 +64,7 @@ class FieldDoesNotExist(Exception):
# A guide to Field parameters: # A guide to Field parameters:
# #
# * name: The name of the field specifed in the model. # * name: The name of the field specified in the model.
# * attname: The attribute to use on the model object. This is the same as # * attname: The attribute to use on the model object. This is the same as
# "name", except in the case of ForeignKeys, where "_id" is # "name", except in the case of ForeignKeys, where "_id" is
# appended. # appended.
@ -306,7 +306,7 @@ class Field(RegisterLookupMixin):
* top-level classes, top-level functions - will be referenced by their full import path * top-level classes, top-level functions - will be referenced by their full import path
* Storage instances - these have their own deconstruct() method * Storage instances - these have their own deconstruct() method
This is because the values here must be serialised into a text format This is because the values here must be serialized into a text format
(possibly new Python code, possibly JSON) and these are the only types (possibly new Python code, possibly JSON) and these are the only types
with encoding handlers defined. with encoding handlers defined.

View File

@ -1287,14 +1287,14 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None,
Helper function that recursively returns an information for a klass, to be Helper function that recursively returns an information for a klass, to be
used in get_cached_row. It exists just to compute this information only used in get_cached_row. It exists just to compute this information only
once for entire queryset. Otherwise it would be computed for each row, which once for entire queryset. Otherwise it would be computed for each row, which
leads to poor perfomance on large querysets. leads to poor performance on large querysets.
Arguments: Arguments:
* klass - the class to retrieve (and instantiate) * klass - the class to retrieve (and instantiate)
* max_depth - the maximum depth to which a select_related() * max_depth - the maximum depth to which a select_related()
relationship should be explored. relationship should be explored.
* cur_depth - the current depth in the select_related() tree. * cur_depth - the current depth in the select_related() tree.
Used in recursive calls to determin if we should dig deeper. Used in recursive calls to determine if we should dig deeper.
* requested - A dictionary describing the select_related() tree * requested - A dictionary describing the select_related() tree
that is to be retrieved. keys are field names; values are that is to be retrieved. keys are field names; values are
dictionaries describing the keys on that related object that dictionaries describing the keys on that related object that
@ -1359,7 +1359,7 @@ def get_klass_info(klass, max_depth=0, cur_depth=0, requested=None,
field_names = [f.attname for f in klass._meta.concrete_fields field_names = [f.attname for f in klass._meta.concrete_fields
if f.model in non_seen_models] if f.model in non_seen_models]
field_count = len(field_names) field_count = len(field_names)
# Try to avoid populating field_names variable for perfomance reasons. # Try to avoid populating field_names variable for performance reasons.
# If field_names variable is set, we use **kwargs based model init # If field_names variable is set, we use **kwargs based model init
# which is slower than normal init. # which is slower than normal init.
if field_count == len(klass._meta.concrete_fields): if field_count == len(klass._meta.concrete_fields):
@ -1552,7 +1552,7 @@ class RawQuerySet(object):
else: else:
model_cls = self.model model_cls = self.model
# All model's fields are present in the query. So, it is possible # All model's fields are present in the query. So, it is possible
# to use *args based model instantation. For each field of the model, # to use *args based model instantiation. For each field of the model,
# record the query column position matching that field. # record the query column position matching that field.
model_init_field_pos = [] model_init_field_pos = []
for field in self.model._meta.fields: for field in self.model._meta.fields:
@ -1762,7 +1762,7 @@ def prefetch_related_objects(result_cache, related_lookups):
# Descend down tree # Descend down tree
# We assume that objects retrieved are homogenous (which is the premise # We assume that objects retrieved are homogeneous (which is the premise
# of prefetch_related), so what applies to first object applies to all. # of prefetch_related), so what applies to first object applies to all.
first_obj = obj_list[0] first_obj = obj_list[0]
prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr) prefetcher, descriptor, attr_found, is_fetched = get_prefetcher(first_obj, through_attr)

View File

@ -109,7 +109,7 @@ class DeferredAttribute(object):
val = self._check_parent_chain(instance, name) val = self._check_parent_chain(instance, name)
if val is None: if val is None:
# We use only() instead of values() here because we want the # We use only() instead of values() here because we want the
# various data coersion methods (to_python(), etc.) to be # various data coercion methods (to_python(), etc.) to be
# called here. # called here.
val = getattr( val = getattr(
non_deferred_model._base_manager.only(name).using( non_deferred_model._base_manager.only(name).using(

View File

@ -26,7 +26,7 @@ class RelatedObject(object):
as SelectField choices for this field. as SelectField choices for this field.
Analogue of django.db.models.fields.Field.get_choices, provided Analogue of django.db.models.fields.Field.get_choices, provided
initially for utilisation by RelatedFieldListFilter. initially for utilization by RelatedFieldListFilter.
""" """
first_choice = blank_choice if include_blank else [] first_choice = blank_choice if include_blank else []
queryset = self.model._default_manager.all() queryset = self.model._default_manager.all()

View File

@ -890,7 +890,7 @@ class SQLInsertCompiler(SQLCompiler):
col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column)) col = "%s.%s" % (qn(opts.db_table), qn(opts.pk.column))
result.append("VALUES (%s)" % ", ".join(placeholders[0])) result.append("VALUES (%s)" % ", ".join(placeholders[0]))
r_fmt, r_params = self.connection.ops.return_insert_id() r_fmt, r_params = self.connection.ops.return_insert_id()
# Skip empty r_fmt to allow subclasses to customize behaviour for # Skip empty r_fmt to allow subclasses to customize behavior for
# 3rd party backends. Refs #19096. # 3rd party backends. Refs #19096.
if r_fmt: if r_fmt:
result.append(r_fmt % col) result.append(r_fmt % col)

View File

@ -568,7 +568,7 @@ class Query(object):
Converts the self.deferred_loading data structure to an alternate data Converts the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialised on each QuerySet class to work out which fields are being initialized on each
model. Models that have all their fields included aren't mentioned in model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place. the result, only those that have field restrictions in place.
@ -1767,7 +1767,7 @@ class Query(object):
""" """
# Fields on related models are stored in the literal double-underscore # Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar # format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL colum names (as part of # splitting and handling when computing the SQL column names (as part of
# get_columns()). # get_columns()).
existing, defer = self.deferred_loading existing, defer = self.deferred_loading
if defer: if defer:

View File

@ -426,7 +426,7 @@ def _transaction_func(entering, exiting, using):
""" """
Takes 3 things, an entering function (what to do to start this block of Takes 3 things, an entering function (what to do to start this block of
transaction management), an exiting function (what to do to end it, on both transaction management), an exiting function (what to do to end it, on both
success and failure, and using which can be: None, indiciating using is success and failure, and using which can be: None, indicating using is
DEFAULT_DB_ALIAS, a callable, indicating that using is DEFAULT_DB_ALIAS and DEFAULT_DB_ALIAS, a callable, indicating that using is DEFAULT_DB_ALIAS and
to return the function already wrapped. to return the function already wrapped.

View File

@ -27,7 +27,7 @@ class Signal(object):
Internal attributes: Internal attributes:
receivers receivers
{ receriverkey (id) : weakref(receiver) } { receiverkey (id) : weakref(receiver) }
""" """
def __init__(self, providing_args=None, use_caching=False): def __init__(self, providing_args=None, use_caching=False):
""" """

View File

@ -190,7 +190,7 @@ class BaseForm(object):
for e in bf_errors]) for e in bf_errors])
hidden_fields.append(six.text_type(bf)) hidden_fields.append(six.text_type(bf))
else: else:
# Create a 'class="..."' atribute if the row should have any # Create a 'class="..."' attribute if the row should have any
# CSS classes applied. # CSS classes applied.
css_classes = bf.css_classes() css_classes = bf.css_classes()
if css_classes: if css_classes:

View File

@ -347,7 +347,7 @@ class BaseFormSet(object):
""" """
Hook for doing any extra formset-wide cleaning after Form.clean() has Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accesible will not be associated with a particular form; it will be accessible
via formset.non_form_errors() via formset.non_form_errors()
""" """
pass pass

View File

@ -253,7 +253,7 @@ class MultiPartParser(object):
def handle_file_complete(self, old_field_name, counters): def handle_file_complete(self, old_field_name, counters):
""" """
Handle all the signalling that takes place when a file is complete. Handle all the signaling that takes place when a file is complete.
""" """
for i, handler in enumerate(self._upload_handlers): for i, handler in enumerate(self._upload_handlers):
file_obj = handler.file_complete(counters[i]) file_obj = handler.file_complete(counters[i])
@ -484,7 +484,7 @@ class BoundaryIter(six.Iterator):
self._done = True self._done = True
return chunk[:end] return chunk[:end]
else: else:
# make sure we dont treat a partial boundary (and # make sure we don't treat a partial boundary (and
# its separators) as data # its separators) as data
if not chunk[:-rollback]: # and len(chunk) >= (len(self._boundary) + 6): if not chunk[:-rollback]: # and len(chunk) >= (len(self._boundary) + 6):
# There's nothing left, we should just return and mark as done. # There's nothing left, we should just return and mark as done.
@ -498,7 +498,7 @@ class BoundaryIter(six.Iterator):
""" """
Finds a multipart boundary in data. Finds a multipart boundary in data.
Should no boundry exist in the data None is returned instead. Otherwise Should no boundary exist in the data None is returned instead. Otherwise
a tuple containing the indices of the following are returned: a tuple containing the indices of the following are returned:
* the end of current encapsulation * the end of current encapsulation

View File

@ -189,7 +189,7 @@ class HttpResponseBase(six.Iterator):
return self._headers[header.lower()][1] return self._headers[header.lower()][1]
def __getstate__(self): def __getstate__(self):
# SimpleCookie is not pickeable with pickle.HIGHEST_PROTOCOL, so we # SimpleCookie is not pickleable with pickle.HIGHEST_PROTOCOL, so we
# serialize to a string instead # serialize to a string instead
state = self.__dict__.copy() state = self.__dict__.copy()
state['cookies'] = str(state['cookies']) state['cookies'] = str(state['cookies'])
@ -274,7 +274,7 @@ class HttpResponseBase(six.Iterator):
return bytes(value) return bytes(value)
# Handle string types -- we can't rely on force_bytes here because: # Handle string types -- we can't rely on force_bytes here because:
# - under Python 3 it attemps str conversion first # - under Python 3 it attempts str conversion first
# - when self._charset != 'utf-8' it re-encodes the content # - when self._charset != 'utf-8' it re-encodes the content
if isinstance(value, bytes): if isinstance(value, bytes):
return bytes(value) return bytes(value)

View File

@ -69,7 +69,7 @@ libraries = {}
builtins = [] builtins = []
# True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means # True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means
# uninitialised. # uninitialized.
invalid_var_format_string = None invalid_var_format_string = None

View File

@ -567,7 +567,7 @@ def cycle(parser, token, escape=False):
{% endfor %} {% endfor %}
Outside of a loop, give the values a unique name the first time you call Outside of a loop, give the values a unique name the first time you call
it, then use that name each sucessive time through:: it, then use that name each successive time through::
<tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr> <tr class="{% cycle 'row1' 'row2' 'row3' as rowcolors %}">...</tr>
<tr class="{% cycle rowcolors %}">...</tr> <tr class="{% cycle rowcolors %}">...</tr>
@ -944,7 +944,7 @@ def do_if(parser, token):
{% endif %} {% endif %}
{% if athlete_list and coach_list %} {% if athlete_list and coach_list %}
Both atheletes and coaches are available. Both athletes and coaches are available.
{% endif %} {% endif %}
{% if not athlete_list or coach_list %} {% if not athlete_list or coach_list %}

View File

@ -2253,7 +2253,7 @@ class DocTestCase(unittest.TestCase):
caller can catch the errors and initiate post-mortem debugging. caller can catch the errors and initiate post-mortem debugging.
The DocTestCase provides a debug method that raises The DocTestCase provides a debug method that raises
UnexpectedException errors if there is an unexepcted UnexpectedException errors if there is an unexpected
exception: exception:
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',

View File

@ -122,7 +122,7 @@ def complex_setting_changed(**kwargs):
if kwargs['enter'] and kwargs['setting'] in COMPLEX_OVERRIDE_SETTINGS: if kwargs['enter'] and kwargs['setting'] in COMPLEX_OVERRIDE_SETTINGS:
# Considering the current implementation of the signals framework, # Considering the current implementation of the signals framework,
# stacklevel=5 shows the line containing the override_settings call. # stacklevel=5 shows the line containing the override_settings call.
warnings.warn("Overriding setting %s can lead to unexpected behaviour." warnings.warn("Overriding setting %s can lead to unexpected behavior."
% kwargs['setting'], stacklevel=5) % kwargs['setting'], stacklevel=5)

View File

@ -1182,7 +1182,7 @@ class LiveServerTestCase(TransactionTestCase):
cls.server_thread.terminate() cls.server_thread.terminate()
cls.server_thread.join() cls.server_thread.join()
# Restore sqlite connections' non-sharability # Restore sqlite connections' non-shareability
for conn in connections.all(): for conn in connections.all():
if (conn.vendor == 'sqlite' if (conn.vendor == 'sqlite'
and conn.settings_dict['NAME'] == ':memory:'): and conn.settings_dict['NAME'] == ':memory:'):

View File

@ -259,7 +259,7 @@ class modify_settings(override_settings):
""" """
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
if args: if args:
# Hack used when instaciating from SimpleTestCase._pre_setup. # Hack used when instantiating from SimpleTestCase._pre_setup.
assert not kwargs assert not kwargs
self.operations = args[0] self.operations = args[0]
else: else:

View File

@ -282,7 +282,7 @@ _super = super
class SimpleLazyObject(LazyObject): class SimpleLazyObject(LazyObject):
""" """
A lazy object initialised from any function. A lazy object initialized from any function.
Designed for compound objects of unknown type. For builtins or objects of Designed for compound objects of unknown type. For builtins or objects of
known type, use django.utils.functional.lazy. known type, use django.utils.functional.lazy.

View File

@ -213,13 +213,13 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
Links can have trailing punctuation (periods, commas, close-parens) and Links can have trailing punctuation (periods, commas, close-parens) and
leading punctuation (opening parens) and it'll still do the right thing. leading punctuation (opening parens) and it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text longer than this limit If trim_url_limit is not None, the URLs in the link text longer than this
will truncated to trim_url_limit-3 characters and appended with an elipsis. limit will be truncated to trim_url_limit-3 characters and appended with
an ellipsis.
If nofollow is True, the URLs in link text will get a rel="nofollow" If nofollow is True, the links will get a rel="nofollow" attribute.
attribute.
If autoescape is True, the link text and URLs will get autoescaped. If autoescape is True, the link text and URLs will be autoescaped.
""" """
def trim_url(x, limit=trim_url_limit): def trim_url(x, limit=trim_url_limit):
if limit is None or len(x) <= limit: if limit is None or len(x) <= limit:

View File

@ -14,7 +14,7 @@ def clean_ipv6_address(ip_str, unpack_ipv4=False,
Validity is checked by calling is_valid_ipv6_address() - if an Validity is checked by calling is_valid_ipv6_address() - if an
invalid address is passed, ValidationError is raised. invalid address is passed, ValidationError is raised.
Replaces the longest continious zero-sequence with "::" and Replaces the longest continuous zero-sequence with "::" and
removes leading zeroes and makes sure all hextets are lowercase. removes leading zeroes and makes sure all hextets are lowercase.
Args: Args:

View File

@ -82,7 +82,7 @@ class JsLexer(Lexer):
>>> list(lexer.lex("a = 1")) >>> list(lexer.lex("a = 1"))
[('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')] [('id', 'a'), ('ws', ' '), ('punct', '='), ('ws', ' '), ('dnum', '1')]
This doesn't properly handle non-Ascii characters in the Javascript source. This doesn't properly handle non-ASCII characters in the Javascript source.
""" """
# Because these tokens are matched as alternatives in a regex, longer # Because these tokens are matched as alternatives in a regex, longer

View File

@ -169,7 +169,7 @@ def normalize(pattern):
count, ch = get_quantifier(ch, pattern_iter) count, ch = get_quantifier(ch, pattern_iter)
if ch: if ch:
# We had to look ahead, but it wasn't need to compute the # We had to look ahead, but it wasn't need to compute the
# quanitifer, so use this character next time around the # quantifier, so use this character next time around the
# main loop. # main loop.
consume_next = False consume_next = False

View File

@ -641,7 +641,7 @@ if PY3:
else: else:
_assertRaisesRegex = "assertRaisesRegexp" _assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches" _assertRegex = "assertRegexpMatches"
# memoryview and buffer are not stricly equivalent, but should be fine for # memoryview and buffer are not strictly equivalent, but should be fine for
# django core usage (mainly BinaryField). However, Jython doesn't support # django core usage (mainly BinaryField). However, Jython doesn't support
# buffer (see http://bugs.jython.org/issue1521), so we have to be careful. # buffer (see http://bugs.jython.org/issue1521), so we have to be careful.
if sys.platform.startswith('java'): if sys.platform.startswith('java'):

View File

@ -191,7 +191,7 @@ class ChangeListTests(TestCase):
def test_distinct_for_m2m_in_list_filter(self): def test_distinct_for_m2m_in_list_filter(self):
""" """
Regression test for #13902: When using a ManyToMany in list_filter, Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Basic ManyToMany. results shouldn't appear more than once. Basic ManyToMany.
""" """
blues = Genre.objects.create(name='Blues') blues = Genre.objects.create(name='Blues')
band = Band.objects.create(name='B.B. King Review', nr_of_members=11) band = Band.objects.create(name='B.B. King Review', nr_of_members=11)
@ -215,7 +215,7 @@ class ChangeListTests(TestCase):
def test_distinct_for_through_m2m_in_list_filter(self): def test_distinct_for_through_m2m_in_list_filter(self):
""" """
Regression test for #13902: When using a ManyToMany in list_filter, Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. With an intermediate model. results shouldn't appear more than once. With an intermediate model.
""" """
lead = Musician.objects.create(name='Vox') lead = Musician.objects.create(name='Vox')
band = Group.objects.create(name='The Hype') band = Group.objects.create(name='The Hype')
@ -238,7 +238,7 @@ class ChangeListTests(TestCase):
def test_distinct_for_inherited_m2m_in_list_filter(self): def test_distinct_for_inherited_m2m_in_list_filter(self):
""" """
Regression test for #13902: When using a ManyToMany in list_filter, Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Model managed in the results shouldn't appear more than once. Model managed in the
admin inherits from the one that defins the relationship. admin inherits from the one that defins the relationship.
""" """
lead = Musician.objects.create(name='John') lead = Musician.objects.create(name='John')
@ -262,7 +262,7 @@ class ChangeListTests(TestCase):
def test_distinct_for_m2m_to_inherited_in_list_filter(self): def test_distinct_for_m2m_to_inherited_in_list_filter(self):
""" """
Regression test for #13902: When using a ManyToMany in list_filter, Regression test for #13902: When using a ManyToMany in list_filter,
results shouldn't apper more than once. Target of the relationship results shouldn't appear more than once. Target of the relationship
inherits from another. inherits from another.
""" """
lead = ChordsMusician.objects.create(name='Player A') lead = ChordsMusician.objects.create(name='Player A')

View File

@ -63,7 +63,7 @@ class TestInline(TestCase):
def test_inline_primary(self): def test_inline_primary(self):
person = Person.objects.create(firstname='Imelda') person = Person.objects.create(firstname='Imelda')
item = OutfitItem.objects.create(name='Shoes') item = OutfitItem.objects.create(name='Shoes')
# Imelda likes shoes, but can't cary her own bags. # Imelda likes shoes, but can't carry her own bags.
data = { data = {
'shoppingweakness_set-TOTAL_FORMS': 1, 'shoppingweakness_set-TOTAL_FORMS': 1,
'shoppingweakness_set-INITIAL_FORMS': 0, 'shoppingweakness_set-INITIAL_FORMS': 0,
@ -91,7 +91,7 @@ class TestInline(TestCase):
'title_set-0-title2': 'a different title', 'title_set-0-title2': 'a different title',
} }
response = self.client.post('/admin/admin_inlines/titlecollection/add/', data) response = self.client.post('/admin/admin_inlines/titlecollection/add/', data)
# Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbock. # Here colspan is "4": two fields (title1 and title2), one hidden field and the delete checkbox.
self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>The two titles must be the same</li></ul></td></tr>') self.assertContains(response, '<tr><td colspan="4"><ul class="errorlist"><li>The two titles must be the same</li></ul></td></tr>')
def test_no_parent_callable_lookup(self): def test_no_parent_callable_lookup(self):
@ -109,7 +109,7 @@ class TestInline(TestCase):
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
# Add parent object view should have the child inlines section # Add parent object view should have the child inlines section
self.assertContains(response, '<div class="inline-group" id="question_set-group">') self.assertContains(response, '<div class="inline-group" id="question_set-group">')
# The right callabe should be used for the inline readonly_fields # The right callable should be used for the inline readonly_fields
# column cells # column cells
self.assertContains(response, '<p>Callable in QuestionInline</p>') self.assertContains(response, '<p>Callable in QuestionInline</p>')

View File

@ -58,7 +58,7 @@ class TestAdminOrdering(TestCase):
def test_dynamic_ordering(self): def test_dynamic_ordering(self):
""" """
Let's use a custom ModelAdmin that changes the ordering dinamically. Let's use a custom ModelAdmin that changes the ordering dynamically.
""" """
super_user = User.objects.create(username='admin', is_superuser=True) super_user = User.objects.create(username='admin', is_superuser=True)
other_user = User.objects.create(username='other') other_user = User.objects.create(username='other')

View File

@ -2,7 +2,7 @@
from __future__ import unicode_literals from __future__ import unicode_literals
""" """
A series of tests to establish that the command-line managment tools work as A series of tests to establish that the command-line management tools work as
advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE advertised - especially with regards to the handling of the DJANGO_SETTINGS_MODULE
and default settings.py files. and default settings.py files.
""" """
@ -1474,7 +1474,7 @@ class CommandTypes(AdminScriptTestCase):
def test_run_from_argv_non_ascii_error(self): def test_run_from_argv_non_ascii_error(self):
""" """
Test that non-ascii message of CommandError does not raise any Test that non-ASCII message of CommandError does not raise any
UnicodeDecodeError in run_from_argv. UnicodeDecodeError in run_from_argv.
""" """
def raise_command_error(*args, **kwargs): def raise_command_error(*args, **kwargs):

View File

@ -249,7 +249,7 @@ class AdminViewBasicTest(AdminViewBasicTestCase):
def testChangeListSortingModel(self): def testChangeListSortingModel(self):
""" """
Ensure we can sort on a list_display field that is a Model method Ensure we can sort on a list_display field that is a Model method
(colunn 3 is 'model_year' in ArticleAdmin) (column 3 is 'model_year' in ArticleAdmin)
""" """
response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '-3'}) response = self.client.get('/test_admin/%s/admin_views/article/' % self.urlbit, {'o': '-3'})
self.assertContentBefore(response, 'Newest content', 'Middle content', self.assertContentBefore(response, 'Newest content', 'Middle content',
@ -729,7 +729,7 @@ class AdminCustomTemplateTests(AdminViewBasicTestCase):
group = Group.objects.create(name="foogroup") group = Group.objects.create(name="foogroup")
post_data = { post_data = {
'action': 'delete_selected', 'action': 'delete_selected',
'selected_accross': '0', 'selected_across': '0',
'index': '0', 'index': '0',
'_selected_action': group.id '_selected_action': group.id
} }
@ -1185,7 +1185,7 @@ class AdminViewPermissionsTest(TestCase):
'date_0': '2008-03-18', 'date_1': '10:54:39', 'date_0': '2008-03-18', 'date_1': '10:54:39',
'section': 1} 'section': 1}
# add user shoud not be able to view the list of article or change any of them # add user should not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/') self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login) self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/') response = self.client.get('/test_admin/admin/admin_views/article/')
@ -1259,7 +1259,7 @@ class AdminViewPermissionsTest(TestCase):
login_url = reverse('admin:login') + '?next=/test_admin/admin/' login_url = reverse('admin:login') + '?next=/test_admin/admin/'
# add user shoud not be able to view the list of article or change any of them # add user should not be able to view the list of article or change any of them
self.client.get('/test_admin/admin/') self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login) self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/history/') response = self.client.get('/test_admin/admin/admin_views/article/1/history/')
@ -1366,7 +1366,7 @@ class AdminViewPermissionsTest(TestCase):
login_url = reverse('admin:login') + '?next=/test_admin/admin/' login_url = reverse('admin:login') + '?next=/test_admin/admin/'
delete_dict = {'post': 'yes'} delete_dict = {'post': 'yes'}
# add user shoud not be able to delete articles # add user should not be able to delete articles
self.client.get('/test_admin/admin/') self.client.get('/test_admin/admin/')
self.client.post(login_url, self.adduser_login) self.client.post(login_url, self.adduser_login)
response = self.client.get('/test_admin/admin/admin_views/article/1/delete/') response = self.client.get('/test_admin/admin/admin_views/article/1/delete/')
@ -1749,7 +1749,7 @@ class AdminViewStringPrimaryKeyTest(TestCase):
@override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',)) @override_settings(PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',))
class SecureViewTests(TestCase): class SecureViewTests(TestCase):
""" """
Test behaviour of a view protected by the staff_member_required decorator. Test behavior of a view protected by the staff_member_required decorator.
""" """
urls = "admin_views.urls" urls = "admin_views.urls"
fixtures = ['admin-views-users.xml'] fixtures = ['admin-views-users.xml']
@ -1782,7 +1782,7 @@ class AdminViewUnicodeTest(TestCase):
def testUnicodeEdit(self): def testUnicodeEdit(self):
""" """
A test to ensure that POST on edit_view handles non-ascii characters. A test to ensure that POST on edit_view handles non-ASCII characters.
""" """
post_data = { post_data = {
"name": "Test lærdommer", "name": "Test lærdommer",
@ -1815,7 +1815,7 @@ class AdminViewUnicodeTest(TestCase):
def testUnicodeDelete(self): def testUnicodeDelete(self):
""" """
Ensure that the delete_view handles non-ascii characters Ensure that the delete_view handles non-ASCII characters
""" """
delete_dict = {'post': 'yes'} delete_dict = {'post': 'yes'}
response = self.client.get('/test_admin/admin/admin_views/book/1/delete/') response = self.client.get('/test_admin/admin/admin_views/book/1/delete/')
@ -3995,7 +3995,7 @@ class CSSTest(TestCase):
def testAppModelInFormBodyClass(self): def testAppModelInFormBodyClass(self):
""" """
Ensure app and model tag are correcly read by change_form template Ensure app and model tag are correctly read by change_form template
""" """
response = self.client.get('/test_admin/admin/admin_views/section/add/') response = self.client.get('/test_admin/admin/admin_views/section/add/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
@ -4004,7 +4004,7 @@ class CSSTest(TestCase):
def testAppModelInListBodyClass(self): def testAppModelInListBodyClass(self):
""" """
Ensure app and model tag are correcly read by change_list template Ensure app and model tag are correctly read by change_list template
""" """
response = self.client.get('/test_admin/admin/admin_views/section/') response = self.client.get('/test_admin/admin/admin_views/section/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
@ -4013,7 +4013,7 @@ class CSSTest(TestCase):
def testAppModelInDeleteConfirmationBodyClass(self): def testAppModelInDeleteConfirmationBodyClass(self):
""" """
Ensure app and model tag are correcly read by delete_confirmation Ensure app and model tag are correctly read by delete_confirmation
template template
""" """
response = self.client.get( response = self.client.get(
@ -4024,7 +4024,7 @@ class CSSTest(TestCase):
def testAppModelInAppIndexBodyClass(self): def testAppModelInAppIndexBodyClass(self):
""" """
Ensure app and model tag are correcly read by app_index template Ensure app and model tag are correctly read by app_index template
""" """
response = self.client.get('/test_admin/admin/admin_views/') response = self.client.get('/test_admin/admin/admin_views/')
self.assertEqual(response.status_code, 200) self.assertEqual(response.status_code, 200)
@ -4032,7 +4032,7 @@ class CSSTest(TestCase):
def testAppModelInDeleteSelectedConfirmationBodyClass(self): def testAppModelInDeleteSelectedConfirmationBodyClass(self):
""" """
Ensure app and model tag are correcly read by Ensure app and model tag are correctly read by
delete_selected_confirmation template delete_selected_confirmation template
""" """
action_data = { action_data = {
@ -4719,7 +4719,7 @@ class AdminViewOnSiteTests(TestCase):
) )
def test_true(self): def test_true(self):
"Ensure that the default behaviour is followed if view_on_site is True" "Ensure that the default behavior is followed if view_on_site is True"
response = self.client.get('/test_admin/admin/admin_views/city/1/') response = self.client.get('/test_admin/admin/admin_views/city/1/')
content_type_pk = ContentType.objects.get_for_model(City).pk content_type_pk = ContentType.objects.get_for_model(City).pk
self.assertContains(response, self.assertContains(response,

View File

@ -70,7 +70,7 @@ class AggregationTests(TestCase):
Regression test for #11916: Extra params + aggregation creates Regression test for #11916: Extra params + aggregation creates
incorrect SQL. incorrect SQL.
""" """
# oracle doesn't support subqueries in group by clause # Oracle doesn't support subqueries in group by clause
shortest_book_sql = """ shortest_book_sql = """
SELECT name SELECT name
FROM aggregation_regress_book b FROM aggregation_regress_book b
@ -1080,7 +1080,7 @@ class AggregationTests(TestCase):
) )
def test_name_expressions(self): def test_name_expressions(self):
# Test that aggregates are spotted corretly from F objects. # Test that aggregates are spotted correctly from F objects.
# Note that Adrian's age is 34 in the fixtures, and he has one book # Note that Adrian's age is 34 in the fixtures, and he has one book
# so both conditions match one author. # so both conditions match one author.
qs = Author.objects.annotate(Count('book')).filter( qs = Author.objects.annotate(Count('book')).filter(

View File

@ -563,14 +563,14 @@ class BackendTestCase(TestCase):
@skipUnlessDBFeature('supports_paramstyle_pyformat') @skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_execute_with_pyformat(self): def test_cursor_execute_with_pyformat(self):
#10070: Support pyformat style passing of paramters #10070: Support pyformat style passing of parameters
args = {'root': 3, 'square': 9} args = {'root': 3, 'square': 9}
self.create_squares(args, 'pyformat', multiple=False) self.create_squares(args, 'pyformat', multiple=False)
self.assertEqual(models.Square.objects.count(), 1) self.assertEqual(models.Square.objects.count(), 1)
@skipUnlessDBFeature('supports_paramstyle_pyformat') @skipUnlessDBFeature('supports_paramstyle_pyformat')
def test_cursor_executemany_with_pyformat(self): def test_cursor_executemany_with_pyformat(self):
#10070: Support pyformat style passing of paramters #10070: Support pyformat style passing of parameters
args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)] args = [{'root': i, 'square': i ** 2} for i in range(-5, 6)]
self.create_squares(args, 'pyformat', multiple=True) self.create_squares(args, 'pyformat', multiple=True)
self.assertEqual(models.Square.objects.count(), 11) self.assertEqual(models.Square.objects.count(), 11)
@ -733,7 +733,7 @@ class FkConstraintsTests(TransactionTestCase):
with transaction.atomic(): with transaction.atomic():
# Create an Article. # Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r) models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB # Retrieve it from the DB
a = models.Article.objects.get(headline="Test article") a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30 a.reporter_id = 30
try: try:
@ -751,7 +751,7 @@ class FkConstraintsTests(TransactionTestCase):
with transaction.atomic(): with transaction.atomic():
# Create an Article. # Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r) models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB # Retrieve it from the DB
a = models.Article.objects.get(headline="Test article") a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30 a.reporter_id = 30
try: try:
@ -768,7 +768,7 @@ class FkConstraintsTests(TransactionTestCase):
with transaction.atomic(): with transaction.atomic():
# Create an Article. # Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r) models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB # Retrieve it from the DB
a = models.Article.objects.get(headline="Test article") a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30 a.reporter_id = 30
with connection.constraint_checks_disabled(): with connection.constraint_checks_disabled():

View File

@ -871,7 +871,7 @@ class SelectOnSaveTests(TestCase):
with self.assertNumQueries(2): with self.assertNumQueries(2):
asos.save() asos.save()
self.assertTrue(FakeQuerySet.called) self.assertTrue(FakeQuerySet.called)
# This is not wanted behaviour, but this is how Django has always # This is not wanted behavior, but this is how Django has always
# behaved for databases that do not return correct information # behaved for databases that do not return correct information
# about matched rows for UPDATE. # about matched rows for UPDATE.
with self.assertRaises(DatabaseError): with self.assertRaises(DatabaseError):

View File

@ -1292,7 +1292,7 @@ class CacheUtils(TestCase):
def test_cache_key_varies_by_url(self): def test_cache_key_varies_by_url(self):
""" """
get_cache_key keys differ by fully-qualfied URL instead of path get_cache_key keys differ by fully-qualified URL instead of path
""" """
request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com') request1 = self.factory.get(self.path, HTTP_HOST='sub-1.example.com')
learn_cache_key(request1, HttpResponse()) learn_cache_key(request1, HttpResponse())

View File

@ -110,7 +110,7 @@ class DeferTests(TestCase):
obj.name = "c2" obj.name = "c2"
obj.save() obj.save()
# You can retrive a single column on a base class with no fields # You can retrieve a single column on a base class with no fields
obj = Child.objects.only("name").get(name="c2") obj = Child.objects.only("name").get(name="c2")
self.assert_delayed(obj, 3) self.assert_delayed(obj, 3)
self.assertEqual(obj.name, "c2") self.assertEqual(obj.name, "c2")

View File

@ -131,7 +131,7 @@ class ExtraRegressTests(TestCase):
""" """
Regression test for #8039: Ordering sometimes removed relevant tables Regression test for #8039: Ordering sometimes removed relevant tables
from extra(). This test is the critical case: ordering uses a table, from extra(). This test is the critical case: ordering uses a table,
but then removes the reference because of an optimisation. The table but then removes the reference because of an optimization. The table
should still be present because of the extra() call. should still be present because of the extra() call.
""" """
self.assertQuerysetEqual( self.assertQuerysetEqual(

View File

@ -59,7 +59,7 @@ class SmallerField(SmallField):
class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)): class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)):
description = ("JSONField automatically serializes and desializes values to " description = ("JSONField automatically serializes and deserializes values to "
"and from JSON.") "and from JSON.")
def to_python(self, value): def to_python(self, value):

View File

@ -39,7 +39,7 @@ class CustomField(TestCase):
# Custom fields still have normal field's attributes. # Custom fields still have normal field's attributes.
self.assertEqual(m._meta.get_field("data").verbose_name, "small field") self.assertEqual(m._meta.get_field("data").verbose_name, "small field")
# The m.data attribute has been initialised correctly. It's a Small # The m.data attribute has been initialized correctly. It's a Small
# object. # object.
self.assertEqual((m.data.first, m.data.second), (1, 2)) self.assertEqual((m.data.first, m.data.second), (1, 2))
@ -100,7 +100,7 @@ class CustomField(TestCase):
http://users.rcn.com/python/download/Descriptor.htm#properties http://users.rcn.com/python/download/Descriptor.htm#properties
""" """
# Even when looking for totally different properties, SubfieldBase's # Even when looking for totally different properties, SubfieldBase's
# non property like behaviour made inspect crash. Refs #12568. # non property like behavior made inspect crash. Refs #12568.
data = dict(inspect.getmembers(MyModel)) data = dict(inspect.getmembers(MyModel))
self.assertIn('__module__', data) self.assertIn('__module__', data)
self.assertEqual(data['__module__'], 'field_subclassing.models') self.assertEqual(data['__module__'], 'field_subclassing.models')

View File

@ -341,7 +341,7 @@ class FileStorageTests(unittest.TestCase):
def test_file_chunks_error(self): def test_file_chunks_error(self):
""" """
Test behaviour when file.chunks() is raising an error Test behavior when file.chunks() is raising an error
""" """
f1 = ContentFile('chunks fails') f1 = ContentFile('chunks fails')

View File

@ -304,7 +304,7 @@ class FixtureLoadingTests(DumpDataAssertMixin, TestCase):
]) ])
def test_ambiguous_compressed_fixture(self): def test_ambiguous_compressed_fixture(self):
# The name "fixture5" is ambigous, so loading it will raise an error # The name "fixture5" is ambiguous, so loading it will raise an error
with self.assertRaises(management.CommandError) as cm: with self.assertRaises(management.CommandError) as cm:
management.call_command('loaddata', 'fixture5', verbosity=0) management.call_command('loaddata', 'fixture5', verbosity=0)
self.assertIn("Multiple fixtures named 'fixture5'", cm.exception.args[0]) self.assertIn("Multiple fixtures named 'fixture5'", cm.exception.args[0])

View File

@ -64,7 +64,7 @@ class TestFixtures(TestCase):
def test_loaddata_not_found_fields_not_ignore(self): def test_loaddata_not_found_fields_not_ignore(self):
""" """
Test for ticket #9279 -- Error is raised for entries in Test for ticket #9279 -- Error is raised for entries in
the serialised data for fields that have been removed the serialized data for fields that have been removed
from the database when not ignored. from the database when not ignored.
""" """
with self.assertRaises(DeserializationError): with self.assertRaises(DeserializationError):
@ -77,7 +77,7 @@ class TestFixtures(TestCase):
def test_loaddata_not_found_fields_ignore(self): def test_loaddata_not_found_fields_ignore(self):
""" """
Test for ticket #9279 -- Ignores entries in Test for ticket #9279 -- Ignores entries in
the serialised data for fields that have been removed the serialized data for fields that have been removed
from the database. from the database.
""" """
management.call_command( management.call_command(
@ -90,7 +90,7 @@ class TestFixtures(TestCase):
def test_loaddata_not_found_fields_ignore_xml(self): def test_loaddata_not_found_fields_ignore_xml(self):
""" """
Test for ticket #19998 -- Ignore entries in the XML serialised data Test for ticket #19998 -- Ignore entries in the XML serialized data
for fields that have been removed from the model definition. for fields that have been removed from the model definition.
""" """
management.call_command( management.call_command(

View File

@ -113,7 +113,7 @@ class ArticleTranslationDescriptor(ReverseSingleRelatedObjectDescriptor):
class ColConstraint(object): class ColConstraint(object):
# Antyhing with as_sql() method works in get_extra_restriction(). # Anything with as_sql() method works in get_extra_restriction().
def __init__(self, alias, col, value): def __init__(self, alias, col, value):
self.alias, self.col, self.value = alias, col, value self.alias, self.col, self.value = alias, col, value

View File

@ -510,7 +510,7 @@ class FormsExtraTestCase(TestCase, AssertFormErrorsMixin):
self.assertFormErrors(['This is not a valid IPv6 address.'], f.clean, '1:2') self.assertFormErrors(['This is not a valid IPv6 address.'], f.clean, '1:2')
def test_generic_ipaddress_normalization(self): def test_generic_ipaddress_normalization(self):
# Test the normalising code # Test the normalizing code
f = GenericIPAddressField() f = GenericIPAddressField()
self.assertEqual(f.clean(' ::ffff:0a0a:0a0a '), '::ffff:10.10.10.10') self.assertEqual(f.clean(' ::ffff:0a0a:0a0a '), '::ffff:10.10.10.10')
self.assertEqual(f.clean(' ::ffff:10.10.10.10 '), '::ffff:10.10.10.10') self.assertEqual(f.clean(' ::ffff:10.10.10.10 '), '::ffff:10.10.10.10')

View File

@ -1710,7 +1710,7 @@ class FormsTestCase(TestCase):
def test_empty_permitted(self): def test_empty_permitted(self):
# Sometimes (pretty much in formsets) we want to allow a form to pass validation # Sometimes (pretty much in formsets) we want to allow a form to pass validation
# if it is completely empty. We can accomplish this by using the empty_permitted # if it is completely empty. We can accomplish this by using the empty_permitted
# agrument to a form constructor. # argument to a form constructor.
class SongForm(Form): class SongForm(Form):
artist = CharField() artist = CharField()
name = CharField() name = CharField()

View File

@ -164,7 +164,7 @@ urlpatterns = patterns('',
views.AuthorListCustomPaginator.as_view()), views.AuthorListCustomPaginator.as_view()),
# YearArchiveView # YearArchiveView
# Mixing keyword and possitional captures below is intentional; the views # Mixing keyword and positional captures below is intentional; the views
# ought to be able to accept either. # ought to be able to accept either.
(r'^dates/books/(?P<year>\d{4})/$', (r'^dates/books/(?P<year>\d{4})/$',
views.BookYearArchive.as_view()), views.BookYearArchive.as_view()),

View File

@ -100,7 +100,7 @@ class ExtractorTests(SimpleTestCase):
return self._assertPoLocComment(True, po_filename, line_number, *comment_parts) return self._assertPoLocComment(True, po_filename, line_number, *comment_parts)
def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts): def assertLocationCommentNotPresent(self, po_filename, line_number, *comment_parts):
"""Check the oposite of assertLocationComment()""" """Check the opposite of assertLocationComment()"""
return self._assertPoLocComment(False, po_filename, line_number, *comment_parts) return self._assertPoLocComment(False, po_filename, line_number, *comment_parts)

Some files were not shown because too many files have changed in this diff Show More