Fix many many typos in comments throughout the codebase

This commit is contained in:
Alex Gaynor 2014-04-26 10:18:45 -07:00
parent 8b5b199e20
commit 2bcb8bfc8d
53 changed files with 79 additions and 80 deletions

View File

@ -328,7 +328,7 @@ class BaseModelAdmin(six.with_metaclass(forms.MediaDefiningClass)):
""" """
# We access the property and check if it triggers a warning. # We access the property and check if it triggers a warning.
# If it does, then it's ours and we can safely ignore it, but if # If it does, then it's ours and we can safely ignore it, but if
# it doesn't then it has been overriden so we must warn about the # it doesn't then it has been overridden so we must warn about the
# deprecation. # deprecation.
with warnings.catch_warnings(record=True) as w: with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("always") warnings.simplefilter("always")

View File

@ -89,7 +89,7 @@ class PermissionsRequiredDecoratorTest(TestCase):
def test_permissioned_denied_redirect(self): def test_permissioned_denied_redirect(self):
@permission_required(['auth.add_customuser', 'auth.change_customuser', 'non-existant-permission']) @permission_required(['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission'])
def a_view(request): def a_view(request):
return HttpResponse() return HttpResponse()
request = self.factory.get('/rand') request = self.factory.get('/rand')
@ -99,7 +99,7 @@ class PermissionsRequiredDecoratorTest(TestCase):
def test_permissioned_denied_exception_raised(self): def test_permissioned_denied_exception_raised(self):
@permission_required(['auth.add_customuser', 'auth.change_customuser', 'non-existant-permission'], raise_exception=True) @permission_required(['auth.add_customuser', 'auth.change_customuser', 'non-existent-permission'], raise_exception=True)
def a_view(request): def a_view(request):
return HttpResponse() return HttpResponse()
request = self.factory.get('/rand') request = self.factory.get('/rand')

View File

@ -388,7 +388,7 @@ class PasswordResetFormTest(TestCase):
def test_nonexistant_email(self): def test_nonexistant_email(self):
""" """
Test nonexistant email address. This should not fail because it would Test nonexistent email address. This should not fail because it would
expose information about registered users. expose information about registered users.
""" """
data = {'email': 'foo@bar.com'} data = {'email': 'foo@bar.com'}

View File

@ -231,7 +231,7 @@ class PasswordResetTest(AuthViewsTestCase):
self.assertContains(response, "The password reset link was invalid") self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self): def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404 # Ensure that we get a 200 response for a non-existent user, not a 404
response = self.client.get('/reset/123456/1-1/') response = self.client.get('/reset/123456/1-1/')
self.assertContains(response, "The password reset link was invalid") self.assertContains(response, "The password reset link was invalid")

View File

@ -94,7 +94,7 @@ class PreviewTests(TestCase):
Use the client library to POST to the form with stage set to 3 Use the client library to POST to the form with stage set to 3
to see if our forms done() method is called. Check first to see if our forms done() method is called. Check first
without the security hash, verify failure, retry with security without the security hash, verify failure, retry with security
hash and verify sucess. hash and verify success.
""" """
# Pass strings for form submittal and add stage variable to # Pass strings for form submittal and add stage variable to

View File

@ -27,16 +27,16 @@ class GeometryColumns(models.Model):
@classmethod @classmethod
def table_name_col(cls): def table_name_col(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature table
the feature table name. name.
""" """
return 'table_name' return 'table_name'
@classmethod @classmethod
def geom_col_name(cls): def geom_col_name(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature
the feature geometry column. geometry column.
""" """
return 'column_name' return 'column_name'

View File

@ -28,16 +28,16 @@ class GeometryColumns(models.Model):
@classmethod @classmethod
def table_name_col(cls): def table_name_col(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature table
the feature table name. name.
""" """
return 'f_table_name' return 'f_table_name'
@classmethod @classmethod
def geom_col_name(cls): def geom_col_name(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature
the feature geometry column. geometry column.
""" """
return 'f_geometry_column' return 'f_geometry_column'

View File

@ -25,16 +25,16 @@ class GeometryColumns(models.Model):
@classmethod @classmethod
def table_name_col(cls): def table_name_col(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature table
the feature table name. name.
""" """
return 'f_table_name' return 'f_table_name'
@classmethod @classmethod
def geom_col_name(cls): def geom_col_name(cls):
""" """
Returns the name of the metadata column used to store the Returns the name of the metadata column used to store the feature
the feature geometry column. geometry column.
""" """
return 'f_geometry_column' return 'f_geometry_column'

View File

@ -247,7 +247,7 @@ class SpatiaLiteOperations(DatabaseOperations, BaseSpatialOperations):
""" """
Helper routine for calling SpatiaLite functions and returning Helper routine for calling SpatiaLite functions and returning
their result. their result.
Any error occuring in this method should be handled by the caller. Any error occurring in this method should be handled by the caller.
""" """
cursor = self.connection._cursor() cursor = self.connection._cursor()
try: try:

View File

@ -38,7 +38,7 @@ class GeoQuerySet(QuerySet):
Returns the area of the geographic field in an `area` attribute on Returns the area of the geographic field in an `area` attribute on
each element of this GeoQuerySet. each element of this GeoQuerySet.
""" """
# Peforming setup here rather than in `_spatial_attribute` so that # Performing setup here rather than in `_spatial_attribute` so that
# we can get the units for `AreaField`. # we can get the units for `AreaField`.
procedure_args, geo_field = self._spatial_setup('area', field_name=kwargs.get('field_name', None)) procedure_args, geo_field = self._spatial_setup('area', field_name=kwargs.get('field_name', None))
s = {'procedure_args': procedure_args, s = {'procedure_args': procedure_args,

View File

@ -314,7 +314,7 @@ class SQLDateTimeCompiler(compiler.SQLDateTimeCompiler, GeoSQLCompiler):
datetime = self.resolve_columns(row, fields)[offset] datetime = self.resolve_columns(row, fields)[offset]
elif needs_string_cast: elif needs_string_cast:
datetime = typecast_timestamp(str(datetime)) datetime = typecast_timestamp(str(datetime))
# Datetimes are artifically returned in UTC on databases that # Datetimes are artificially returned in UTC on databases that
# don't support time zone. Restore the zone used in the query. # don't support time zone. Restore the zone used in the query.
if settings.USE_TZ: if settings.USE_TZ:
datetime = datetime.replace(tzinfo=None) datetime = datetime.replace(tzinfo=None)

View File

@ -28,7 +28,7 @@
GDAL links to a large number of external libraries that consume RAM when GDAL links to a large number of external libraries that consume RAM when
loaded. Thus, it may desirable to disable GDAL on systems with limited loaded. Thus, it may desirable to disable GDAL on systems with limited
RAM resources -- this may be accomplished by setting `GDAL_LIBRARY_PATH` RAM resources -- this may be accomplished by setting `GDAL_LIBRARY_PATH`
to a non-existant file location (e.g., `GDAL_LIBRARY_PATH='/null/path'`; to a non-existent file location (e.g., `GDAL_LIBRARY_PATH='/null/path'`;
setting to None/False/'' will not work as a string must be given). setting to None/False/'' will not work as a string must be given).
""" """
from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException # NOQA from django.contrib.gis.gdal.error import check_err, OGRException, OGRIndexError, SRSException # NOQA

View File

@ -192,7 +192,7 @@ class GeoIP(object):
def country(self, query): def country(self, query):
""" """
Returns a dictionary with with the country code and name when given an Returns a dictionary with the country code and name when given an
IP address or a Fully Qualified Domain Name (FQDN). For example, both IP address or a Fully Qualified Domain Name (FQDN). For example, both
'24.124.1.80' and 'djangoproject.com' are valid parameters. '24.124.1.80' and 'djangoproject.com' are valid parameters.
""" """

View File

@ -50,7 +50,7 @@
- The `dom_id` property returns the DOM id for the map. Defaults to "map". - The `dom_id` property returns the DOM id for the map. Defaults to "map".
The following attributes may be set or customized in your local settings: The following attributes may be set or customized in your local settings:
* GOOGLE_MAPS_API_KEY: String of your Google Maps API key. These are tied to * GOOGLE_MAPS_API_KEY: String of your Google Maps API key. These are tied
to a domain. May be obtained from http://www.google.com/apis/maps/ to a domain. May be obtained from http://www.google.com/apis/maps/
* GOOGLE_MAPS_API_VERSION (optional): Defaults to using "2.x" * GOOGLE_MAPS_API_VERSION (optional): Defaults to using "2.x"
* GOOGLE_MAPS_URL (optional): Must have a substitution ('%s') for the API * GOOGLE_MAPS_URL (optional): Must have a substitution ('%s') for the API

View File

@ -71,7 +71,7 @@ class GoogleZoom(object):
npix = self._npix[zoom] npix = self._npix[zoom]
# Calculating the pixel x coordinate by multiplying the longitude value # Calculating the pixel x coordinate by multiplying the longitude value
# with with the number of degrees/pixel at the given zoom level. # with the number of degrees/pixel at the given zoom level.
px_x = round(npix + (lon * self._degpp[zoom])) px_x = round(npix + (lon * self._degpp[zoom]))
# Creating the factor, and ensuring that 1 or -1 is not passed in as the # Creating the factor, and ensuring that 1 or -1 is not passed in as the

View File

@ -97,7 +97,7 @@ class LayerMapping(object):
self.mapping = mapping self.mapping = mapping
self.model = model self.model = model
# Checking the layer -- intitialization of the object will fail if # Checking the layer -- initialization of the object will fail if
# things don't check out before hand. # things don't check out before hand.
self.check_layer() self.check_layer()

View File

@ -103,7 +103,7 @@ class CookieStorage(BaseStorage):
encoded_data = self._encode(messages) encoded_data = self._encode(messages)
if self.max_cookie_size: if self.max_cookie_size:
# data is going to be stored eventually by SimpleCookie, which # data is going to be stored eventually by SimpleCookie, which
# adds it's own overhead, which we must account for. # adds its own overhead, which we must account for.
cookie = SimpleCookie() # create outside the loop cookie = SimpleCookie() # create outside the loop
def stored_length(val): def stored_length(val):

View File

@ -493,7 +493,7 @@ class CacheSessionTests(SessionTestsMixin, unittest.TestCase):
}, },
}, SESSION_CACHE_ALIAS='sessions') }, SESSION_CACHE_ALIAS='sessions')
def test_non_default_cache(self): def test_non_default_cache(self):
# Re-initalize the session backend to make use of overridden settings. # Re-initialize the session backend to make use of overridden settings.
self.session = self.backend() self.session = self.backend()
self.session.save() self.session.save()

View File

@ -890,7 +890,7 @@ class BaseDatabaseOperations(object):
if _allow_fallback: if _allow_fallback:
# Without sqlparse, fall back to the legacy (and buggy) logic. # Without sqlparse, fall back to the legacy (and buggy) logic.
warnings.warn( warnings.warn(
"Providing intial SQL data on a %s database will require " "Providing initial SQL data on a %s database will require "
"sqlparse in Django 1.9." % self.connection.vendor, "sqlparse in Django 1.9." % self.connection.vendor,
RemovedInDjango19Warning) RemovedInDjango19Warning)
from django.core.management.sql import _split_statements from django.core.management.sql import _split_statements

View File

@ -429,7 +429,7 @@ class ImageField(FileField):
Dimensions can be forced to update with force=True, which is how Dimensions can be forced to update with force=True, which is how
ImageFileDescriptor.__set__ calls this method. ImageFileDescriptor.__set__ calls this method.
""" """
# Nothing to update if the field doesn't have have dimension fields. # Nothing to update if the field doesn't have dimension fields.
has_dimension_fields = self.width_field or self.height_field has_dimension_fields = self.width_field or self.height_field
if not has_dimension_fields: if not has_dimension_fields:
return return

View File

@ -757,8 +757,8 @@ class QuerySet(object):
evaluated. evaluated.
When prefetch_related() is called more than once, the list of lookups to When prefetch_related() is called more than once, the list of lookups to
prefetch is appended to. If prefetch_related(None) is called, the prefetch is appended to. If prefetch_related(None) is called, the list
the list is cleared. is cleared.
""" """
clone = self._clone() clone = self._clone()
if lookups == (None,): if lookups == (None,):
@ -869,7 +869,7 @@ class QuerySet(object):
def using(self, alias): def using(self, alias):
""" """
Selects which database this QuerySet should excecute its query against. Selects which database this QuerySet should execute its query against.
""" """
clone = self._clone() clone = self._clone()
clone._db = alias clone._db = alias
@ -1599,7 +1599,7 @@ class RawQuerySet(object):
def using(self, alias): def using(self, alias):
""" """
Selects which database this Raw QuerySet should excecute it's query against. Selects which database this Raw QuerySet should execute its query against.
""" """
return RawQuerySet(self.raw_query, model=self.model, return RawQuerySet(self.raw_query, model=self.model,
query=self.query.clone(using=alias), query=self.query.clone(using=alias),
@ -1621,7 +1621,7 @@ class RawQuerySet(object):
index = self._columns.index(query_name) index = self._columns.index(query_name)
self._columns[index] = model_name self._columns[index] = model_name
except ValueError: except ValueError:
# Ignore translations for non-existant column names # Ignore translations for non-existent column names
pass pass
return self._columns return self._columns

View File

@ -1132,7 +1132,7 @@ class SQLDateTimeCompiler(SQLCompiler):
datetime = self.resolve_columns(row, fields)[offset] datetime = self.resolve_columns(row, fields)[offset]
elif needs_string_cast: elif needs_string_cast:
datetime = typecast_timestamp(str(datetime)) datetime = typecast_timestamp(str(datetime))
# Datetimes are artifically returned in UTC on databases that # Datetimes are artificially returned in UTC on databases that
# don't support time zone. Restore the zone used in the query. # don't support time zone. Restore the zone used in the query.
if settings.USE_TZ: if settings.USE_TZ:
if datetime is None: if datetime is None:

View File

@ -1,5 +1,5 @@
""" """
Useful auxilliary data structures for query construction. Not useful outside Useful auxiliary data structures for query construction. Not useful outside
the SQL domain. the SQL domain.
""" """

View File

@ -40,7 +40,7 @@ class SQLEvaluator(object):
return self.expression.evaluate(self, qn, connection) return self.expression.evaluate(self, qn, connection)
##################################################### #####################################################
# Vistor methods for initial expression preparation # # Visitor methods for initial expression preparation #
##################################################### #####################################################
def prepare_node(self, node, query, allow_joins): def prepare_node(self, node, query, allow_joins):
@ -72,7 +72,7 @@ class SQLEvaluator(object):
[f.name for f in self.opts.fields])) [f.name for f in self.opts.fields]))
################################################## ##################################################
# Vistor methods for final expression evaluation # # Visitor methods for final expression evaluation #
################################################## ##################################################
def evaluate_node(self, node, qn, connection): def evaluate_node(self, node, qn, connection):

View File

@ -199,7 +199,7 @@ class Query(object):
def sql_with_params(self): def sql_with_params(self):
""" """
Returns the query as an SQL string and the parameters that will be Returns the query as an SQL string and the parameters that will be
subsituted into the query. substituted into the query.
""" """
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql() return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
@ -675,7 +675,7 @@ class Query(object):
alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1) alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1)
current.append(alias) current.append(alias)
else: else:
# The first occurence of a table uses the table name directly. # The first occurrence of a table uses the table name directly.
alias = table_name alias = table_name
self.table_map[alias] = [alias] self.table_map[alias] = [alias]
self.alias_refcount[alias] = 1 self.alias_refcount[alias] = 1
@ -1242,7 +1242,7 @@ class Query(object):
""" """
having_parts = [] having_parts = []
for c in q_object.children[:]: for c in q_object.children[:]:
# When constucting the having nodes we need to take care to # When constructing the having nodes we need to take care to
# preserve the negation status from the upper parts of the tree # preserve the negation status from the upper parts of the tree
if isinstance(c, Node): if isinstance(c, Node):
# For each negated child, flip the in_negated flag. # For each negated child, flip the in_negated flag.
@ -1802,7 +1802,7 @@ class Query(object):
""" """
If any fields are marked to be deferred, returns a dictionary mapping If any fields are marked to be deferred, returns a dictionary mapping
models to a set of names in those fields that will be loaded. If a models to a set of names in those fields that will be loaded. If a
model is not in the returned dictionary, none of it's fields are model is not in the returned dictionary, none of its fields are
deferred. deferred.
If no fields are marked for deferral, returns an empty dictionary. If no fields are marked for deferral, returns an empty dictionary.

View File

@ -66,7 +66,7 @@ class WhereNode(tree.Node):
# emptiness and transform any non-empty values correctly. # emptiness and transform any non-empty values correctly.
value = list(value) value = list(value)
# The "value_annotation" parameter is used to pass auxilliary information # The "value_annotation" parameter is used to pass auxiliary information
# about the value(s) to the query construction. Specifically, datetime # about the value(s) to the query construction. Specifically, datetime
# and empty values need special handling. Other types could be used # and empty values need special handling. Other types could be used
# here in the future (using Python types is suggested for consistency). # here in the future (using Python types is suggested for consistency).

View File

@ -187,7 +187,7 @@ class Field(object):
Return True if data differs from initial. Return True if data differs from initial.
""" """
# For purposes of seeing whether something has changed, None is # For purposes of seeing whether something has changed, None is
# the same as an empty string, if the data or inital value we get # the same as an empty string, if the data or initial value we get
# is None, replace it w/ ''. # is None, replace it w/ ''.
initial_value = initial if initial is not None else '' initial_value = initial if initial is not None else ''
try: try:

View File

@ -231,11 +231,11 @@ class HttpRequest(object):
try: try:
self._post, self._files = self.parse_file_upload(self.META, data) self._post, self._files = self.parse_file_upload(self.META, data)
except MultiPartParserError: except MultiPartParserError:
# An error occured while parsing POST data. Since when # An error occurred while parsing POST data. Since when
# formatting the error the request handler might access # formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent # self.POST, set self._post and self._file to prevent
# attempts to parse POST data again. # attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to # Mark that an error occurred. This allows self.__repr__ to
# be explicit about it instead of simply representing an # be explicit about it instead of simply representing an
# empty POST # empty POST
self._mark_post_parse_error() self._mark_post_parse_error()

View File

@ -352,7 +352,7 @@ def do_translate(parser, token):
def top(self): def top(self):
value = self.value() value = self.value()
# Backwards Compatiblity fix: # Backwards Compatibility fix:
# FilterExpression does not support single-quoted strings, # FilterExpression does not support single-quoted strings,
# so we make a cheap localized fix in order to maintain # so we make a cheap localized fix in order to maintain
# backwards compatibility with existing uses of ``trans`` # backwards compatibility with existing uses of ``trans``

View File

@ -178,7 +178,7 @@ def dependency_ordered(test_databases, dependencies):
# Maps db signature to dependencies of all it's aliases # Maps db signature to dependencies of all it's aliases
dependencies_map = {} dependencies_map = {}
# sanity check - no DB can depend on it's own alias # sanity check - no DB can depend on its own alias
for sig, (_, aliases) in test_databases: for sig, (_, aliases) in test_databases:
all_deps = set() all_deps = set()
for alias in aliases: for alias in aliases:

View File

@ -232,8 +232,7 @@ class SimpleTestCase(unittest.TestCase):
def settings(self, **kwargs): def settings(self, **kwargs):
""" """
A context manager that temporarily sets a setting and reverts A context manager that temporarily sets a setting and reverts to the original value when exiting the context.
back to the original value when exiting the context.
""" """
return override_settings(**kwargs) return override_settings(**kwargs)

View File

@ -141,7 +141,7 @@ class TarArchive(BaseArchive):
self._archive.list(*args, **kwargs) self._archive.list(*args, **kwargs)
def extract(self, to_path): def extract(self, to_path):
# note: python<=2.5 doesnt seem to know about pax headers, filter them # note: python<=2.5 doesn't seem to know about pax headers, filter them
members = [member for member in self._archive.getmembers() members = [member for member in self._archive.getmembers()
if member.name != 'pax_global_header'] if member.name != 'pax_global_header']
leading = self.has_leading_dir(members) leading = self.has_leading_dir(members)

View File

@ -250,7 +250,7 @@ def get_quantifier(ch, input_iter):
Parse a quantifier from the input, where "ch" is the first character in the Parse a quantifier from the input, where "ch" is the first character in the
quantifier. quantifier.
Returns the minimum number of occurences permitted by the quantifier and Returns the minimum number of occurrences permitted by the quantifier and
either None or the next character from the input_iter if the next character either None or the next character from the input_iter if the next character
is not part of the quantifier. is not part of the quantifier.
""" """

View File

@ -698,7 +698,7 @@ def _get_next_prev(generic_view, date, is_previous, period):
* If allow_empty and allow_future are both true, this is easy: just * If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day/week/month, return the naive result (just the next/previous day/week/month,
reguardless of object existence.) regardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive result * If allow_empty is true, allow_future is false, and the naive result
isn't in the future, then return it; otherwise return None. isn't in the future, then return it; otherwise return None.

View File

@ -60,7 +60,7 @@ class SingleObjectMixin(ContextMixin):
Return the `QuerySet` that will be used to look up the object. Return the `QuerySet` that will be used to look up the object.
Note that this method is called by the default implementation of Note that this method is called by the default implementation of
`get_object` and may not be called if `get_object` is overriden. `get_object` and may not be called if `get_object` is overridden.
""" """
if self.queryset is None: if self.queryset is None:
if self.model: if self.model:

View File

@ -409,7 +409,7 @@ class SystemChecksTestCase(TestCase):
def test_nonexistant_field(self): def test_nonexistant_field(self):
class SongAdmin(admin.ModelAdmin): class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title", "nonexistant") readonly_fields = ("title", "nonexistent")
errors = SongAdmin.check(model=Song) errors = SongAdmin.check(model=Song)
expected = [ expected = [

View File

@ -385,7 +385,7 @@ class TestInlinePermissions(TestCase):
author = Author.objects.create(pk=1, name='The Author') author = Author.objects.create(pk=1, name='The Author')
book = author.books.create(name='The inline Book') book = author.books.create(name='The inline Book')
self.author_change_url = '/admin/admin_inlines/author/%i/' % author.id self.author_change_url = '/admin/admin_inlines/author/%i/' % author.id
# Get the ID of the automatically created intermediate model for thw Author-Book m2m # Get the ID of the automatically created intermediate model for the Author-Book m2m
author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book) author_book_auto_m2m_intermediate = Author.books.through.objects.get(author=author, book=book)
self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk self.author_book_auto_m2m_intermediate_id = author_book_auto_m2m_intermediate.pk

View File

@ -1564,7 +1564,7 @@ class Discovery(TestCase):
def test_precedence(self): def test_precedence(self):
""" """
Apps listed first in INSTALLED_APPS have precendence. Apps listed first in INSTALLED_APPS have precedence.
""" """
with self.settings(INSTALLED_APPS=['admin_scripts.complex_app', with self.settings(INSTALLED_APPS=['admin_scripts.complex_app',
'admin_scripts.simple_app', 'admin_scripts.simple_app',

View File

@ -214,10 +214,10 @@ class ValidationTestCase(TestCase):
def test_nonexistant_field(self): def test_nonexistant_field(self):
class SongAdmin(admin.ModelAdmin): class SongAdmin(admin.ModelAdmin):
readonly_fields = ("title", "nonexistant") readonly_fields = ("title", "nonexistent")
self.assertRaisesMessage(ImproperlyConfigured, self.assertRaisesMessage(ImproperlyConfigured,
str_prefix("SongAdmin.readonly_fields[1], %(_)s'nonexistant' is not a callable " str_prefix("SongAdmin.readonly_fields[1], %(_)s'nonexistent' is not a callable "
"or an attribute of 'SongAdmin' or found in the model 'Song'."), "or an attribute of 'SongAdmin' or found in the model 'Song'."),
SongAdmin.validate, SongAdmin.validate,
Song) Song)

View File

@ -420,7 +420,7 @@ class PostAdmin(admin.ModelAdmin):
if instance.pk: if instance.pk:
return "%d amount of cool." % instance.pk return "%d amount of cool." % instance.pk
else: else:
return "Unkown coolness." return "Unknown coolness."
def value(self, instance): def value(self, instance):
return 1000 return 1000

View File

@ -1045,8 +1045,8 @@ class AdminViewPermissionsTest(TestCase):
""" """
Make sure only staff members can log in. Make sure only staff members can log in.
Successful posts to the login page will redirect to the orignal url. Successful posts to the login page will redirect to the original url.
Unsuccessfull attempts will continue to render the login page with Unsuccessful attempts will continue to render the login page with
a 200 status code. a 200 status code.
""" """
login_url = reverse('admin:login') + '?next=/test_admin/admin/' login_url = reverse('admin:login') + '?next=/test_admin/admin/'
@ -3649,7 +3649,7 @@ class ReadonlyTest(TestCase):
self.assertContains(response, self.assertContains(response,
"<label>Awesomeness level:</label>") "<label>Awesomeness level:</label>")
self.assertContains(response, "Very awesome.") self.assertContains(response, "Very awesome.")
self.assertContains(response, "Unkown coolness.") self.assertContains(response, "Unknown coolness.")
self.assertContains(response, "foo") self.assertContains(response, "foo")
# Checks that multiline text in a readonly field gets <br /> tags # Checks that multiline text in a readonly field gets <br /> tags

View File

@ -91,6 +91,6 @@ class DataTypesTestCase(TestCase):
b = RumBaba.objects.create() b = RumBaba.objects.create()
# Verify we didn't break DateTimeField behavior # Verify we didn't break DateTimeField behavior
self.assertIsInstance(b.baked_timestamp, datetime.datetime) self.assertIsInstance(b.baked_timestamp, datetime.datetime)
# We need to test this this way because datetime.datetime inherits # We need to test this way because datetime.datetime inherits
# from datetime.date: # from datetime.date:
self.assertIsInstance(b.baked_date, datetime.date) and not isinstance(b.baked_date, datetime.datetime) self.assertIsInstance(b.baked_date, datetime.date) and not isinstance(b.baked_date, datetime.datetime)

View File

@ -112,7 +112,7 @@ class FieldDeconstructionTests(TestCase):
def test_decimal_field_0_decimal_places(self): def test_decimal_field_0_decimal_places(self):
""" """
A DecimalField with decimal_places=0 shoudl work (#22272). A DecimalField with decimal_places=0 should work (#22272).
""" """
field = models.DecimalField(max_digits=5, decimal_places=0) field = models.DecimalField(max_digits=5, decimal_places=0)
name, path, args, kwargs = field.deconstruct() name, path, args, kwargs = field.deconstruct()

View File

@ -162,9 +162,9 @@ class FileUploadTests(TestCase):
response = self.client.request(**r) response = self.client.request(**r)
# The filenames should have been sanitized by the time it got to the view. # The filenames should have been sanitized by the time it got to the view.
recieved = json.loads(response.content.decode('utf-8')) received = json.loads(response.content.decode('utf-8'))
for i, name in enumerate(scary_file_names): for i, name in enumerate(scary_file_names):
got = recieved["file%s" % i] got = received["file%s" % i]
self.assertEqual(got, "hax0rd.txt") self.assertEqual(got, "hax0rd.txt")
def test_filename_overflow(self): def test_filename_overflow(self):

View File

@ -101,7 +101,7 @@ class PrimaryKeyCharModel(models.Model):
class FksToBooleans(models.Model): class FksToBooleans(models.Model):
"""Model wih FKs to models with {Null,}BooleanField's, #15040""" """Model with FKs to models with {Null,}BooleanField's, #15040"""
bf = models.ForeignKey(BooleanModel) bf = models.ForeignKey(BooleanModel)
nbf = models.ForeignKey(NullBooleanModel) nbf = models.ForeignKey(NullBooleanModel)

View File

@ -1844,7 +1844,7 @@ class FileAndImageFieldTests(TestCase):
@skipUnless(test_images, "Pillow not installed") @skipUnless(test_images, "Pillow not installed")
def test_image_field(self): def test_image_field(self):
# ImageField and FileField are nearly identical, but they differ slighty when # ImageField and FileField are nearly identical, but they differ slightly when
# it comes to validation. This specifically tests that #6302 is fixed for # it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields. # both file fields and image fields.

View File

@ -258,7 +258,7 @@ class ModelInheritanceTest(TestCase):
self.assertEqual(m2mchildren, []) self.assertEqual(m2mchildren, [])
# Ordering should not include any database column more than once (this # Ordering should not include any database column more than once (this
# is most likely to ocurr naturally with model inheritance, so we # is most likely to occur naturally with model inheritance, so we
# check it here). Regression test for #9390. This necessarily pokes at # check it here). Regression test for #9390. This necessarily pokes at
# the SQL string for the query, since the duplicate problems are only # the SQL string for the query, since the duplicate problems are only
# apparent at that late stage. # apparent at that late stage.
@ -363,10 +363,10 @@ class ModelInheritanceTest(TestCase):
self.assertEqual(parties, [bachelor]) self.assertEqual(parties, [bachelor])
# Check that a subclass of a subclass of an abstract model doesn't get # Check that a subclass of a subclass of an abstract model doesn't get
# it's own accessor. # its own accessor.
self.assertFalse(hasattr(p2, 'messybachelorparty_set')) self.assertFalse(hasattr(p2, 'messybachelorparty_set'))
# ... but it does inherit the m2m from it's parent # ... but it does inherit the m2m from its parent
messy = MessyBachelorParty.objects.create( messy = MessyBachelorParty.objects.create(
name='Bachelor party for Dave') name='Bachelor party for Dave')
messy.attendees = [p4] messy.attendees = [p4]

View File

@ -2009,7 +2009,7 @@ class CloneTests(TestCase):
n_list = Note.objects.all() n_list = Note.objects.all()
# Evaluate the Note queryset, populating the query cache # Evaluate the Note queryset, populating the query cache
list(n_list) list(n_list)
# Use the note queryset in a query, and evalute # Use the note queryset in a query, and evaluate
# that query in a way that involves cloning. # that query in a way that involves cloning.
self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good') self.assertEqual(ExtraInfo.objects.filter(note__in=n_list)[0].info, 'good')

View File

@ -527,7 +527,7 @@ class SchemaTests(TransactionTestCase):
UniqueTest.objects.create(year=2011, slug="bar") UniqueTest.objects.create(year=2011, slug="bar")
self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo") self.assertRaises(IntegrityError, UniqueTest.objects.create, year=2012, slug="foo")
UniqueTest.objects.all().delete() UniqueTest.objects.all().delete()
# Alter the model to it's non-unique-together companion # Alter the model to its non-unique-together companion
with connection.schema_editor() as editor: with connection.schema_editor() as editor:
editor.alter_unique_together( editor.alter_unique_together(
UniqueTest, UniqueTest,

View File

@ -317,7 +317,7 @@ class SyndicationFeedTest(FeedTestCase):
Test that datetimes are correctly converted to the local time zone. Test that datetimes are correctly converted to the local time zone.
""" """
# Naive date times passed in get converted to the local time zone, so # Naive date times passed in get converted to the local time zone, so
# check the recived zone offset against the local offset. # check the received zone offset against the local offset.
response = self.client.get('/syndication/naive-dates/') response = self.client.get('/syndication/naive-dates/')
doc = minidom.parseString(response.content) doc = minidom.parseString(response.content)
updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText updated = doc.getElementsByTagName('updated')[0].firstChild.wholeText

View File

@ -167,7 +167,7 @@ class SimpleTemplateResponseTest(TestCase):
self.assertEqual(unpickled_response['content-type'], response['content-type']) self.assertEqual(unpickled_response['content-type'], response['content-type'])
self.assertEqual(unpickled_response.status_code, response.status_code) self.assertEqual(unpickled_response.status_code, response.status_code)
# ...and the unpickled reponse doesn't have the # ...and the unpickled response doesn't have the
# template-related attributes, so it can't be re-rendered # template-related attributes, so it can't be re-rendered
template_attrs = ('template_name', 'context_data', '_post_render_callbacks') template_attrs = ('template_name', 'context_data', '_post_render_callbacks')
for attr in template_attrs: for attr in template_attrs:
@ -273,7 +273,7 @@ class TemplateResponseTest(TestCase):
self.assertEqual(unpickled_response['content-type'], response['content-type']) self.assertEqual(unpickled_response['content-type'], response['content-type'])
self.assertEqual(unpickled_response.status_code, response.status_code) self.assertEqual(unpickled_response.status_code, response.status_code)
# ...and the unpickled reponse doesn't have the # ...and the unpickled response doesn't have the
# template-related attributes, so it can't be re-rendered # template-related attributes, so it can't be re-rendered
template_attrs = ('template_name', 'context_data', template_attrs = ('template_name', 'context_data',
'_post_render_callbacks', '_request', '_current_app') '_post_render_callbacks', '_request', '_current_app')

View File

@ -717,7 +717,7 @@ class TemplateTests(TestCase):
'basic-syntax27': (r'{{ _("\"fred\"") }}', {}, "\"fred\""), 'basic-syntax27': (r'{{ _("\"fred\"") }}', {}, "\"fred\""),
# regression test for ticket #12554 # regression test for ticket #12554
# make sure a silent_variable_failure Exception is supressed # make sure a silent_variable_failure Exception is suppressed
# on dictionary and attribute lookup # on dictionary and attribute lookup
'basic-syntax28': ("{{ a.b }}", {'a': SilentGetItemClass()}, ('', 'INVALID')), 'basic-syntax28': ("{{ a.b }}", {'a': SilentGetItemClass()}, ('', 'INVALID')),
'basic-syntax29': ("{{ a.b }}", {'a': SilentAttrClass()}, ('', 'INVALID')), 'basic-syntax29': ("{{ a.b }}", {'a': SilentAttrClass()}, ('', 'INVALID')),

View File

@ -88,7 +88,7 @@ TEST_DATA = (
(validate_ipv4_address, '25.1 .1.1', ValidationError), (validate_ipv4_address, '25.1 .1.1', ValidationError),
# validate_ipv6_address uses django.utils.ipv6, which # validate_ipv6_address uses django.utils.ipv6, which
# is tested in much greater detail in it's own testcase # is tested in much greater detail in its own testcase
(validate_ipv6_address, 'fe80::1', None), (validate_ipv6_address, 'fe80::1', None),
(validate_ipv6_address, '::1', None), (validate_ipv6_address, '::1', None),
(validate_ipv6_address, '1:2:3:4:5:6:7:8', None), (validate_ipv6_address, '1:2:3:4:5:6:7:8', None),