Fixed E125 pep8 warnings

This commit is contained in:
Christopher Medrela 2013-11-26 10:43:46 +01:00 committed by Tim Graham
parent d1df395f3a
commit 7477a4ffde
38 changed files with 67 additions and 67 deletions

View File

@ -613,7 +613,7 @@ class ModelAdmin(BaseModelAdmin):
} }
defaults.update(kwargs) defaults.update(kwargs)
if (defaults.get('fields') is None if (defaults.get('fields') is None
and not modelform_defines_fields(defaults.get('form'))): and not modelform_defines_fields(defaults.get('form'))):
defaults['fields'] = forms.ALL_FIELDS defaults['fields'] = forms.ALL_FIELDS
return modelform_factory(self.model, **defaults) return modelform_factory(self.model, **defaults)

View File

@ -169,7 +169,7 @@ class AdminSite(object):
raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in " raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in "
"your INSTALLED_APPS setting in order to use the admin application.") "your INSTALLED_APPS setting in order to use the admin application.")
if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or
'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS): 'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS):
raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' " raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' "
"in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.") "in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.")

View File

@ -155,7 +155,7 @@ class BaseValidator(object):
for field, val in cls.prepopulated_fields.items(): for field, val in cls.prepopulated_fields.items():
f = get_field(cls, model, 'prepopulated_fields', field) f = get_field(cls, model, 'prepopulated_fields', field)
if isinstance(f, (models.DateTimeField, models.ForeignKey, if isinstance(f, (models.DateTimeField, models.ForeignKey,
models.ManyToManyField)): models.ManyToManyField)):
raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' " raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' "
"is either a DateTimeField, ForeignKey or " "is either a DateTimeField, ForeignKey or "
"ManyToManyField. This isn't allowed." "ManyToManyField. This isn't allowed."

View File

@ -29,7 +29,7 @@ def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIE
login_scheme, login_netloc = urlparse(resolved_login_url)[:2] login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2] current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)): (not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path() path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login from django.contrib.auth.views import redirect_to_login
return redirect_to_login( return redirect_to_login(

View File

@ -23,8 +23,8 @@ class FlatpageForm(forms.ModelForm):
code='missing_leading_slash', code='missing_leading_slash',
) )
if (settings.APPEND_SLASH and if (settings.APPEND_SLASH and
'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE_CLASSES and 'django.middleware.common.CommonMiddleware' in settings.MIDDLEWARE_CLASSES and
not url.endswith('/')): not url.endswith('/')):
raise forms.ValidationError( raise forms.ValidationError(
ugettext("URL is missing a trailing slash."), ugettext("URL is missing a trailing slash."),
code='missing_trailing_slash', code='missing_trailing_slash',

View File

@ -123,7 +123,7 @@ class WizardView(TemplateView):
@classmethod @classmethod
def get_initkwargs(cls, form_list=None, initial_dict=None, def get_initkwargs(cls, form_list=None, initial_dict=None,
instance_dict=None, condition_dict=None, *args, **kwargs): instance_dict=None, condition_dict=None, *args, **kwargs):
""" """
Creates a dict with all needed parameters for the form wizard instances. Creates a dict with all needed parameters for the form wizard instances.

View File

@ -369,7 +369,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
dist_param = value dist_param = value
if (not geography and geodetic and lookup_type != 'dwithin' if (not geography and geodetic and lookup_type != 'dwithin'
and option == 'spheroid'): and option == 'spheroid'):
# using distance_spheroid requires the spheroid of the field as # using distance_spheroid requires the spheroid of the field as
# a parameter. # a parameter.
return [f._spheroid, dist_param] return [f._spheroid, dist_param]
@ -467,7 +467,7 @@ class PostGISOperations(DatabaseOperations, BaseSpatialOperations):
def two_to_three(np): def two_to_three(np):
return np >= 2 and np <= 3 return np >= 2 and np <= 3
if (lookup_type in self.distance_functions and if (lookup_type in self.distance_functions and
lookup_type != 'dwithin'): lookup_type != 'dwithin'):
return two_to_three(num_param) return two_to_three(num_param)
else: else:
return exactly_two(num_param) return exactly_two(num_param)

View File

@ -39,7 +39,7 @@ class GeoWhereNode(WhereNode):
if isinstance(data, (list, tuple)): if isinstance(data, (list, tuple)):
obj, lookup_type, value = data obj, lookup_type, value = data
if (isinstance(obj, Constraint) and if (isinstance(obj, Constraint) and
isinstance(obj.field, GeometryField)): isinstance(obj.field, GeometryField)):
data = (GeoConstraint(obj), lookup_type, value) data = (GeoConstraint(obj), lookup_type, value)
return super(GeoWhereNode, self)._prepare_data(data) return super(GeoWhereNode, self)._prepare_data(data)

View File

@ -1,7 +1,7 @@
from django.db import connection from django.db import connection
if (hasattr(connection.ops, 'spatial_version') and if (hasattr(connection.ops, 'spatial_version') and
not connection.ops.mysql): not connection.ops.mysql):
# Getting the `SpatialRefSys` and `GeometryColumns` # Getting the `SpatialRefSys` and `GeometryColumns`
# models for the default spatial backend. These # models for the default spatial backend. These
# aliases are provided for backwards-compatibility. # aliases are provided for backwards-compatibility.

View File

@ -339,7 +339,7 @@ class LayerMapping(object):
otherwise the proper exception is raised. otherwise the proper exception is raised.
""" """
if (isinstance(ogr_field, OFTString) and if (isinstance(ogr_field, OFTString) and
isinstance(model_field, (models.CharField, models.TextField))): isinstance(model_field, (models.CharField, models.TextField))):
if self.encoding: if self.encoding:
# The encoding for OGR data sources may be specified here # The encoding for OGR data sources may be specified here
# (e.g., 'cp437' for Census Bureau boundary files). # (e.g., 'cp437' for Census Bureau boundary files).

View File

@ -141,7 +141,7 @@ class SessionTestsMixin(object):
def test_save(self): def test_save(self):
if (hasattr(self.session, '_cache') and 'DummyCache' in if (hasattr(self.session, '_cache') and 'DummyCache' in
settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']): settings.CACHES[settings.SESSION_CACHE_ALIAS]['BACKEND']):
raise unittest.SkipTest("Session saving tests require a real cache backend") raise unittest.SkipTest("Session saving tests require a real cache backend")
self.session.save() self.session.save()
self.assertTrue(self.session.exists(self.session.session_key)) self.assertTrue(self.session.exists(self.session.session_key))

View File

@ -94,7 +94,7 @@ class Sitemap(object):
if all_items_lastmod: if all_items_lastmod:
all_items_lastmod = lastmod is not None all_items_lastmod = lastmod is not None
if (all_items_lastmod and if (all_items_lastmod and
(latest_lastmod is None or lastmod > latest_lastmod)): (latest_lastmod is None or lastmod > latest_lastmod)):
latest_lastmod = lastmod latest_lastmod = lastmod
url_info = { url_info = {
'item': item, 'item': item,

View File

@ -153,7 +153,7 @@ class BaseDatabaseWrapper(object):
""" """
self.validate_thread_sharing() self.validate_thread_sharing()
if (self.use_debug_cursor or if (self.use_debug_cursor or
(self.use_debug_cursor is None and settings.DEBUG)): (self.use_debug_cursor is None and settings.DEBUG)):
cursor = self.make_debug_cursor(self._cursor()) cursor = self.make_debug_cursor(self._cursor())
else: else:
cursor = utils.CursorWrapper(self._cursor(), self) cursor = utils.CursorWrapper(self._cursor(), self)

View File

@ -8,7 +8,7 @@ class SQLCompiler(compiler.SQLCompiler):
index_extra_select = len(self.query.extra_select) index_extra_select = len(self.query.extra_select)
for value, field in zip_longest(row[index_extra_select:], fields): for value, field in zip_longest(row[index_extra_select:], fields):
if (field and field.get_internal_type() in ("BooleanField", "NullBooleanField") and if (field and field.get_internal_type() in ("BooleanField", "NullBooleanField") and
value in (0, 1)): value in (0, 1)):
value = bool(value) value = bool(value)
values.append(value) values.append(value)
return row[:index_extra_select] + tuple(values) return row[:index_extra_select] + tuple(values)

View File

@ -135,7 +135,7 @@ class Collector(object):
# Foreign keys pointing to this model, both from m2m and other # Foreign keys pointing to this model, both from m2m and other
# models. # models.
for related in opts.get_all_related_objects( for related in opts.get_all_related_objects(
include_hidden=True, include_proxy_eq=True): include_hidden=True, include_proxy_eq=True):
if related.field.rel.on_delete is not DO_NOTHING: if related.field.rel.on_delete is not DO_NOTHING:
return False return False
# GFK deletes # GFK deletes
@ -145,7 +145,7 @@ class Collector(object):
return True return True
def collect(self, objs, source=None, nullable=False, collect_related=True, def collect(self, objs, source=None, nullable=False, collect_related=True,
source_attr=None, reverse_dependency=False): source_attr=None, reverse_dependency=False):
""" """
Adds 'objs' to the collection of objects to be deleted as well as all Adds 'objs' to the collection of objects to be deleted as well as all
parent instances. 'objs' must be a homogenous iterable collection of parent instances. 'objs' must be a homogenous iterable collection of

View File

@ -1281,7 +1281,7 @@ class IntegerField(Field):
def get_prep_lookup(self, lookup_type, value): def get_prep_lookup(self, lookup_type, value):
if ((lookup_type == 'gte' or lookup_type == 'lt') if ((lookup_type == 'gte' or lookup_type == 'lt')
and isinstance(value, float)): and isinstance(value, float)):
value = math.ceil(value) value = math.ceil(value)
return super(IntegerField, self).get_prep_lookup(lookup_type, value) return super(IntegerField, self).get_prep_lookup(lookup_type, value)

View File

@ -392,7 +392,7 @@ class QuerySet(object):
fields = self.model._meta.local_fields fields = self.model._meta.local_fields
with transaction.commit_on_success_unless_managed(using=self.db): with transaction.commit_on_success_unless_managed(using=self.db):
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
and self.model._meta.has_auto_field): and self.model._meta.has_auto_field):
self._batched_insert(objs, fields, batch_size) self._batched_insert(objs, fields, batch_size)
else: else:
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs) objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
@ -1494,7 +1494,7 @@ class RawQuerySet(object):
annotated model instances. annotated model instances.
""" """
def __init__(self, raw_query, model=None, query=None, params=None, def __init__(self, raw_query, model=None, query=None, params=None,
translations=None, using=None, hints=None): translations=None, using=None, hints=None):
self.raw_query = raw_query self.raw_query = raw_query
self.model = model self.model = model
self._db = using self._db = using

View File

@ -994,9 +994,9 @@ class Query(object):
raise FieldError("Cannot compute %s('%s'): '%s' is an aggregate" % ( raise FieldError("Cannot compute %s('%s'): '%s' is an aggregate" % (
aggregate.name, field_name, field_name)) aggregate.name, field_name, field_name))
elif ((len(field_list) > 1) or elif ((len(field_list) > 1) or
(field_list[0] not in [i.name for i in opts.fields]) or (field_list[0] not in [i.name for i in opts.fields]) or
self.group_by is None or self.group_by is None or
not is_summary): not is_summary):
# If: # If:
# - the field descriptor has more than one part (foo__bar), or # - the field descriptor has more than one part (foo__bar), or
# - the field descriptor is referencing an m2m/m2o field, or # - the field descriptor is referencing an m2m/m2o field, or
@ -1906,7 +1906,7 @@ class Query(object):
# is_nullable() is needed to the compiler stage, but that is not easy # is_nullable() is needed to the compiler stage, but that is not easy
# to do currently. # to do currently.
if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls) if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls)
and field.empty_strings_allowed): and field.empty_strings_allowed):
return True return True
else: else:
return field.null return field.null

View File

@ -61,7 +61,7 @@ class DeleteQuery(Query):
innerq_used_tables = [t for t in innerq.tables innerq_used_tables = [t for t in innerq.tables
if innerq.alias_refcount[t]] if innerq.alias_refcount[t]]
if ((not innerq_used_tables or innerq_used_tables == self.tables) if ((not innerq_used_tables or innerq_used_tables == self.tables)
and not len(innerq.having)): and not len(innerq.having)):
# There is only the base table in use in the query, and there is # There is only the base table in use in the query, and there is
# no aggregate filtering going on. # no aggregate filtering going on.
self.where = innerq.where self.where = innerq.where

View File

@ -69,7 +69,7 @@ class WhereNode(tree.Node):
# and empty values need special handling. Other types could be used # and empty values need special handling. Other types could be used
# here in the future (using Python types is suggested for consistency). # here in the future (using Python types is suggested for consistency).
if (isinstance(value, datetime.datetime) if (isinstance(value, datetime.datetime)
or (isinstance(obj.field, DateTimeField) and lookup_type != 'isnull')): or (isinstance(obj.field, DateTimeField) and lookup_type != 'isnull')):
value_annotation = datetime.datetime value_annotation = datetime.datetime
elif hasattr(value, 'value_annotation'): elif hasattr(value, 'value_annotation'):
value_annotation = value.value_annotation value_annotation = value.value_annotation
@ -207,7 +207,7 @@ class WhereNode(tree.Node):
params = field_params + params params = field_params + params
if (len(params) == 1 and params[0] == '' and lookup_type == 'exact' if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
and connection.features.interprets_empty_strings_as_nulls): and connection.features.interprets_empty_strings_as_nulls):
lookup_type = 'isnull' lookup_type = 'isnull'
value_annotation = True value_annotation = True

View File

@ -1102,8 +1102,8 @@ class FilePathField(ChoiceField):
continue continue
full_file = os.path.join(self.path, f) full_file = os.path.join(self.path, f)
if (((self.allow_files and os.path.isfile(full_file)) or if (((self.allow_files and os.path.isfile(full_file)) or
(self.allow_folders and os.path.isdir(full_file))) and (self.allow_folders and os.path.isdir(full_file))) and
(self.match is None or self.match_re.search(f))): (self.match is None or self.match_re.search(f))):
self.choices.append((full_file, f)) self.choices.append((full_file, f))
except OSError: except OSError:
pass pass

View File

@ -325,15 +325,15 @@ class BaseFormSet(object):
self._errors.append(form.errors) self._errors.append(form.errors)
try: try:
if (self.validate_max and if (self.validate_max and
self.total_form_count() - len(self.deleted_forms) > self.max_num) or \ self.total_form_count() - len(self.deleted_forms) > self.max_num) or \
self.management_form.cleaned_data[TOTAL_FORM_COUNT] > self.absolute_max: self.management_form.cleaned_data[TOTAL_FORM_COUNT] > self.absolute_max:
raise ValidationError(ungettext( raise ValidationError(ungettext(
"Please submit %d or fewer forms.", "Please submit %d or fewer forms.",
"Please submit %d or fewer forms.", self.max_num) % self.max_num, "Please submit %d or fewer forms.", self.max_num) % self.max_num,
code='too_many_forms', code='too_many_forms',
) )
if (self.validate_min and if (self.validate_min and
self.total_form_count() - len(self.deleted_forms) < self.min_num): self.total_form_count() - len(self.deleted_forms) < self.min_num):
raise ValidationError(ungettext( raise ValidationError(ungettext(
"Please submit %d or more forms.", "Please submit %d or more forms.",
"Please submit %d or more forms.", self.min_num) % self.min_num, "Please submit %d or more forms.", self.min_num) % self.min_num,

View File

@ -524,7 +524,7 @@ def modelform_factory(model, form=ModelForm, fields=None, exclude=None,
# be difficult to debug for code that needs updating, so we produce the # be difficult to debug for code that needs updating, so we produce the
# warning here too. # warning here too.
if (getattr(Meta, 'fields', None) is None and if (getattr(Meta, 'fields', None) is None and
getattr(Meta, 'exclude', None) is None): getattr(Meta, 'exclude', None) is None):
warnings.warn("Calling modelform_factory without defining 'fields' or " warnings.warn("Calling modelform_factory without defining 'fields' or "
"'exclude' explicitly is deprecated", "'exclude' explicitly is deprecated",
DeprecationWarning, stacklevel=2) DeprecationWarning, stacklevel=2)
@ -675,7 +675,7 @@ class BaseModelFormSet(BaseFormSet):
for form in valid_forms: for form in valid_forms:
# see if we have data for both fields # see if we have data for both fields
if (form.cleaned_data and form.cleaned_data[field] is not None if (form.cleaned_data and form.cleaned_data[field] is not None
and form.cleaned_data[unique_for] is not None): and form.cleaned_data[unique_for] is not None):
# if it's a date lookup we need to get the data for all the fields # if it's a date lookup we need to get the data for all the fields
if lookup == 'date': if lookup == 'date':
date = form.cleaned_data[unique_for] date = form.cleaned_data[unique_for]
@ -815,7 +815,7 @@ def modelformset_factory(model, form=ModelForm, formfield_callback=None,
if meta is None: if meta is None:
meta = type(str('Meta'), (object,), {}) meta = type(str('Meta'), (object,), {})
if (getattr(meta, 'fields', fields) is None and if (getattr(meta, 'fields', fields) is None and
getattr(meta, 'exclude', exclude) is None): getattr(meta, 'exclude', exclude) is None):
warnings.warn("Calling modelformset_factory without defining 'fields' or " warnings.warn("Calling modelformset_factory without defining 'fields' or "
"'exclude' explicitly is deprecated", "'exclude' explicitly is deprecated",
DeprecationWarning, stacklevel=2) DeprecationWarning, stacklevel=2)

View File

@ -66,7 +66,7 @@ class HttpRequest(object):
"""Returns the HTTP host using the environment or request headers.""" """Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference. # We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and ( if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META): 'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST'] host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META: elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST'] host = self.META['HTTP_HOST']

View File

@ -122,7 +122,7 @@ class CommonMiddleware(object):
etag = '"%s"' % hashlib.md5(response.content).hexdigest() etag = '"%s"' % hashlib.md5(response.content).hexdigest()
if etag is not None: if etag is not None:
if (200 <= response.status_code < 300 if (200 <= response.status_code < 300
and request.META.get('HTTP_IF_NONE_MATCH') == etag): and request.META.get('HTTP_IF_NONE_MATCH') == etag):
cookies = response.cookies cookies = response.cookies
response = http.HttpResponseNotModified() response = http.HttpResponseNotModified()
response.cookies = cookies response.cookies = cookies

View File

@ -40,7 +40,7 @@ def stringfilter(func):
args = list(args) args = list(args)
args[0] = force_text(args[0]) args[0] = force_text(args[0])
if (isinstance(args[0], SafeData) and if (isinstance(args[0], SafeData) and
getattr(_dec._decorated_function, 'is_safe', False)): getattr(_dec._decorated_function, 'is_safe', False)):
return mark_safe(func(*args, **kwargs)) return mark_safe(func(*args, **kwargs))
return func(*args, **kwargs) return func(*args, **kwargs)

View File

@ -70,7 +70,7 @@ class BlockNode(Node):
def super(self): def super(self):
render_context = self.context.render_context render_context = self.context.render_context
if (BLOCK_CONTEXT_KEY in render_context and if (BLOCK_CONTEXT_KEY in render_context and
render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None): render_context[BLOCK_CONTEXT_KEY].get_block(self.name) is not None):
return mark_safe(self.render(self.context)) return mark_safe(self.render(self.context))
return '' return ''

View File

@ -272,7 +272,7 @@ def setup_databases(verbosity, interactive, **kwargs):
mirrors = [] mirrors = []
for signature, (db_name, aliases) in dependency_ordered( for signature, (db_name, aliases) in dependency_ordered(
test_databases.items(), dependencies): test_databases.items(), dependencies):
test_db_name = None test_db_name = None
# Actually create the database for the first connection # Actually create the database for the first connection
for alias in aliases: for alias in aliases:

View File

@ -298,7 +298,7 @@ class SimpleTestCase(unittest.TestCase):
# If the response supports deferred rendering and hasn't been rendered # If the response supports deferred rendering and hasn't been rendered
# yet, then ensure that it does get rendered before proceeding further. # yet, then ensure that it does get rendered before proceeding further.
if (hasattr(response, 'render') and callable(response.render) if (hasattr(response, 'render') and callable(response.render)
and not response.is_rendered): and not response.is_rendered):
response.render() response.render()
if msg_prefix: if msg_prefix:
@ -1043,7 +1043,7 @@ class LiveServerThread(threading.Thread):
(self.host, port), QuietWSGIRequestHandler) (self.host, port), QuietWSGIRequestHandler)
except socket.error as e: except socket.error as e:
if (index + 1 < len(self.possible_ports) and if (index + 1 < len(self.possible_ports) and
e.errno == errno.EADDRINUSE): e.errno == errno.EADDRINUSE):
# This port is already in use, so we go on and try with # This port is already in use, so we go on and try with
# the next one in the list. # the next one in the list.
continue continue
@ -1097,7 +1097,7 @@ class LiveServerTestCase(TransactionTestCase):
# If using in-memory sqlite databases, pass the connections to # If using in-memory sqlite databases, pass the connections to
# the server thread. # the server thread.
if (conn.vendor == 'sqlite' if (conn.vendor == 'sqlite'
and conn.settings_dict['NAME'] == ':memory:'): and conn.settings_dict['NAME'] == ':memory:'):
# Explicitly enable thread-shareability for this connection # Explicitly enable thread-shareability for this connection
conn.allow_thread_sharing = True conn.allow_thread_sharing = True
connections_override[conn.alias] = conn connections_override[conn.alias] = conn
@ -1154,7 +1154,7 @@ class LiveServerTestCase(TransactionTestCase):
# Restore sqlite connections' non-sharability # Restore sqlite connections' non-sharability
for conn in connections.all(): for conn in connections.all():
if (conn.vendor == 'sqlite' if (conn.vendor == 'sqlite'
and conn.settings_dict['NAME'] == ':memory:'): and conn.settings_dict['NAME'] == ':memory:'):
conn.allow_thread_sharing = False conn.allow_thread_sharing = False
@classmethod @classmethod

View File

@ -74,8 +74,8 @@ def safe_join(base, *paths):
# b) The final path must be the same as the base path. # b) The final path must be the same as the base path.
# c) The base path must be the most root path (meaning either "/" or "C:\\") # c) The base path must be the most root path (meaning either "/" or "C:\\")
if (not normcase(final_path).startswith(normcase(base_path + sep)) and if (not normcase(final_path).startswith(normcase(base_path + sep)) and
normcase(final_path) != normcase(base_path) and normcase(final_path) != normcase(base_path) and
dirname(normcase(base_path)) != normcase(base_path)): dirname(normcase(base_path)) != normcase(base_path)):
raise ValueError('The joined path (%s) is located outside of the base ' raise ValueError('The joined path (%s) is located outside of the base '
'path component (%s)' % (final_path, base_path)) 'path component (%s)' % (final_path, base_path))
return final_path return final_path

View File

@ -113,9 +113,9 @@ class SyndicationFeed(object):
self.items = [] self.items = []
def add_item(self, title, link, description, author_email=None, def add_item(self, title, link, description, author_email=None,
author_name=None, author_link=None, pubdate=None, comments=None, author_name=None, author_link=None, pubdate=None, comments=None,
unique_id=None, unique_id_is_permalink=None, enclosure=None, unique_id=None, unique_id_is_permalink=None, enclosure=None,
categories=(), item_copyright=None, ttl=None, updateddate=None, **kwargs): categories=(), item_copyright=None, ttl=None, updateddate=None, **kwargs):
""" """
Adds an item to the feed. All args are expected to be Python Unicode Adds an item to the feed. All args are expected to be Python Unicode
objects except pubdate and updateddate, which are datetime.datetime objects except pubdate and updateddate, which are datetime.datetime

View File

@ -238,7 +238,7 @@ def urlize(text, trim_url_limit=None, nofollow=False, autoescape=False):
lead = lead + opening lead = lead + opening
# Keep parentheses at the end only if they're balanced. # Keep parentheses at the end only if they're balanced.
if (middle.endswith(closing) if (middle.endswith(closing)
and middle.count(closing) == middle.count(opening) + 1): and middle.count(closing) == middle.count(opening) + 1):
middle = middle[:-len(closing)] middle = middle[:-len(closing)]
trail = closing + trail trail = closing + trail

View File

@ -189,7 +189,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
sensitive_variables = None sensitive_variables = None
while current_frame is not None: while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in current_frame.f_locals): and 'sensitive_variables_wrapper' in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note # The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names. # of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper'] wrapper = current_frame.f_locals['sensitive_variables_wrapper']
@ -218,7 +218,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
cleansed[name] = self.cleanse_special_types(request, value) cleansed[name] = self.cleanse_special_types(request, value)
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper' in tb_frame.f_locals): and 'sensitive_variables_wrapper' in tb_frame.f_locals):
# For good measure, obfuscate the decorated function's arguments in # For good measure, obfuscate the decorated function's arguments in
# the sensitive_variables decorator's frame, in case the variables # the sensitive_variables decorator's frame, in case the variables
# associated with those arguments were meant to be obfuscated from # associated with those arguments were meant to be obfuscated from
@ -287,7 +287,7 @@ class ExceptionReporter(object):
'templates': template_list, 'templates': template_list,
}) })
if (settings.TEMPLATE_DEBUG and if (settings.TEMPLATE_DEBUG and
hasattr(self.exc_value, 'django_template_source')): hasattr(self.exc_value, 'django_template_source')):
self.get_template_exception_info() self.get_template_exception_info()
frames = self.get_traceback_frames() frames = self.get_traceback_frames()

View File

@ -150,7 +150,7 @@ class BaseListView(MultipleObjectMixin, View):
# it's better to do a cheap query than to load the unpaginated # it's better to do a cheap query than to load the unpaginated
# queryset in memory. # queryset in memory.
if (self.get_paginate_by(self.object_list) is not None if (self.get_paginate_by(self.object_list) is not None
and hasattr(self.object_list, 'exists')): and hasattr(self.object_list, 'exists')):
is_empty = not self.object_list.exists() is_empty = not self.object_list.exists()
else: else:
is_empty = len(self.object_list) == 0 is_empty = len(self.object_list) == 0

View File

@ -145,11 +145,11 @@ def get_template_dirs():
from django.conf import settings from django.conf import settings
dirs = set() dirs = set()
if ('django.template.loaders.filesystem.load_template_source' in settings.TEMPLATE_LOADERS if ('django.template.loaders.filesystem.load_template_source' in settings.TEMPLATE_LOADERS
or 'django.template.loaders.filesystem.Loader' in settings.TEMPLATE_LOADERS): or 'django.template.loaders.filesystem.Loader' in settings.TEMPLATE_LOADERS):
dirs.update(map(unicode, settings.TEMPLATE_DIRS)) dirs.update(map(unicode, settings.TEMPLATE_DIRS))
if ('django.template.loaders.app_directories.load_template_source' in settings.TEMPLATE_LOADERS if ('django.template.loaders.app_directories.load_template_source' in settings.TEMPLATE_LOADERS
or 'django.template.loaders.app_directories.Loader' in settings.TEMPLATE_LOADERS): or 'django.template.loaders.app_directories.Loader' in settings.TEMPLATE_LOADERS):
from django.template.loaders.app_directories import app_template_dirs from django.template.loaders.app_directories import app_template_dirs
dirs.update(app_template_dirs) dirs.update(app_template_dirs)
return dirs return dirs

View File

@ -11,7 +11,7 @@ class DeferTests(TestCase):
count = 0 count = 0
for field in obj._meta.fields: for field in obj._meta.fields:
if isinstance(obj.__class__.__dict__.get(field.attname), if isinstance(obj.__class__.__dict__.get(field.attname),
DeferredAttribute): DeferredAttribute):
count += 1 count += 1
self.assertEqual(count, num) self.assertEqual(count, num)

View File

@ -69,11 +69,11 @@ def get_test_modules():
for modpath, dirpath in discovery_paths: for modpath, dirpath in discovery_paths:
for f in os.listdir(dirpath): for f in os.listdir(dirpath):
if ('.' in f or if ('.' in f or
# Python 3 byte code dirs (PEP 3147) # Python 3 byte code dirs (PEP 3147)
f == '__pycache__' or f == '__pycache__' or
f.startswith('sql') or f.startswith('sql') or
os.path.basename(f) in SUBDIRS_TO_SKIP or os.path.basename(f) in SUBDIRS_TO_SKIP or
os.path.isfile(f)): os.path.isfile(f)):
continue continue
modules.append((modpath, f)) modules.append((modpath, f))
return modules return modules

View File

@ -93,15 +93,15 @@ class GetInternalWSGIApplicationTest(unittest.TestCase):
@override_settings(WSGI_APPLICATION="wsgi.noexist.app") @override_settings(WSGI_APPLICATION="wsgi.noexist.app")
def test_bad_module(self): def test_bad_module(self):
with six.assertRaisesRegex(self, with six.assertRaisesRegex(self,
ImproperlyConfigured, ImproperlyConfigured,
r"^WSGI application 'wsgi.noexist.app' could not be loaded; Error importing.*"): r"^WSGI application 'wsgi.noexist.app' could not be loaded; Error importing.*"):
get_internal_wsgi_application() get_internal_wsgi_application()
@override_settings(WSGI_APPLICATION="wsgi.wsgi.noexist") @override_settings(WSGI_APPLICATION="wsgi.wsgi.noexist")
def test_bad_name(self): def test_bad_name(self):
with six.assertRaisesRegex(self, with six.assertRaisesRegex(self,
ImproperlyConfigured, ImproperlyConfigured,
r"^WSGI application 'wsgi.wsgi.noexist' could not be loaded; Module.*"): r"^WSGI application 'wsgi.wsgi.noexist' could not be loaded; Module.*"):
get_internal_wsgi_application() get_internal_wsgi_application()