mirror of https://github.com/django/django.git
Fixed W503 flake8 warnings.
This commit is contained in:
parent
d356bb653f
commit
2cd2d18851
|
@ -51,9 +51,8 @@ def check_dependencies(**kwargs):
|
|||
pass
|
||||
else:
|
||||
if ('django.contrib.auth.context_processors.auth'
|
||||
not in default_template_engine.context_processors
|
||||
and 'django.contrib.auth.backends.ModelBackend'
|
||||
in settings.AUTHENTICATION_BACKENDS):
|
||||
not in default_template_engine.context_processors and
|
||||
'django.contrib.auth.backends.ModelBackend' in settings.AUTHENTICATION_BACKENDS):
|
||||
missing_template = checks.Error(
|
||||
"'django.contrib.auth.context_processors.auth' must be in "
|
||||
"TEMPLATES in order to use the admin application.",
|
||||
|
@ -801,8 +800,8 @@ class ModelAdminChecks(BaseModelAdminChecks):
|
|||
]
|
||||
# If list_display[0] is in list_editable, check that
|
||||
# list_display_links is set. See #22792 and #26229 for use cases.
|
||||
elif (obj.list_display[0] == field_name and not obj.list_display_links
|
||||
and obj.list_display_links is not None):
|
||||
elif (obj.list_display[0] == field_name and not obj.list_display_links and
|
||||
obj.list_display_links is not None):
|
||||
return [
|
||||
checks.Error(
|
||||
"The value of '%s' refers to the first field in 'list_display' ('%s'), "
|
||||
|
|
|
@ -386,8 +386,7 @@ class AllValuesFieldListFilter(FieldListFilter):
|
|||
|
||||
def choices(self, changelist):
|
||||
yield {
|
||||
'selected': (self.lookup_val is None
|
||||
and self.lookup_val_isnull is None),
|
||||
'selected': self.lookup_val is None and self.lookup_val_isnull is None,
|
||||
'query_string': changelist.get_query_string({},
|
||||
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
|
||||
'display': _('All'),
|
||||
|
|
|
@ -672,8 +672,7 @@ class ModelAdmin(BaseModelAdmin):
|
|||
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
if (defaults.get('fields') is None
|
||||
and not modelform_defines_fields(defaults.get('form'))):
|
||||
if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')):
|
||||
defaults['fields'] = forms.ALL_FIELDS
|
||||
|
||||
return modelform_factory(self.model, **defaults)
|
||||
|
|
|
@ -241,8 +241,7 @@ class ChangeList(object):
|
|||
ordering field.
|
||||
"""
|
||||
params = self.params
|
||||
ordering = list(self.model_admin.get_ordering(request)
|
||||
or self._get_default_ordering())
|
||||
ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering())
|
||||
if ORDER_VAR in params:
|
||||
# Clear ordering and used params
|
||||
ordering = []
|
||||
|
|
|
@ -261,8 +261,7 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
|
|||
# also handles it (#25524).
|
||||
if handle_spheroid and len(dist_val) > 1:
|
||||
option = dist_val[1]
|
||||
if (not geography and geodetic and lookup_type != 'dwithin'
|
||||
and option == 'spheroid'):
|
||||
if not geography and geodetic and lookup_type != 'dwithin' and option == 'spheroid':
|
||||
# using distance_spheroid requires the spheroid of the field as
|
||||
# a parameter.
|
||||
params.insert(0, f._spheroid)
|
||||
|
|
|
@ -18,8 +18,8 @@ class Serializer(JSONSerializer):
|
|||
super(Serializer, self)._init_options()
|
||||
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
|
||||
self.srid = self.json_kwargs.pop('srid', 4326)
|
||||
if (self.selected_fields is not None and self.geometry_field is not None
|
||||
and self.geometry_field not in self.selected_fields):
|
||||
if (self.selected_fields is not None and self.geometry_field is not None and
|
||||
self.geometry_field not in self.selected_fields):
|
||||
self.selected_fields = list(self.selected_fields) + [self.geometry_field]
|
||||
|
||||
def start_serialization(self):
|
||||
|
|
|
@ -58,10 +58,10 @@ class KeysValidator(object):
|
|||
|
||||
def __eq__(self, other):
|
||||
return (
|
||||
isinstance(other, self.__class__)
|
||||
and (self.keys == other.keys)
|
||||
and (self.messages == other.messages)
|
||||
and (self.strict == other.strict)
|
||||
isinstance(other, self.__class__) and
|
||||
self.keys == other.keys and
|
||||
self.messages == other.messages and
|
||||
self.strict == other.strict
|
||||
)
|
||||
|
||||
def __ne__(self, other):
|
||||
|
|
|
@ -259,12 +259,8 @@ class Command(BaseCommand):
|
|||
full_path = None
|
||||
# Skip the file if the source file is younger
|
||||
# Avoid sub-second precision (see #14665, #19540)
|
||||
if (target_last_modified.replace(microsecond=0)
|
||||
>= source_last_modified.replace(microsecond=0)):
|
||||
if not ((self.symlink and full_path
|
||||
and not os.path.islink(full_path)) or
|
||||
(not self.symlink and full_path
|
||||
and os.path.islink(full_path))):
|
||||
if (target_last_modified.replace(microsecond=0) >= source_last_modified.replace(microsecond=0) and
|
||||
full_path and not (self.symlink ^ os.path.islink(full_path))):
|
||||
if prefixed_path not in self.unmodified_files:
|
||||
self.unmodified_files.append(prefixed_path)
|
||||
self.log("Skipping '%s' (not modified)" % path)
|
||||
|
|
|
@ -78,8 +78,7 @@ class BaseHandler(object):
|
|||
def make_view_atomic(self, view):
|
||||
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
|
||||
for db in connections.all():
|
||||
if (db.settings_dict['ATOMIC_REQUESTS']
|
||||
and db.alias not in non_atomic_requests):
|
||||
if db.settings_dict['ATOMIC_REQUESTS'] and db.alias not in non_atomic_requests:
|
||||
view = transaction.atomic(using=db.alias)(view)
|
||||
return view
|
||||
|
||||
|
|
|
@ -322,9 +322,9 @@ class BaseValidator(object):
|
|||
def __eq__(self, other):
|
||||
return (
|
||||
isinstance(other, self.__class__) and
|
||||
(self.limit_value == other.limit_value)
|
||||
and (self.message == other.message)
|
||||
and (self.code == other.code)
|
||||
self.limit_value == other.limit_value and
|
||||
self.message == other.message and
|
||||
self.code == other.code
|
||||
)
|
||||
|
||||
def compare(self, a, b):
|
||||
|
@ -435,8 +435,8 @@ class DecimalValidator(object):
|
|||
code='max_decimal_places',
|
||||
params={'max': self.decimal_places},
|
||||
)
|
||||
if (self.max_digits is not None and self.decimal_places is not None
|
||||
and whole_digits > (self.max_digits - self.decimal_places)):
|
||||
if (self.max_digits is not None and self.decimal_places is not None and
|
||||
whole_digits > (self.max_digits - self.decimal_places)):
|
||||
raise ValidationError(
|
||||
self.messages['max_whole_digits'],
|
||||
code='max_whole_digits',
|
||||
|
|
|
@ -381,9 +381,8 @@ class BaseDatabaseWrapper(object):
|
|||
self.ensure_connection()
|
||||
|
||||
start_transaction_under_autocommit = (
|
||||
force_begin_transaction_with_broken_autocommit
|
||||
and not autocommit
|
||||
and self.features.autocommits_when_autocommit_is_off
|
||||
force_begin_transaction_with_broken_autocommit and not autocommit and
|
||||
self.features.autocommits_when_autocommit_is_off
|
||||
)
|
||||
|
||||
if start_transaction_under_autocommit:
|
||||
|
@ -514,13 +513,14 @@ class BaseDatabaseWrapper(object):
|
|||
authorized to be shared between threads (via the `allow_thread_sharing`
|
||||
property). Raises an exception if the validation fails.
|
||||
"""
|
||||
if not (self.allow_thread_sharing
|
||||
or self._thread_ident == thread.get_ident()):
|
||||
raise DatabaseError("DatabaseWrapper objects created in a "
|
||||
if not (self.allow_thread_sharing or self._thread_ident == thread.get_ident()):
|
||||
raise DatabaseError(
|
||||
"DatabaseWrapper objects created in a "
|
||||
"thread can only be used in that same thread. The object "
|
||||
"with alias '%s' was created in thread id %s and this is "
|
||||
"thread id %s."
|
||||
% (self.alias, self._thread_ident, thread.get_ident()))
|
||||
% (self.alias, self._thread_ident, thread.get_ident())
|
||||
)
|
||||
|
||||
# ##### Miscellaneous #####
|
||||
|
||||
|
|
|
@ -79,14 +79,18 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
for line in cursor.description:
|
||||
col_name = force_text(line[0])
|
||||
fields.append(
|
||||
FieldInfo(*((col_name,)
|
||||
+ line[1:3]
|
||||
+ (to_int(field_info[col_name].max_len) or line[3],
|
||||
FieldInfo(*(
|
||||
(col_name,) +
|
||||
line[1:3] +
|
||||
(
|
||||
to_int(field_info[col_name].max_len) or line[3],
|
||||
to_int(field_info[col_name].num_prec) or line[4],
|
||||
to_int(field_info[col_name].num_scale) or line[5])
|
||||
+ (line[6],)
|
||||
+ (field_info[col_name].extra,)
|
||||
+ (field_info[col_name].column_default,)))
|
||||
to_int(field_info[col_name].num_scale) or line[5],
|
||||
line[6],
|
||||
field_info[col_name].extra,
|
||||
field_info[col_name].column_default,
|
||||
)
|
||||
))
|
||||
)
|
||||
return fields
|
||||
|
||||
|
|
|
@ -21,9 +21,8 @@ class DatabaseValidation(BaseDatabaseValidation):
|
|||
if field_type is None:
|
||||
return errors
|
||||
|
||||
if (field_type.startswith('varchar') # Look for CharFields...
|
||||
and field.unique # ... that are unique
|
||||
and (field.max_length is None or int(field.max_length) > 255)):
|
||||
if (field_type.startswith('varchar') and field.unique and
|
||||
(field.max_length is None or int(field.max_length) > 255)):
|
||||
errors.append(
|
||||
checks.Error(
|
||||
'MySQL does not allow unique CharFields to have a max_length > 255.',
|
||||
|
|
|
@ -228,8 +228,9 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||
# TO_CHAR().
|
||||
cursor.execute(
|
||||
"ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
|
||||
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
|
||||
+ (" TIME_ZONE = 'UTC'" if settings.USE_TZ else ''))
|
||||
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" +
|
||||
(" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')
|
||||
)
|
||||
cursor.close()
|
||||
if 'operators' not in self.__dict__:
|
||||
# Ticket #14149: Check whether our LIKE implementation will
|
||||
|
|
|
@ -14,8 +14,7 @@ class SQLCompiler(compiler.SQLCompiler):
|
|||
"""
|
||||
# The `do_offset` flag indicates whether we need to construct
|
||||
# the SQL needed to use limit/offset with Oracle.
|
||||
do_offset = with_limits and (self.query.high_mark is not None
|
||||
or self.query.low_mark)
|
||||
do_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
|
||||
if not do_offset:
|
||||
sql, params = super(SQLCompiler, self).as_sql(
|
||||
with_limits=False,
|
||||
|
|
|
@ -78,9 +78,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
WHERE table_name = %s""", [table_name])
|
||||
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
|
||||
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
|
||||
return [FieldInfo(*((force_text(line[0]),) + line[1:6]
|
||||
+ (field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])))
|
||||
for line in cursor.description]
|
||||
return [
|
||||
FieldInfo(*(
|
||||
(force_text(line[0]),) +
|
||||
line[1:6] +
|
||||
(field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])
|
||||
)) for line in cursor.description
|
||||
]
|
||||
|
||||
def get_relations(self, cursor, table_name):
|
||||
"""
|
||||
|
|
|
@ -215,8 +215,8 @@ class MigrationAutodetector(object):
|
|||
old_model_state = self.from_state.models[app_label, old_model_name]
|
||||
for field_name, field in old_model_state.fields:
|
||||
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
|
||||
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None)
|
||||
and not old_field.remote_field.through._meta.auto_created):
|
||||
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
|
||||
not old_field.remote_field.through._meta.auto_created):
|
||||
through_key = (
|
||||
old_field.remote_field.through._meta.app_label,
|
||||
old_field.remote_field.through._meta.model_name,
|
||||
|
@ -509,8 +509,8 @@ class MigrationAutodetector(object):
|
|||
related_fields[field.name] = field
|
||||
# through will be none on M2Ms on swapped-out models;
|
||||
# we can treat lack of through as auto_created=True, though.
|
||||
if (getattr(field.remote_field, "through", None)
|
||||
and not field.remote_field.through._meta.auto_created):
|
||||
if (getattr(field.remote_field, "through", None) and
|
||||
not field.remote_field.through._meta.auto_created):
|
||||
related_fields[field.name] = field
|
||||
for field in model_opts.local_many_to_many:
|
||||
if field.remote_field.model:
|
||||
|
@ -671,8 +671,8 @@ class MigrationAutodetector(object):
|
|||
related_fields[field.name] = field
|
||||
# through will be none on M2Ms on swapped-out models;
|
||||
# we can treat lack of through as auto_created=True, though.
|
||||
if (getattr(field.remote_field, "through", None)
|
||||
and not field.remote_field.through._meta.auto_created):
|
||||
if (getattr(field.remote_field, "through", None) and
|
||||
not field.remote_field.through._meta.auto_created):
|
||||
related_fields[field.name] = field
|
||||
for field in model._meta.local_many_to_many:
|
||||
if field.remote_field.model:
|
||||
|
|
|
@ -206,8 +206,7 @@ class MigrationGraph(object):
|
|||
"""
|
||||
roots = set()
|
||||
for node in self.nodes:
|
||||
if (not any(key[0] == node[0] for key in self.node_map[node].parents)
|
||||
and (not app or app == node[0])):
|
||||
if not any(key[0] == node[0] for key in self.node_map[node].parents) and (not app or app == node[0]):
|
||||
roots.add(node)
|
||||
return sorted(roots)
|
||||
|
||||
|
@ -221,8 +220,7 @@ class MigrationGraph(object):
|
|||
"""
|
||||
leaves = set()
|
||||
for node in self.nodes:
|
||||
if (not any(key[0] == node[0] for key in self.node_map[node].children)
|
||||
and (not app or app == node[0])):
|
||||
if not any(key[0] == node[0] for key in self.node_map[node].children) and (not app or app == node[0]):
|
||||
leaves.add(node)
|
||||
return sorted(leaves)
|
||||
|
||||
|
|
|
@ -397,8 +397,8 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
# data-descriptor object (DeferredAttribute) without triggering its
|
||||
# __get__ method.
|
||||
if (field.attname not in kwargs and
|
||||
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
|
||||
or field.column is None)):
|
||||
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or
|
||||
field.column is None)):
|
||||
# This field will be populated on request.
|
||||
continue
|
||||
if kwargs:
|
||||
|
@ -753,8 +753,8 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
meta = cls._meta
|
||||
for parent, field in meta.parents.items():
|
||||
# Make sure the link fields are synced between parent and self.
|
||||
if (field and getattr(self, parent._meta.pk.attname) is None
|
||||
and getattr(self, field.attname) is not None):
|
||||
if (field and getattr(self, parent._meta.pk.attname) is None and
|
||||
getattr(self, field.attname) is not None):
|
||||
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
|
||||
self._save_parents(cls=parent, using=using, update_fields=update_fields)
|
||||
self._save_table(cls=parent, using=using, update_fields=update_fields)
|
||||
|
@ -1589,8 +1589,7 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
|
||||
# Check if auto-generated name for the field is too long
|
||||
# for the database.
|
||||
if (f.db_column is None and column_name is not None
|
||||
and len(column_name) > allowed_len):
|
||||
if f.db_column is None and column_name is not None and len(column_name) > allowed_len:
|
||||
errors.append(
|
||||
checks.Error(
|
||||
'Autogenerated column name too long for field "%s". '
|
||||
|
@ -1607,8 +1606,7 @@ class Model(six.with_metaclass(ModelBase)):
|
|||
# for the database.
|
||||
for m2m in f.remote_field.through._meta.local_fields:
|
||||
_, rel_name = m2m.get_attname_column()
|
||||
if (m2m.db_column is None and rel_name is not None
|
||||
and len(rel_name) > allowed_len):
|
||||
if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len:
|
||||
errors.append(
|
||||
checks.Error(
|
||||
'Autogenerated column name too long for M2M field '
|
||||
|
|
|
@ -132,9 +132,9 @@ class Collector(object):
|
|||
if not (hasattr(objs, 'model') and hasattr(objs, '_raw_delete')):
|
||||
return False
|
||||
model = objs.model
|
||||
if (signals.pre_delete.has_listeners(model)
|
||||
or signals.post_delete.has_listeners(model)
|
||||
or signals.m2m_changed.has_listeners(model)):
|
||||
if (signals.pre_delete.has_listeners(model) or
|
||||
signals.post_delete.has_listeners(model) or
|
||||
signals.m2m_changed.has_listeners(model)):
|
||||
return False
|
||||
# The use of from_field comes from the need to avoid cascade back to
|
||||
# parent when parent delete is cascading to child.
|
||||
|
|
|
@ -395,8 +395,8 @@ class CombinedExpression(Expression):
|
|||
except FieldError:
|
||||
rhs_output = None
|
||||
if (not connection.features.has_native_duration_field and
|
||||
((lhs_output and lhs_output.get_internal_type() == 'DurationField')
|
||||
or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))):
|
||||
((lhs_output and lhs_output.get_internal_type() == 'DurationField') or
|
||||
(rhs_output and rhs_output.get_internal_type() == 'DurationField'))):
|
||||
return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection)
|
||||
if (lhs_output and rhs_output and self.connector == self.SUB and
|
||||
lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and
|
||||
|
|
|
@ -1878,8 +1878,7 @@ class IntegerField(Field):
|
|||
return int(value)
|
||||
|
||||
def get_prep_lookup(self, lookup_type, value):
|
||||
if ((lookup_type == 'gte' or lookup_type == 'lt')
|
||||
and isinstance(value, float)):
|
||||
if lookup_type in ('gte', 'lt') and isinstance(value, float):
|
||||
value = math.ceil(value)
|
||||
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
|
||||
|
||||
|
|
|
@ -440,8 +440,8 @@ class ImageField(FileField):
|
|||
return
|
||||
|
||||
dimension_fields_filled = not(
|
||||
(self.width_field and not getattr(instance, self.width_field))
|
||||
or (self.height_field and not getattr(instance, self.height_field))
|
||||
(self.width_field and not getattr(instance, self.width_field)) or
|
||||
(self.height_field and not getattr(instance, self.height_field))
|
||||
)
|
||||
# When both dimension fields have values, we are most likely loading
|
||||
# data from the database or updating an image field that already had
|
||||
|
|
|
@ -275,8 +275,8 @@ class PatternLookup(BuiltinLookup):
|
|||
# So, for Python values we don't need any special pattern, but for
|
||||
# SQL reference values or SQL transformations we need the correct
|
||||
# pattern added.
|
||||
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql')
|
||||
or hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
|
||||
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql') or
|
||||
hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
|
||||
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
||||
return pattern.format(rhs)
|
||||
else:
|
||||
|
|
|
@ -384,9 +384,9 @@ class Options(object):
|
|||
|
||||
return make_immutable_fields_list(
|
||||
"fields",
|
||||
(f for f in self._get_fields(reverse=False) if
|
||||
is_not_an_m2m_field(f) and is_not_a_generic_relation(f)
|
||||
and is_not_a_generic_foreign_key(f))
|
||||
(f for f in self._get_fields(reverse=False)
|
||||
if is_not_an_m2m_field(f) and is_not_a_generic_relation(f) and
|
||||
is_not_a_generic_foreign_key(f))
|
||||
)
|
||||
|
||||
@cached_property
|
||||
|
|
|
@ -443,8 +443,8 @@ class QuerySet(object):
|
|||
objs = list(objs)
|
||||
self._populate_pk_values(objs)
|
||||
with transaction.atomic(using=self.db, savepoint=False):
|
||||
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
|
||||
and self.model._meta.has_auto_field):
|
||||
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk and
|
||||
self.model._meta.has_auto_field):
|
||||
self._batched_insert(objs, fields, batch_size)
|
||||
else:
|
||||
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
|
||||
|
|
|
@ -735,9 +735,7 @@ class Query(object):
|
|||
# Only the first alias (skipped above) should have None join_type
|
||||
assert self.alias_map[alias].join_type is not None
|
||||
parent_alias = self.alias_map[alias].parent_alias
|
||||
parent_louter = (
|
||||
parent_alias
|
||||
and self.alias_map[parent_alias].join_type == LOUTER)
|
||||
parent_louter = parent_alias and self.alias_map[parent_alias].join_type == LOUTER
|
||||
already_louter = self.alias_map[alias].join_type == LOUTER
|
||||
if ((self.alias_map[alias].nullable or parent_louter) and
|
||||
not already_louter):
|
||||
|
@ -746,8 +744,8 @@ class Query(object):
|
|||
# refer to this one.
|
||||
aliases.extend(
|
||||
join for join in self.alias_map.keys()
|
||||
if (self.alias_map[join].parent_alias == alias
|
||||
and join not in aliases))
|
||||
if self.alias_map[join].parent_alias == alias and join not in aliases
|
||||
)
|
||||
|
||||
def demote_joins(self, aliases):
|
||||
"""
|
||||
|
@ -1641,8 +1639,7 @@ class Query(object):
|
|||
# from the model on which the lookup failed.
|
||||
raise
|
||||
else:
|
||||
names = sorted(list(get_field_names_from_opts(opts)) + list(self.extra)
|
||||
+ list(self.annotation_select))
|
||||
names = sorted(list(get_field_names_from_opts(opts)) + list(self.extra) + list(self.annotation_select))
|
||||
raise FieldError("Cannot resolve keyword %r into field. "
|
||||
"Choices are: %s" % (name, ", ".join(names)))
|
||||
|
||||
|
@ -1963,8 +1960,7 @@ class Query(object):
|
|||
# used. The proper fix would be to defer all decisions where
|
||||
# is_nullable() is needed to the compiler stage, but that is not easy
|
||||
# to do currently.
|
||||
if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls)
|
||||
and field.empty_strings_allowed):
|
||||
if connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed:
|
||||
return True
|
||||
else:
|
||||
return field.null
|
||||
|
|
|
@ -707,8 +707,8 @@ class BaseModelFormSet(BaseFormSet):
|
|||
uclass, lookup, field, unique_for = date_check
|
||||
for form in valid_forms:
|
||||
# see if we have data for both fields
|
||||
if (form.cleaned_data and form.cleaned_data[field] is not None
|
||||
and form.cleaned_data[unique_for] is not None):
|
||||
if (form.cleaned_data and form.cleaned_data[field] is not None and
|
||||
form.cleaned_data[unique_for] is not None):
|
||||
# if it's a date lookup we need to get the data for all the fields
|
||||
if lookup == 'date':
|
||||
date = form.cleaned_data[unique_for]
|
||||
|
@ -806,8 +806,8 @@ class BaseModelFormSet(BaseFormSet):
|
|||
def pk_is_not_editable(pk):
|
||||
return (
|
||||
(not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or (
|
||||
pk.remote_field and pk.remote_field.parent_link
|
||||
and pk_is_not_editable(pk.remote_field.model._meta.pk)
|
||||
pk.remote_field and pk.remote_field.parent_link and
|
||||
pk_is_not_editable(pk.remote_field.model._meta.pk)
|
||||
)
|
||||
)
|
||||
if pk_is_not_editable(pk) or pk.name not in form.fields:
|
||||
|
@ -999,9 +999,10 @@ def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
|
|||
# Try to discover what the ForeignKey from model to parent_model is
|
||||
fks_to_parent = [
|
||||
f for f in opts.fields
|
||||
if isinstance(f, ForeignKey)
|
||||
and (f.remote_field.model == parent_model
|
||||
or f.remote_field.model in parent_model._meta.get_parent_list())
|
||||
if isinstance(f, ForeignKey) and (
|
||||
f.remote_field.model == parent_model or
|
||||
f.remote_field.model in parent_model._meta.get_parent_list()
|
||||
)
|
||||
]
|
||||
if len(fks_to_parent) == 1:
|
||||
fk = fks_to_parent[0]
|
||||
|
|
|
@ -76,8 +76,8 @@ class CommonMiddleware(object):
|
|||
if settings.APPEND_SLASH and not request.get_full_path().endswith('/'):
|
||||
urlconf = getattr(request, 'urlconf', None)
|
||||
return (
|
||||
not is_valid_path(request.path_info, urlconf)
|
||||
and is_valid_path('%s/' % request.path_info, urlconf)
|
||||
not is_valid_path(request.path_info, urlconf) and
|
||||
is_valid_path('%s/' % request.path_info, urlconf)
|
||||
)
|
||||
return False
|
||||
|
||||
|
|
|
@ -39,8 +39,8 @@ class LocaleMiddleware(object):
|
|||
path_valid = is_valid_path(language_path, urlconf)
|
||||
path_needs_slash = (
|
||||
not path_valid and (
|
||||
settings.APPEND_SLASH and not language_path.endswith('/')
|
||||
and is_valid_path('%s/' % language_path, urlconf)
|
||||
settings.APPEND_SLASH and not language_path.endswith('/') and
|
||||
is_valid_path('%s/' % language_path, urlconf)
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
@ -327,8 +327,7 @@ class SimpleTestCase(unittest.TestCase):
|
|||
def _assert_contains(self, response, text, status_code, msg_prefix, html):
|
||||
# If the response supports deferred rendering and hasn't been rendered
|
||||
# yet, then ensure that it does get rendered before proceeding further.
|
||||
if (hasattr(response, 'render') and callable(response.render)
|
||||
and not response.is_rendered):
|
||||
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
|
||||
response.render()
|
||||
|
||||
if msg_prefix:
|
||||
|
@ -920,9 +919,8 @@ class TransactionTestCase(SimpleTestCase):
|
|||
for db_name in self._databases_names(include_mirrors=False):
|
||||
# Flush the database
|
||||
inhibit_post_migrate = (
|
||||
self.available_apps is not None
|
||||
or (
|
||||
# Inhibit the post_migrate signal when using serialized
|
||||
self.available_apps is not None or
|
||||
( # Inhibit the post_migrate signal when using serialized
|
||||
# rollback to avoid trying to recreate the serialized data.
|
||||
self.serialized_rollback and
|
||||
hasattr(connections[db_name], '_test_serialized_contents')
|
||||
|
|
|
@ -423,16 +423,11 @@ def strip_quotes(want, got):
|
|||
"""
|
||||
def is_quoted_string(s):
|
||||
s = s.strip()
|
||||
return (len(s) >= 2
|
||||
and s[0] == s[-1]
|
||||
and s[0] in ('"', "'"))
|
||||
return len(s) >= 2 and s[0] == s[-1] and s[0] in ('"', "'")
|
||||
|
||||
def is_quoted_unicode(s):
|
||||
s = s.strip()
|
||||
return (len(s) >= 3
|
||||
and s[0] == 'u'
|
||||
and s[1] == s[-1]
|
||||
and s[1] in ('"', "'"))
|
||||
return len(s) >= 3 and s[0] == 'u' and s[1] == s[-1] and s[1] in ('"', "'")
|
||||
|
||||
if is_quoted_string(want) and is_quoted_string(got):
|
||||
want = want.strip()[1:-1]
|
||||
|
|
|
@ -101,8 +101,7 @@ class BaseArchive(object):
|
|||
def split_leading_dir(self, path):
|
||||
path = str(path)
|
||||
path = path.lstrip('/').lstrip('\\')
|
||||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
|
||||
or '\\' not in path):
|
||||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path):
|
||||
return path.split('/', 1)
|
||||
elif '\\' in path:
|
||||
return path.split('\\', 1)
|
||||
|
|
|
@ -288,10 +288,12 @@ def template_localtime(value, use_tz=None):
|
|||
|
||||
This function is designed for use by the template engine.
|
||||
"""
|
||||
should_convert = (isinstance(value, datetime)
|
||||
and (settings.USE_TZ if use_tz is None else use_tz)
|
||||
and not is_naive(value)
|
||||
and getattr(value, 'convert_to_local_time', True))
|
||||
should_convert = (
|
||||
isinstance(value, datetime) and
|
||||
(settings.USE_TZ if use_tz is None else use_tz) and
|
||||
not is_naive(value) and
|
||||
getattr(value, 'convert_to_local_time', True)
|
||||
)
|
||||
return localtime(value) if should_convert else value
|
||||
|
||||
|
||||
|
|
|
@ -129,8 +129,7 @@ class DjangoTranslation(gettext_module.GNUTranslations):
|
|||
self._add_installed_apps_translations()
|
||||
|
||||
self._add_local_translations()
|
||||
if (self.__language == settings.LANGUAGE_CODE and self.domain == 'django'
|
||||
and self._catalog is None):
|
||||
if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None:
|
||||
# default lang should have at least one translation file available.
|
||||
raise IOError("No translation files found for default language %s." % settings.LANGUAGE_CODE)
|
||||
self._add_fallback(localedirs)
|
||||
|
|
|
@ -102,8 +102,8 @@ class Node(object):
|
|||
return data
|
||||
if self.connector == conn_type:
|
||||
# We can reuse self.children to append or squash the node other.
|
||||
if (isinstance(data, Node) and not data.negated
|
||||
and (data.connector == conn_type or len(data) == 1)):
|
||||
if (isinstance(data, Node) and not data.negated and
|
||||
(data.connector == conn_type or len(data) == 1)):
|
||||
# We can squash the other node's children directly into this
|
||||
# node. We are just doing (AB)(CD) == (ABCD) here, with the
|
||||
# addition that if the length of the other node is 1 the
|
||||
|
|
|
@ -190,8 +190,8 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
|||
current_frame = tb_frame.f_back
|
||||
sensitive_variables = None
|
||||
while current_frame is not None:
|
||||
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
|
||||
and 'sensitive_variables_wrapper' in current_frame.f_locals):
|
||||
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and
|
||||
'sensitive_variables_wrapper' in current_frame.f_locals):
|
||||
# The sensitive_variables decorator was used, so we take note
|
||||
# of the sensitive variables' names.
|
||||
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
|
||||
|
@ -219,8 +219,8 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
|||
for name, value in tb_frame.f_locals.items():
|
||||
cleansed[name] = self.cleanse_special_types(request, value)
|
||||
|
||||
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper'
|
||||
and 'sensitive_variables_wrapper' in tb_frame.f_locals):
|
||||
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and
|
||||
'sensitive_variables_wrapper' in tb_frame.f_locals):
|
||||
# For good measure, obfuscate the decorated function's arguments in
|
||||
# the sensitive_variables decorator's frame, in case the variables
|
||||
# associated with those arguments were meant to be obfuscated from
|
||||
|
@ -459,11 +459,12 @@ def technical_404_response(request, exception):
|
|||
except (IndexError, TypeError, KeyError):
|
||||
tried = []
|
||||
else:
|
||||
if (not tried # empty URLconf
|
||||
or (request.path == '/'
|
||||
and len(tried) == 1 # default URLconf
|
||||
and len(tried[0]) == 1
|
||||
and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin')):
|
||||
if (not tried or ( # empty URLconf
|
||||
request.path == '/' and
|
||||
len(tried) == 1 and # default URLconf
|
||||
len(tried[0]) == 1 and
|
||||
getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin'
|
||||
)):
|
||||
return default_urlconf(request)
|
||||
|
||||
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
|
||||
|
|
|
@ -163,8 +163,7 @@ class BaseListView(MultipleObjectMixin, View):
|
|||
# When pagination is enabled and object_list is a queryset,
|
||||
# it's better to do a cheap query than to load the unpaginated
|
||||
# queryset in memory.
|
||||
if (self.get_paginate_by(self.object_list) is not None
|
||||
and hasattr(self.object_list, 'exists')):
|
||||
if self.get_paginate_by(self.object_list) is not None and hasattr(self.object_list, 'exists'):
|
||||
is_empty = not self.object_list.exists()
|
||||
else:
|
||||
is_empty = len(self.object_list) == 0
|
||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
|||
|
||||
[flake8]
|
||||
exclude = build,.git,./django/utils/lru_cache.py,./django/utils/six.py,./django/conf/app_template/*,./django/dispatch/weakref_backports.py,./tests/.env,./xmlrunner,tests/view_tests/tests/py3_test_debug.py,tests/template_tests/annotated_tag_function.py
|
||||
ignore = E128,E402,W503,W601
|
||||
ignore = E128,E402,W601
|
||||
max-line-length = 119
|
||||
|
||||
[isort]
|
||||
|
|
|
@ -983,8 +983,8 @@ class AggregationTests(TestCase):
|
|||
Book.objects
|
||||
.annotate(n_authors=Count("authors"))
|
||||
.filter(
|
||||
Q(name="The Definitive Guide to Django: Web Development Done Right")
|
||||
| (Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
|
||||
Q(name="The Definitive Guide to Django: Web Development Done Right") |
|
||||
(Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
|
||||
)
|
||||
)
|
||||
self.assertQuerysetEqual(
|
||||
|
|
|
@ -941,8 +941,8 @@ value="Should escape < & > and <script>alert('xss')</
|
|||
password2 = CharField(widget=PasswordInput)
|
||||
|
||||
def clean_password2(self):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
raise ValidationError('Please make sure your passwords match.')
|
||||
|
||||
return self.cleaned_data['password2']
|
||||
|
@ -980,8 +980,8 @@ value="Should escape < & > and <script>alert('xss')</
|
|||
|
||||
def clean(self):
|
||||
# Test raising a ValidationError as NON_FIELD_ERRORS.
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
raise ValidationError('Please make sure your passwords match.')
|
||||
|
||||
# Test raising ValidationError that targets multiple fields.
|
||||
|
@ -1120,8 +1120,8 @@ value="Should escape < & > and <script>alert('xss')</
|
|||
password2 = CharField(widget=PasswordInput)
|
||||
|
||||
def clean(self):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
raise ValidationError(
|
||||
'Please make sure your passwords match.',
|
||||
code='password_mismatch',
|
||||
|
@ -2309,8 +2309,8 @@ Password: <input type="password" name="password" /></li>
|
|||
password2 = CharField(widget=PasswordInput)
|
||||
|
||||
def clean(self):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
raise ValidationError('Please make sure your passwords match.')
|
||||
|
||||
return self.cleaned_data
|
||||
|
@ -2369,8 +2369,8 @@ Password: <input type="password" name="password" /></li>
|
|||
password2 = CharField(widget=PasswordInput)
|
||||
|
||||
def clean(self):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||
raise ValidationError('Please make sure your passwords match.')
|
||||
|
||||
return self.cleaned_data
|
||||
|
|
|
@ -65,9 +65,10 @@ def api_get_area(x):
|
|||
def api_get_length(x):
|
||||
return x.length
|
||||
|
||||
geos_function_tests = [val for name, val in vars().items()
|
||||
if hasattr(val, '__call__')
|
||||
and name.startswith('api_get_')]
|
||||
geos_function_tests = [
|
||||
val for name, val in vars().items()
|
||||
if hasattr(val, '__call__') and name.startswith('api_get_')
|
||||
]
|
||||
|
||||
|
||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||
|
|
|
@ -27,8 +27,8 @@ class OrLookupsTests(TestCase):
|
|||
def test_filter_or(self):
|
||||
self.assertQuerysetEqual(
|
||||
(
|
||||
Article.objects.filter(headline__startswith='Hello')
|
||||
| Article.objects.filter(headline__startswith='Goodbye')
|
||||
Article.objects.filter(headline__startswith='Hello') |
|
||||
Article.objects.filter(headline__startswith='Goodbye')
|
||||
), [
|
||||
'Hello',
|
||||
'Goodbye',
|
||||
|
|
|
@ -1170,8 +1170,7 @@ class Queries1Tests(BaseQuerysetTest):
|
|||
|
||||
def test_ticket19672(self):
|
||||
self.assertQuerysetEqual(
|
||||
Report.objects.filter(Q(creator__isnull=False) &
|
||||
~Q(creator__extra__value=41)),
|
||||
Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)),
|
||||
['<Report: r1>']
|
||||
)
|
||||
|
||||
|
@ -1390,8 +1389,8 @@ class Queries4Tests(BaseQuerysetTest):
|
|||
|
||||
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
|
||||
q2 = (
|
||||
Item.objects.filter(Q(creator__report__name='e1')).order_by()
|
||||
| Item.objects.filter(Q(creator=self.a1)).order_by()
|
||||
Item.objects.filter(Q(creator__report__name='e1')).order_by() |
|
||||
Item.objects.filter(Q(creator=self.a1)).order_by()
|
||||
)
|
||||
self.assertQuerysetEqual(q1, ["<Item: i1>"])
|
||||
self.assertEqual(str(q1.query), str(q2.query))
|
||||
|
@ -3081,12 +3080,10 @@ class NullJoinPromotionOrTest(TestCase):
|
|||
# Test OR + doubleneg. The expected result is that channel is LOUTER
|
||||
# joined, program INNER joined
|
||||
qs1_filter = Identifier.objects.filter(
|
||||
Q(program__id=p2.id, channel__id=c1.id)
|
||||
| Q(program__id=p1.id)
|
||||
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
|
||||
).order_by('pk')
|
||||
qs1_doubleneg = Identifier.objects.exclude(
|
||||
~Q(Q(program__id=p2.id, channel__id=c1.id)
|
||||
| Q(program__id=p1.id))
|
||||
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
|
||||
).order_by('pk')
|
||||
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
|
||||
self.assertEqual(str(qs1_filter.query).count('JOIN'),
|
||||
|
@ -3106,11 +3103,11 @@ class NullJoinPromotionOrTest(TestCase):
|
|||
# NOT is pushed to lowest level in the boolean tree, and
|
||||
# another query where this isn't done.
|
||||
qs1 = Identifier.objects.filter(
|
||||
~Q(~Q(program__id=p2.id, channel__id=c1.id)
|
||||
& Q(program__id=p1.id))).order_by('pk')
|
||||
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
|
||||
).order_by('pk')
|
||||
qs2 = Identifier.objects.filter(
|
||||
Q(Q(program__id=p2.id, channel__id=c1.id)
|
||||
| ~Q(program__id=p1.id))).order_by('pk')
|
||||
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
|
||||
).order_by('pk')
|
||||
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
|
||||
self.assertEqual(str(qs1.query).count('JOIN'),
|
||||
str(qs2.query).count('JOIN'))
|
||||
|
@ -3697,8 +3694,7 @@ class Ticket23605Tests(TestCase):
|
|||
F("ticket23605b__modelc_fk__field_c0")
|
||||
) &
|
||||
# True for a1 (field_b1=True)
|
||||
Q(ticket23605b__field_b1=True) &
|
||||
~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
|
||||
Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
|
||||
~(
|
||||
# Same filters as above commented filters, but
|
||||
# double-negated (one for Q() above, one for
|
||||
|
@ -3803,12 +3799,12 @@ class Ticket23622Tests(TestCase):
|
|||
modelc_fk=c1,
|
||||
)
|
||||
qx = (
|
||||
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk'))
|
||||
& Q(ticket23605b__field_b0__gte=300)
|
||||
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
|
||||
Q(ticket23605b__field_b0__gte=300)
|
||||
)
|
||||
qy = (
|
||||
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk'))
|
||||
& Q(ticket23605b__field_b0__gte=300)
|
||||
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
|
||||
Q(ticket23605b__field_b0__gte=300)
|
||||
)
|
||||
self.assertEqual(
|
||||
set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)),
|
||||
|
|
|
@ -398,9 +398,9 @@ def register_tests(test_class, method_name, test_func, exclude=None):
|
|||
"""
|
||||
formats = [
|
||||
f for f in serializers.get_serializer_formats()
|
||||
if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer)
|
||||
and not f == 'geojson'
|
||||
and (exclude is None or f not in exclude))
|
||||
if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer) and
|
||||
f != 'geojson' and
|
||||
(exclude is None or f not in exclude))
|
||||
]
|
||||
for format_ in formats:
|
||||
setattr(test_class, method_name % format_, curry(test_func, format_))
|
||||
|
|
Loading…
Reference in New Issue