mirror of https://github.com/django/django.git
Fixed W503 flake8 warnings.
This commit is contained in:
parent
d356bb653f
commit
2cd2d18851
|
@ -51,9 +51,8 @@ def check_dependencies(**kwargs):
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
if ('django.contrib.auth.context_processors.auth'
|
if ('django.contrib.auth.context_processors.auth'
|
||||||
not in default_template_engine.context_processors
|
not in default_template_engine.context_processors and
|
||||||
and 'django.contrib.auth.backends.ModelBackend'
|
'django.contrib.auth.backends.ModelBackend' in settings.AUTHENTICATION_BACKENDS):
|
||||||
in settings.AUTHENTICATION_BACKENDS):
|
|
||||||
missing_template = checks.Error(
|
missing_template = checks.Error(
|
||||||
"'django.contrib.auth.context_processors.auth' must be in "
|
"'django.contrib.auth.context_processors.auth' must be in "
|
||||||
"TEMPLATES in order to use the admin application.",
|
"TEMPLATES in order to use the admin application.",
|
||||||
|
@ -801,8 +800,8 @@ class ModelAdminChecks(BaseModelAdminChecks):
|
||||||
]
|
]
|
||||||
# If list_display[0] is in list_editable, check that
|
# If list_display[0] is in list_editable, check that
|
||||||
# list_display_links is set. See #22792 and #26229 for use cases.
|
# list_display_links is set. See #22792 and #26229 for use cases.
|
||||||
elif (obj.list_display[0] == field_name and not obj.list_display_links
|
elif (obj.list_display[0] == field_name and not obj.list_display_links and
|
||||||
and obj.list_display_links is not None):
|
obj.list_display_links is not None):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
"The value of '%s' refers to the first field in 'list_display' ('%s'), "
|
"The value of '%s' refers to the first field in 'list_display' ('%s'), "
|
||||||
|
|
|
@ -386,8 +386,7 @@ class AllValuesFieldListFilter(FieldListFilter):
|
||||||
|
|
||||||
def choices(self, changelist):
|
def choices(self, changelist):
|
||||||
yield {
|
yield {
|
||||||
'selected': (self.lookup_val is None
|
'selected': self.lookup_val is None and self.lookup_val_isnull is None,
|
||||||
and self.lookup_val_isnull is None),
|
|
||||||
'query_string': changelist.get_query_string({},
|
'query_string': changelist.get_query_string({},
|
||||||
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
|
[self.lookup_kwarg, self.lookup_kwarg_isnull]),
|
||||||
'display': _('All'),
|
'display': _('All'),
|
||||||
|
|
|
@ -672,8 +672,7 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
|
"formfield_callback": partial(self.formfield_for_dbfield, request=request),
|
||||||
}
|
}
|
||||||
defaults.update(kwargs)
|
defaults.update(kwargs)
|
||||||
if (defaults.get('fields') is None
|
if defaults.get('fields') is None and not modelform_defines_fields(defaults.get('form')):
|
||||||
and not modelform_defines_fields(defaults.get('form'))):
|
|
||||||
defaults['fields'] = forms.ALL_FIELDS
|
defaults['fields'] = forms.ALL_FIELDS
|
||||||
|
|
||||||
return modelform_factory(self.model, **defaults)
|
return modelform_factory(self.model, **defaults)
|
||||||
|
|
|
@ -241,8 +241,7 @@ class ChangeList(object):
|
||||||
ordering field.
|
ordering field.
|
||||||
"""
|
"""
|
||||||
params = self.params
|
params = self.params
|
||||||
ordering = list(self.model_admin.get_ordering(request)
|
ordering = list(self.model_admin.get_ordering(request) or self._get_default_ordering())
|
||||||
or self._get_default_ordering())
|
|
||||||
if ORDER_VAR in params:
|
if ORDER_VAR in params:
|
||||||
# Clear ordering and used params
|
# Clear ordering and used params
|
||||||
ordering = []
|
ordering = []
|
||||||
|
|
|
@ -261,8 +261,7 @@ class PostGISOperations(BaseSpatialOperations, DatabaseOperations):
|
||||||
# also handles it (#25524).
|
# also handles it (#25524).
|
||||||
if handle_spheroid and len(dist_val) > 1:
|
if handle_spheroid and len(dist_val) > 1:
|
||||||
option = dist_val[1]
|
option = dist_val[1]
|
||||||
if (not geography and geodetic and lookup_type != 'dwithin'
|
if not geography and geodetic and lookup_type != 'dwithin' and option == 'spheroid':
|
||||||
and option == 'spheroid'):
|
|
||||||
# using distance_spheroid requires the spheroid of the field as
|
# using distance_spheroid requires the spheroid of the field as
|
||||||
# a parameter.
|
# a parameter.
|
||||||
params.insert(0, f._spheroid)
|
params.insert(0, f._spheroid)
|
||||||
|
|
|
@ -18,8 +18,8 @@ class Serializer(JSONSerializer):
|
||||||
super(Serializer, self)._init_options()
|
super(Serializer, self)._init_options()
|
||||||
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
|
self.geometry_field = self.json_kwargs.pop('geometry_field', None)
|
||||||
self.srid = self.json_kwargs.pop('srid', 4326)
|
self.srid = self.json_kwargs.pop('srid', 4326)
|
||||||
if (self.selected_fields is not None and self.geometry_field is not None
|
if (self.selected_fields is not None and self.geometry_field is not None and
|
||||||
and self.geometry_field not in self.selected_fields):
|
self.geometry_field not in self.selected_fields):
|
||||||
self.selected_fields = list(self.selected_fields) + [self.geometry_field]
|
self.selected_fields = list(self.selected_fields) + [self.geometry_field]
|
||||||
|
|
||||||
def start_serialization(self):
|
def start_serialization(self):
|
||||||
|
|
|
@ -58,10 +58,10 @@ class KeysValidator(object):
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (
|
return (
|
||||||
isinstance(other, self.__class__)
|
isinstance(other, self.__class__) and
|
||||||
and (self.keys == other.keys)
|
self.keys == other.keys and
|
||||||
and (self.messages == other.messages)
|
self.messages == other.messages and
|
||||||
and (self.strict == other.strict)
|
self.strict == other.strict
|
||||||
)
|
)
|
||||||
|
|
||||||
def __ne__(self, other):
|
def __ne__(self, other):
|
||||||
|
|
|
@ -259,16 +259,12 @@ class Command(BaseCommand):
|
||||||
full_path = None
|
full_path = None
|
||||||
# Skip the file if the source file is younger
|
# Skip the file if the source file is younger
|
||||||
# Avoid sub-second precision (see #14665, #19540)
|
# Avoid sub-second precision (see #14665, #19540)
|
||||||
if (target_last_modified.replace(microsecond=0)
|
if (target_last_modified.replace(microsecond=0) >= source_last_modified.replace(microsecond=0) and
|
||||||
>= source_last_modified.replace(microsecond=0)):
|
full_path and not (self.symlink ^ os.path.islink(full_path))):
|
||||||
if not ((self.symlink and full_path
|
if prefixed_path not in self.unmodified_files:
|
||||||
and not os.path.islink(full_path)) or
|
self.unmodified_files.append(prefixed_path)
|
||||||
(not self.symlink and full_path
|
self.log("Skipping '%s' (not modified)" % path)
|
||||||
and os.path.islink(full_path))):
|
return False
|
||||||
if prefixed_path not in self.unmodified_files:
|
|
||||||
self.unmodified_files.append(prefixed_path)
|
|
||||||
self.log("Skipping '%s' (not modified)" % path)
|
|
||||||
return False
|
|
||||||
# Then delete the existing file if really needed
|
# Then delete the existing file if really needed
|
||||||
if self.dry_run:
|
if self.dry_run:
|
||||||
self.log("Pretending to delete '%s'" % path)
|
self.log("Pretending to delete '%s'" % path)
|
||||||
|
|
|
@ -78,8 +78,7 @@ class BaseHandler(object):
|
||||||
def make_view_atomic(self, view):
|
def make_view_atomic(self, view):
|
||||||
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
|
non_atomic_requests = getattr(view, '_non_atomic_requests', set())
|
||||||
for db in connections.all():
|
for db in connections.all():
|
||||||
if (db.settings_dict['ATOMIC_REQUESTS']
|
if db.settings_dict['ATOMIC_REQUESTS'] and db.alias not in non_atomic_requests:
|
||||||
and db.alias not in non_atomic_requests):
|
|
||||||
view = transaction.atomic(using=db.alias)(view)
|
view = transaction.atomic(using=db.alias)(view)
|
||||||
return view
|
return view
|
||||||
|
|
||||||
|
|
|
@ -322,9 +322,9 @@ class BaseValidator(object):
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return (
|
return (
|
||||||
isinstance(other, self.__class__) and
|
isinstance(other, self.__class__) and
|
||||||
(self.limit_value == other.limit_value)
|
self.limit_value == other.limit_value and
|
||||||
and (self.message == other.message)
|
self.message == other.message and
|
||||||
and (self.code == other.code)
|
self.code == other.code
|
||||||
)
|
)
|
||||||
|
|
||||||
def compare(self, a, b):
|
def compare(self, a, b):
|
||||||
|
@ -435,8 +435,8 @@ class DecimalValidator(object):
|
||||||
code='max_decimal_places',
|
code='max_decimal_places',
|
||||||
params={'max': self.decimal_places},
|
params={'max': self.decimal_places},
|
||||||
)
|
)
|
||||||
if (self.max_digits is not None and self.decimal_places is not None
|
if (self.max_digits is not None and self.decimal_places is not None and
|
||||||
and whole_digits > (self.max_digits - self.decimal_places)):
|
whole_digits > (self.max_digits - self.decimal_places)):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
self.messages['max_whole_digits'],
|
self.messages['max_whole_digits'],
|
||||||
code='max_whole_digits',
|
code='max_whole_digits',
|
||||||
|
|
|
@ -381,9 +381,8 @@ class BaseDatabaseWrapper(object):
|
||||||
self.ensure_connection()
|
self.ensure_connection()
|
||||||
|
|
||||||
start_transaction_under_autocommit = (
|
start_transaction_under_autocommit = (
|
||||||
force_begin_transaction_with_broken_autocommit
|
force_begin_transaction_with_broken_autocommit and not autocommit and
|
||||||
and not autocommit
|
self.features.autocommits_when_autocommit_is_off
|
||||||
and self.features.autocommits_when_autocommit_is_off
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if start_transaction_under_autocommit:
|
if start_transaction_under_autocommit:
|
||||||
|
@ -514,13 +513,14 @@ class BaseDatabaseWrapper(object):
|
||||||
authorized to be shared between threads (via the `allow_thread_sharing`
|
authorized to be shared between threads (via the `allow_thread_sharing`
|
||||||
property). Raises an exception if the validation fails.
|
property). Raises an exception if the validation fails.
|
||||||
"""
|
"""
|
||||||
if not (self.allow_thread_sharing
|
if not (self.allow_thread_sharing or self._thread_ident == thread.get_ident()):
|
||||||
or self._thread_ident == thread.get_ident()):
|
raise DatabaseError(
|
||||||
raise DatabaseError("DatabaseWrapper objects created in a "
|
"DatabaseWrapper objects created in a "
|
||||||
"thread can only be used in that same thread. The object "
|
"thread can only be used in that same thread. The object "
|
||||||
"with alias '%s' was created in thread id %s and this is "
|
"with alias '%s' was created in thread id %s and this is "
|
||||||
"thread id %s."
|
"thread id %s."
|
||||||
% (self.alias, self._thread_ident, thread.get_ident()))
|
% (self.alias, self._thread_ident, thread.get_ident())
|
||||||
|
)
|
||||||
|
|
||||||
# ##### Miscellaneous #####
|
# ##### Miscellaneous #####
|
||||||
|
|
||||||
|
|
|
@ -79,14 +79,18 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
for line in cursor.description:
|
for line in cursor.description:
|
||||||
col_name = force_text(line[0])
|
col_name = force_text(line[0])
|
||||||
fields.append(
|
fields.append(
|
||||||
FieldInfo(*((col_name,)
|
FieldInfo(*(
|
||||||
+ line[1:3]
|
(col_name,) +
|
||||||
+ (to_int(field_info[col_name].max_len) or line[3],
|
line[1:3] +
|
||||||
to_int(field_info[col_name].num_prec) or line[4],
|
(
|
||||||
to_int(field_info[col_name].num_scale) or line[5])
|
to_int(field_info[col_name].max_len) or line[3],
|
||||||
+ (line[6],)
|
to_int(field_info[col_name].num_prec) or line[4],
|
||||||
+ (field_info[col_name].extra,)
|
to_int(field_info[col_name].num_scale) or line[5],
|
||||||
+ (field_info[col_name].column_default,)))
|
line[6],
|
||||||
|
field_info[col_name].extra,
|
||||||
|
field_info[col_name].column_default,
|
||||||
|
)
|
||||||
|
))
|
||||||
)
|
)
|
||||||
return fields
|
return fields
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,8 @@ class DatabaseValidation(BaseDatabaseValidation):
|
||||||
if field_type is None:
|
if field_type is None:
|
||||||
return errors
|
return errors
|
||||||
|
|
||||||
if (field_type.startswith('varchar') # Look for CharFields...
|
if (field_type.startswith('varchar') and field.unique and
|
||||||
and field.unique # ... that are unique
|
(field.max_length is None or int(field.max_length) > 255)):
|
||||||
and (field.max_length is None or int(field.max_length) > 255)):
|
|
||||||
errors.append(
|
errors.append(
|
||||||
checks.Error(
|
checks.Error(
|
||||||
'MySQL does not allow unique CharFields to have a max_length > 255.',
|
'MySQL does not allow unique CharFields to have a max_length > 255.',
|
||||||
|
|
|
@ -228,8 +228,9 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
||||||
# TO_CHAR().
|
# TO_CHAR().
|
||||||
cursor.execute(
|
cursor.execute(
|
||||||
"ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
|
"ALTER SESSION SET NLS_DATE_FORMAT = 'YYYY-MM-DD HH24:MI:SS'"
|
||||||
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'"
|
" NLS_TIMESTAMP_FORMAT = 'YYYY-MM-DD HH24:MI:SS.FF'" +
|
||||||
+ (" TIME_ZONE = 'UTC'" if settings.USE_TZ else ''))
|
(" TIME_ZONE = 'UTC'" if settings.USE_TZ else '')
|
||||||
|
)
|
||||||
cursor.close()
|
cursor.close()
|
||||||
if 'operators' not in self.__dict__:
|
if 'operators' not in self.__dict__:
|
||||||
# Ticket #14149: Check whether our LIKE implementation will
|
# Ticket #14149: Check whether our LIKE implementation will
|
||||||
|
|
|
@ -14,8 +14,7 @@ class SQLCompiler(compiler.SQLCompiler):
|
||||||
"""
|
"""
|
||||||
# The `do_offset` flag indicates whether we need to construct
|
# The `do_offset` flag indicates whether we need to construct
|
||||||
# the SQL needed to use limit/offset with Oracle.
|
# the SQL needed to use limit/offset with Oracle.
|
||||||
do_offset = with_limits and (self.query.high_mark is not None
|
do_offset = with_limits and (self.query.high_mark is not None or self.query.low_mark)
|
||||||
or self.query.low_mark)
|
|
||||||
if not do_offset:
|
if not do_offset:
|
||||||
sql, params = super(SQLCompiler, self).as_sql(
|
sql, params = super(SQLCompiler, self).as_sql(
|
||||||
with_limits=False,
|
with_limits=False,
|
||||||
|
|
|
@ -78,9 +78,13 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
||||||
WHERE table_name = %s""", [table_name])
|
WHERE table_name = %s""", [table_name])
|
||||||
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
|
field_map = {line[0]: line[1:] for line in cursor.fetchall()}
|
||||||
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
|
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
|
||||||
return [FieldInfo(*((force_text(line[0]),) + line[1:6]
|
return [
|
||||||
+ (field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])))
|
FieldInfo(*(
|
||||||
for line in cursor.description]
|
(force_text(line[0]),) +
|
||||||
|
line[1:6] +
|
||||||
|
(field_map[force_text(line[0])][0] == 'YES', field_map[force_text(line[0])][1])
|
||||||
|
)) for line in cursor.description
|
||||||
|
]
|
||||||
|
|
||||||
def get_relations(self, cursor, table_name):
|
def get_relations(self, cursor, table_name):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -215,8 +215,8 @@ class MigrationAutodetector(object):
|
||||||
old_model_state = self.from_state.models[app_label, old_model_name]
|
old_model_state = self.from_state.models[app_label, old_model_name]
|
||||||
for field_name, field in old_model_state.fields:
|
for field_name, field in old_model_state.fields:
|
||||||
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
|
old_field = self.old_apps.get_model(app_label, old_model_name)._meta.get_field(field_name)
|
||||||
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None)
|
if (hasattr(old_field, "remote_field") and getattr(old_field.remote_field, "through", None) and
|
||||||
and not old_field.remote_field.through._meta.auto_created):
|
not old_field.remote_field.through._meta.auto_created):
|
||||||
through_key = (
|
through_key = (
|
||||||
old_field.remote_field.through._meta.app_label,
|
old_field.remote_field.through._meta.app_label,
|
||||||
old_field.remote_field.through._meta.model_name,
|
old_field.remote_field.through._meta.model_name,
|
||||||
|
@ -509,8 +509,8 @@ class MigrationAutodetector(object):
|
||||||
related_fields[field.name] = field
|
related_fields[field.name] = field
|
||||||
# through will be none on M2Ms on swapped-out models;
|
# through will be none on M2Ms on swapped-out models;
|
||||||
# we can treat lack of through as auto_created=True, though.
|
# we can treat lack of through as auto_created=True, though.
|
||||||
if (getattr(field.remote_field, "through", None)
|
if (getattr(field.remote_field, "through", None) and
|
||||||
and not field.remote_field.through._meta.auto_created):
|
not field.remote_field.through._meta.auto_created):
|
||||||
related_fields[field.name] = field
|
related_fields[field.name] = field
|
||||||
for field in model_opts.local_many_to_many:
|
for field in model_opts.local_many_to_many:
|
||||||
if field.remote_field.model:
|
if field.remote_field.model:
|
||||||
|
@ -671,8 +671,8 @@ class MigrationAutodetector(object):
|
||||||
related_fields[field.name] = field
|
related_fields[field.name] = field
|
||||||
# through will be none on M2Ms on swapped-out models;
|
# through will be none on M2Ms on swapped-out models;
|
||||||
# we can treat lack of through as auto_created=True, though.
|
# we can treat lack of through as auto_created=True, though.
|
||||||
if (getattr(field.remote_field, "through", None)
|
if (getattr(field.remote_field, "through", None) and
|
||||||
and not field.remote_field.through._meta.auto_created):
|
not field.remote_field.through._meta.auto_created):
|
||||||
related_fields[field.name] = field
|
related_fields[field.name] = field
|
||||||
for field in model._meta.local_many_to_many:
|
for field in model._meta.local_many_to_many:
|
||||||
if field.remote_field.model:
|
if field.remote_field.model:
|
||||||
|
|
|
@ -206,8 +206,7 @@ class MigrationGraph(object):
|
||||||
"""
|
"""
|
||||||
roots = set()
|
roots = set()
|
||||||
for node in self.nodes:
|
for node in self.nodes:
|
||||||
if (not any(key[0] == node[0] for key in self.node_map[node].parents)
|
if not any(key[0] == node[0] for key in self.node_map[node].parents) and (not app or app == node[0]):
|
||||||
and (not app or app == node[0])):
|
|
||||||
roots.add(node)
|
roots.add(node)
|
||||||
return sorted(roots)
|
return sorted(roots)
|
||||||
|
|
||||||
|
@ -221,8 +220,7 @@ class MigrationGraph(object):
|
||||||
"""
|
"""
|
||||||
leaves = set()
|
leaves = set()
|
||||||
for node in self.nodes:
|
for node in self.nodes:
|
||||||
if (not any(key[0] == node[0] for key in self.node_map[node].children)
|
if not any(key[0] == node[0] for key in self.node_map[node].children) and (not app or app == node[0]):
|
||||||
and (not app or app == node[0])):
|
|
||||||
leaves.add(node)
|
leaves.add(node)
|
||||||
return sorted(leaves)
|
return sorted(leaves)
|
||||||
|
|
||||||
|
|
|
@ -397,8 +397,8 @@ class Model(six.with_metaclass(ModelBase)):
|
||||||
# data-descriptor object (DeferredAttribute) without triggering its
|
# data-descriptor object (DeferredAttribute) without triggering its
|
||||||
# __get__ method.
|
# __get__ method.
|
||||||
if (field.attname not in kwargs and
|
if (field.attname not in kwargs and
|
||||||
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute)
|
(isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or
|
||||||
or field.column is None)):
|
field.column is None)):
|
||||||
# This field will be populated on request.
|
# This field will be populated on request.
|
||||||
continue
|
continue
|
||||||
if kwargs:
|
if kwargs:
|
||||||
|
@ -753,8 +753,8 @@ class Model(six.with_metaclass(ModelBase)):
|
||||||
meta = cls._meta
|
meta = cls._meta
|
||||||
for parent, field in meta.parents.items():
|
for parent, field in meta.parents.items():
|
||||||
# Make sure the link fields are synced between parent and self.
|
# Make sure the link fields are synced between parent and self.
|
||||||
if (field and getattr(self, parent._meta.pk.attname) is None
|
if (field and getattr(self, parent._meta.pk.attname) is None and
|
||||||
and getattr(self, field.attname) is not None):
|
getattr(self, field.attname) is not None):
|
||||||
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
|
setattr(self, parent._meta.pk.attname, getattr(self, field.attname))
|
||||||
self._save_parents(cls=parent, using=using, update_fields=update_fields)
|
self._save_parents(cls=parent, using=using, update_fields=update_fields)
|
||||||
self._save_table(cls=parent, using=using, update_fields=update_fields)
|
self._save_table(cls=parent, using=using, update_fields=update_fields)
|
||||||
|
@ -1589,8 +1589,7 @@ class Model(six.with_metaclass(ModelBase)):
|
||||||
|
|
||||||
# Check if auto-generated name for the field is too long
|
# Check if auto-generated name for the field is too long
|
||||||
# for the database.
|
# for the database.
|
||||||
if (f.db_column is None and column_name is not None
|
if f.db_column is None and column_name is not None and len(column_name) > allowed_len:
|
||||||
and len(column_name) > allowed_len):
|
|
||||||
errors.append(
|
errors.append(
|
||||||
checks.Error(
|
checks.Error(
|
||||||
'Autogenerated column name too long for field "%s". '
|
'Autogenerated column name too long for field "%s". '
|
||||||
|
@ -1607,8 +1606,7 @@ class Model(six.with_metaclass(ModelBase)):
|
||||||
# for the database.
|
# for the database.
|
||||||
for m2m in f.remote_field.through._meta.local_fields:
|
for m2m in f.remote_field.through._meta.local_fields:
|
||||||
_, rel_name = m2m.get_attname_column()
|
_, rel_name = m2m.get_attname_column()
|
||||||
if (m2m.db_column is None and rel_name is not None
|
if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len:
|
||||||
and len(rel_name) > allowed_len):
|
|
||||||
errors.append(
|
errors.append(
|
||||||
checks.Error(
|
checks.Error(
|
||||||
'Autogenerated column name too long for M2M field '
|
'Autogenerated column name too long for M2M field '
|
||||||
|
|
|
@ -132,9 +132,9 @@ class Collector(object):
|
||||||
if not (hasattr(objs, 'model') and hasattr(objs, '_raw_delete')):
|
if not (hasattr(objs, 'model') and hasattr(objs, '_raw_delete')):
|
||||||
return False
|
return False
|
||||||
model = objs.model
|
model = objs.model
|
||||||
if (signals.pre_delete.has_listeners(model)
|
if (signals.pre_delete.has_listeners(model) or
|
||||||
or signals.post_delete.has_listeners(model)
|
signals.post_delete.has_listeners(model) or
|
||||||
or signals.m2m_changed.has_listeners(model)):
|
signals.m2m_changed.has_listeners(model)):
|
||||||
return False
|
return False
|
||||||
# The use of from_field comes from the need to avoid cascade back to
|
# The use of from_field comes from the need to avoid cascade back to
|
||||||
# parent when parent delete is cascading to child.
|
# parent when parent delete is cascading to child.
|
||||||
|
|
|
@ -395,8 +395,8 @@ class CombinedExpression(Expression):
|
||||||
except FieldError:
|
except FieldError:
|
||||||
rhs_output = None
|
rhs_output = None
|
||||||
if (not connection.features.has_native_duration_field and
|
if (not connection.features.has_native_duration_field and
|
||||||
((lhs_output and lhs_output.get_internal_type() == 'DurationField')
|
((lhs_output and lhs_output.get_internal_type() == 'DurationField') or
|
||||||
or (rhs_output and rhs_output.get_internal_type() == 'DurationField'))):
|
(rhs_output and rhs_output.get_internal_type() == 'DurationField'))):
|
||||||
return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection)
|
return DurationExpression(self.lhs, self.connector, self.rhs).as_sql(compiler, connection)
|
||||||
if (lhs_output and rhs_output and self.connector == self.SUB and
|
if (lhs_output and rhs_output and self.connector == self.SUB and
|
||||||
lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and
|
lhs_output.get_internal_type() in {'DateField', 'DateTimeField', 'TimeField'} and
|
||||||
|
|
|
@ -1878,8 +1878,7 @@ class IntegerField(Field):
|
||||||
return int(value)
|
return int(value)
|
||||||
|
|
||||||
def get_prep_lookup(self, lookup_type, value):
|
def get_prep_lookup(self, lookup_type, value):
|
||||||
if ((lookup_type == 'gte' or lookup_type == 'lt')
|
if lookup_type in ('gte', 'lt') and isinstance(value, float):
|
||||||
and isinstance(value, float)):
|
|
||||||
value = math.ceil(value)
|
value = math.ceil(value)
|
||||||
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
|
return super(IntegerField, self).get_prep_lookup(lookup_type, value)
|
||||||
|
|
||||||
|
|
|
@ -440,8 +440,8 @@ class ImageField(FileField):
|
||||||
return
|
return
|
||||||
|
|
||||||
dimension_fields_filled = not(
|
dimension_fields_filled = not(
|
||||||
(self.width_field and not getattr(instance, self.width_field))
|
(self.width_field and not getattr(instance, self.width_field)) or
|
||||||
or (self.height_field and not getattr(instance, self.height_field))
|
(self.height_field and not getattr(instance, self.height_field))
|
||||||
)
|
)
|
||||||
# When both dimension fields have values, we are most likely loading
|
# When both dimension fields have values, we are most likely loading
|
||||||
# data from the database or updating an image field that already had
|
# data from the database or updating an image field that already had
|
||||||
|
|
|
@ -275,8 +275,8 @@ class PatternLookup(BuiltinLookup):
|
||||||
# So, for Python values we don't need any special pattern, but for
|
# So, for Python values we don't need any special pattern, but for
|
||||||
# SQL reference values or SQL transformations we need the correct
|
# SQL reference values or SQL transformations we need the correct
|
||||||
# pattern added.
|
# pattern added.
|
||||||
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql')
|
if (hasattr(self.rhs, 'get_compiler') or hasattr(self.rhs, 'as_sql') or
|
||||||
or hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
|
hasattr(self.rhs, '_as_sql') or self.bilateral_transforms):
|
||||||
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
pattern = connection.pattern_ops[self.lookup_name].format(connection.pattern_esc)
|
||||||
return pattern.format(rhs)
|
return pattern.format(rhs)
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -384,9 +384,9 @@ class Options(object):
|
||||||
|
|
||||||
return make_immutable_fields_list(
|
return make_immutable_fields_list(
|
||||||
"fields",
|
"fields",
|
||||||
(f for f in self._get_fields(reverse=False) if
|
(f for f in self._get_fields(reverse=False)
|
||||||
is_not_an_m2m_field(f) and is_not_a_generic_relation(f)
|
if is_not_an_m2m_field(f) and is_not_a_generic_relation(f) and
|
||||||
and is_not_a_generic_foreign_key(f))
|
is_not_a_generic_foreign_key(f))
|
||||||
)
|
)
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
|
|
|
@ -443,8 +443,8 @@ class QuerySet(object):
|
||||||
objs = list(objs)
|
objs = list(objs)
|
||||||
self._populate_pk_values(objs)
|
self._populate_pk_values(objs)
|
||||||
with transaction.atomic(using=self.db, savepoint=False):
|
with transaction.atomic(using=self.db, savepoint=False):
|
||||||
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk
|
if (connection.features.can_combine_inserts_with_and_without_auto_increment_pk and
|
||||||
and self.model._meta.has_auto_field):
|
self.model._meta.has_auto_field):
|
||||||
self._batched_insert(objs, fields, batch_size)
|
self._batched_insert(objs, fields, batch_size)
|
||||||
else:
|
else:
|
||||||
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
|
objs_with_pk, objs_without_pk = partition(lambda o: o.pk is None, objs)
|
||||||
|
|
|
@ -735,9 +735,7 @@ class Query(object):
|
||||||
# Only the first alias (skipped above) should have None join_type
|
# Only the first alias (skipped above) should have None join_type
|
||||||
assert self.alias_map[alias].join_type is not None
|
assert self.alias_map[alias].join_type is not None
|
||||||
parent_alias = self.alias_map[alias].parent_alias
|
parent_alias = self.alias_map[alias].parent_alias
|
||||||
parent_louter = (
|
parent_louter = parent_alias and self.alias_map[parent_alias].join_type == LOUTER
|
||||||
parent_alias
|
|
||||||
and self.alias_map[parent_alias].join_type == LOUTER)
|
|
||||||
already_louter = self.alias_map[alias].join_type == LOUTER
|
already_louter = self.alias_map[alias].join_type == LOUTER
|
||||||
if ((self.alias_map[alias].nullable or parent_louter) and
|
if ((self.alias_map[alias].nullable or parent_louter) and
|
||||||
not already_louter):
|
not already_louter):
|
||||||
|
@ -746,8 +744,8 @@ class Query(object):
|
||||||
# refer to this one.
|
# refer to this one.
|
||||||
aliases.extend(
|
aliases.extend(
|
||||||
join for join in self.alias_map.keys()
|
join for join in self.alias_map.keys()
|
||||||
if (self.alias_map[join].parent_alias == alias
|
if self.alias_map[join].parent_alias == alias and join not in aliases
|
||||||
and join not in aliases))
|
)
|
||||||
|
|
||||||
def demote_joins(self, aliases):
|
def demote_joins(self, aliases):
|
||||||
"""
|
"""
|
||||||
|
@ -1641,8 +1639,7 @@ class Query(object):
|
||||||
# from the model on which the lookup failed.
|
# from the model on which the lookup failed.
|
||||||
raise
|
raise
|
||||||
else:
|
else:
|
||||||
names = sorted(list(get_field_names_from_opts(opts)) + list(self.extra)
|
names = sorted(list(get_field_names_from_opts(opts)) + list(self.extra) + list(self.annotation_select))
|
||||||
+ list(self.annotation_select))
|
|
||||||
raise FieldError("Cannot resolve keyword %r into field. "
|
raise FieldError("Cannot resolve keyword %r into field. "
|
||||||
"Choices are: %s" % (name, ", ".join(names)))
|
"Choices are: %s" % (name, ", ".join(names)))
|
||||||
|
|
||||||
|
@ -1963,8 +1960,7 @@ class Query(object):
|
||||||
# used. The proper fix would be to defer all decisions where
|
# used. The proper fix would be to defer all decisions where
|
||||||
# is_nullable() is needed to the compiler stage, but that is not easy
|
# is_nullable() is needed to the compiler stage, but that is not easy
|
||||||
# to do currently.
|
# to do currently.
|
||||||
if ((connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls)
|
if connections[DEFAULT_DB_ALIAS].features.interprets_empty_strings_as_nulls and field.empty_strings_allowed:
|
||||||
and field.empty_strings_allowed):
|
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
return field.null
|
return field.null
|
||||||
|
|
|
@ -707,8 +707,8 @@ class BaseModelFormSet(BaseFormSet):
|
||||||
uclass, lookup, field, unique_for = date_check
|
uclass, lookup, field, unique_for = date_check
|
||||||
for form in valid_forms:
|
for form in valid_forms:
|
||||||
# see if we have data for both fields
|
# see if we have data for both fields
|
||||||
if (form.cleaned_data and form.cleaned_data[field] is not None
|
if (form.cleaned_data and form.cleaned_data[field] is not None and
|
||||||
and form.cleaned_data[unique_for] is not None):
|
form.cleaned_data[unique_for] is not None):
|
||||||
# if it's a date lookup we need to get the data for all the fields
|
# if it's a date lookup we need to get the data for all the fields
|
||||||
if lookup == 'date':
|
if lookup == 'date':
|
||||||
date = form.cleaned_data[unique_for]
|
date = form.cleaned_data[unique_for]
|
||||||
|
@ -806,8 +806,8 @@ class BaseModelFormSet(BaseFormSet):
|
||||||
def pk_is_not_editable(pk):
|
def pk_is_not_editable(pk):
|
||||||
return (
|
return (
|
||||||
(not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or (
|
(not pk.editable) or (pk.auto_created or isinstance(pk, AutoField)) or (
|
||||||
pk.remote_field and pk.remote_field.parent_link
|
pk.remote_field and pk.remote_field.parent_link and
|
||||||
and pk_is_not_editable(pk.remote_field.model._meta.pk)
|
pk_is_not_editable(pk.remote_field.model._meta.pk)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
if pk_is_not_editable(pk) or pk.name not in form.fields:
|
if pk_is_not_editable(pk) or pk.name not in form.fields:
|
||||||
|
@ -999,9 +999,10 @@ def _get_foreign_key(parent_model, model, fk_name=None, can_fail=False):
|
||||||
# Try to discover what the ForeignKey from model to parent_model is
|
# Try to discover what the ForeignKey from model to parent_model is
|
||||||
fks_to_parent = [
|
fks_to_parent = [
|
||||||
f for f in opts.fields
|
f for f in opts.fields
|
||||||
if isinstance(f, ForeignKey)
|
if isinstance(f, ForeignKey) and (
|
||||||
and (f.remote_field.model == parent_model
|
f.remote_field.model == parent_model or
|
||||||
or f.remote_field.model in parent_model._meta.get_parent_list())
|
f.remote_field.model in parent_model._meta.get_parent_list()
|
||||||
|
)
|
||||||
]
|
]
|
||||||
if len(fks_to_parent) == 1:
|
if len(fks_to_parent) == 1:
|
||||||
fk = fks_to_parent[0]
|
fk = fks_to_parent[0]
|
||||||
|
|
|
@ -76,8 +76,8 @@ class CommonMiddleware(object):
|
||||||
if settings.APPEND_SLASH and not request.get_full_path().endswith('/'):
|
if settings.APPEND_SLASH and not request.get_full_path().endswith('/'):
|
||||||
urlconf = getattr(request, 'urlconf', None)
|
urlconf = getattr(request, 'urlconf', None)
|
||||||
return (
|
return (
|
||||||
not is_valid_path(request.path_info, urlconf)
|
not is_valid_path(request.path_info, urlconf) and
|
||||||
and is_valid_path('%s/' % request.path_info, urlconf)
|
is_valid_path('%s/' % request.path_info, urlconf)
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -39,8 +39,8 @@ class LocaleMiddleware(object):
|
||||||
path_valid = is_valid_path(language_path, urlconf)
|
path_valid = is_valid_path(language_path, urlconf)
|
||||||
path_needs_slash = (
|
path_needs_slash = (
|
||||||
not path_valid and (
|
not path_valid and (
|
||||||
settings.APPEND_SLASH and not language_path.endswith('/')
|
settings.APPEND_SLASH and not language_path.endswith('/') and
|
||||||
and is_valid_path('%s/' % language_path, urlconf)
|
is_valid_path('%s/' % language_path, urlconf)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -327,8 +327,7 @@ class SimpleTestCase(unittest.TestCase):
|
||||||
def _assert_contains(self, response, text, status_code, msg_prefix, html):
|
def _assert_contains(self, response, text, status_code, msg_prefix, html):
|
||||||
# If the response supports deferred rendering and hasn't been rendered
|
# If the response supports deferred rendering and hasn't been rendered
|
||||||
# yet, then ensure that it does get rendered before proceeding further.
|
# yet, then ensure that it does get rendered before proceeding further.
|
||||||
if (hasattr(response, 'render') and callable(response.render)
|
if hasattr(response, 'render') and callable(response.render) and not response.is_rendered:
|
||||||
and not response.is_rendered):
|
|
||||||
response.render()
|
response.render()
|
||||||
|
|
||||||
if msg_prefix:
|
if msg_prefix:
|
||||||
|
@ -920,9 +919,8 @@ class TransactionTestCase(SimpleTestCase):
|
||||||
for db_name in self._databases_names(include_mirrors=False):
|
for db_name in self._databases_names(include_mirrors=False):
|
||||||
# Flush the database
|
# Flush the database
|
||||||
inhibit_post_migrate = (
|
inhibit_post_migrate = (
|
||||||
self.available_apps is not None
|
self.available_apps is not None or
|
||||||
or (
|
( # Inhibit the post_migrate signal when using serialized
|
||||||
# Inhibit the post_migrate signal when using serialized
|
|
||||||
# rollback to avoid trying to recreate the serialized data.
|
# rollback to avoid trying to recreate the serialized data.
|
||||||
self.serialized_rollback and
|
self.serialized_rollback and
|
||||||
hasattr(connections[db_name], '_test_serialized_contents')
|
hasattr(connections[db_name], '_test_serialized_contents')
|
||||||
|
|
|
@ -423,16 +423,11 @@ def strip_quotes(want, got):
|
||||||
"""
|
"""
|
||||||
def is_quoted_string(s):
|
def is_quoted_string(s):
|
||||||
s = s.strip()
|
s = s.strip()
|
||||||
return (len(s) >= 2
|
return len(s) >= 2 and s[0] == s[-1] and s[0] in ('"', "'")
|
||||||
and s[0] == s[-1]
|
|
||||||
and s[0] in ('"', "'"))
|
|
||||||
|
|
||||||
def is_quoted_unicode(s):
|
def is_quoted_unicode(s):
|
||||||
s = s.strip()
|
s = s.strip()
|
||||||
return (len(s) >= 3
|
return len(s) >= 3 and s[0] == 'u' and s[1] == s[-1] and s[1] in ('"', "'")
|
||||||
and s[0] == 'u'
|
|
||||||
and s[1] == s[-1]
|
|
||||||
and s[1] in ('"', "'"))
|
|
||||||
|
|
||||||
if is_quoted_string(want) and is_quoted_string(got):
|
if is_quoted_string(want) and is_quoted_string(got):
|
||||||
want = want.strip()[1:-1]
|
want = want.strip()[1:-1]
|
||||||
|
|
|
@ -101,8 +101,7 @@ class BaseArchive(object):
|
||||||
def split_leading_dir(self, path):
|
def split_leading_dir(self, path):
|
||||||
path = str(path)
|
path = str(path)
|
||||||
path = path.lstrip('/').lstrip('\\')
|
path = path.lstrip('/').lstrip('\\')
|
||||||
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
|
if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or '\\' not in path):
|
||||||
or '\\' not in path):
|
|
||||||
return path.split('/', 1)
|
return path.split('/', 1)
|
||||||
elif '\\' in path:
|
elif '\\' in path:
|
||||||
return path.split('\\', 1)
|
return path.split('\\', 1)
|
||||||
|
|
|
@ -288,10 +288,12 @@ def template_localtime(value, use_tz=None):
|
||||||
|
|
||||||
This function is designed for use by the template engine.
|
This function is designed for use by the template engine.
|
||||||
"""
|
"""
|
||||||
should_convert = (isinstance(value, datetime)
|
should_convert = (
|
||||||
and (settings.USE_TZ if use_tz is None else use_tz)
|
isinstance(value, datetime) and
|
||||||
and not is_naive(value)
|
(settings.USE_TZ if use_tz is None else use_tz) and
|
||||||
and getattr(value, 'convert_to_local_time', True))
|
not is_naive(value) and
|
||||||
|
getattr(value, 'convert_to_local_time', True)
|
||||||
|
)
|
||||||
return localtime(value) if should_convert else value
|
return localtime(value) if should_convert else value
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -129,8 +129,7 @@ class DjangoTranslation(gettext_module.GNUTranslations):
|
||||||
self._add_installed_apps_translations()
|
self._add_installed_apps_translations()
|
||||||
|
|
||||||
self._add_local_translations()
|
self._add_local_translations()
|
||||||
if (self.__language == settings.LANGUAGE_CODE and self.domain == 'django'
|
if self.__language == settings.LANGUAGE_CODE and self.domain == 'django' and self._catalog is None:
|
||||||
and self._catalog is None):
|
|
||||||
# default lang should have at least one translation file available.
|
# default lang should have at least one translation file available.
|
||||||
raise IOError("No translation files found for default language %s." % settings.LANGUAGE_CODE)
|
raise IOError("No translation files found for default language %s." % settings.LANGUAGE_CODE)
|
||||||
self._add_fallback(localedirs)
|
self._add_fallback(localedirs)
|
||||||
|
|
|
@ -102,8 +102,8 @@ class Node(object):
|
||||||
return data
|
return data
|
||||||
if self.connector == conn_type:
|
if self.connector == conn_type:
|
||||||
# We can reuse self.children to append or squash the node other.
|
# We can reuse self.children to append or squash the node other.
|
||||||
if (isinstance(data, Node) and not data.negated
|
if (isinstance(data, Node) and not data.negated and
|
||||||
and (data.connector == conn_type or len(data) == 1)):
|
(data.connector == conn_type or len(data) == 1)):
|
||||||
# We can squash the other node's children directly into this
|
# We can squash the other node's children directly into this
|
||||||
# node. We are just doing (AB)(CD) == (ABCD) here, with the
|
# node. We are just doing (AB)(CD) == (ABCD) here, with the
|
||||||
# addition that if the length of the other node is 1 the
|
# addition that if the length of the other node is 1 the
|
||||||
|
|
|
@ -190,8 +190,8 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
||||||
current_frame = tb_frame.f_back
|
current_frame = tb_frame.f_back
|
||||||
sensitive_variables = None
|
sensitive_variables = None
|
||||||
while current_frame is not None:
|
while current_frame is not None:
|
||||||
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
|
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper' and
|
||||||
and 'sensitive_variables_wrapper' in current_frame.f_locals):
|
'sensitive_variables_wrapper' in current_frame.f_locals):
|
||||||
# The sensitive_variables decorator was used, so we take note
|
# The sensitive_variables decorator was used, so we take note
|
||||||
# of the sensitive variables' names.
|
# of the sensitive variables' names.
|
||||||
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
|
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
|
||||||
|
@ -219,8 +219,8 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
||||||
for name, value in tb_frame.f_locals.items():
|
for name, value in tb_frame.f_locals.items():
|
||||||
cleansed[name] = self.cleanse_special_types(request, value)
|
cleansed[name] = self.cleanse_special_types(request, value)
|
||||||
|
|
||||||
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper'
|
if (tb_frame.f_code.co_name == 'sensitive_variables_wrapper' and
|
||||||
and 'sensitive_variables_wrapper' in tb_frame.f_locals):
|
'sensitive_variables_wrapper' in tb_frame.f_locals):
|
||||||
# For good measure, obfuscate the decorated function's arguments in
|
# For good measure, obfuscate the decorated function's arguments in
|
||||||
# the sensitive_variables decorator's frame, in case the variables
|
# the sensitive_variables decorator's frame, in case the variables
|
||||||
# associated with those arguments were meant to be obfuscated from
|
# associated with those arguments were meant to be obfuscated from
|
||||||
|
@ -459,11 +459,12 @@ def technical_404_response(request, exception):
|
||||||
except (IndexError, TypeError, KeyError):
|
except (IndexError, TypeError, KeyError):
|
||||||
tried = []
|
tried = []
|
||||||
else:
|
else:
|
||||||
if (not tried # empty URLconf
|
if (not tried or ( # empty URLconf
|
||||||
or (request.path == '/'
|
request.path == '/' and
|
||||||
and len(tried) == 1 # default URLconf
|
len(tried) == 1 and # default URLconf
|
||||||
and len(tried[0]) == 1
|
len(tried[0]) == 1 and
|
||||||
and getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin')):
|
getattr(tried[0][0], 'app_name', '') == getattr(tried[0][0], 'namespace', '') == 'admin'
|
||||||
|
)):
|
||||||
return default_urlconf(request)
|
return default_urlconf(request)
|
||||||
|
|
||||||
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
|
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
|
||||||
|
|
|
@ -163,8 +163,7 @@ class BaseListView(MultipleObjectMixin, View):
|
||||||
# When pagination is enabled and object_list is a queryset,
|
# When pagination is enabled and object_list is a queryset,
|
||||||
# it's better to do a cheap query than to load the unpaginated
|
# it's better to do a cheap query than to load the unpaginated
|
||||||
# queryset in memory.
|
# queryset in memory.
|
||||||
if (self.get_paginate_by(self.object_list) is not None
|
if self.get_paginate_by(self.object_list) is not None and hasattr(self.object_list, 'exists'):
|
||||||
and hasattr(self.object_list, 'exists')):
|
|
||||||
is_empty = not self.object_list.exists()
|
is_empty = not self.object_list.exists()
|
||||||
else:
|
else:
|
||||||
is_empty = len(self.object_list) == 0
|
is_empty = len(self.object_list) == 0
|
||||||
|
|
|
@ -4,7 +4,7 @@ install-script = scripts/rpm-install.sh
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
exclude = build,.git,./django/utils/lru_cache.py,./django/utils/six.py,./django/conf/app_template/*,./django/dispatch/weakref_backports.py,./tests/.env,./xmlrunner,tests/view_tests/tests/py3_test_debug.py,tests/template_tests/annotated_tag_function.py
|
exclude = build,.git,./django/utils/lru_cache.py,./django/utils/six.py,./django/conf/app_template/*,./django/dispatch/weakref_backports.py,./tests/.env,./xmlrunner,tests/view_tests/tests/py3_test_debug.py,tests/template_tests/annotated_tag_function.py
|
||||||
ignore = E128,E402,W503,W601
|
ignore = E128,E402,W601
|
||||||
max-line-length = 119
|
max-line-length = 119
|
||||||
|
|
||||||
[isort]
|
[isort]
|
||||||
|
|
|
@ -983,8 +983,8 @@ class AggregationTests(TestCase):
|
||||||
Book.objects
|
Book.objects
|
||||||
.annotate(n_authors=Count("authors"))
|
.annotate(n_authors=Count("authors"))
|
||||||
.filter(
|
.filter(
|
||||||
Q(name="The Definitive Guide to Django: Web Development Done Right")
|
Q(name="The Definitive Guide to Django: Web Development Done Right") |
|
||||||
| (Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
|
(Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
|
|
|
@ -941,8 +941,8 @@ value="Should escape < & > and <script>alert('xss')</
|
||||||
password2 = CharField(widget=PasswordInput)
|
password2 = CharField(widget=PasswordInput)
|
||||||
|
|
||||||
def clean_password2(self):
|
def clean_password2(self):
|
||||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||||
raise ValidationError('Please make sure your passwords match.')
|
raise ValidationError('Please make sure your passwords match.')
|
||||||
|
|
||||||
return self.cleaned_data['password2']
|
return self.cleaned_data['password2']
|
||||||
|
@ -980,8 +980,8 @@ value="Should escape < & > and <script>alert('xss')</
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
# Test raising a ValidationError as NON_FIELD_ERRORS.
|
# Test raising a ValidationError as NON_FIELD_ERRORS.
|
||||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||||
raise ValidationError('Please make sure your passwords match.')
|
raise ValidationError('Please make sure your passwords match.')
|
||||||
|
|
||||||
# Test raising ValidationError that targets multiple fields.
|
# Test raising ValidationError that targets multiple fields.
|
||||||
|
@ -1120,8 +1120,8 @@ value="Should escape < & > and <script>alert('xss')</
|
||||||
password2 = CharField(widget=PasswordInput)
|
password2 = CharField(widget=PasswordInput)
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||||
raise ValidationError(
|
raise ValidationError(
|
||||||
'Please make sure your passwords match.',
|
'Please make sure your passwords match.',
|
||||||
code='password_mismatch',
|
code='password_mismatch',
|
||||||
|
@ -2309,8 +2309,8 @@ Password: <input type="password" name="password" /></li>
|
||||||
password2 = CharField(widget=PasswordInput)
|
password2 = CharField(widget=PasswordInput)
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||||
raise ValidationError('Please make sure your passwords match.')
|
raise ValidationError('Please make sure your passwords match.')
|
||||||
|
|
||||||
return self.cleaned_data
|
return self.cleaned_data
|
||||||
|
@ -2369,8 +2369,8 @@ Password: <input type="password" name="password" /></li>
|
||||||
password2 = CharField(widget=PasswordInput)
|
password2 = CharField(widget=PasswordInput)
|
||||||
|
|
||||||
def clean(self):
|
def clean(self):
|
||||||
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2')
|
if (self.cleaned_data.get('password1') and self.cleaned_data.get('password2') and
|
||||||
and self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
self.cleaned_data['password1'] != self.cleaned_data['password2']):
|
||||||
raise ValidationError('Please make sure your passwords match.')
|
raise ValidationError('Please make sure your passwords match.')
|
||||||
|
|
||||||
return self.cleaned_data
|
return self.cleaned_data
|
||||||
|
|
|
@ -65,9 +65,10 @@ def api_get_area(x):
|
||||||
def api_get_length(x):
|
def api_get_length(x):
|
||||||
return x.length
|
return x.length
|
||||||
|
|
||||||
geos_function_tests = [val for name, val in vars().items()
|
geos_function_tests = [
|
||||||
if hasattr(val, '__call__')
|
val for name, val in vars().items()
|
||||||
and name.startswith('api_get_')]
|
if hasattr(val, '__call__') and name.startswith('api_get_')
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
@skipUnless(HAS_GEOS, "Geos is required.")
|
@skipUnless(HAS_GEOS, "Geos is required.")
|
||||||
|
|
|
@ -27,8 +27,8 @@ class OrLookupsTests(TestCase):
|
||||||
def test_filter_or(self):
|
def test_filter_or(self):
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
(
|
(
|
||||||
Article.objects.filter(headline__startswith='Hello')
|
Article.objects.filter(headline__startswith='Hello') |
|
||||||
| Article.objects.filter(headline__startswith='Goodbye')
|
Article.objects.filter(headline__startswith='Goodbye')
|
||||||
), [
|
), [
|
||||||
'Hello',
|
'Hello',
|
||||||
'Goodbye',
|
'Goodbye',
|
||||||
|
|
|
@ -1170,8 +1170,7 @@ class Queries1Tests(BaseQuerysetTest):
|
||||||
|
|
||||||
def test_ticket19672(self):
|
def test_ticket19672(self):
|
||||||
self.assertQuerysetEqual(
|
self.assertQuerysetEqual(
|
||||||
Report.objects.filter(Q(creator__isnull=False) &
|
Report.objects.filter(Q(creator__isnull=False) & ~Q(creator__extra__value=41)),
|
||||||
~Q(creator__extra__value=41)),
|
|
||||||
['<Report: r1>']
|
['<Report: r1>']
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1390,8 +1389,8 @@ class Queries4Tests(BaseQuerysetTest):
|
||||||
|
|
||||||
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
|
q1 = Item.objects.filter(Q(creator__report__name='e1') | Q(creator=self.a1)).order_by()
|
||||||
q2 = (
|
q2 = (
|
||||||
Item.objects.filter(Q(creator__report__name='e1')).order_by()
|
Item.objects.filter(Q(creator__report__name='e1')).order_by() |
|
||||||
| Item.objects.filter(Q(creator=self.a1)).order_by()
|
Item.objects.filter(Q(creator=self.a1)).order_by()
|
||||||
)
|
)
|
||||||
self.assertQuerysetEqual(q1, ["<Item: i1>"])
|
self.assertQuerysetEqual(q1, ["<Item: i1>"])
|
||||||
self.assertEqual(str(q1.query), str(q2.query))
|
self.assertEqual(str(q1.query), str(q2.query))
|
||||||
|
@ -3081,12 +3080,10 @@ class NullJoinPromotionOrTest(TestCase):
|
||||||
# Test OR + doubleneg. The expected result is that channel is LOUTER
|
# Test OR + doubleneg. The expected result is that channel is LOUTER
|
||||||
# joined, program INNER joined
|
# joined, program INNER joined
|
||||||
qs1_filter = Identifier.objects.filter(
|
qs1_filter = Identifier.objects.filter(
|
||||||
Q(program__id=p2.id, channel__id=c1.id)
|
Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id)
|
||||||
| Q(program__id=p1.id)
|
|
||||||
).order_by('pk')
|
).order_by('pk')
|
||||||
qs1_doubleneg = Identifier.objects.exclude(
|
qs1_doubleneg = Identifier.objects.exclude(
|
||||||
~Q(Q(program__id=p2.id, channel__id=c1.id)
|
~Q(Q(program__id=p2.id, channel__id=c1.id) | Q(program__id=p1.id))
|
||||||
| Q(program__id=p1.id))
|
|
||||||
).order_by('pk')
|
).order_by('pk')
|
||||||
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
|
self.assertQuerysetEqual(qs1_doubleneg, qs1_filter, lambda x: x)
|
||||||
self.assertEqual(str(qs1_filter.query).count('JOIN'),
|
self.assertEqual(str(qs1_filter.query).count('JOIN'),
|
||||||
|
@ -3106,11 +3103,11 @@ class NullJoinPromotionOrTest(TestCase):
|
||||||
# NOT is pushed to lowest level in the boolean tree, and
|
# NOT is pushed to lowest level in the boolean tree, and
|
||||||
# another query where this isn't done.
|
# another query where this isn't done.
|
||||||
qs1 = Identifier.objects.filter(
|
qs1 = Identifier.objects.filter(
|
||||||
~Q(~Q(program__id=p2.id, channel__id=c1.id)
|
~Q(~Q(program__id=p2.id, channel__id=c1.id) & Q(program__id=p1.id))
|
||||||
& Q(program__id=p1.id))).order_by('pk')
|
).order_by('pk')
|
||||||
qs2 = Identifier.objects.filter(
|
qs2 = Identifier.objects.filter(
|
||||||
Q(Q(program__id=p2.id, channel__id=c1.id)
|
Q(Q(program__id=p2.id, channel__id=c1.id) | ~Q(program__id=p1.id))
|
||||||
| ~Q(program__id=p1.id))).order_by('pk')
|
).order_by('pk')
|
||||||
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
|
self.assertQuerysetEqual(qs1, qs2, lambda x: x)
|
||||||
self.assertEqual(str(qs1.query).count('JOIN'),
|
self.assertEqual(str(qs1.query).count('JOIN'),
|
||||||
str(qs2.query).count('JOIN'))
|
str(qs2.query).count('JOIN'))
|
||||||
|
@ -3697,8 +3694,7 @@ class Ticket23605Tests(TestCase):
|
||||||
F("ticket23605b__modelc_fk__field_c0")
|
F("ticket23605b__modelc_fk__field_c0")
|
||||||
) &
|
) &
|
||||||
# True for a1 (field_b1=True)
|
# True for a1 (field_b1=True)
|
||||||
Q(ticket23605b__field_b1=True) &
|
Q(ticket23605b__field_b1=True) & ~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
|
||||||
~Q(ticket23605b__pk__in=Ticket23605B.objects.filter(
|
|
||||||
~(
|
~(
|
||||||
# Same filters as above commented filters, but
|
# Same filters as above commented filters, but
|
||||||
# double-negated (one for Q() above, one for
|
# double-negated (one for Q() above, one for
|
||||||
|
@ -3803,12 +3799,12 @@ class Ticket23622Tests(TestCase):
|
||||||
modelc_fk=c1,
|
modelc_fk=c1,
|
||||||
)
|
)
|
||||||
qx = (
|
qx = (
|
||||||
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk'))
|
Q(ticket23605b__pk__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
|
||||||
& Q(ticket23605b__field_b0__gte=300)
|
Q(ticket23605b__field_b0__gte=300)
|
||||||
)
|
)
|
||||||
qy = (
|
qy = (
|
||||||
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk'))
|
Q(ticket23605b__in=Ticket23605B.objects.order_by('modela_fk', '-field_b1').distinct('modela_fk')) &
|
||||||
& Q(ticket23605b__field_b0__gte=300)
|
Q(ticket23605b__field_b0__gte=300)
|
||||||
)
|
)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)),
|
set(Ticket23605A.objects.filter(qx).values_list('pk', flat=True)),
|
||||||
|
|
|
@ -398,9 +398,9 @@ def register_tests(test_class, method_name, test_func, exclude=None):
|
||||||
"""
|
"""
|
||||||
formats = [
|
formats = [
|
||||||
f for f in serializers.get_serializer_formats()
|
f for f in serializers.get_serializer_formats()
|
||||||
if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer)
|
if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer) and
|
||||||
and not f == 'geojson'
|
f != 'geojson' and
|
||||||
and (exclude is None or f not in exclude))
|
(exclude is None or f not in exclude))
|
||||||
]
|
]
|
||||||
for format_ in formats:
|
for format_ in formats:
|
||||||
setattr(test_class, method_name % format_, curry(test_func, format_))
|
setattr(test_class, method_name % format_, curry(test_func, format_))
|
||||||
|
|
Loading…
Reference in New Issue