mirror of https://github.com/django/django.git
Fixed #28996 -- Simplified some boolean constructs and removed trivial continue statements.
This commit is contained in:
parent
4bcec02368
commit
a38ae914d8
|
@ -152,9 +152,8 @@ class FieldListFilter(ListFilter):
|
||||||
@classmethod
|
@classmethod
|
||||||
def create(cls, field, request, params, model, model_admin, field_path):
|
def create(cls, field, request, params, model, model_admin, field_path):
|
||||||
for test, list_filter_class in cls._field_list_filters:
|
for test, list_filter_class in cls._field_list_filters:
|
||||||
if not test(field):
|
if test(field):
|
||||||
continue
|
return list_filter_class(field, request, params, model, model_admin, field_path=field_path)
|
||||||
return list_filter_class(field, request, params, model, model_admin, field_path=field_path)
|
|
||||||
|
|
||||||
|
|
||||||
class RelatedFieldListFilter(FieldListFilter):
|
class RelatedFieldListFilter(FieldListFilter):
|
||||||
|
|
|
@ -367,9 +367,8 @@ class InlineFieldset(Fieldset):
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
fk = getattr(self.formset, "fk", None)
|
fk = getattr(self.formset, "fk", None)
|
||||||
for field in self.fields:
|
for field in self.fields:
|
||||||
if fk and fk.name == field:
|
if not fk or fk.name != field:
|
||||||
continue
|
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
|
||||||
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
|
|
||||||
|
|
||||||
|
|
||||||
class AdminErrorList(forms.utils.ErrorList):
|
class AdminErrorList(forms.utils.ErrorList):
|
||||||
|
|
|
@ -831,10 +831,7 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
# Then gather them from the model admin and all parent classes,
|
# Then gather them from the model admin and all parent classes,
|
||||||
# starting with self and working back up.
|
# starting with self and working back up.
|
||||||
for klass in self.__class__.mro()[::-1]:
|
for klass in self.__class__.mro()[::-1]:
|
||||||
class_actions = getattr(klass, 'actions', [])
|
class_actions = getattr(klass, 'actions', []) or []
|
||||||
# Avoid trying to iterate over None
|
|
||||||
if not class_actions:
|
|
||||||
continue
|
|
||||||
actions.extend(self.get_action(action) for action in class_actions)
|
actions.extend(self.get_action(action) for action in class_actions)
|
||||||
|
|
||||||
# get_action might have returned None, so filter any of those out.
|
# get_action might have returned None, so filter any of those out.
|
||||||
|
@ -1498,11 +1495,10 @@ class ModelAdmin(BaseModelAdmin):
|
||||||
ModelForm = self.get_form(request, obj)
|
ModelForm = self.get_form(request, obj)
|
||||||
if request.method == 'POST':
|
if request.method == 'POST':
|
||||||
form = ModelForm(request.POST, request.FILES, instance=obj)
|
form = ModelForm(request.POST, request.FILES, instance=obj)
|
||||||
if form.is_valid():
|
form_validated = form.is_valid()
|
||||||
form_validated = True
|
if form_validated:
|
||||||
new_object = self.save_form(request, form, change=not add)
|
new_object = self.save_form(request, form, change=not add)
|
||||||
else:
|
else:
|
||||||
form_validated = False
|
|
||||||
new_object = form.instance
|
new_object = form.instance
|
||||||
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
|
formsets, inline_instances = self._create_formsets(request, new_object, change=not add)
|
||||||
if all_valid(formsets) and form_validated:
|
if all_valid(formsets) and form_validated:
|
||||||
|
|
|
@ -128,10 +128,9 @@ def result_headers(cl):
|
||||||
order_type = ''
|
order_type = ''
|
||||||
new_order_type = 'asc'
|
new_order_type = 'asc'
|
||||||
sort_priority = 0
|
sort_priority = 0
|
||||||
sorted = False
|
|
||||||
# Is it currently being sorted on?
|
# Is it currently being sorted on?
|
||||||
if i in ordering_field_columns:
|
is_sorted = i in ordering_field_columns
|
||||||
sorted = True
|
if is_sorted:
|
||||||
order_type = ordering_field_columns.get(i).lower()
|
order_type = ordering_field_columns.get(i).lower()
|
||||||
sort_priority = list(ordering_field_columns).index(i) + 1
|
sort_priority = list(ordering_field_columns).index(i) + 1
|
||||||
th_classes.append('sorted %sending' % order_type)
|
th_classes.append('sorted %sending' % order_type)
|
||||||
|
@ -165,7 +164,7 @@ def result_headers(cl):
|
||||||
yield {
|
yield {
|
||||||
"text": text,
|
"text": text,
|
||||||
"sortable": True,
|
"sortable": True,
|
||||||
"sorted": sorted,
|
"sorted": is_sorted,
|
||||||
"ascending": order_type == "asc",
|
"ascending": order_type == "asc",
|
||||||
"sort_priority": sort_priority,
|
"sort_priority": sort_priority,
|
||||||
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
|
"url_primary": cl.get_query_string({ORDER_VAR: '.'.join(o_list_primary)}),
|
||||||
|
|
|
@ -53,11 +53,8 @@ def prepare_lookup_value(key, value):
|
||||||
if key.endswith('__in'):
|
if key.endswith('__in'):
|
||||||
value = value.split(',')
|
value = value.split(',')
|
||||||
# if key ends with __isnull, special case '' and the string literals 'false' and '0'
|
# if key ends with __isnull, special case '' and the string literals 'false' and '0'
|
||||||
if key.endswith('__isnull'):
|
elif key.endswith('__isnull'):
|
||||||
if value.lower() in ('', 'false', '0'):
|
value = value.lower() not in ('', 'false', '0')
|
||||||
value = False
|
|
||||||
else:
|
|
||||||
value = True
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -378,9 +378,8 @@ class ChangeList:
|
||||||
else:
|
else:
|
||||||
if isinstance(field.remote_field, models.ManyToOneRel):
|
if isinstance(field.remote_field, models.ManyToOneRel):
|
||||||
# <FK>_id field names don't require a join.
|
# <FK>_id field names don't require a join.
|
||||||
if field_name == field.get_attname():
|
if field_name != field.get_attname():
|
||||||
continue
|
return True
|
||||||
return True
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def url_for_result(self, result):
|
def url_for_result(self, result):
|
||||||
|
|
|
@ -438,8 +438,7 @@ class AutocompleteMixin:
|
||||||
str(option_value) in value and
|
str(option_value) in value and
|
||||||
(has_selected is False or self.allow_multiple_selected)
|
(has_selected is False or self.allow_multiple_selected)
|
||||||
)
|
)
|
||||||
if selected is True and has_selected is False:
|
has_selected |= selected
|
||||||
has_selected = True
|
|
||||||
index = len(default[1])
|
index = len(default[1])
|
||||||
subgroup = default[1]
|
subgroup = default[1]
|
||||||
subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index))
|
subgroup.append(self.create_option(name, option_value, option_label, selected_choices, index))
|
||||||
|
|
|
@ -41,12 +41,11 @@ class Command(BaseCommand):
|
||||||
collector.collect([ct])
|
collector.collect([ct])
|
||||||
|
|
||||||
for obj_type, objs in collector.data.items():
|
for obj_type, objs in collector.data.items():
|
||||||
if objs == {ct}:
|
if objs != {ct}:
|
||||||
continue
|
ct_info.append(' - %s %s object(s)' % (
|
||||||
ct_info.append(' - %s %s object(s)' % (
|
len(objs),
|
||||||
len(objs),
|
obj_type._meta.label,
|
||||||
obj_type._meta.label,
|
))
|
||||||
))
|
|
||||||
content_type_display = '\n'.join(ct_info)
|
content_type_display = '\n'.join(ct_info)
|
||||||
self.stdout.write("""Some content types in your database are stale and can be deleted.
|
self.stdout.write("""Some content types in your database are stale and can be deleted.
|
||||||
Any objects that depend on these content types will also be deleted.
|
Any objects that depend on these content types will also be deleted.
|
||||||
|
|
|
@ -142,12 +142,9 @@ class OGRGeometry(GDALBase):
|
||||||
def _from_json(geom_input):
|
def _from_json(geom_input):
|
||||||
ptr = capi.from_json(geom_input)
|
ptr = capi.from_json(geom_input)
|
||||||
if GDAL_VERSION < (2, 0):
|
if GDAL_VERSION < (2, 0):
|
||||||
has_srs = True
|
|
||||||
try:
|
try:
|
||||||
capi.get_geom_srs(ptr)
|
capi.get_geom_srs(ptr)
|
||||||
except SRSException:
|
except SRSException:
|
||||||
has_srs = False
|
|
||||||
if not has_srs:
|
|
||||||
srs = SpatialReference(4326)
|
srs = SpatialReference(4326)
|
||||||
capi.assign_srs(ptr, srs.ptr)
|
capi.assign_srs(ptr, srs.ptr)
|
||||||
return ptr
|
return ptr
|
||||||
|
|
|
@ -49,7 +49,14 @@ class LineString(LinearGeometryMixin, GEOSGeometry):
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
if isinstance(coords, (tuple, list)):
|
numpy_coords = not isinstance(coords, (tuple, list))
|
||||||
|
if numpy_coords:
|
||||||
|
shape = coords.shape # Using numpy's shape.
|
||||||
|
if len(shape) != 2:
|
||||||
|
raise TypeError('Too many dimensions.')
|
||||||
|
self._checkdim(shape[1])
|
||||||
|
ndim = shape[1]
|
||||||
|
else:
|
||||||
# Getting the number of coords and the number of dimensions -- which
|
# Getting the number of coords and the number of dimensions -- which
|
||||||
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
|
# must stay the same, e.g., no LineString((1, 2), (1, 2, 3)).
|
||||||
ndim = None
|
ndim = None
|
||||||
|
@ -63,14 +70,6 @@ class LineString(LinearGeometryMixin, GEOSGeometry):
|
||||||
self._checkdim(ndim)
|
self._checkdim(ndim)
|
||||||
elif len(coord) != ndim:
|
elif len(coord) != ndim:
|
||||||
raise TypeError('Dimension mismatch.')
|
raise TypeError('Dimension mismatch.')
|
||||||
numpy_coords = False
|
|
||||||
else:
|
|
||||||
shape = coords.shape # Using numpy's shape.
|
|
||||||
if len(shape) != 2:
|
|
||||||
raise TypeError('Too many dimensions.')
|
|
||||||
self._checkdim(shape[1])
|
|
||||||
ndim = shape[1]
|
|
||||||
numpy_coords = True
|
|
||||||
|
|
||||||
# Creating a coordinate sequence object because it is easier to
|
# Creating a coordinate sequence object because it is easier to
|
||||||
# set the points using its methods.
|
# set the points using its methods.
|
||||||
|
|
|
@ -61,10 +61,8 @@ class SessionStore(SessionBase):
|
||||||
modification = os.stat(self._key_to_file()).st_mtime
|
modification = os.stat(self._key_to_file()).st_mtime
|
||||||
if settings.USE_TZ:
|
if settings.USE_TZ:
|
||||||
modification = datetime.datetime.utcfromtimestamp(modification)
|
modification = datetime.datetime.utcfromtimestamp(modification)
|
||||||
modification = modification.replace(tzinfo=timezone.utc)
|
return modification.replace(tzinfo=timezone.utc)
|
||||||
else:
|
return datetime.datetime.fromtimestamp(modification)
|
||||||
modification = datetime.datetime.fromtimestamp(modification)
|
|
||||||
return modification
|
|
||||||
|
|
||||||
def _expiry_date(self, session_data):
|
def _expiry_date(self, session_data):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -86,8 +86,8 @@ class HashedFilesMixin:
|
||||||
parsed_name = urlsplit(unquote(name))
|
parsed_name = urlsplit(unquote(name))
|
||||||
clean_name = parsed_name.path.strip()
|
clean_name = parsed_name.path.strip()
|
||||||
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
||||||
opened = False
|
opened = content is None
|
||||||
if content is None:
|
if opened:
|
||||||
if not self.exists(filename):
|
if not self.exists(filename):
|
||||||
raise ValueError("The file '%s' could not be found with %r." % (filename, self))
|
raise ValueError("The file '%s' could not be found with %r." % (filename, self))
|
||||||
try:
|
try:
|
||||||
|
@ -95,7 +95,6 @@ class HashedFilesMixin:
|
||||||
except IOError:
|
except IOError:
|
||||||
# Handle directory paths and fragments
|
# Handle directory paths and fragments
|
||||||
return name
|
return name
|
||||||
opened = True
|
|
||||||
try:
|
try:
|
||||||
file_hash = self.file_hash(clean_name, content)
|
file_hash = self.file_hash(clean_name, content)
|
||||||
finally:
|
finally:
|
||||||
|
|
|
@ -17,13 +17,10 @@ E002 = Error(
|
||||||
|
|
||||||
@register(Tags.templates)
|
@register(Tags.templates)
|
||||||
def check_setting_app_dirs_loaders(app_configs, **kwargs):
|
def check_setting_app_dirs_loaders(app_configs, **kwargs):
|
||||||
passed_check = True
|
return [E001] if any(
|
||||||
for conf in settings.TEMPLATES:
|
conf.get('APP_DIRS') and 'loaders' in conf.get('OPTIONS', {})
|
||||||
if not conf.get('APP_DIRS'):
|
for conf in settings.TEMPLATES
|
||||||
continue
|
) else []
|
||||||
if 'loaders' in conf.get('OPTIONS', {}):
|
|
||||||
passed_check = False
|
|
||||||
return [] if passed_check else [E001]
|
|
||||||
|
|
||||||
|
|
||||||
@register(Tags.templates)
|
@register(Tags.templates)
|
||||||
|
|
|
@ -160,10 +160,7 @@ class MemoryFileUploadHandler(FileUploadHandler):
|
||||||
"""
|
"""
|
||||||
# Check the content-length header to see if we should
|
# Check the content-length header to see if we should
|
||||||
# If the post is too large, we cannot use the Memory handler.
|
# If the post is too large, we cannot use the Memory handler.
|
||||||
if content_length > settings.FILE_UPLOAD_MAX_MEMORY_SIZE:
|
self.activated = content_length <= settings.FILE_UPLOAD_MAX_MEMORY_SIZE
|
||||||
self.activated = False
|
|
||||||
else:
|
|
||||||
self.activated = True
|
|
||||||
|
|
||||||
def new_file(self, *args, **kwargs):
|
def new_file(self, *args, **kwargs):
|
||||||
super().new_file(*args, **kwargs)
|
super().new_file(*args, **kwargs)
|
||||||
|
|
|
@ -271,9 +271,8 @@ class EmailMessage:
|
||||||
# Use cached DNS_NAME for performance
|
# Use cached DNS_NAME for performance
|
||||||
msg['Message-ID'] = make_msgid(domain=DNS_NAME)
|
msg['Message-ID'] = make_msgid(domain=DNS_NAME)
|
||||||
for name, value in self.extra_headers.items():
|
for name, value in self.extra_headers.items():
|
||||||
if name.lower() == 'from': # From is already handled
|
if name.lower() != 'from': # From is already handled
|
||||||
continue
|
msg[name] = value
|
||||||
msg[name] = value
|
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def recipients(self):
|
def recipients(self):
|
||||||
|
|
|
@ -132,11 +132,10 @@ def loads(s, key=None, salt='django.core.signing', serializer=JSONSerializer, ma
|
||||||
# TimestampSigner.unsign() returns str but base64 and zlib compression
|
# TimestampSigner.unsign() returns str but base64 and zlib compression
|
||||||
# operate on bytes.
|
# operate on bytes.
|
||||||
base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
|
base64d = force_bytes(TimestampSigner(key, salt=salt).unsign(s, max_age=max_age))
|
||||||
decompress = False
|
decompress = base64d[:1] == b'.'
|
||||||
if base64d[:1] == b'.':
|
if decompress:
|
||||||
# It's compressed; uncompress it first
|
# It's compressed; uncompress it first
|
||||||
base64d = base64d[1:]
|
base64d = base64d[1:]
|
||||||
decompress = True
|
|
||||||
data = b64_decode(base64d)
|
data = b64_decode(base64d)
|
||||||
if decompress:
|
if decompress:
|
||||||
data = zlib.decompress(data)
|
data = zlib.decompress(data)
|
||||||
|
|
|
@ -570,12 +570,11 @@ class BaseDatabaseSchemaEditor:
|
||||||
# db_index=True.
|
# db_index=True.
|
||||||
index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)
|
index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)
|
||||||
for index_name in index_names:
|
for index_name in index_names:
|
||||||
if index_name in meta_index_names:
|
if index_name not in meta_index_names:
|
||||||
# The only way to check if an index was created with
|
# The only way to check if an index was created with
|
||||||
# db_index=True or with Index(['field'], name='foo')
|
# db_index=True or with Index(['field'], name='foo')
|
||||||
# is to look at its name (refs #28053).
|
# is to look at its name (refs #28053).
|
||||||
continue
|
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
|
||||||
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
|
|
||||||
# Change check constraints?
|
# Change check constraints?
|
||||||
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
|
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
|
||||||
constraint_names = self._constraint_names(model, [old_field.column], check=True)
|
constraint_names = self._constraint_names(model, [old_field.column], check=True)
|
||||||
|
|
|
@ -261,8 +261,8 @@ class MigrationAutodetector:
|
||||||
deps_satisfied = True
|
deps_satisfied = True
|
||||||
operation_dependencies = set()
|
operation_dependencies = set()
|
||||||
for dep in operation._auto_deps:
|
for dep in operation._auto_deps:
|
||||||
is_swappable_dep = False
|
is_swappable_dep = dep[0] == '__setting__'
|
||||||
if dep[0] == "__setting__":
|
if is_swappable_dep:
|
||||||
# We need to temporarily resolve the swappable dependency to prevent
|
# We need to temporarily resolve the swappable dependency to prevent
|
||||||
# circular references. While keeping the dependency checks on the
|
# circular references. While keeping the dependency checks on the
|
||||||
# resolved model we still add the swappable dependencies.
|
# resolved model we still add the swappable dependencies.
|
||||||
|
@ -270,7 +270,6 @@ class MigrationAutodetector:
|
||||||
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
|
resolved_app_label, resolved_object_name = getattr(settings, dep[1]).split('.')
|
||||||
original_dep = dep
|
original_dep = dep
|
||||||
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
|
dep = (resolved_app_label, resolved_object_name.lower(), dep[2], dep[3])
|
||||||
is_swappable_dep = True
|
|
||||||
if dep[0] != app_label and dep[0] != "__setting__":
|
if dep[0] != app_label and dep[0] != "__setting__":
|
||||||
# External app dependency. See if it's not yet
|
# External app dependency. See if it's not yet
|
||||||
# satisfied.
|
# satisfied.
|
||||||
|
@ -831,18 +830,18 @@ class MigrationAutodetector:
|
||||||
dependencies.extend(self._get_dependencies_for_foreign_key(field))
|
dependencies.extend(self._get_dependencies_for_foreign_key(field))
|
||||||
# You can't just add NOT NULL fields with no default or fields
|
# You can't just add NOT NULL fields with no default or fields
|
||||||
# which don't allow empty strings as default.
|
# which don't allow empty strings as default.
|
||||||
preserve_default = True
|
|
||||||
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
|
time_fields = (models.DateField, models.DateTimeField, models.TimeField)
|
||||||
if (not field.null and not field.has_default() and
|
preserve_default = (
|
||||||
not field.many_to_many and
|
field.null or field.has_default() or field.many_to_many or
|
||||||
not (field.blank and field.empty_strings_allowed) and
|
(field.blank and field.empty_strings_allowed) or
|
||||||
not (isinstance(field, time_fields) and field.auto_now)):
|
(isinstance(field, time_fields) and field.auto_now)
|
||||||
|
)
|
||||||
|
if not preserve_default:
|
||||||
field = field.clone()
|
field = field.clone()
|
||||||
if isinstance(field, time_fields) and field.auto_now_add:
|
if isinstance(field, time_fields) and field.auto_now_add:
|
||||||
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
|
field.default = self.questioner.ask_auto_now_add_addition(field_name, model_name)
|
||||||
else:
|
else:
|
||||||
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
|
field.default = self.questioner.ask_not_null_addition(field_name, model_name)
|
||||||
preserve_default = False
|
|
||||||
self.add_operation(
|
self.add_operation(
|
||||||
app_label,
|
app_label,
|
||||||
operations.AddField(
|
operations.AddField(
|
||||||
|
|
|
@ -367,9 +367,7 @@ class MigrationGraph:
|
||||||
plan = []
|
plan = []
|
||||||
for node in nodes:
|
for node in nodes:
|
||||||
for migration in self.forwards_plan(node):
|
for migration in self.forwards_plan(node):
|
||||||
if migration not in plan:
|
if migration in plan or at_end or migration not in nodes:
|
||||||
if not at_end and migration in nodes:
|
|
||||||
continue
|
|
||||||
plan.append(migration)
|
plan.append(migration)
|
||||||
project_state = ProjectState(real_apps=real_apps)
|
project_state = ProjectState(real_apps=real_apps)
|
||||||
for node in plan:
|
for node in plan:
|
||||||
|
|
|
@ -172,10 +172,9 @@ class MigrationLoader:
|
||||||
dependencies find the correct root node.
|
dependencies find the correct root node.
|
||||||
"""
|
"""
|
||||||
for parent in migration.dependencies:
|
for parent in migration.dependencies:
|
||||||
if parent[0] != key[0] or parent[1] == '__first__':
|
# Ignore __first__ references to the same app.
|
||||||
# Ignore __first__ references to the same app (#22325).
|
if parent[0] == key[0] and parent[1] != '__first__':
|
||||||
continue
|
self.graph.add_dependency(migration, key, parent, skip_validation=True)
|
||||||
self.graph.add_dependency(migration, key, parent, skip_validation=True)
|
|
||||||
|
|
||||||
def add_external_dependencies(self, key, migration):
|
def add_external_dependencies(self, key, migration):
|
||||||
for parent in migration.dependencies:
|
for parent in migration.dependencies:
|
||||||
|
|
|
@ -109,11 +109,7 @@ class RelatedField(FieldCacheMixin, Field):
|
||||||
related_name = self.remote_field.related_name
|
related_name = self.remote_field.related_name
|
||||||
if related_name is None:
|
if related_name is None:
|
||||||
return []
|
return []
|
||||||
is_valid_id = True
|
is_valid_id = not keyword.iskeyword(related_name) and related_name.isidentifier()
|
||||||
if keyword.iskeyword(related_name):
|
|
||||||
is_valid_id = False
|
|
||||||
if not related_name.isidentifier():
|
|
||||||
is_valid_id = False
|
|
||||||
if not (is_valid_id or related_name.endswith('+')):
|
if not (is_valid_id or related_name.endswith('+')):
|
||||||
return [
|
return [
|
||||||
checks.Error(
|
checks.Error(
|
||||||
|
|
|
@ -753,10 +753,9 @@ class Options:
|
||||||
|
|
||||||
# We must keep track of which models we have already seen. Otherwise we
|
# We must keep track of which models we have already seen. Otherwise we
|
||||||
# could include the same field multiple times from different models.
|
# could include the same field multiple times from different models.
|
||||||
topmost_call = False
|
topmost_call = seen_models is None
|
||||||
if seen_models is None:
|
if topmost_call:
|
||||||
seen_models = set()
|
seen_models = set()
|
||||||
topmost_call = True
|
|
||||||
seen_models.add(self.model)
|
seen_models.add(self.model)
|
||||||
|
|
||||||
# Creates a cache key composed of all arguments
|
# Creates a cache key composed of all arguments
|
||||||
|
@ -785,9 +784,8 @@ class Options:
|
||||||
for obj in parent._meta._get_fields(
|
for obj in parent._meta._get_fields(
|
||||||
forward=forward, reverse=reverse, include_parents=include_parents,
|
forward=forward, reverse=reverse, include_parents=include_parents,
|
||||||
include_hidden=include_hidden, seen_models=seen_models):
|
include_hidden=include_hidden, seen_models=seen_models):
|
||||||
if getattr(obj, 'parent_link', False) and obj.model != self.concrete_model:
|
if not getattr(obj, 'parent_link', False) or obj.model == self.concrete_model:
|
||||||
continue
|
fields.append(obj)
|
||||||
fields.append(obj)
|
|
||||||
if reverse and not self.proxy:
|
if reverse and not self.proxy:
|
||||||
# Tree is computed once and cached until the app cache is expired.
|
# Tree is computed once and cached until the app cache is expired.
|
||||||
# It is composed of a list of fields pointing to the current model
|
# It is composed of a list of fields pointing to the current model
|
||||||
|
|
|
@ -114,13 +114,10 @@ class SQLCompiler:
|
||||||
for col in cols:
|
for col in cols:
|
||||||
expressions.append(col)
|
expressions.append(col)
|
||||||
for expr, (sql, params, is_ref) in order_by:
|
for expr, (sql, params, is_ref) in order_by:
|
||||||
if expr.contains_aggregate:
|
# Skip References to the select clause, as all expressions in the
|
||||||
continue
|
# select clause are already part of the group by.
|
||||||
# We can skip References to select clause, as all expressions in
|
if not expr.contains_aggregate and not is_ref:
|
||||||
# the select clause are already part of the group by.
|
expressions.extend(expr.get_source_expressions())
|
||||||
if is_ref:
|
|
||||||
continue
|
|
||||||
expressions.extend(expr.get_source_expressions())
|
|
||||||
having_group_by = self.having.get_group_by_cols() if self.having else ()
|
having_group_by = self.having.get_group_by_cols() if self.having else ()
|
||||||
for expr in having_group_by:
|
for expr in having_group_by:
|
||||||
expressions.append(expr)
|
expressions.append(expr)
|
||||||
|
@ -283,7 +280,7 @@ class SQLCompiler:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
col, order = get_order_dir(field, asc)
|
col, order = get_order_dir(field, asc)
|
||||||
descending = True if order == 'DESC' else False
|
descending = order == 'DESC'
|
||||||
|
|
||||||
if col in self.query.annotation_select:
|
if col in self.query.annotation_select:
|
||||||
# Reference to expression in SELECT clause
|
# Reference to expression in SELECT clause
|
||||||
|
@ -646,7 +643,7 @@ class SQLCompiler:
|
||||||
The 'name' is of the form 'field1__field2__...__fieldN'.
|
The 'name' is of the form 'field1__field2__...__fieldN'.
|
||||||
"""
|
"""
|
||||||
name, order = get_order_dir(name, default_order)
|
name, order = get_order_dir(name, default_order)
|
||||||
descending = True if order == 'DESC' else False
|
descending = order == 'DESC'
|
||||||
pieces = name.split(LOOKUP_SEP)
|
pieces = name.split(LOOKUP_SEP)
|
||||||
field, targets, alias, joins, path, opts = self._setup_joins(pieces, opts, alias)
|
field, targets, alias, joins, path, opts = self._setup_joins(pieces, opts, alias)
|
||||||
|
|
||||||
|
@ -747,11 +744,9 @@ class SQLCompiler:
|
||||||
# included in the related selection.
|
# included in the related selection.
|
||||||
fields_found = set()
|
fields_found = set()
|
||||||
if requested is None:
|
if requested is None:
|
||||||
if isinstance(self.query.select_related, dict):
|
restricted = isinstance(self.query.select_related, dict)
|
||||||
|
if restricted:
|
||||||
requested = self.query.select_related
|
requested = self.query.select_related
|
||||||
restricted = True
|
|
||||||
else:
|
|
||||||
restricted = False
|
|
||||||
|
|
||||||
def get_related_klass_infos(klass_info, related_klass_infos):
|
def get_related_klass_infos(klass_info, related_klass_infos):
|
||||||
klass_info['related_klass_infos'] = related_klass_infos
|
klass_info['related_klass_infos'] = related_klass_infos
|
||||||
|
|
|
@ -666,10 +666,9 @@ class Query:
|
||||||
workset = {}
|
workset = {}
|
||||||
for model, values in seen.items():
|
for model, values in seen.items():
|
||||||
for field in model._meta.local_fields:
|
for field in model._meta.local_fields:
|
||||||
if field in values:
|
if field not in values:
|
||||||
continue
|
m = field.model._meta.concrete_model
|
||||||
m = field.model._meta.concrete_model
|
add_to_dict(workset, m, field)
|
||||||
add_to_dict(workset, m, field)
|
|
||||||
for model, values in must_include.items():
|
for model, values in must_include.items():
|
||||||
# If we haven't included a model in workset, we don't add the
|
# If we haven't included a model in workset, we don't add the
|
||||||
# corresponding must_include fields for that model, since an
|
# corresponding must_include fields for that model, since an
|
||||||
|
|
|
@ -587,9 +587,8 @@ class BaseModelFormSet(BaseFormSet):
|
||||||
return field.to_python
|
return field.to_python
|
||||||
|
|
||||||
def _construct_form(self, i, **kwargs):
|
def _construct_form(self, i, **kwargs):
|
||||||
pk_required = False
|
pk_required = i < self.initial_form_count()
|
||||||
if i < self.initial_form_count():
|
if pk_required:
|
||||||
pk_required = True
|
|
||||||
if self.is_bound:
|
if self.is_bound:
|
||||||
pk_key = '%s-%s' % (self.add_prefix(i), self.model._meta.pk.name)
|
pk_key = '%s-%s' % (self.add_prefix(i), self.model._meta.pk.name)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -599,8 +599,7 @@ class ChoiceWidget(Widget):
|
||||||
str(subvalue) in value and
|
str(subvalue) in value and
|
||||||
(not has_selected or self.allow_multiple_selected)
|
(not has_selected or self.allow_multiple_selected)
|
||||||
)
|
)
|
||||||
if selected and not has_selected:
|
has_selected |= selected
|
||||||
has_selected = True
|
|
||||||
subgroup.append(self.create_option(
|
subgroup.append(self.create_option(
|
||||||
name, subvalue, sublabel, selected, index,
|
name, subvalue, sublabel, selected, index,
|
||||||
subindex=subindex, attrs=attrs,
|
subindex=subindex, attrs=attrs,
|
||||||
|
|
|
@ -376,9 +376,8 @@ def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cach
|
||||||
headerlist = []
|
headerlist = []
|
||||||
for header in cc_delim_re.split(response['Vary']):
|
for header in cc_delim_re.split(response['Vary']):
|
||||||
header = header.upper().replace('-', '_')
|
header = header.upper().replace('-', '_')
|
||||||
if header == 'ACCEPT_LANGUAGE' and is_accept_language_redundant:
|
if header != 'ACCEPT_LANGUAGE' or not is_accept_language_redundant:
|
||||||
continue
|
headerlist.append('HTTP_' + header)
|
||||||
headerlist.append('HTTP_' + header)
|
|
||||||
headerlist.sort()
|
headerlist.sort()
|
||||||
cache.set(cache_key, headerlist, cache_timeout)
|
cache.set(cache_key, headerlist, cache_timeout)
|
||||||
return _generate_cache_key(request, request.method, headerlist, key_prefix)
|
return _generate_cache_key(request, request.method, headerlist, key_prefix)
|
||||||
|
|
|
@ -275,11 +275,9 @@ class DictWrapper(dict):
|
||||||
present). If the prefix is present, pass the value through self.func
|
present). If the prefix is present, pass the value through self.func
|
||||||
before returning, otherwise return the raw value.
|
before returning, otherwise return the raw value.
|
||||||
"""
|
"""
|
||||||
if key.startswith(self.prefix):
|
use_func = key.startswith(self.prefix)
|
||||||
use_func = True
|
if use_func:
|
||||||
key = key[len(self.prefix):]
|
key = key[len(self.prefix):]
|
||||||
else:
|
|
||||||
use_func = False
|
|
||||||
value = super().__getitem__(key)
|
value = super().__getitem__(key)
|
||||||
if use_func:
|
if use_func:
|
||||||
return self.func(value)
|
return self.func(value)
|
||||||
|
|
|
@ -176,8 +176,7 @@ def normalize(pattern):
|
||||||
|
|
||||||
if consume_next:
|
if consume_next:
|
||||||
ch, escaped = next(pattern_iter)
|
ch, escaped = next(pattern_iter)
|
||||||
else:
|
consume_next = True
|
||||||
consume_next = True
|
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
pass
|
pass
|
||||||
except NotImplementedError:
|
except NotImplementedError:
|
||||||
|
|
|
@ -1694,9 +1694,8 @@ templates used by the :class:`ModelAdmin` views:
|
||||||
def get_formsets_with_inlines(self, request, obj=None):
|
def get_formsets_with_inlines(self, request, obj=None):
|
||||||
for inline in self.get_inline_instances(request, obj):
|
for inline in self.get_inline_instances(request, obj):
|
||||||
# hide MyInline in the add view
|
# hide MyInline in the add view
|
||||||
if isinstance(inline, MyInline) and obj is None:
|
if not isinstance(inline, MyInline) or obj is not None:
|
||||||
continue
|
yield inline.get_formset(request, obj), inline
|
||||||
yield inline.get_formset(request, obj), inline
|
|
||||||
|
|
||||||
.. method:: ModelAdmin.formfield_for_foreignkey(db_field, request, **kwargs)
|
.. method:: ModelAdmin.formfield_for_foreignkey(db_field, request, **kwargs)
|
||||||
|
|
||||||
|
|
|
@ -81,11 +81,7 @@ class DistanceTest(TestCase):
|
||||||
# Now performing the `dwithin` queries on a geodetic coordinate system.
|
# Now performing the `dwithin` queries on a geodetic coordinate system.
|
||||||
for dist in au_dists:
|
for dist in au_dists:
|
||||||
with self.subTest(dist=dist):
|
with self.subTest(dist=dist):
|
||||||
if isinstance(dist, D) and not oracle:
|
type_error = isinstance(dist, D) and not oracle
|
||||||
type_error = True
|
|
||||||
else:
|
|
||||||
type_error = False
|
|
||||||
|
|
||||||
if isinstance(dist, tuple):
|
if isinstance(dist, tuple):
|
||||||
if oracle or spatialite:
|
if oracle or spatialite:
|
||||||
# Result in meters
|
# Result in meters
|
||||||
|
|
|
@ -17,13 +17,8 @@ def get_max_column_name_length():
|
||||||
for db in settings.DATABASES:
|
for db in settings.DATABASES:
|
||||||
connection = connections[db]
|
connection = connections[db]
|
||||||
max_name_length = connection.ops.max_name_length()
|
max_name_length = connection.ops.max_name_length()
|
||||||
if max_name_length is None or connection.features.truncates_names:
|
if max_name_length is not None and not connection.features.truncates_names:
|
||||||
continue
|
if allowed_len is None or max_name_length < allowed_len:
|
||||||
else:
|
|
||||||
if allowed_len is None:
|
|
||||||
allowed_len = max_name_length
|
|
||||||
db_alias = db
|
|
||||||
elif max_name_length < allowed_len:
|
|
||||||
allowed_len = max_name_length
|
allowed_len = max_name_length
|
||||||
db_alias = db
|
db_alias = db
|
||||||
|
|
||||||
|
|
|
@ -252,7 +252,7 @@ class BaseTests:
|
||||||
def test_middleware_disabled_fail_silently(self):
|
def test_middleware_disabled_fail_silently(self):
|
||||||
"""
|
"""
|
||||||
When the middleware is disabled, an exception is not raised
|
When the middleware is disabled, an exception is not raised
|
||||||
if 'fail_silently' = True
|
if 'fail_silently' is True.
|
||||||
"""
|
"""
|
||||||
data = {
|
data = {
|
||||||
'messages': ['Test message %d' % x for x in range(5)],
|
'messages': ['Test message %d' % x for x in range(5)],
|
||||||
|
|
|
@ -85,12 +85,11 @@ def get_test_modules():
|
||||||
|
|
||||||
for modpath, dirpath in discovery_paths:
|
for modpath, dirpath in discovery_paths:
|
||||||
for f in os.listdir(dirpath):
|
for f in os.listdir(dirpath):
|
||||||
if ('.' in f or
|
if ('.' not in f and
|
||||||
os.path.basename(f) in SUBDIRS_TO_SKIP or
|
os.path.basename(f) not in SUBDIRS_TO_SKIP and
|
||||||
os.path.isfile(f) or
|
not os.path.isfile(f) and
|
||||||
not os.path.exists(os.path.join(dirpath, f, '__init__.py'))):
|
os.path.exists(os.path.join(dirpath, f, '__init__.py'))):
|
||||||
continue
|
modules.append((modpath, f))
|
||||||
modules.append((modpath, f))
|
|
||||||
return modules
|
return modules
|
||||||
|
|
||||||
|
|
||||||
|
@ -189,13 +188,11 @@ def setup(verbosity, test_labels, parallel):
|
||||||
# if the module (or an ancestor) was named on the command line, or
|
# if the module (or an ancestor) was named on the command line, or
|
||||||
# no modules were named (i.e., run all), import
|
# no modules were named (i.e., run all), import
|
||||||
# this module and add it to INSTALLED_APPS.
|
# this module and add it to INSTALLED_APPS.
|
||||||
if not test_labels:
|
module_found_in_labels = not test_labels or any(
|
||||||
module_found_in_labels = True
|
# exact match or ancestor match
|
||||||
else:
|
module_label == label or module_label.startswith(label + '.')
|
||||||
module_found_in_labels = any(
|
for label in test_labels_set
|
||||||
# exact match or ancestor match
|
)
|
||||||
module_label == label or module_label.startswith(label + '.')
|
|
||||||
for label in test_labels_set)
|
|
||||||
|
|
||||||
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
|
if module_name in CONTRIB_TESTS_TO_APPS and module_found_in_labels:
|
||||||
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
|
settings.INSTALLED_APPS.append(CONTRIB_TESTS_TO_APPS[module_name])
|
||||||
|
|
Loading…
Reference in New Issue