mirror of https://github.com/django/django.git
Fixed #28893 -- Removed unnecessary dict.items() calls.
This commit is contained in:
parent
183fb7b2b9
commit
a862af3839
|
@ -264,7 +264,7 @@ class Command(BaseCommand):
|
|||
to the given database table name.
|
||||
"""
|
||||
unique_together = []
|
||||
for index, params in constraints.items():
|
||||
for params in constraints.values():
|
||||
if params['unique']:
|
||||
columns = params['columns']
|
||||
if len(columns) > 1:
|
||||
|
|
|
@ -128,7 +128,7 @@ class WSGIRequestHandler(simple_server.WSGIRequestHandler):
|
|||
# the WSGI environ. This prevents header-spoofing based on ambiguity
|
||||
# between underscores and dashes both normalized to underscores in WSGI
|
||||
# env vars. Nginx and Apache 2.4+ both do this as well.
|
||||
for k, v in self.headers.items():
|
||||
for k in self.headers:
|
||||
if '_' in k:
|
||||
del self.headers[k]
|
||||
|
||||
|
|
|
@ -349,7 +349,7 @@ class MigrationAutodetector:
|
|||
m2.dependencies.append((app_label, m1.name))
|
||||
|
||||
# De-dupe dependencies
|
||||
for app_label, migrations in self.migrations.items():
|
||||
for migrations in self.migrations.values():
|
||||
for migration in migrations:
|
||||
migration.dependencies = list(set(migration.dependencies))
|
||||
|
||||
|
@ -590,7 +590,7 @@ class MigrationAutodetector:
|
|||
# Generate other opns
|
||||
related_dependencies = [
|
||||
(app_label, model_name, name, True)
|
||||
for name, field in sorted(related_fields.items())
|
||||
for name in sorted(related_fields)
|
||||
]
|
||||
related_dependencies.append((app_label, model_name, None, True))
|
||||
for index in indexes:
|
||||
|
@ -738,7 +738,7 @@ class MigrationAutodetector:
|
|||
)
|
||||
)
|
||||
# Then remove each related field
|
||||
for name, field in sorted(related_fields.items()):
|
||||
for name in sorted(related_fields):
|
||||
self.add_operation(
|
||||
app_label,
|
||||
operations.RemoveField(
|
||||
|
@ -759,7 +759,7 @@ class MigrationAutodetector:
|
|||
if not related_object.many_to_many:
|
||||
dependencies.append((related_object_app_label, object_name, field_name, "alter"))
|
||||
|
||||
for name, field in sorted(related_fields.items()):
|
||||
for name in sorted(related_fields):
|
||||
dependencies.append((app_label, model_name, name, False))
|
||||
# We're referenced in another field's through=
|
||||
through_user = self.through_users.get((app_label, model_state.name_lower))
|
||||
|
@ -1173,7 +1173,7 @@ class MigrationAutodetector:
|
|||
next_number += 1
|
||||
migration.name = new_name
|
||||
# Now fix dependencies
|
||||
for app_label, migrations in changes.items():
|
||||
for migrations in changes.values():
|
||||
for migration in migrations:
|
||||
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
|
||||
return changes
|
||||
|
|
|
@ -308,7 +308,7 @@ class Collector:
|
|||
)
|
||||
|
||||
# update collected instances
|
||||
for model, instances_for_fieldvalues in self.field_updates.items():
|
||||
for instances_for_fieldvalues in self.field_updates.values():
|
||||
for (field, value), instances in instances_for_fieldvalues.items():
|
||||
for obj in instances:
|
||||
setattr(obj, field.attname, value)
|
||||
|
|
|
@ -1749,7 +1749,7 @@ class Query:
|
|||
"""
|
||||
group_by = list(self.select)
|
||||
if self.annotation_select:
|
||||
for alias, annotation in self.annotation_select.items():
|
||||
for annotation in self.annotation_select.values():
|
||||
for col in annotation.get_group_by_cols():
|
||||
group_by.append(col)
|
||||
self.group_by = tuple(group_by)
|
||||
|
|
|
@ -160,7 +160,7 @@ def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, paral
|
|||
|
||||
old_names = []
|
||||
|
||||
for signature, (db_name, aliases) in test_databases.items():
|
||||
for db_name, aliases in test_databases.values():
|
||||
first_alias = None
|
||||
for alias in aliases:
|
||||
connection = connections[alias]
|
||||
|
|
|
@ -164,7 +164,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
|||
cleansed = request.POST.copy()
|
||||
if sensitive_post_parameters == '__ALL__':
|
||||
# Cleanse all parameters.
|
||||
for k, v in cleansed.items():
|
||||
for k in cleansed:
|
||||
cleansed[k] = CLEANSED_SUBSTITUTE
|
||||
return cleansed
|
||||
else:
|
||||
|
@ -213,7 +213,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
|
|||
if self.is_active(request) and sensitive_variables:
|
||||
if sensitive_variables == '__ALL__':
|
||||
# Cleanse all variables
|
||||
for name, value in tb_frame.f_locals.items():
|
||||
for name in tb_frame.f_locals:
|
||||
cleansed[name] = CLEANSED_SUBSTITUTE
|
||||
else:
|
||||
# Cleanse specified variables
|
||||
|
|
|
@ -33,7 +33,7 @@ class DatabaseCreationTests(SimpleTestCase):
|
|||
try:
|
||||
yield
|
||||
finally:
|
||||
for name, value in kwargs.items():
|
||||
for name in kwargs:
|
||||
if name in saved_values:
|
||||
settings[name] = saved_values[name]
|
||||
else:
|
||||
|
|
|
@ -27,7 +27,7 @@ class ServerSideCursorsPostgres(TestCase):
|
|||
|
||||
@contextmanager
|
||||
def override_db_setting(self, **kwargs):
|
||||
for setting, value in kwargs.items():
|
||||
for setting in kwargs:
|
||||
original_value = connection.settings_dict.get(setting)
|
||||
if setting in connection.settings_dict:
|
||||
self.addCleanup(operator.setitem, connection.settings_dict, setting, original_value)
|
||||
|
|
|
@ -176,7 +176,7 @@ class IntrospectionTests(TransactionTestCase):
|
|||
constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table)
|
||||
index = {}
|
||||
index2 = {}
|
||||
for key, val in constraints.items():
|
||||
for val in constraints.values():
|
||||
if val['columns'] == ['headline', 'pub_date']:
|
||||
index = val
|
||||
if val['columns'] == ['headline', 'response_to_id', 'pub_date', 'reporter_id']:
|
||||
|
@ -198,7 +198,7 @@ class IntrospectionTests(TransactionTestCase):
|
|||
['response_to_id'],
|
||||
['headline', 'response_to_id', 'pub_date', 'reporter_id'],
|
||||
]
|
||||
for key, val in constraints.items():
|
||||
for val in constraints.values():
|
||||
if val['index'] and not (val['primary_key'] or val['unique']):
|
||||
self.assertIn(val['columns'], expected_columns)
|
||||
self.assertEqual(val['orders'], ['ASC'] * len(val['columns']))
|
||||
|
|
|
@ -180,7 +180,7 @@ class SchemaTests(TransactionTestCase):
|
|||
"""
|
||||
constraints = self.get_constraints(model._meta.db_table)
|
||||
constraint_fk = None
|
||||
for name, details in constraints.items():
|
||||
for details in constraints.values():
|
||||
if details['columns'] == [column] and details['foreign_key']:
|
||||
constraint_fk = details['foreign_key']
|
||||
break
|
||||
|
@ -836,7 +836,7 @@ class SchemaTests(TransactionTestCase):
|
|||
editor.create_model(LocalBook)
|
||||
# Ensure no FK constraint exists
|
||||
constraints = self.get_constraints(LocalBook._meta.db_table)
|
||||
for name, details in constraints.items():
|
||||
for details in constraints.values():
|
||||
if details['foreign_key']:
|
||||
self.fail('Found an unexpected FK constraint to %s' % details['columns'])
|
||||
old_field = LocalBook._meta.get_field("author")
|
||||
|
@ -1430,7 +1430,7 @@ class SchemaTests(TransactionTestCase):
|
|||
editor.create_model(Author)
|
||||
# Ensure the constraint exists
|
||||
constraints = self.get_constraints(Author._meta.db_table)
|
||||
for name, details in constraints.items():
|
||||
for details in constraints.values():
|
||||
if details['columns'] == ["height"] and details['check']:
|
||||
break
|
||||
else:
|
||||
|
@ -1442,7 +1442,7 @@ class SchemaTests(TransactionTestCase):
|
|||
with connection.schema_editor() as editor:
|
||||
editor.alter_field(Author, old_field, new_field, strict=True)
|
||||
constraints = self.get_constraints(Author._meta.db_table)
|
||||
for name, details in constraints.items():
|
||||
for details in constraints.values():
|
||||
if details['columns'] == ["height"] and details['check']:
|
||||
self.fail("Check constraint for height found")
|
||||
# Alter the column to re-add it
|
||||
|
@ -1450,7 +1450,7 @@ class SchemaTests(TransactionTestCase):
|
|||
with connection.schema_editor() as editor:
|
||||
editor.alter_field(Author, new_field, new_field2, strict=True)
|
||||
constraints = self.get_constraints(Author._meta.db_table)
|
||||
for name, details in constraints.items():
|
||||
for details in constraints.values():
|
||||
if details['columns'] == ["height"] and details['check']:
|
||||
break
|
||||
else:
|
||||
|
|
|
@ -106,7 +106,7 @@ def inherited_create(pk, klass, data):
|
|||
# automatically is easier than manually creating both.
|
||||
models.Model.save(instance)
|
||||
created = [instance]
|
||||
for klass, field in instance._meta.parents.items():
|
||||
for klass in instance._meta.parents:
|
||||
created.append(klass.objects.get(id=pk))
|
||||
return created
|
||||
|
||||
|
|
|
@ -47,7 +47,7 @@ class GetUniqueCheckTests(unittest.TestCase):
|
|||
(('foo', 'bar'), ('bar', 'baz'))),
|
||||
}
|
||||
|
||||
for test_name, (unique_together, normalized) in data.items():
|
||||
for unique_together, normalized in data.values():
|
||||
class M(models.Model):
|
||||
foo = models.IntegerField()
|
||||
bar = models.IntegerField()
|
||||
|
|
|
@ -769,7 +769,7 @@ class ExceptionReportTestMixin:
|
|||
self.assertContains(response, 'sauce', status_code=500)
|
||||
self.assertNotContains(response, 'worcestershire', status_code=500)
|
||||
if check_for_POST_params:
|
||||
for k, v in self.breakfast_data.items():
|
||||
for k in self.breakfast_data:
|
||||
# All POST parameters' names are shown.
|
||||
self.assertContains(response, k, status_code=500)
|
||||
# Non-sensitive POST parameters' values are shown.
|
||||
|
@ -858,7 +858,7 @@ class ExceptionReportTestMixin:
|
|||
self.assertNotIn('worcestershire', body_html)
|
||||
|
||||
if check_for_POST_params:
|
||||
for k, v in self.breakfast_data.items():
|
||||
for k in self.breakfast_data:
|
||||
# All POST parameters' names are shown.
|
||||
self.assertIn(k, body_plain)
|
||||
# Non-sensitive POST parameters' values are shown.
|
||||
|
|
Loading…
Reference in New Issue