Fixed #28893 -- Removed unnecessary dict.items() calls.

This commit is contained in:
Tim Graham 2017-12-06 17:17:59 -05:00 committed by GitHub
parent 183fb7b2b9
commit a862af3839
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 25 additions and 25 deletions

View File

@ -264,7 +264,7 @@ class Command(BaseCommand):
to the given database table name. to the given database table name.
""" """
unique_together = [] unique_together = []
for index, params in constraints.items(): for params in constraints.values():
if params['unique']: if params['unique']:
columns = params['columns'] columns = params['columns']
if len(columns) > 1: if len(columns) > 1:

View File

@ -128,7 +128,7 @@ class WSGIRequestHandler(simple_server.WSGIRequestHandler):
# the WSGI environ. This prevents header-spoofing based on ambiguity # the WSGI environ. This prevents header-spoofing based on ambiguity
# between underscores and dashes both normalized to underscores in WSGI # between underscores and dashes both normalized to underscores in WSGI
# env vars. Nginx and Apache 2.4+ both do this as well. # env vars. Nginx and Apache 2.4+ both do this as well.
for k, v in self.headers.items(): for k in self.headers:
if '_' in k: if '_' in k:
del self.headers[k] del self.headers[k]

View File

@ -349,7 +349,7 @@ class MigrationAutodetector:
m2.dependencies.append((app_label, m1.name)) m2.dependencies.append((app_label, m1.name))
# De-dupe dependencies # De-dupe dependencies
for app_label, migrations in self.migrations.items(): for migrations in self.migrations.values():
for migration in migrations: for migration in migrations:
migration.dependencies = list(set(migration.dependencies)) migration.dependencies = list(set(migration.dependencies))
@ -590,7 +590,7 @@ class MigrationAutodetector:
# Generate other opns # Generate other opns
related_dependencies = [ related_dependencies = [
(app_label, model_name, name, True) (app_label, model_name, name, True)
for name, field in sorted(related_fields.items()) for name in sorted(related_fields)
] ]
related_dependencies.append((app_label, model_name, None, True)) related_dependencies.append((app_label, model_name, None, True))
for index in indexes: for index in indexes:
@ -738,7 +738,7 @@ class MigrationAutodetector:
) )
) )
# Then remove each related field # Then remove each related field
for name, field in sorted(related_fields.items()): for name in sorted(related_fields):
self.add_operation( self.add_operation(
app_label, app_label,
operations.RemoveField( operations.RemoveField(
@ -759,7 +759,7 @@ class MigrationAutodetector:
if not related_object.many_to_many: if not related_object.many_to_many:
dependencies.append((related_object_app_label, object_name, field_name, "alter")) dependencies.append((related_object_app_label, object_name, field_name, "alter"))
for name, field in sorted(related_fields.items()): for name in sorted(related_fields):
dependencies.append((app_label, model_name, name, False)) dependencies.append((app_label, model_name, name, False))
# We're referenced in another field's through= # We're referenced in another field's through=
through_user = self.through_users.get((app_label, model_state.name_lower)) through_user = self.through_users.get((app_label, model_state.name_lower))
@ -1173,7 +1173,7 @@ class MigrationAutodetector:
next_number += 1 next_number += 1
migration.name = new_name migration.name = new_name
# Now fix dependencies # Now fix dependencies
for app_label, migrations in changes.items(): for migrations in changes.values():
for migration in migrations: for migration in migrations:
migration.dependencies = [name_map.get(d, d) for d in migration.dependencies] migration.dependencies = [name_map.get(d, d) for d in migration.dependencies]
return changes return changes

View File

@ -308,7 +308,7 @@ class Collector:
) )
# update collected instances # update collected instances
for model, instances_for_fieldvalues in self.field_updates.items(): for instances_for_fieldvalues in self.field_updates.values():
for (field, value), instances in instances_for_fieldvalues.items(): for (field, value), instances in instances_for_fieldvalues.items():
for obj in instances: for obj in instances:
setattr(obj, field.attname, value) setattr(obj, field.attname, value)

View File

@ -1749,7 +1749,7 @@ class Query:
""" """
group_by = list(self.select) group_by = list(self.select)
if self.annotation_select: if self.annotation_select:
for alias, annotation in self.annotation_select.items(): for annotation in self.annotation_select.values():
for col in annotation.get_group_by_cols(): for col in annotation.get_group_by_cols():
group_by.append(col) group_by.append(col)
self.group_by = tuple(group_by) self.group_by = tuple(group_by)

View File

@ -160,7 +160,7 @@ def setup_databases(verbosity, interactive, keepdb=False, debug_sql=False, paral
old_names = [] old_names = []
for signature, (db_name, aliases) in test_databases.items(): for db_name, aliases in test_databases.values():
first_alias = None first_alias = None
for alias in aliases: for alias in aliases:
connection = connections[alias] connection = connections[alias]

View File

@ -164,7 +164,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
cleansed = request.POST.copy() cleansed = request.POST.copy()
if sensitive_post_parameters == '__ALL__': if sensitive_post_parameters == '__ALL__':
# Cleanse all parameters. # Cleanse all parameters.
for k, v in cleansed.items(): for k in cleansed:
cleansed[k] = CLEANSED_SUBSTITUTE cleansed[k] = CLEANSED_SUBSTITUTE
return cleansed return cleansed
else: else:
@ -213,7 +213,7 @@ class SafeExceptionReporterFilter(ExceptionReporterFilter):
if self.is_active(request) and sensitive_variables: if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__': if sensitive_variables == '__ALL__':
# Cleanse all variables # Cleanse all variables
for name, value in tb_frame.f_locals.items(): for name in tb_frame.f_locals:
cleansed[name] = CLEANSED_SUBSTITUTE cleansed[name] = CLEANSED_SUBSTITUTE
else: else:
# Cleanse specified variables # Cleanse specified variables

View File

@ -33,7 +33,7 @@ class DatabaseCreationTests(SimpleTestCase):
try: try:
yield yield
finally: finally:
for name, value in kwargs.items(): for name in kwargs:
if name in saved_values: if name in saved_values:
settings[name] = saved_values[name] settings[name] = saved_values[name]
else: else:

View File

@ -27,7 +27,7 @@ class ServerSideCursorsPostgres(TestCase):
@contextmanager @contextmanager
def override_db_setting(self, **kwargs): def override_db_setting(self, **kwargs):
for setting, value in kwargs.items(): for setting in kwargs:
original_value = connection.settings_dict.get(setting) original_value = connection.settings_dict.get(setting)
if setting in connection.settings_dict: if setting in connection.settings_dict:
self.addCleanup(operator.setitem, connection.settings_dict, setting, original_value) self.addCleanup(operator.setitem, connection.settings_dict, setting, original_value)

View File

@ -176,7 +176,7 @@ class IntrospectionTests(TransactionTestCase):
constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table) constraints = connection.introspection.get_constraints(cursor, Article._meta.db_table)
index = {} index = {}
index2 = {} index2 = {}
for key, val in constraints.items(): for val in constraints.values():
if val['columns'] == ['headline', 'pub_date']: if val['columns'] == ['headline', 'pub_date']:
index = val index = val
if val['columns'] == ['headline', 'response_to_id', 'pub_date', 'reporter_id']: if val['columns'] == ['headline', 'response_to_id', 'pub_date', 'reporter_id']:
@ -198,7 +198,7 @@ class IntrospectionTests(TransactionTestCase):
['response_to_id'], ['response_to_id'],
['headline', 'response_to_id', 'pub_date', 'reporter_id'], ['headline', 'response_to_id', 'pub_date', 'reporter_id'],
] ]
for key, val in constraints.items(): for val in constraints.values():
if val['index'] and not (val['primary_key'] or val['unique']): if val['index'] and not (val['primary_key'] or val['unique']):
self.assertIn(val['columns'], expected_columns) self.assertIn(val['columns'], expected_columns)
self.assertEqual(val['orders'], ['ASC'] * len(val['columns'])) self.assertEqual(val['orders'], ['ASC'] * len(val['columns']))

View File

@ -180,7 +180,7 @@ class SchemaTests(TransactionTestCase):
""" """
constraints = self.get_constraints(model._meta.db_table) constraints = self.get_constraints(model._meta.db_table)
constraint_fk = None constraint_fk = None
for name, details in constraints.items(): for details in constraints.values():
if details['columns'] == [column] and details['foreign_key']: if details['columns'] == [column] and details['foreign_key']:
constraint_fk = details['foreign_key'] constraint_fk = details['foreign_key']
break break
@ -836,7 +836,7 @@ class SchemaTests(TransactionTestCase):
editor.create_model(LocalBook) editor.create_model(LocalBook)
# Ensure no FK constraint exists # Ensure no FK constraint exists
constraints = self.get_constraints(LocalBook._meta.db_table) constraints = self.get_constraints(LocalBook._meta.db_table)
for name, details in constraints.items(): for details in constraints.values():
if details['foreign_key']: if details['foreign_key']:
self.fail('Found an unexpected FK constraint to %s' % details['columns']) self.fail('Found an unexpected FK constraint to %s' % details['columns'])
old_field = LocalBook._meta.get_field("author") old_field = LocalBook._meta.get_field("author")
@ -1430,7 +1430,7 @@ class SchemaTests(TransactionTestCase):
editor.create_model(Author) editor.create_model(Author)
# Ensure the constraint exists # Ensure the constraint exists
constraints = self.get_constraints(Author._meta.db_table) constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items(): for details in constraints.values():
if details['columns'] == ["height"] and details['check']: if details['columns'] == ["height"] and details['check']:
break break
else: else:
@ -1442,7 +1442,7 @@ class SchemaTests(TransactionTestCase):
with connection.schema_editor() as editor: with connection.schema_editor() as editor:
editor.alter_field(Author, old_field, new_field, strict=True) editor.alter_field(Author, old_field, new_field, strict=True)
constraints = self.get_constraints(Author._meta.db_table) constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items(): for details in constraints.values():
if details['columns'] == ["height"] and details['check']: if details['columns'] == ["height"] and details['check']:
self.fail("Check constraint for height found") self.fail("Check constraint for height found")
# Alter the column to re-add it # Alter the column to re-add it
@ -1450,7 +1450,7 @@ class SchemaTests(TransactionTestCase):
with connection.schema_editor() as editor: with connection.schema_editor() as editor:
editor.alter_field(Author, new_field, new_field2, strict=True) editor.alter_field(Author, new_field, new_field2, strict=True)
constraints = self.get_constraints(Author._meta.db_table) constraints = self.get_constraints(Author._meta.db_table)
for name, details in constraints.items(): for details in constraints.values():
if details['columns'] == ["height"] and details['check']: if details['columns'] == ["height"] and details['check']:
break break
else: else:

View File

@ -106,7 +106,7 @@ def inherited_create(pk, klass, data):
# automatically is easier than manually creating both. # automatically is easier than manually creating both.
models.Model.save(instance) models.Model.save(instance)
created = [instance] created = [instance]
for klass, field in instance._meta.parents.items(): for klass in instance._meta.parents:
created.append(klass.objects.get(id=pk)) created.append(klass.objects.get(id=pk))
return created return created

View File

@ -47,7 +47,7 @@ class GetUniqueCheckTests(unittest.TestCase):
(('foo', 'bar'), ('bar', 'baz'))), (('foo', 'bar'), ('bar', 'baz'))),
} }
for test_name, (unique_together, normalized) in data.items(): for unique_together, normalized in data.values():
class M(models.Model): class M(models.Model):
foo = models.IntegerField() foo = models.IntegerField()
bar = models.IntegerField() bar = models.IntegerField()

View File

@ -769,7 +769,7 @@ class ExceptionReportTestMixin:
self.assertContains(response, 'sauce', status_code=500) self.assertContains(response, 'sauce', status_code=500)
self.assertNotContains(response, 'worcestershire', status_code=500) self.assertNotContains(response, 'worcestershire', status_code=500)
if check_for_POST_params: if check_for_POST_params:
for k, v in self.breakfast_data.items(): for k in self.breakfast_data:
# All POST parameters' names are shown. # All POST parameters' names are shown.
self.assertContains(response, k, status_code=500) self.assertContains(response, k, status_code=500)
# Non-sensitive POST parameters' values are shown. # Non-sensitive POST parameters' values are shown.
@ -858,7 +858,7 @@ class ExceptionReportTestMixin:
self.assertNotIn('worcestershire', body_html) self.assertNotIn('worcestershire', body_html)
if check_for_POST_params: if check_for_POST_params:
for k, v in self.breakfast_data.items(): for k in self.breakfast_data:
# All POST parameters' names are shown. # All POST parameters' names are shown.
self.assertIn(k, body_plain) self.assertIn(k, body_plain)
# Non-sensitive POST parameters' values are shown. # Non-sensitive POST parameters' values are shown.