Refs #33476 -- Refactored problematic code before reformatting by Black.

In these cases Black produces unexpected results, e.g.

def make_random_password(
    self,
    length=10,
    allowed_chars='abcdefghjkmnpqrstuvwxyz' 'ABCDEFGHJKLMNPQRSTUVWXYZ' '23456789',
):

or

cursor.execute("""
SELECT ...
""",
    [table name],
)
This commit is contained in:
Mariusz Felisiak 2022-02-03 11:20:46 +01:00 committed by GitHub
parent c9d6e3595c
commit c5cd878382
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
53 changed files with 227 additions and 217 deletions

View File

@ -29,10 +29,11 @@ class BaseUserManager(models.Manager):
email = email_name + '@' + domain_part.lower()
return email
def make_random_password(self, length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyz'
'ABCDEFGHJKLMNPQRSTUVWXYZ'
'23456789'):
def make_random_password(
self,
length=10,
allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789',
):
"""
Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or

View File

@ -90,8 +90,9 @@ def get_hashers():
hasher_cls = import_string(hasher_path)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % hasher_path)
raise ImproperlyConfigured(
"hasher doesn't specify an algorithm name: %s" % hasher_path
)
hashers.append(hasher)
return hashers

View File

@ -36,8 +36,9 @@ def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works '
'with spatial backends.')
raise Exception(
'The `add_srs_entry` utility only works with spatial backends.'
)
if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys()

View File

@ -50,8 +50,9 @@ def check_settings(base_url=None):
"You're using the staticfiles app "
"without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL "
"settings must have different values")
raise ImproperlyConfigured(
"The MEDIA_URL and STATIC_URL settings must have different values"
)
if (settings.DEBUG and settings.MEDIA_URL and settings.STATIC_URL and
settings.MEDIA_URL.startswith(settings.STATIC_URL)):
raise ImproperlyConfigured(
@ -59,5 +60,6 @@ def check_settings(base_url=None):
)
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT "
"settings must have different values")
raise ImproperlyConfigured(
"The MEDIA_ROOT and STATIC_ROOT settings must have different values"
)

View File

@ -552,8 +552,8 @@ class AppCommand(BaseCommand):
corresponding to an application label given on the command line.
"""
raise NotImplementedError(
"Subclasses of AppCommand must provide"
"a handle_app_config() method.")
"Subclasses of AppCommand must provide a handle_app_config() method."
)
class LabelCommand(BaseCommand):

View File

@ -15,8 +15,7 @@ try:
import MySQLdb as Database
except ImportError as err:
raise ImproperlyConfigured(
'Error loading MySQLdb module.\n'
'Did you install mysqlclient?'
'Error loading MySQLdb module.\nDid you install mysqlclient?'
) from err
from MySQLdb.constants import CLIENT, FIELD_TYPE

View File

@ -79,22 +79,28 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
if self.connection.mysql_is_mariadb and self.connection.features.can_introspect_json_field:
# JSON data type is an alias for LONGTEXT in MariaDB, select
# JSON_VALID() constraints to introspect JSONField.
cursor.execute("""
cursor.execute(
"""
SELECT c.constraint_name AS column_name
FROM information_schema.check_constraints AS c
WHERE
c.table_name = %s AND
LOWER(c.check_clause) = 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
c.constraint_schema = DATABASE()
""", [table_name])
""",
[table_name],
)
json_constraints = {row[0] for row in cursor.fetchall()}
# A default collation for the given table.
cursor.execute("""
cursor.execute(
"""
SELECT table_collation
FROM information_schema.tables
WHERE table_schema = DATABASE()
AND table_name = %s
""", [table_name])
""",
[table_name],
)
row = cursor.fetchone()
default_column_collation = row[0] if row else ''
# information_schema database gives more accurate results for some figures:
@ -102,7 +108,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
# not visible length (#5725)
# - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description
cursor.execute("""
cursor.execute(
"""
SELECT
column_name, data_type, character_maximum_length,
numeric_precision, numeric_scale, extra, column_default,
@ -116,7 +123,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
END AS is_unsigned
FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE()
""", [default_column_collation, table_name])
""",
[default_column_collation, table_name],
)
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
@ -153,14 +162,17 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
Return a dictionary of {field_name: (field_name_other_table, other_table)}
representing all foreign keys in the given table.
"""
cursor.execute("""
cursor.execute(
"""
SELECT column_name, referenced_column_name, referenced_table_name
FROM information_schema.key_column_usage
WHERE table_name = %s
AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL
""", [table_name])
""",
[table_name],
)
return {
field_name: (other_field, other_table)
for field_name, other_field, other_table in cursor.fetchall()
@ -171,13 +183,16 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
Retrieve the storage engine for a given table. Return the default
storage engine if the table doesn't exist.
"""
cursor.execute("""
cursor.execute(
"""
SELECT engine
FROM information_schema.tables
WHERE
table_name = %s AND
table_schema = DATABASE()
""", [table_name])
""",
[table_name],
)
result = cursor.fetchone()
if not result:
return self.connection.features._mysql_storage_engine

View File

@ -233,8 +233,9 @@ class DatabaseOperations(BaseDatabaseOperations):
# Zero in AUTO_INCREMENT field does not work without the
# NO_AUTO_VALUE_ON_ZERO SQL mode.
if value == 0 and not self.connection.features.allows_auto_pk_0:
raise ValueError('The database backend does not accept 0 as a '
'value for AutoField.')
raise ValueError(
'The database backend does not accept 0 as a value for AutoField.'
)
return value
def adapt_datetimefield_value(self, value):

View File

@ -93,7 +93,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
interface.
"""
# user_tab_columns gives data default for columns
cursor.execute("""
cursor.execute(
"""
SELECT
user_tab_cols.column_name,
user_tab_cols.data_default,
@ -126,7 +127,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
LEFT OUTER JOIN
user_tables ON user_tables.table_name = user_tab_cols.table_name
WHERE user_tab_cols.table_name = UPPER(%s)
""", [table_name])
""",
[table_name],
)
field_map = {
column: (internal_size, default if default != 'NULL' else None, collation, is_autofield, is_json)
for column, default, collation, internal_size, is_autofield, is_json in cursor.fetchall()
@ -151,7 +154,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
return name.lower()
def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute("""
cursor.execute(
"""
SELECT
user_tab_identity_cols.sequence_name,
user_tab_identity_cols.column_name
@ -165,7 +169,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
AND cols.column_name = user_tab_identity_cols.column_name
AND user_constraints.constraint_type = 'P'
AND user_tab_identity_cols.table_name = UPPER(%s)
""", [table_name])
""",
[table_name],
)
# Oracle allows only one identity column per table.
row = cursor.fetchone()
if row:
@ -203,7 +209,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
}
def get_primary_key_column(self, cursor, table_name):
cursor.execute("""
cursor.execute(
"""
SELECT
cols.column_name
FROM
@ -214,7 +221,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
user_constraints.constraint_type = 'P' AND
user_constraints.table_name = UPPER(%s) AND
cols.position = 1
""", [table_name])
""",
[table_name],
)
row = cursor.fetchone()
return self.identifier_converter(row[0]) if row else None
@ -225,7 +234,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
"""
constraints = {}
# Loop over the constraints, getting PKs, uniques, and checks
cursor.execute("""
cursor.execute(
"""
SELECT
user_constraints.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
@ -249,7 +259,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
user_constraints.constraint_type = ANY('P', 'U', 'C')
AND user_constraints.table_name = UPPER(%s)
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
""", [table_name])
""",
[table_name],
)
for constraint, columns, pk, unique, check in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
@ -261,7 +273,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
'index': unique, # All uniques come with an index
}
# Foreign key constraints
cursor.execute("""
cursor.execute(
"""
SELECT
cons.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
@ -277,7 +290,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
cons.constraint_type = 'R' AND
cons.table_name = UPPER(%s)
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
""", [table_name])
""",
[table_name],
)
for constraint, columns, other_table, other_column in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {
@ -289,7 +304,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
'columns': columns.split(','),
}
# Now get indexes
cursor.execute("""
cursor.execute(
"""
SELECT
ind.index_name,
LOWER(ind.index_type),
@ -306,7 +322,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
WHERE ind.index_name = cons.index_name
) AND cols.index_name = ind.index_name
GROUP BY ind.index_name, ind.index_type, ind.uniqueness
""", [table_name])
""",
[table_name],
)
for constraint, type_, unique, columns, orders in cursor.fetchall():
constraint = self.identifier_converter(constraint)
constraints[constraint] = {

View File

@ -377,7 +377,8 @@ END;
def __foreign_key_constraints(self, table_name, recursive):
with self.connection.cursor() as cursor:
if recursive:
cursor.execute("""
cursor.execute(
"""
SELECT
user_tables.table_name, rcons.constraint_name
FROM
@ -394,9 +395,12 @@ END;
user_tables.table_name, rcons.constraint_name
HAVING user_tables.table_name != UPPER(%s)
ORDER BY MAX(level) DESC
""", (table_name, table_name))
""",
(table_name, table_name),
)
else:
cursor.execute("""
cursor.execute(
"""
SELECT
cons.table_name, cons.constraint_name
FROM
@ -404,7 +408,9 @@ END;
WHERE
cons.constraint_type = 'R'
AND cons.table_name = UPPER(%s)
""", (table_name,))
""",
(table_name,),
)
return cursor.fetchall()
@cached_property

View File

@ -179,13 +179,16 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
def _is_identity_column(self, table_name, column_name):
with self.connection.cursor() as cursor:
cursor.execute("""
cursor.execute(
"""
SELECT
CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END
FROM user_tab_cols
WHERE table_name = %s AND
column_name = %s
""", [self.normalize_name(table_name), self.normalize_name(column_name)])
""",
[self.normalize_name(table_name), self.normalize_name(column_name)],
)
row = cursor.fetchone()
return row[0] if row else False
@ -197,9 +200,12 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
def _get_default_collation(self, table_name):
with self.connection.cursor() as cursor:
cursor.execute("""
cursor.execute(
"""
SELECT default_collation FROM user_tables WHERE table_name = %s
""", [self.normalize_name(table_name)])
""",
[self.normalize_name(table_name)],
)
return cursor.fetchone()[0]
def _alter_column_collation_sql(self, model, new_field, new_type, new_collation):

View File

@ -42,8 +42,7 @@ def resolve_relation(model, app_label=None, model_name=None):
return app_label, model_name.lower()
if app_label is None:
raise TypeError(
'app_label must be provided to resolve unscoped model '
'relationships.'
'app_label must be provided to resolve unscoped model relationships.'
)
return app_label, model.lower()
return model._meta.app_label, model._meta.model_name

View File

@ -40,8 +40,7 @@ class CheckConstraint(BaseConstraint):
self.check = check
if not getattr(check, 'conditional', False):
raise TypeError(
'CheckConstraint.check must be a Q instance or boolean '
'expression.'
'CheckConstraint.check must be a Q instance or boolean expression.'
)
super().__init__(name)

View File

@ -101,8 +101,7 @@ class Field(RegisterLookupMixin):
'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s '
'already exists.'),
'unique': _('%(model_name)s with this %(field_label)s already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for "

View File

@ -36,8 +36,7 @@ class Index:
raise ValueError('Index.opclasses must be a list or tuple.')
if not expressions and not fields:
raise ValueError(
'At least one field or expression is required to define an '
'index.'
'At least one field or expression is required to define an index.'
)
if expressions and fields:
raise ValueError(

View File

@ -530,8 +530,7 @@ class IsNull(BuiltinLookup):
def as_sql(self, compiler, connection):
if not isinstance(self.rhs, bool):
raise ValueError(
'The QuerySet value for an isnull lookup must be True or '
'False.'
'The QuerySet value for an isnull lookup must be True or False.'
)
sql, params = compiler.compile(self.lhs)
if self.rhs:

View File

@ -523,8 +523,7 @@ class QuerySet:
)
if not unique_fields and db_features.supports_update_conflicts_with_target:
raise ValueError(
'Unique fields that can trigger the upsert must be '
'provided.'
'Unique fields that can trigger the upsert must be provided.'
)
# Updating primary keys and non-concrete fields is forbidden.
update_fields = [self.model._meta.get_field(name) for name in update_fields]
@ -930,8 +929,7 @@ class QuerySet:
self._not_support_combined_queries('contains')
if self._fields is not None:
raise TypeError(
'Cannot call QuerySet.contains() after .values() or '
'.values_list().'
'Cannot call QuerySet.contains() after .values() or .values_list().'
)
try:
if obj._meta.concrete_model != self.model._meta.concrete_model:
@ -1739,8 +1737,7 @@ class Prefetch:
)
):
raise ValueError(
'Prefetch querysets cannot use raw(), values(), and '
'values_list().'
'Prefetch querysets cannot use raw(), values(), and values_list().'
)
if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])

View File

@ -1735,8 +1735,7 @@ class Query(BaseExpression):
for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join):
raise FieldError(
'Joined field references are not permitted in '
'this query'
'Joined field references are not permitted in this query'
)
if summarize:
# Summarize currently means we are doing an aggregate() query
@ -1763,8 +1762,9 @@ class Query(BaseExpression):
if not allow_joins and len(join_list) > 1:
raise FieldError('Joined field references are not permitted in this query')
if len(targets) > 1:
raise FieldError("Referencing multicolumn fields with F() objects "
"isn't supported")
raise FieldError(
"Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias)

View File

@ -1218,8 +1218,9 @@ class ModelChoiceField(ChoiceField):
# This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation.
default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of'
' the available choices.'),
'invalid_choice': _(
'Select a valid choice. That choice is not one of the available choices.'
),
}
iterator = ModelChoiceIterator
@ -1338,8 +1339,9 @@ class ModelMultipleChoiceField(ModelChoiceField):
hidden_widget = MultipleHiddenInput
default_error_messages = {
'invalid_list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the'
' available choices.'),
'invalid_choice': _(
'Select a valid choice. %(value)s is not one of the available choices.'
),
'invalid_pk_value': _('%(pk)s” is not a valid value.')
}

View File

@ -38,8 +38,8 @@ class BaseEngine:
This method is optional.
"""
raise NotImplementedError(
"subclasses of BaseEngine should provide "
"a from_string() method")
"subclasses of BaseEngine should provide a from_string() method"
)
def get_template(self, template_name):
"""
@ -48,8 +48,8 @@ class BaseEngine:
Raise TemplateDoesNotExist if no such template exists.
"""
raise NotImplementedError(
"subclasses of BaseEngine must provide "
"a get_template() method")
"subclasses of BaseEngine must provide a get_template() method"
)
# Utility methods: they are provided to minimize code duplication and
# security issues in third-party backends.

View File

@ -675,8 +675,9 @@ class FilterExpression:
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
raise TemplateSyntaxError(
"Could not find variable at start of %s." % token
)
else:
var_obj = Variable(var)
else:
@ -884,9 +885,10 @@ class Variable:
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
raise VariableDoesNotExist(
"Failed lookup for key [%s] in %r",
(bit, current),
) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass

View File

@ -793,8 +793,9 @@ def do_for(parser, token):
"""
bits = token.split_contents()
if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four"
" words: %s" % token.contents)
raise TemplateSyntaxError(
"'for' statements should have at least four words: %s" % token.contents
)
is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2
@ -806,8 +807,9 @@ def do_for(parser, token):
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars:
if not var or not invalid_chars.isdisjoint(var):
raise TemplateSyntaxError("'for' tag received an invalid argument:"
" %s" % token.contents)
raise TemplateSyntaxError(
"'for' tag received an invalid argument: %s" % token.contents
)
sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',))
@ -1160,8 +1162,9 @@ def regroup(parser, token):
if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must"
" be 'as'")
raise TemplateSyntaxError(
"next-to-last argument to 'regroup' tag must be 'as'"
)
var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and
@ -1420,8 +1423,9 @@ def do_with(parser, token):
remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable "
"assignment" % bits[0])
raise TemplateSyntaxError(
"%r expected at least one variable assignment" % bits[0]
)
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))

View File

@ -315,13 +315,15 @@ def do_include(parser, token):
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
raise TemplateSyntaxError(
'The %r option was specified more than once.' % option
)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=False)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
raise TemplateSyntaxError(
'"with" in %r tag needs at least one keyword argument.' % bits[0]
)
elif option == 'only':
value = True
else:

View File

@ -49,8 +49,9 @@ class SimpleTemplateResponse(HttpResponse):
"""
obj_dict = self.__dict__.copy()
if not self._is_rendered:
raise ContentNotRenderedError('The response content must be '
'rendered before it can be pickled.')
raise ContentNotRenderedError(
'The response content must be rendered before it can be pickled.'
)
for attr in self.rendering_attrs:
if attr in obj_dict:
del obj_dict[attr]

View File

@ -477,13 +477,15 @@ def do_block_translate(parser, token):
while remaining_bits:
option = remaining_bits.pop(0)
if option in options:
raise TemplateSyntaxError('The %r option was specified more '
'than once.' % option)
raise TemplateSyntaxError(
'The %r option was specified more than once.' % option
)
if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least '
'one keyword argument.' % bits[0])
raise TemplateSyntaxError(
'"with" in %r tag needs at least one keyword argument.' % bits[0]
)
elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1:

View File

@ -209,6 +209,7 @@ def get_current_timezone_tag(parser, token):
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split()
if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_timezone' requires "
"'as variable' (got %r)" % args)
raise TemplateSyntaxError(
"'get_current_timezone' requires 'as variable' (got %r)" % args
)
return GetCurrentTimezoneNode(args[2])

View File

@ -894,8 +894,7 @@ class AsyncClient(ClientMixin, AsyncRequestFactory):
"""
if 'follow' in request:
raise NotImplementedError(
'AsyncClient request methods do not accept the follow '
'parameter.'
'AsyncClient request methods do not accept the follow parameter.'
)
scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer

View File

@ -987,8 +987,7 @@ class AliasTests(TestCase):
def test_aggregate_alias(self):
msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to "
"promote it."
"Cannot aggregate over the 'other_age' alias. Use annotate() to promote it."
)
with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias(
@ -1012,10 +1011,7 @@ class AliasTests(TestCase):
def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F('rating') - 1)
msg = (
"Cannot select the 'rating_alias' alias. Use annotate() to "
"promote it."
)
msg = "Cannot select the 'rating_alias' alias. Use annotate() to promote it."
for operation in ['values', 'values_list']:
with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg):

View File

@ -429,8 +429,7 @@ class BulkCreateTests(TestCase):
)
def test_update_conflicts_invalid_update_fields(self):
msg = (
'bulk_create() can only be used with concrete fields in '
'update_fields.'
'bulk_create() can only be used with concrete fields in update_fields.'
)
# Reverse one-to-one relationship.
with self.assertRaisesMessage(ValueError, msg):
@ -467,8 +466,7 @@ class BulkCreateTests(TestCase):
)
def test_update_conflicts_invalid_unique_fields(self):
msg = (
'bulk_create() can only be used with concrete fields in '
'unique_fields.'
'bulk_create() can only be used with concrete fields in unique_fields.'
)
# Reverse one-to-one relationship.
with self.assertRaisesMessage(ValueError, msg):

View File

@ -70,8 +70,7 @@ def empty_response(request):
KEY_ERRORS_WITH_MEMCACHED_MSG = (
'Cache key contains characters that will cause errors if used with '
'memcached: %r'
'Cache key contains characters that will cause errors if used with memcached: %r'
)

View File

@ -409,16 +409,22 @@ class BasicExpressionsTests(TestCase):
def test_order_by_multiline_sql(self):
raw_order_by = (
RawSQL('''
RawSQL(
"""
CASE WHEN num_employees > 1000
THEN num_chairs
ELSE 0 END
''', []).desc(),
RawSQL('''
""",
[],
).desc(),
RawSQL(
"""
CASE WHEN num_chairs > 1
THEN 1
ELSE 0 END
''', []).asc()
""",
[],
).asc()
)
for qs in (
Company.objects.all(),

View File

@ -649,8 +649,7 @@ class FileUploadTests(TestCase):
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
'Content-Type: application/octet-stream',
'',
'file contents\n'
'',
'file contents\n',
'--%(boundary)s--\r\n',
]
response = self.client.post(

View File

@ -212,8 +212,7 @@ class MultiWidgetTest(WidgetTest):
def test_no_whitespace_between_widgets(self):
widget = MyMultiWidget(widgets=(TextInput, TextInput()))
self.check_html(widget, 'code', None, html=(
'<input type="text" name="code_0">'
'<input type="text" name="code_1">'
'<input type="text" name="code_0"><input type="text" name="code_1">'
), strict=True)
def test_deepcopy(self):

View File

@ -370,8 +370,7 @@ class GISFunctionsTests(FuncTestMixin, TestCase):
@skipUnlessDBFeature('has_MakeValid_function')
def test_make_valid_multipolygon(self):
invalid_geom = fromstr(
'POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), '
'(10 0, 10 1, 11 1, 11 0, 10 0))'
'POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), (10 0, 10 1, 11 1, 11 0, 10 0))'
)
State.objects.create(name='invalid', poly=invalid_geom)
invalid = State.objects.filter(name='invalid').annotate(

View File

@ -1707,8 +1707,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
@ -1972,8 +1971,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
@ -2075,8 +2073,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
@ -2193,8 +2190,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
@ -2331,8 +2327,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),
@ -2348,8 +2343,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [
Error(
"'constraints' refers to the nonexistent field "
"'missing_field'.",
"'constraints' refers to the nonexistent field 'missing_field'.",
obj=Model,
id='models.E012',
),

View File

@ -308,9 +308,7 @@ class MigrateTests(MigrationTestBase):
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
'\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n',
out.getvalue().lower()
)
@ -320,9 +318,7 @@ class MigrateTests(MigrationTestBase):
# Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
'migrations\n [x] 0001_initial\n [ ] 0002_second\n',
out.getvalue().lower()
)
out = io.StringIO()
@ -343,8 +339,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO()
call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True)
self.assertEqual(
'migrations\n'
' [ ] 0001_squashed_0002 (2 squashed migrations)\n',
'migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower(),
)
out = io.StringIO()
@ -368,8 +363,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO()
call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower(),
)
finally:
@ -447,8 +441,7 @@ class MigrateTests(MigrationTestBase):
# Show the plan for when there is nothing to apply.
call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True)
self.assertEqual(
'Planned operations:\n'
' No planned migration operations.\n',
'Planned operations:\n No planned migration operations.\n',
out.getvalue()
)
out = io.StringIO()
@ -609,8 +602,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO()
call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out)
self.assertEqual(
'[ ] mutate_state_b.0001_initial\n'
'[ ] mutate_state_b.0002_add_field\n',
'[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n',
out.getvalue()
)
# Single app with dependencies.
@ -911,8 +903,7 @@ class MigrateTests(MigrationTestBase):
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
applied_migrations = recorder.applied_migrations()
@ -944,8 +935,7 @@ class MigrateTests(MigrationTestBase):
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
self.assertIn(
@ -1115,8 +1105,7 @@ class MigrateTests(MigrationTestBase):
call_command('migrate', 'migrations', prune=True, stdout=out, no_color=True)
self.assertEqual(
out.getvalue(),
'Pruning migrations:\n'
' No migrations to prune.\n',
'Pruning migrations:\n No migrations to prune.\n',
)
out = io.StringIO()
call_command(
@ -2311,8 +2300,7 @@ class AppLabelErrorTests(TestCase):
"""
nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean "
"'auth'?"
"No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
)
def test_makemigrations_nonexistent_app_label(self):

View File

@ -86,8 +86,7 @@ class OperationWriterTests(SimpleTestCase):
self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual(
buff,
'custom_migration_operations.operations.TestOperation(\n'
'),'
'custom_migration_operations.operations.TestOperation(\n),',
)
def test_args_signature(self):

View File

@ -129,8 +129,7 @@ class TestFormField(SimpleTestCase):
class TestSerialization(SimpleTestCase):
test_data = (
'[{"fields": {"value": %s}, '
'"model": "model_fields.jsonmodel", "pk": null}]'
'[{"fields": {"value": %s}, "model": "model_fields.jsonmodel", "pk": null}]'
)
test_values = (
# (Python value, serialized value),

View File

@ -411,8 +411,7 @@ class PrepopulatedFieldsCheckTests(CheckTestCase):
self.assertIsInvalid(
TestModelAdmin, ValidationTestModel,
'The value of \'prepopulated_fields["slug"]\' must be a list '
'or tuple.',
'The value of \'prepopulated_fields["slug"]\' must be a list or tuple.',
'admin.E029'
)

View File

@ -53,8 +53,7 @@ class AddIndexConcurrentlyTests(OperationTestBase):
operation = AddIndexConcurrently('Pony', index)
self.assertEqual(
operation.describe(),
'Concurrently create index pony_pink_idx on field(s) pink of '
'model Pony'
'Concurrently create index pony_pink_idx on field(s) pink of model Pony',
)
operation.state_forwards(self.app_label, new_state)
self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 1)

View File

@ -1352,9 +1352,10 @@ class MultiDbTests(TestCase):
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related('first_time_authors'))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
def test_using_is_honored_inheritance(self):
B = BookWithYear.objects.using('other')
@ -1391,19 +1392,20 @@ class MultiDbTests(TestCase):
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on the same db.
with self.assertNumQueries(2, using='other'):
prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('other'))
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
self.assertEqual(
books,
"Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on a different db.
with self.assertNumQueries(1, using='default'), self.assertNumQueries(1, using='other'):

View File

@ -3149,8 +3149,7 @@ class QuerySetExceptionTests(SimpleTestCase):
def test_invalid_order_by(self):
msg = (
"Cannot resolve keyword '*' into field. Choices are: created, id, "
"name"
"Cannot resolve keyword '*' into field. Choices are: created, id, name"
)
with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by('*')

View File

@ -41,7 +41,6 @@ class DataUploadMaxMemorySizeMultipartPostTests(SimpleTestCase):
'',
'value',
'--boundary--'
''
]))
self.request = WSGIRequest({
'REQUEST_METHOD': 'POST',
@ -70,7 +69,6 @@ class DataUploadMaxMemorySizeMultipartPostTests(SimpleTestCase):
'',
'value',
'--boundary--'
''
]))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
@ -143,7 +141,6 @@ class DataUploadMaxNumberOfFieldsMultipartPost(SimpleTestCase):
'',
'value2',
'--boundary--'
''
]))
self.request = WSGIRequest({
'REQUEST_METHOD': 'POST',

View File

@ -316,7 +316,7 @@ class RequestsTests(SimpleTestCase):
'',
'value',
'--boundary--'
'']))
]))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
@ -341,7 +341,7 @@ class RequestsTests(SimpleTestCase):
b'',
b'value',
b'--boundary--'
b''])
])
payload = FakePayload(payload_data)
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
@ -366,7 +366,7 @@ class RequestsTests(SimpleTestCase):
'',
'value',
'--boundary--'
'']))
]))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',

View File

@ -292,8 +292,7 @@ def setup_run_tests(verbosity, start_at, start_after, test_labels=None):
# Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self):
raise Exception(
'Please define available_apps in TransactionTestCase and its '
'subclasses.'
'Please define available_apps in TransactionTestCase and its subclasses.'
)
TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None

View File

@ -11,10 +11,7 @@ from django.test.utils import captured_stdin, captured_stdout
class ShellCommandTestCase(SimpleTestCase):
script_globals = 'print("__name__" in globals())'
script_with_inline_function = (
'import django\n'
'def f():\n'
' print(django.__version__)\n'
'f()'
'import django\ndef f():\n print(django.__version__)\nf()'
)
def test_command_option(self):

View File

@ -60,14 +60,13 @@ class FunctionTests(SimpleTestCase):
self.assertEqual(
urlizetrunc(uri, 1),
'<a href="http://31characteruri.com/test/"'
' rel="nofollow">…</a>',
'<a href="http://31characteruri.com/test/" rel="nofollow">…</a>',
)
def test_overtruncate(self):
self.assertEqual(
urlizetrunc('http://short.com/', 20), '<a href='
'"http://short.com/" rel="nofollow">http://short.com/</a>',
urlizetrunc('http://short.com/', 20),
'<a href="http://short.com/" rel="nofollow">http://short.com/</a>',
)
def test_query_string(self):

View File

@ -36,7 +36,7 @@ class I18nGetLanguageInfoTagTests(SimpleTestCase):
output = self.engine.render_to_string('i18n38')
self.assertEqual(output, 'de: German/Deutsch/německy bidi=False')
@setup({'template': '{% load i18n %}''{% get_language_info %}'})
@setup({'template': '{% load i18n %}{% get_language_info %}'})
def test_no_for_as(self):
msg = "'get_language_info' requires 'for string as variable' (got [])"
with self.assertRaisesMessage(TemplateSyntaxError, msg):

View File

@ -104,8 +104,7 @@ class SimpleTagTests(TagTestCase):
(
"'simple_keyword_only_param' received multiple values for "
"keyword argument 'kwarg'",
'{% load custom %}{% simple_keyword_only_param kwarg=42 '
'kwarg=37 %}',
'{% load custom %}{% simple_keyword_only_param kwarg=42 kwarg=37 %}',
),
(
"'simple_keyword_only_default' received multiple values for "

View File

@ -445,8 +445,7 @@ class DiscoverRunnerTests(SimpleTestCase):
def test_pdb_with_parallel(self):
msg = (
'You cannot use --pdb with parallel tests; pass --parallel=1 to '
'use it.'
'You cannot use --pdb with parallel tests; pass --parallel=1 to use it.'
)
with self.assertRaisesMessage(ValueError, msg):
DiscoverRunner(pdb=True, parallel=2)

View File

@ -437,8 +437,7 @@ class AssertNumQueriesContextManagerTests(TestCase):
def test_failure(self):
msg = (
'1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n'
'1.'
'1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1.'
)
with self.assertRaisesMessage(AssertionError, msg):
with self.assertNumQueries(2):
@ -927,11 +926,7 @@ class HTMLEqualTests(SimpleTestCase):
parse_html('</p>')
def test_escaped_html_errors(self):
msg = (
'<p>\n<foo>\n</p>'
' != '
'<p>\n&lt;foo&gt;\n</p>\n'
)
msg = '<p>\n<foo>\n</p> != <p>\n&lt;foo&gt;\n</p>\n'
with self.assertRaisesMessage(AssertionError, msg):
self.assertHTMLEqual('<p><foo></p>', '<p>&lt;foo&gt;</p>')
with self.assertRaisesMessage(AssertionError, msg):

View File

@ -133,8 +133,7 @@ VALID_URLS = [
'ample.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaa.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaa',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa'

View File

@ -690,14 +690,12 @@ class ExceptionReporterTests(SimpleTestCase):
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
' &lt;source code not available&gt;',
'"generated", line 2, in funcName\n &lt;source code not available&gt;',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
'"generated", line 2, in funcName\n <source code not available>',
text,
)
@ -731,14 +729,12 @@ class ExceptionReporterTests(SimpleTestCase):
html,
)
self.assertIn(
'"generated", line 2, in funcName\n'
' &lt;source code not available&gt;',
'"generated", line 2, in funcName\n &lt;source code not available&gt;',
html,
)
text = reporter.get_traceback_text()
self.assertIn(
'"generated", line 2, in funcName\n'
' <source code not available>',
'"generated", line 2, in funcName\n <source code not available>',
text,
)