[4.0.x] Refs #33476 -- Refactored problematic code before reformatting by Black.

In these cases Black produces unexpected results, e.g.

def make_random_password(
    self,
    length=10,
    allowed_chars='abcdefghjkmnpqrstuvwxyz' 'ABCDEFGHJKLMNPQRSTUVWXYZ' '23456789',
):

or

cursor.execute("""
SELECT ...
""",
    [table name],
)

Backport of c5cd878382 from main.
This commit is contained in:
Mariusz Felisiak 2022-02-03 11:20:46 +01:00
parent 76c80d96f3
commit d55a1e5809
52 changed files with 229 additions and 209 deletions

View File

@ -29,10 +29,11 @@ class BaseUserManager(models.Manager):
email = email_name + '@' + domain_part.lower() email = email_name + '@' + domain_part.lower()
return email return email
def make_random_password(self, length=10, def make_random_password(
allowed_chars='abcdefghjkmnpqrstuvwxyz' self,
'ABCDEFGHJKLMNPQRSTUVWXYZ' length=10,
'23456789'): allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789',
):
""" """
Generate a random password with the given length and given Generate a random password with the given length and given
allowed_chars. The default value of allowed_chars does not have "I" or allowed_chars. The default value of allowed_chars does not have "I" or

View File

@ -90,8 +90,9 @@ def get_hashers():
hasher_cls = import_string(hasher_path) hasher_cls = import_string(hasher_path)
hasher = hasher_cls() hasher = hasher_cls()
if not getattr(hasher, 'algorithm'): if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an " raise ImproperlyConfigured(
"algorithm name: %s" % hasher_path) "hasher doesn't specify an algorithm name: %s" % hasher_path
)
hashers.append(hasher) hashers.append(hasher)
return hashers return hashers

View File

@ -36,8 +36,9 @@ def add_srs_entry(srs, auth_name='EPSG', auth_srid=None, ref_sys_name=None,
connection = connections[database] connection = connections[database]
if not hasattr(connection.ops, 'spatial_version'): if not hasattr(connection.ops, 'spatial_version'):
raise Exception('The `add_srs_entry` utility only works ' raise Exception(
'with spatial backends.') 'The `add_srs_entry` utility only works with spatial backends.'
)
if not connection.features.supports_add_srs_entry: if not connection.features.supports_add_srs_entry:
raise Exception('This utility does not support your database backend.') raise Exception('This utility does not support your database backend.')
SpatialRefSys = connection.ops.spatial_ref_sys() SpatialRefSys = connection.ops.spatial_ref_sys()

View File

@ -50,8 +50,9 @@ def check_settings(base_url=None):
"You're using the staticfiles app " "You're using the staticfiles app "
"without having set the required STATIC_URL setting.") "without having set the required STATIC_URL setting.")
if settings.MEDIA_URL == base_url: if settings.MEDIA_URL == base_url:
raise ImproperlyConfigured("The MEDIA_URL and STATIC_URL " raise ImproperlyConfigured(
"settings must have different values") "The MEDIA_URL and STATIC_URL settings must have different values"
)
if (settings.DEBUG and settings.MEDIA_URL and settings.STATIC_URL and if (settings.DEBUG and settings.MEDIA_URL and settings.STATIC_URL and
settings.MEDIA_URL.startswith(settings.STATIC_URL)): settings.MEDIA_URL.startswith(settings.STATIC_URL)):
raise ImproperlyConfigured( raise ImproperlyConfigured(
@ -59,5 +60,6 @@ def check_settings(base_url=None):
) )
if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and if ((settings.MEDIA_ROOT and settings.STATIC_ROOT) and
(settings.MEDIA_ROOT == settings.STATIC_ROOT)): (settings.MEDIA_ROOT == settings.STATIC_ROOT)):
raise ImproperlyConfigured("The MEDIA_ROOT and STATIC_ROOT " raise ImproperlyConfigured(
"settings must have different values") "The MEDIA_ROOT and STATIC_ROOT settings must have different values"
)

View File

@ -562,8 +562,8 @@ class AppCommand(BaseCommand):
corresponding to an application label given on the command line. corresponding to an application label given on the command line.
""" """
raise NotImplementedError( raise NotImplementedError(
"Subclasses of AppCommand must provide" "Subclasses of AppCommand must provide a handle_app_config() method."
"a handle_app_config() method.") )
class LabelCommand(BaseCommand): class LabelCommand(BaseCommand):

View File

@ -15,8 +15,7 @@ try:
import MySQLdb as Database import MySQLdb as Database
except ImportError as err: except ImportError as err:
raise ImproperlyConfigured( raise ImproperlyConfigured(
'Error loading MySQLdb module.\n' 'Error loading MySQLdb module.\nDid you install mysqlclient?'
'Did you install mysqlclient?'
) from err ) from err
from MySQLdb.constants import CLIENT, FIELD_TYPE from MySQLdb.constants import CLIENT, FIELD_TYPE

View File

@ -79,22 +79,28 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
if self.connection.mysql_is_mariadb and self.connection.features.can_introspect_json_field: if self.connection.mysql_is_mariadb and self.connection.features.can_introspect_json_field:
# JSON data type is an alias for LONGTEXT in MariaDB, select # JSON data type is an alias for LONGTEXT in MariaDB, select
# JSON_VALID() constraints to introspect JSONField. # JSON_VALID() constraints to introspect JSONField.
cursor.execute(""" cursor.execute(
"""
SELECT c.constraint_name AS column_name SELECT c.constraint_name AS column_name
FROM information_schema.check_constraints AS c FROM information_schema.check_constraints AS c
WHERE WHERE
c.table_name = %s AND c.table_name = %s AND
LOWER(c.check_clause) = 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND LOWER(c.check_clause) = 'json_valid(`' + LOWER(c.constraint_name) + '`)' AND
c.constraint_schema = DATABASE() c.constraint_schema = DATABASE()
""", [table_name]) """,
[table_name],
)
json_constraints = {row[0] for row in cursor.fetchall()} json_constraints = {row[0] for row in cursor.fetchall()}
# A default collation for the given table. # A default collation for the given table.
cursor.execute(""" cursor.execute(
"""
SELECT table_collation SELECT table_collation
FROM information_schema.tables FROM information_schema.tables
WHERE table_schema = DATABASE() WHERE table_schema = DATABASE()
AND table_name = %s AND table_name = %s
""", [table_name]) """,
[table_name],
)
row = cursor.fetchone() row = cursor.fetchone()
default_column_collation = row[0] if row else '' default_column_collation = row[0] if row else ''
# information_schema database gives more accurate results for some figures: # information_schema database gives more accurate results for some figures:
@ -102,7 +108,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
# not visible length (#5725) # not visible length (#5725)
# - precision and scale (for decimal fields) (#5014) # - precision and scale (for decimal fields) (#5014)
# - auto_increment is not available in cursor.description # - auto_increment is not available in cursor.description
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
column_name, data_type, character_maximum_length, column_name, data_type, character_maximum_length,
numeric_precision, numeric_scale, extra, column_default, numeric_precision, numeric_scale, extra, column_default,
@ -116,7 +123,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
END AS is_unsigned END AS is_unsigned
FROM information_schema.columns FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE() WHERE table_name = %s AND table_schema = DATABASE()
""", [default_column_collation, table_name]) """,
[default_column_collation, table_name],
)
field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()} field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()}
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
@ -165,13 +174,17 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
for all key columns in the given table. for all key columns in the given table.
""" """
key_columns = [] key_columns = []
cursor.execute(""" cursor.execute(
"""
SELECT column_name, referenced_table_name, referenced_column_name SELECT column_name, referenced_table_name, referenced_column_name
FROM information_schema.key_column_usage FROM information_schema.key_column_usage
WHERE table_name = %s WHERE table_name = %s
AND table_schema = DATABASE() AND table_schema = DATABASE()
AND referenced_table_name IS NOT NULL AND referenced_table_name IS NOT NULL
AND referenced_column_name IS NOT NULL""", [table_name]) AND referenced_column_name IS NOT NULL
""",
[table_name],
)
key_columns.extend(cursor.fetchall()) key_columns.extend(cursor.fetchall())
return key_columns return key_columns
@ -180,13 +193,16 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
Retrieve the storage engine for a given table. Return the default Retrieve the storage engine for a given table. Return the default
storage engine if the table doesn't exist. storage engine if the table doesn't exist.
""" """
cursor.execute(""" cursor.execute(
"""
SELECT engine SELECT engine
FROM information_schema.tables FROM information_schema.tables
WHERE WHERE
table_name = %s AND table_name = %s AND
table_schema = DATABASE() table_schema = DATABASE()
""", [table_name]) """,
[table_name],
)
result = cursor.fetchone() result = cursor.fetchone()
if not result: if not result:
return self.connection.features._mysql_storage_engine return self.connection.features._mysql_storage_engine

View File

@ -231,8 +231,9 @@ class DatabaseOperations(BaseDatabaseOperations):
# Zero in AUTO_INCREMENT field does not work without the # Zero in AUTO_INCREMENT field does not work without the
# NO_AUTO_VALUE_ON_ZERO SQL mode. # NO_AUTO_VALUE_ON_ZERO SQL mode.
if value == 0 and not self.connection.features.allows_auto_pk_0: if value == 0 and not self.connection.features.allows_auto_pk_0:
raise ValueError('The database backend does not accept 0 as a ' raise ValueError(
'value for AutoField.') 'The database backend does not accept 0 as a value for AutoField.'
)
return value return value
def adapt_datetimefield_value(self, value): def adapt_datetimefield_value(self, value):

View File

@ -93,7 +93,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
interface. interface.
""" """
# user_tab_columns gives data default for columns # user_tab_columns gives data default for columns
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
user_tab_cols.column_name, user_tab_cols.column_name,
user_tab_cols.data_default, user_tab_cols.data_default,
@ -126,7 +127,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
LEFT OUTER JOIN LEFT OUTER JOIN
user_tables ON user_tables.table_name = user_tab_cols.table_name user_tables ON user_tables.table_name = user_tab_cols.table_name
WHERE user_tab_cols.table_name = UPPER(%s) WHERE user_tab_cols.table_name = UPPER(%s)
""", [table_name]) """,
[table_name],
)
field_map = { field_map = {
column: (internal_size, default if default != 'NULL' else None, collation, is_autofield, is_json) column: (internal_size, default if default != 'NULL' else None, collation, is_autofield, is_json)
for column, default, collation, internal_size, is_autofield, is_json in cursor.fetchall() for column, default, collation, internal_size, is_autofield, is_json in cursor.fetchall()
@ -151,7 +154,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
return name.lower() return name.lower()
def get_sequences(self, cursor, table_name, table_fields=()): def get_sequences(self, cursor, table_name, table_fields=()):
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
user_tab_identity_cols.sequence_name, user_tab_identity_cols.sequence_name,
user_tab_identity_cols.column_name user_tab_identity_cols.column_name
@ -165,7 +169,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
AND cols.column_name = user_tab_identity_cols.column_name AND cols.column_name = user_tab_identity_cols.column_name
AND user_constraints.constraint_type = 'P' AND user_constraints.constraint_type = 'P'
AND user_tab_identity_cols.table_name = UPPER(%s) AND user_tab_identity_cols.table_name = UPPER(%s)
""", [table_name]) """,
[table_name],
)
# Oracle allows only one identity column per table. # Oracle allows only one identity column per table.
row = cursor.fetchone() row = cursor.fetchone()
if row: if row:
@ -217,7 +223,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
] ]
def get_primary_key_column(self, cursor, table_name): def get_primary_key_column(self, cursor, table_name):
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
cols.column_name cols.column_name
FROM FROM
@ -228,7 +235,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
user_constraints.constraint_type = 'P' AND user_constraints.constraint_type = 'P' AND
user_constraints.table_name = UPPER(%s) AND user_constraints.table_name = UPPER(%s) AND
cols.position = 1 cols.position = 1
""", [table_name]) """,
[table_name],
)
row = cursor.fetchone() row = cursor.fetchone()
return self.identifier_converter(row[0]) if row else None return self.identifier_converter(row[0]) if row else None
@ -239,7 +248,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
""" """
constraints = {} constraints = {}
# Loop over the constraints, getting PKs, uniques, and checks # Loop over the constraints, getting PKs, uniques, and checks
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
user_constraints.constraint_name, user_constraints.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position), LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
@ -263,7 +273,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
user_constraints.constraint_type = ANY('P', 'U', 'C') user_constraints.constraint_type = ANY('P', 'U', 'C')
AND user_constraints.table_name = UPPER(%s) AND user_constraints.table_name = UPPER(%s)
GROUP BY user_constraints.constraint_name, user_constraints.constraint_type GROUP BY user_constraints.constraint_name, user_constraints.constraint_type
""", [table_name]) """,
[table_name],
)
for constraint, columns, pk, unique, check in cursor.fetchall(): for constraint, columns, pk, unique, check in cursor.fetchall():
constraint = self.identifier_converter(constraint) constraint = self.identifier_converter(constraint)
constraints[constraint] = { constraints[constraint] = {
@ -275,7 +287,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
'index': unique, # All uniques come with an index 'index': unique, # All uniques come with an index
} }
# Foreign key constraints # Foreign key constraints
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
cons.constraint_name, cons.constraint_name,
LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position), LISTAGG(LOWER(cols.column_name), ',') WITHIN GROUP (ORDER BY cols.position),
@ -291,7 +304,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
cons.constraint_type = 'R' AND cons.constraint_type = 'R' AND
cons.table_name = UPPER(%s) cons.table_name = UPPER(%s)
GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name GROUP BY cons.constraint_name, rcols.table_name, rcols.column_name
""", [table_name]) """,
[table_name],
)
for constraint, columns, other_table, other_column in cursor.fetchall(): for constraint, columns, other_table, other_column in cursor.fetchall():
constraint = self.identifier_converter(constraint) constraint = self.identifier_converter(constraint)
constraints[constraint] = { constraints[constraint] = {
@ -303,7 +318,8 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
'columns': columns.split(','), 'columns': columns.split(','),
} }
# Now get indexes # Now get indexes
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
ind.index_name, ind.index_name,
LOWER(ind.index_type), LOWER(ind.index_type),
@ -320,7 +336,9 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
WHERE ind.index_name = cons.index_name WHERE ind.index_name = cons.index_name
) AND cols.index_name = ind.index_name ) AND cols.index_name = ind.index_name
GROUP BY ind.index_name, ind.index_type, ind.uniqueness GROUP BY ind.index_name, ind.index_type, ind.uniqueness
""", [table_name]) """,
[table_name],
)
for constraint, type_, unique, columns, orders in cursor.fetchall(): for constraint, type_, unique, columns, orders in cursor.fetchall():
constraint = self.identifier_converter(constraint) constraint = self.identifier_converter(constraint)
constraints[constraint] = { constraints[constraint] = {

View File

@ -372,7 +372,8 @@ END;
def __foreign_key_constraints(self, table_name, recursive): def __foreign_key_constraints(self, table_name, recursive):
with self.connection.cursor() as cursor: with self.connection.cursor() as cursor:
if recursive: if recursive:
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
user_tables.table_name, rcons.constraint_name user_tables.table_name, rcons.constraint_name
FROM FROM
@ -389,9 +390,12 @@ END;
user_tables.table_name, rcons.constraint_name user_tables.table_name, rcons.constraint_name
HAVING user_tables.table_name != UPPER(%s) HAVING user_tables.table_name != UPPER(%s)
ORDER BY MAX(level) DESC ORDER BY MAX(level) DESC
""", (table_name, table_name)) """,
(table_name, table_name),
)
else: else:
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
cons.table_name, cons.constraint_name cons.table_name, cons.constraint_name
FROM FROM
@ -399,7 +403,9 @@ END;
WHERE WHERE
cons.constraint_type = 'R' cons.constraint_type = 'R'
AND cons.table_name = UPPER(%s) AND cons.table_name = UPPER(%s)
""", (table_name,)) """,
(table_name,),
)
return cursor.fetchall() return cursor.fetchall()
@cached_property @cached_property

View File

@ -182,13 +182,16 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
def _is_identity_column(self, table_name, column_name): def _is_identity_column(self, table_name, column_name):
with self.connection.cursor() as cursor: with self.connection.cursor() as cursor:
cursor.execute(""" cursor.execute(
"""
SELECT SELECT
CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END CASE WHEN identity_column = 'YES' THEN 1 ELSE 0 END
FROM user_tab_cols FROM user_tab_cols
WHERE table_name = %s AND WHERE table_name = %s AND
column_name = %s column_name = %s
""", [self.normalize_name(table_name), self.normalize_name(column_name)]) """,
[self.normalize_name(table_name), self.normalize_name(column_name)],
)
row = cursor.fetchone() row = cursor.fetchone()
return row[0] if row else False return row[0] if row else False
@ -200,9 +203,12 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
def _get_default_collation(self, table_name): def _get_default_collation(self, table_name):
with self.connection.cursor() as cursor: with self.connection.cursor() as cursor:
cursor.execute(""" cursor.execute(
"""
SELECT default_collation FROM user_tables WHERE table_name = %s SELECT default_collation FROM user_tables WHERE table_name = %s
""", [self.normalize_name(table_name)]) """,
[self.normalize_name(table_name)],
)
return cursor.fetchone()[0] return cursor.fetchone()[0]
def _alter_column_collation_sql(self, model, new_field, new_type, new_collation): def _alter_column_collation_sql(self, model, new_field, new_type, new_collation):

View File

@ -42,8 +42,7 @@ def resolve_relation(model, app_label=None, model_name=None):
return app_label, model_name.lower() return app_label, model_name.lower()
if app_label is None: if app_label is None:
raise TypeError( raise TypeError(
'app_label must be provided to resolve unscoped model ' 'app_label must be provided to resolve unscoped model relationships.'
'relationships.'
) )
return app_label, model.lower() return app_label, model.lower()
return model._meta.app_label, model._meta.model_name return model._meta.app_label, model._meta.model_name

View File

@ -40,8 +40,7 @@ class CheckConstraint(BaseConstraint):
self.check = check self.check = check
if not getattr(check, 'conditional', False): if not getattr(check, 'conditional', False):
raise TypeError( raise TypeError(
'CheckConstraint.check must be a Q instance or boolean ' 'CheckConstraint.check must be a Q instance or boolean expression.'
'expression.'
) )
super().__init__(name) super().__init__(name)

View File

@ -101,8 +101,7 @@ class Field(RegisterLookupMixin):
'invalid_choice': _('Value %(value)r is not a valid choice.'), 'invalid_choice': _('Value %(value)r is not a valid choice.'),
'null': _('This field cannot be null.'), 'null': _('This field cannot be null.'),
'blank': _('This field cannot be blank.'), 'blank': _('This field cannot be blank.'),
'unique': _('%(model_name)s with this %(field_label)s ' 'unique': _('%(model_name)s with this %(field_label)s already exists.'),
'already exists.'),
# Translators: The 'lookup_type' is one of 'date', 'year' or 'month'. # Translators: The 'lookup_type' is one of 'date', 'year' or 'month'.
# Eg: "Title must be unique for pub_date year" # Eg: "Title must be unique for pub_date year"
'unique_for_date': _("%(field_label)s must be unique for " 'unique_for_date': _("%(field_label)s must be unique for "

View File

@ -36,8 +36,7 @@ class Index:
raise ValueError('Index.opclasses must be a list or tuple.') raise ValueError('Index.opclasses must be a list or tuple.')
if not expressions and not fields: if not expressions and not fields:
raise ValueError( raise ValueError(
'At least one field or expression is required to define an ' 'At least one field or expression is required to define an index.'
'index.'
) )
if expressions and fields: if expressions and fields:
raise ValueError( raise ValueError(

View File

@ -541,8 +541,7 @@ class IsNull(BuiltinLookup):
def as_sql(self, compiler, connection): def as_sql(self, compiler, connection):
if not isinstance(self.rhs, bool): if not isinstance(self.rhs, bool):
raise ValueError( raise ValueError(
'The QuerySet value for an isnull lookup must be True or ' 'The QuerySet value for an isnull lookup must be True or False.'
'False.'
) )
sql, params = compiler.compile(self.lhs) sql, params = compiler.compile(self.lhs)
if self.rhs: if self.rhs:

View File

@ -820,8 +820,7 @@ class QuerySet:
self._not_support_combined_queries('contains') self._not_support_combined_queries('contains')
if self._fields is not None: if self._fields is not None:
raise TypeError( raise TypeError(
'Cannot call QuerySet.contains() after .values() or ' 'Cannot call QuerySet.contains() after .values() or .values_list().'
'.values_list().'
) )
try: try:
if obj._meta.concrete_model != self.model._meta.concrete_model: if obj._meta.concrete_model != self.model._meta.concrete_model:
@ -1611,8 +1610,7 @@ class Prefetch:
) )
): ):
raise ValueError( raise ValueError(
'Prefetch querysets cannot use raw(), values(), and ' 'Prefetch querysets cannot use raw(), values(), and values_list().'
'values_list().'
) )
if to_attr: if to_attr:
self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr]) self.prefetch_to = LOOKUP_SEP.join(lookup.split(LOOKUP_SEP)[:-1] + [to_attr])

View File

@ -1706,8 +1706,7 @@ class Query(BaseExpression):
for alias in self._gen_col_aliases([annotation]): for alias in self._gen_col_aliases([annotation]):
if isinstance(self.alias_map[alias], Join): if isinstance(self.alias_map[alias], Join):
raise FieldError( raise FieldError(
'Joined field references are not permitted in ' 'Joined field references are not permitted in this query'
'this query'
) )
if summarize: if summarize:
# Summarize currently means we are doing an aggregate() query # Summarize currently means we are doing an aggregate() query
@ -1734,8 +1733,9 @@ class Query(BaseExpression):
if not allow_joins and len(join_list) > 1: if not allow_joins and len(join_list) > 1:
raise FieldError('Joined field references are not permitted in this query') raise FieldError('Joined field references are not permitted in this query')
if len(targets) > 1: if len(targets) > 1:
raise FieldError("Referencing multicolumn fields with F() objects " raise FieldError(
"isn't supported") "Referencing multicolumn fields with F() objects isn't supported"
)
# Verify that the last lookup in name is a field or a transform: # Verify that the last lookup in name is a field or a transform:
# transform_function() raises FieldError if not. # transform_function() raises FieldError if not.
transform = join_info.transform_function(targets[0], final_alias) transform = join_info.transform_function(targets[0], final_alias)

View File

@ -1211,8 +1211,9 @@ class ModelChoiceField(ChoiceField):
# This class is a subclass of ChoiceField for purity, but it doesn't # This class is a subclass of ChoiceField for purity, but it doesn't
# actually use any of ChoiceField's implementation. # actually use any of ChoiceField's implementation.
default_error_messages = { default_error_messages = {
'invalid_choice': _('Select a valid choice. That choice is not one of' 'invalid_choice': _(
' the available choices.'), 'Select a valid choice. That choice is not one of the available choices.'
),
} }
iterator = ModelChoiceIterator iterator = ModelChoiceIterator
@ -1331,8 +1332,9 @@ class ModelMultipleChoiceField(ModelChoiceField):
hidden_widget = MultipleHiddenInput hidden_widget = MultipleHiddenInput
default_error_messages = { default_error_messages = {
'invalid_list': _('Enter a list of values.'), 'invalid_list': _('Enter a list of values.'),
'invalid_choice': _('Select a valid choice. %(value)s is not one of the' 'invalid_choice': _(
' available choices.'), 'Select a valid choice. %(value)s is not one of the available choices.'
),
'invalid_pk_value': _('%(pk)s” is not a valid value.') 'invalid_pk_value': _('%(pk)s” is not a valid value.')
} }

View File

@ -38,8 +38,8 @@ class BaseEngine:
This method is optional. This method is optional.
""" """
raise NotImplementedError( raise NotImplementedError(
"subclasses of BaseEngine should provide " "subclasses of BaseEngine should provide a from_string() method"
"a from_string() method") )
def get_template(self, template_name): def get_template(self, template_name):
""" """
@ -48,8 +48,8 @@ class BaseEngine:
Raise TemplateDoesNotExist if no such template exists. Raise TemplateDoesNotExist if no such template exists.
""" """
raise NotImplementedError( raise NotImplementedError(
"subclasses of BaseEngine must provide " "subclasses of BaseEngine must provide a get_template() method"
"a get_template() method") )
# Utility methods: they are provided to minimize code duplication and # Utility methods: they are provided to minimize code duplication and
# security issues in third-party backends. # security issues in third-party backends.

View File

@ -672,8 +672,9 @@ class FilterExpression:
except VariableDoesNotExist: except VariableDoesNotExist:
var_obj = None var_obj = None
elif var is None: elif var is None:
raise TemplateSyntaxError("Could not find variable at " raise TemplateSyntaxError(
"start of %s." % token) "Could not find variable at start of %s." % token
)
else: else:
var_obj = Variable(var) var_obj = Variable(var)
else: else:
@ -878,9 +879,10 @@ class Variable:
ValueError, # invalid literal for int() ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key " raise VariableDoesNotExist(
"[%s] in %r", "Failed lookup for key [%s] in %r",
(bit, current)) # missing attribute (bit, current),
) # missing attribute
if callable(current): if callable(current):
if getattr(current, 'do_not_call_in_templates', False): if getattr(current, 'do_not_call_in_templates', False):
pass pass

View File

@ -793,8 +793,9 @@ def do_for(parser, token):
""" """
bits = token.split_contents() bits = token.split_contents()
if len(bits) < 4: if len(bits) < 4:
raise TemplateSyntaxError("'for' statements should have at least four" raise TemplateSyntaxError(
" words: %s" % token.contents) "'for' statements should have at least four words: %s" % token.contents
)
is_reversed = bits[-1] == 'reversed' is_reversed = bits[-1] == 'reversed'
in_index = -3 if is_reversed else -2 in_index = -3 if is_reversed else -2
@ -806,8 +807,9 @@ def do_for(parser, token):
loopvars = re.split(r' *, *', ' '.join(bits[1:in_index])) loopvars = re.split(r' *, *', ' '.join(bits[1:in_index]))
for var in loopvars: for var in loopvars:
if not var or not invalid_chars.isdisjoint(var): if not var or not invalid_chars.isdisjoint(var):
raise TemplateSyntaxError("'for' tag received an invalid argument:" raise TemplateSyntaxError(
" %s" % token.contents) "'for' tag received an invalid argument: %s" % token.contents
)
sequence = parser.compile_filter(bits[in_index + 1]) sequence = parser.compile_filter(bits[in_index + 1])
nodelist_loop = parser.parse(('empty', 'endfor',)) nodelist_loop = parser.parse(('empty', 'endfor',))
@ -1160,8 +1162,9 @@ def regroup(parser, token):
if bits[2] != 'by': if bits[2] != 'by':
raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'") raise TemplateSyntaxError("second argument to 'regroup' tag must be 'by'")
if bits[4] != 'as': if bits[4] != 'as':
raise TemplateSyntaxError("next-to-last argument to 'regroup' tag must" raise TemplateSyntaxError(
" be 'as'") "next-to-last argument to 'regroup' tag must be 'as'"
)
var_name = bits[5] var_name = bits[5]
# RegroupNode will take each item in 'target', put it in the context under # RegroupNode will take each item in 'target', put it in the context under
# 'var_name', evaluate 'var_name'.'expression' in the current context, and # 'var_name', evaluate 'var_name'.'expression' in the current context, and
@ -1420,8 +1423,9 @@ def do_with(parser, token):
remaining_bits = bits[1:] remaining_bits = bits[1:]
extra_context = token_kwargs(remaining_bits, parser, support_legacy=True) extra_context = token_kwargs(remaining_bits, parser, support_legacy=True)
if not extra_context: if not extra_context:
raise TemplateSyntaxError("%r expected at least one variable " raise TemplateSyntaxError(
"assignment" % bits[0]) "%r expected at least one variable assignment" % bits[0]
)
if remaining_bits: if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" % raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0])) (bits[0], remaining_bits[0]))

View File

@ -315,13 +315,15 @@ def do_include(parser, token):
while remaining_bits: while remaining_bits:
option = remaining_bits.pop(0) option = remaining_bits.pop(0)
if option in options: if option in options:
raise TemplateSyntaxError('The %r option was specified more ' raise TemplateSyntaxError(
'than once.' % option) 'The %r option was specified more than once.' % option
)
if option == 'with': if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=False) value = token_kwargs(remaining_bits, parser, support_legacy=False)
if not value: if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least ' raise TemplateSyntaxError(
'one keyword argument.' % bits[0]) '"with" in %r tag needs at least one keyword argument.' % bits[0]
)
elif option == 'only': elif option == 'only':
value = True value = True
else: else:

View File

@ -49,8 +49,9 @@ class SimpleTemplateResponse(HttpResponse):
""" """
obj_dict = self.__dict__.copy() obj_dict = self.__dict__.copy()
if not self._is_rendered: if not self._is_rendered:
raise ContentNotRenderedError('The response content must be ' raise ContentNotRenderedError(
'rendered before it can be pickled.') 'The response content must be rendered before it can be pickled.'
)
for attr in self.rendering_attrs: for attr in self.rendering_attrs:
if attr in obj_dict: if attr in obj_dict:
del obj_dict[attr] del obj_dict[attr]

View File

@ -476,13 +476,15 @@ def do_block_translate(parser, token):
while remaining_bits: while remaining_bits:
option = remaining_bits.pop(0) option = remaining_bits.pop(0)
if option in options: if option in options:
raise TemplateSyntaxError('The %r option was specified more ' raise TemplateSyntaxError(
'than once.' % option) 'The %r option was specified more than once.' % option
)
if option == 'with': if option == 'with':
value = token_kwargs(remaining_bits, parser, support_legacy=True) value = token_kwargs(remaining_bits, parser, support_legacy=True)
if not value: if not value:
raise TemplateSyntaxError('"with" in %r tag needs at least ' raise TemplateSyntaxError(
'one keyword argument.' % bits[0]) '"with" in %r tag needs at least one keyword argument.' % bits[0]
)
elif option == 'count': elif option == 'count':
value = token_kwargs(remaining_bits, parser, support_legacy=True) value = token_kwargs(remaining_bits, parser, support_legacy=True)
if len(value) != 1: if len(value) != 1:

View File

@ -209,6 +209,7 @@ def get_current_timezone_tag(parser, token):
# token.split_contents() isn't useful here because this tag doesn't accept variable as arguments # token.split_contents() isn't useful here because this tag doesn't accept variable as arguments
args = token.contents.split() args = token.contents.split()
if len(args) != 3 or args[1] != 'as': if len(args) != 3 or args[1] != 'as':
raise TemplateSyntaxError("'get_current_timezone' requires " raise TemplateSyntaxError(
"'as variable' (got %r)" % args) "'get_current_timezone' requires 'as variable' (got %r)" % args
)
return GetCurrentTimezoneNode(args[2]) return GetCurrentTimezoneNode(args[2])

View File

@ -894,8 +894,7 @@ class AsyncClient(ClientMixin, AsyncRequestFactory):
""" """
if 'follow' in request: if 'follow' in request:
raise NotImplementedError( raise NotImplementedError(
'AsyncClient request methods do not accept the follow ' 'AsyncClient request methods do not accept the follow parameter.'
'parameter.'
) )
scope = self._base_scope(**request) scope = self._base_scope(**request)
# Curry a data dictionary into an instance of the template renderer # Curry a data dictionary into an instance of the template renderer

View File

@ -967,8 +967,7 @@ class AliasTests(TestCase):
def test_aggregate_alias(self): def test_aggregate_alias(self):
msg = ( msg = (
"Cannot aggregate over the 'other_age' alias. Use annotate() to " "Cannot aggregate over the 'other_age' alias. Use annotate() to promote it."
"promote it."
) )
with self.assertRaisesMessage(FieldError, msg): with self.assertRaisesMessage(FieldError, msg):
Author.objects.alias( Author.objects.alias(
@ -992,10 +991,7 @@ class AliasTests(TestCase):
def test_values_alias(self): def test_values_alias(self):
qs = Book.objects.alias(rating_alias=F('rating') - 1) qs = Book.objects.alias(rating_alias=F('rating') - 1)
msg = ( msg = "Cannot select the 'rating_alias' alias. Use annotate() to promote it."
"Cannot select the 'rating_alias' alias. Use annotate() to "
"promote it."
)
for operation in ['values', 'values_list']: for operation in ['values', 'values_list']:
with self.subTest(operation=operation): with self.subTest(operation=operation):
with self.assertRaisesMessage(FieldError, msg): with self.assertRaisesMessage(FieldError, msg):

View File

@ -72,8 +72,7 @@ def empty_response(request):
KEY_ERRORS_WITH_MEMCACHED_MSG = ( KEY_ERRORS_WITH_MEMCACHED_MSG = (
'Cache key contains characters that will cause errors if used with ' 'Cache key contains characters that will cause errors if used with memcached: %r'
'memcached: %r'
) )

View File

@ -409,16 +409,22 @@ class BasicExpressionsTests(TestCase):
def test_order_by_multiline_sql(self): def test_order_by_multiline_sql(self):
raw_order_by = ( raw_order_by = (
RawSQL(''' RawSQL(
"""
CASE WHEN num_employees > 1000 CASE WHEN num_employees > 1000
THEN num_chairs THEN num_chairs
ELSE 0 END ELSE 0 END
''', []).desc(), """,
RawSQL(''' [],
).desc(),
RawSQL(
"""
CASE WHEN num_chairs > 1 CASE WHEN num_chairs > 1
THEN 1 THEN 1
ELSE 0 END ELSE 0 END
''', []).asc() """,
[],
).asc()
) )
for qs in ( for qs in (
Company.objects.all(), Company.objects.all(),

View File

@ -626,8 +626,7 @@ class FileUploadTests(TestCase):
'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"', 'Content-Disposition: form-data; name="file_field"; filename="MiXeD_cAsE.txt"',
'Content-Type: application/octet-stream', 'Content-Type: application/octet-stream',
'', '',
'file contents\n' 'file contents\n',
'',
'--%(boundary)s--\r\n', '--%(boundary)s--\r\n',
] ]
response = self.client.post( response = self.client.post(

View File

@ -212,8 +212,7 @@ class MultiWidgetTest(WidgetTest):
def test_no_whitespace_between_widgets(self): def test_no_whitespace_between_widgets(self):
widget = MyMultiWidget(widgets=(TextInput, TextInput())) widget = MyMultiWidget(widgets=(TextInput, TextInput()))
self.check_html(widget, 'code', None, html=( self.check_html(widget, 'code', None, html=(
'<input type="text" name="code_0">' '<input type="text" name="code_0"><input type="text" name="code_1">'
'<input type="text" name="code_1">'
), strict=True) ), strict=True)
def test_deepcopy(self): def test_deepcopy(self):

View File

@ -370,8 +370,7 @@ class GISFunctionsTests(FuncTestMixin, TestCase):
@skipUnlessDBFeature('has_MakeValid_function') @skipUnlessDBFeature('has_MakeValid_function')
def test_make_valid_multipolygon(self): def test_make_valid_multipolygon(self):
invalid_geom = fromstr( invalid_geom = fromstr(
'POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), ' 'POLYGON((0 0, 0 1 , 1 1 , 1 0, 0 0), (10 0, 10 1, 11 1, 11 0, 10 0))'
'(10 0, 10 1, 11 1, 11 0, 10 0))'
) )
State.objects.create(name='invalid', poly=invalid_geom) State.objects.create(name='invalid', poly=invalid_geom)
invalid = State.objects.filter(name='invalid').annotate( invalid = State.objects.filter(name='invalid').annotate(

View File

@ -1707,8 +1707,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),
@ -1972,8 +1971,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),
@ -2075,8 +2073,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),
@ -2193,8 +2190,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),
@ -2331,8 +2327,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),
@ -2348,8 +2343,7 @@ class ConstraintsTests(TestCase):
self.assertEqual(Model.check(databases=self.databases), [ self.assertEqual(Model.check(databases=self.databases), [
Error( Error(
"'constraints' refers to the nonexistent field " "'constraints' refers to the nonexistent field 'missing_field'.",
"'missing_field'.",
obj=Model, obj=Model,
id='models.E012', id='models.E012',
), ),

View File

@ -306,9 +306,7 @@ class MigrateTests(MigrationTestBase):
with mock.patch('django.core.management.color.supports_color', lambda *args: True): with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False) call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual( self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m' '\x1b[1mmigrations\n\x1b[0m [ ] 0001_initial\n [ ] 0002_second\n',
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower() out.getvalue().lower()
) )
@ -318,9 +316,7 @@ class MigrateTests(MigrationTestBase):
# Giving the explicit app_label tests for selective `show_list` in the command # Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True) call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual( self.assertEqual(
'migrations\n' 'migrations\n [x] 0001_initial\n [ ] 0002_second\n',
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower() out.getvalue().lower()
) )
out = io.StringIO() out = io.StringIO()
@ -341,8 +337,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO() out = io.StringIO()
call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True)
self.assertEqual( self.assertEqual(
'migrations\n' 'migrations\n [ ] 0001_squashed_0002 (2 squashed migrations)\n',
' [ ] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower(), out.getvalue().lower(),
) )
out = io.StringIO() out = io.StringIO()
@ -366,8 +361,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO() out = io.StringIO()
call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True) call_command('showmigrations', format='list', stdout=out, verbosity=2, no_color=True)
self.assertEqual( self.assertEqual(
'migrations\n' 'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower(), out.getvalue().lower(),
) )
finally: finally:
@ -445,8 +439,7 @@ class MigrateTests(MigrationTestBase):
# Show the plan for when there is nothing to apply. # Show the plan for when there is nothing to apply.
call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True) call_command('migrate', 'migrations', '0003', plan=True, stdout=out, no_color=True)
self.assertEqual( self.assertEqual(
'Planned operations:\n' 'Planned operations:\n No planned migration operations.\n',
' No planned migration operations.\n',
out.getvalue() out.getvalue()
) )
out = io.StringIO() out = io.StringIO()
@ -607,8 +600,7 @@ class MigrateTests(MigrationTestBase):
out = io.StringIO() out = io.StringIO()
call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out) call_command('showmigrations', 'mutate_state_b', format='plan', stdout=out)
self.assertEqual( self.assertEqual(
'[ ] mutate_state_b.0001_initial\n' '[ ] mutate_state_b.0001_initial\n[ ] mutate_state_b.0002_add_field\n',
'[ ] mutate_state_b.0002_add_field\n',
out.getvalue() out.getvalue()
) )
# Single app with dependencies. # Single app with dependencies.
@ -909,8 +901,7 @@ class MigrateTests(MigrationTestBase):
call_command("migrate", "migrations", verbosity=0) call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True) call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual( self.assertEqual(
'migrations\n' 'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower() out.getvalue().lower()
) )
applied_migrations = recorder.applied_migrations() applied_migrations = recorder.applied_migrations()
@ -942,8 +933,7 @@ class MigrateTests(MigrationTestBase):
call_command("migrate", "migrations", verbosity=0) call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True) call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual( self.assertEqual(
'migrations\n' 'migrations\n [x] 0001_squashed_0002 (2 squashed migrations)\n',
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower() out.getvalue().lower()
) )
self.assertIn( self.assertIn(
@ -1991,8 +1981,7 @@ class AppLabelErrorTests(TestCase):
""" """
nonexistent_app_error = "No installed app with label 'nonexistent_app'." nonexistent_app_error = "No installed app with label 'nonexistent_app'."
did_you_mean_auth_error = ( did_you_mean_auth_error = (
"No installed app with label 'django.contrib.auth'. Did you mean " "No installed app with label 'django.contrib.auth'. Did you mean 'auth'?"
"'auth'?"
) )
def test_makemigrations_nonexistent_app_label(self): def test_makemigrations_nonexistent_app_label(self):

View File

@ -86,8 +86,7 @@ class OperationWriterTests(SimpleTestCase):
self.assertEqual(imports, {'import custom_migration_operations.operations'}) self.assertEqual(imports, {'import custom_migration_operations.operations'})
self.assertEqual( self.assertEqual(
buff, buff,
'custom_migration_operations.operations.TestOperation(\n' 'custom_migration_operations.operations.TestOperation(\n),',
'),'
) )
def test_args_signature(self): def test_args_signature(self):

View File

@ -129,8 +129,7 @@ class TestFormField(SimpleTestCase):
class TestSerialization(SimpleTestCase): class TestSerialization(SimpleTestCase):
test_data = ( test_data = (
'[{"fields": {"value": %s}, ' '[{"fields": {"value": %s}, "model": "model_fields.jsonmodel", "pk": null}]'
'"model": "model_fields.jsonmodel", "pk": null}]'
) )
test_values = ( test_values = (
# (Python value, serialized value), # (Python value, serialized value),

View File

@ -411,8 +411,7 @@ class PrepopulatedFieldsCheckTests(CheckTestCase):
self.assertIsInvalid( self.assertIsInvalid(
TestModelAdmin, ValidationTestModel, TestModelAdmin, ValidationTestModel,
'The value of \'prepopulated_fields["slug"]\' must be a list ' 'The value of \'prepopulated_fields["slug"]\' must be a list or tuple.',
'or tuple.',
'admin.E029' 'admin.E029'
) )

View File

@ -53,8 +53,7 @@ class AddIndexConcurrentlyTests(OperationTestBase):
operation = AddIndexConcurrently('Pony', index) operation = AddIndexConcurrently('Pony', index)
self.assertEqual( self.assertEqual(
operation.describe(), operation.describe(),
'Concurrently create index pony_pink_idx on field(s) pink of ' 'Concurrently create index pony_pink_idx on field(s) pink of model Pony',
'model Pony'
) )
operation.state_forwards(self.app_label, new_state) operation.state_forwards(self.app_label, new_state)
self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 1) self.assertEqual(len(new_state.models[self.app_label, 'pony'].options['indexes']), 1)

View File

@ -1311,9 +1311,10 @@ class MultiDbTests(TestCase):
books = "".join("%s (%s)\n" % books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all())) (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related('first_time_authors')) for b in B.prefetch_related('first_time_authors'))
self.assertEqual(books, self.assertEqual(
"Poems (Charlotte Bronte)\n" books,
"Sense and Sensibility (Jane Austen)\n") "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
def test_using_is_honored_inheritance(self): def test_using_is_honored_inheritance(self):
B = BookWithYear.objects.using('other') B = BookWithYear.objects.using('other')
@ -1350,19 +1351,20 @@ class MultiDbTests(TestCase):
books = "".join("%s (%s)\n" % books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all())) (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)) for b in B.prefetch_related(prefetch))
self.assertEqual(books, self.assertEqual(
"Poems (Charlotte Bronte)\n" books,
"Sense and Sensibility (Jane Austen)\n") "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on the same db. # Explicit using on the same db.
with self.assertNumQueries(2, using='other'): with self.assertNumQueries(2, using='other'):
prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('other')) prefetch = Prefetch('first_time_authors', queryset=Author.objects.using('other'))
books = "".join("%s (%s)\n" % books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all())) (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)) for b in B.prefetch_related(prefetch))
self.assertEqual(books, self.assertEqual(
"Poems (Charlotte Bronte)\n" books,
"Sense and Sensibility (Jane Austen)\n") "Poems (Charlotte Bronte)\nSense and Sensibility (Jane Austen)\n",
)
# Explicit using on a different db. # Explicit using on a different db.
with self.assertNumQueries(1, using='default'), self.assertNumQueries(1, using='other'): with self.assertNumQueries(1, using='default'), self.assertNumQueries(1, using='other'):
@ -1370,9 +1372,10 @@ class MultiDbTests(TestCase):
books = "".join("%s (%s)\n" % books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all())) (b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related(prefetch)) for b in B.prefetch_related(prefetch))
self.assertEqual(books, self.assertEqual(
"Poems ()\n" books,
"Sense and Sensibility ()\n") "Poems ()\nSense and Sensibility ()\n",
)
class Ticket19607Tests(TestCase): class Ticket19607Tests(TestCase):

View File

@ -3121,8 +3121,7 @@ class QuerySetExceptionTests(SimpleTestCase):
def test_invalid_order_by(self): def test_invalid_order_by(self):
msg = ( msg = (
"Cannot resolve keyword '*' into field. Choices are: created, id, " "Cannot resolve keyword '*' into field. Choices are: created, id, name"
"name"
) )
with self.assertRaisesMessage(FieldError, msg): with self.assertRaisesMessage(FieldError, msg):
Article.objects.order_by('*') Article.objects.order_by('*')

View File

@ -41,7 +41,6 @@ class DataUploadMaxMemorySizeMultipartPostTests(SimpleTestCase):
'', '',
'value', 'value',
'--boundary--' '--boundary--'
''
])) ]))
self.request = WSGIRequest({ self.request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
@ -70,7 +69,6 @@ class DataUploadMaxMemorySizeMultipartPostTests(SimpleTestCase):
'', '',
'value', 'value',
'--boundary--' '--boundary--'
''
])) ]))
request = WSGIRequest({ request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
@ -143,7 +141,6 @@ class DataUploadMaxNumberOfFieldsMultipartPost(SimpleTestCase):
'', '',
'value2', 'value2',
'--boundary--' '--boundary--'
''
])) ]))
self.request = WSGIRequest({ self.request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',

View File

@ -316,7 +316,7 @@ class RequestsTests(SimpleTestCase):
'', '',
'value', 'value',
'--boundary--' '--boundary--'
''])) ]))
request = WSGIRequest({ request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary', 'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
@ -341,7 +341,7 @@ class RequestsTests(SimpleTestCase):
b'', b'',
b'value', b'value',
b'--boundary--' b'--boundary--'
b'']) ])
payload = FakePayload(payload_data) payload = FakePayload(payload_data)
request = WSGIRequest({ request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
@ -366,7 +366,7 @@ class RequestsTests(SimpleTestCase):
'', '',
'value', 'value',
'--boundary--' '--boundary--'
''])) ]))
request = WSGIRequest({ request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary', 'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
@ -445,8 +445,8 @@ class RequestsTests(SimpleTestCase):
'Content-Disposition: form-data; name="name"', 'Content-Disposition: form-data; name="name"',
'', '',
'value', 'value',
'--boundary--' '--boundary--',
''])) ]))
request = WSGIRequest({ request = WSGIRequest({
'REQUEST_METHOD': 'POST', 'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary', 'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',

View File

@ -300,8 +300,7 @@ def setup_run_tests(verbosity, start_at, start_after, test_labels=None):
# Force declaring available_apps in TransactionTestCase for faster tests. # Force declaring available_apps in TransactionTestCase for faster tests.
def no_available_apps(self): def no_available_apps(self):
raise Exception( raise Exception(
'Please define available_apps in TransactionTestCase and its ' 'Please define available_apps in TransactionTestCase and its subclasses.'
'subclasses.'
) )
TransactionTestCase.available_apps = property(no_available_apps) TransactionTestCase.available_apps = property(no_available_apps)
TestCase.available_apps = None TestCase.available_apps = None

View File

@ -11,10 +11,7 @@ from django.test.utils import captured_stdin, captured_stdout
class ShellCommandTestCase(SimpleTestCase): class ShellCommandTestCase(SimpleTestCase):
script_globals = 'print("__name__" in globals())' script_globals = 'print("__name__" in globals())'
script_with_inline_function = ( script_with_inline_function = (
'import django\n' 'import django\ndef f():\n print(django.__version__)\nf()'
'def f():\n'
' print(django.__version__)\n'
'f()'
) )
def test_command_option(self): def test_command_option(self):

View File

@ -60,14 +60,13 @@ class FunctionTests(SimpleTestCase):
self.assertEqual( self.assertEqual(
urlizetrunc(uri, 1), urlizetrunc(uri, 1),
'<a href="http://31characteruri.com/test/"' '<a href="http://31characteruri.com/test/" rel="nofollow">…</a>',
' rel="nofollow">…</a>',
) )
def test_overtruncate(self): def test_overtruncate(self):
self.assertEqual( self.assertEqual(
urlizetrunc('http://short.com/', 20), '<a href=' urlizetrunc('http://short.com/', 20),
'"http://short.com/" rel="nofollow">http://short.com/</a>', '<a href="http://short.com/" rel="nofollow">http://short.com/</a>',
) )
def test_query_string(self): def test_query_string(self):

View File

@ -36,7 +36,7 @@ class I18nGetLanguageInfoTagTests(SimpleTestCase):
output = self.engine.render_to_string('i18n38') output = self.engine.render_to_string('i18n38')
self.assertEqual(output, 'de: German/Deutsch/německy bidi=False') self.assertEqual(output, 'de: German/Deutsch/německy bidi=False')
@setup({'template': '{% load i18n %}''{% get_language_info %}'}) @setup({'template': '{% load i18n %}{% get_language_info %}'})
def test_no_for_as(self): def test_no_for_as(self):
msg = "'get_language_info' requires 'for string as variable' (got [])" msg = "'get_language_info' requires 'for string as variable' (got [])"
with self.assertRaisesMessage(TemplateSyntaxError, msg): with self.assertRaisesMessage(TemplateSyntaxError, msg):

View File

@ -104,8 +104,7 @@ class SimpleTagTests(TagTestCase):
( (
"'simple_keyword_only_param' received multiple values for " "'simple_keyword_only_param' received multiple values for "
"keyword argument 'kwarg'", "keyword argument 'kwarg'",
'{% load custom %}{% simple_keyword_only_param kwarg=42 ' '{% load custom %}{% simple_keyword_only_param kwarg=42 kwarg=37 %}',
'kwarg=37 %}',
), ),
( (
"'simple_keyword_only_default' received multiple values for " "'simple_keyword_only_default' received multiple values for "

View File

@ -445,8 +445,7 @@ class DiscoverRunnerTests(SimpleTestCase):
def test_pdb_with_parallel(self): def test_pdb_with_parallel(self):
msg = ( msg = (
'You cannot use --pdb with parallel tests; pass --parallel=1 to ' 'You cannot use --pdb with parallel tests; pass --parallel=1 to use it.'
'use it.'
) )
with self.assertRaisesMessage(ValueError, msg): with self.assertRaisesMessage(ValueError, msg):
DiscoverRunner(pdb=True, parallel=2) DiscoverRunner(pdb=True, parallel=2)

View File

@ -462,8 +462,7 @@ class AssertNumQueriesContextManagerTests(TestCase):
def test_failure(self): def test_failure(self):
msg = ( msg = (
'1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n' '1 != 2 : 1 queries executed, 2 expected\nCaptured queries were:\n1.'
'1.'
) )
with self.assertRaisesMessage(AssertionError, msg): with self.assertRaisesMessage(AssertionError, msg):
with self.assertNumQueries(2): with self.assertNumQueries(2):

View File

@ -134,8 +134,7 @@ VALID_URLS = [
'ample.com', 'ample.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'
'aaaaa.com', 'aaaaa.com',
'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa' 'http://example.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'aaaaa',
'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa' 'http://aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaa'
'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaa'
'aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa' 'aaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa.aaaaaaaaaaaaaaaaaa'

View File

@ -635,14 +635,12 @@ class ExceptionReporterTests(SimpleTestCase):
html, html,
) )
self.assertIn( self.assertIn(
'"generated", line 2, in funcName\n' '"generated", line 2, in funcName\n &lt;source code not available&gt;',
' &lt;source code not available&gt;',
html, html,
) )
text = reporter.get_traceback_text() text = reporter.get_traceback_text()
self.assertIn( self.assertIn(
'"generated", line 2, in funcName\n' '"generated", line 2, in funcName\n <source code not available>',
' <source code not available>',
text, text,
) )
@ -676,14 +674,12 @@ class ExceptionReporterTests(SimpleTestCase):
html, html,
) )
self.assertIn( self.assertIn(
'"generated", line 2, in funcName\n' '"generated", line 2, in funcName\n &lt;source code not available&gt;',
' &lt;source code not available&gt;',
html, html,
) )
text = reporter.get_traceback_text() text = reporter.get_traceback_text()
self.assertIn( self.assertIn(
'"generated", line 2, in funcName\n' '"generated", line 2, in funcName\n <source code not available>',
' <source code not available>',
text, text,
) )