Fixed #20989 -- Removed explicit list comprehension inside dict() and tuple()
Thanks jeroen.pulles at redslider.net for the suggestion and helper script.
This commit is contained in:
parent
f19a3669b8
commit
c7d0ff0cad
|
@ -95,7 +95,7 @@ def load_hashers(password_hashers=None):
|
|||
raise ImproperlyConfigured("hasher doesn't specify an "
|
||||
"algorithm name: %s" % backend)
|
||||
hashers.append(hasher)
|
||||
HASHERS = dict([(hasher.algorithm, hasher) for hasher in hashers])
|
||||
HASHERS = dict((hasher.algorithm, hasher) for hasher in hashers)
|
||||
PREFERRED_HASHER = hashers[0]
|
||||
|
||||
|
||||
|
|
|
@ -191,4 +191,4 @@ OGRFieldTypes = { 0 : OFTInteger,
|
|||
10 : OFTTime,
|
||||
11 : OFTDateTime,
|
||||
}
|
||||
ROGRFieldTypes = dict([(cls, num) for num, cls in OGRFieldTypes.items()])
|
||||
ROGRFieldTypes = dict((cls, num) for num, cls in OGRFieldTypes.items())
|
||||
|
|
|
@ -28,7 +28,7 @@ class OGRGeomType(object):
|
|||
7 + wkb25bit : 'GeometryCollection25D',
|
||||
}
|
||||
# Reverse type dictionary, keyed by lower-case of the name.
|
||||
_str_types = dict([(v.lower(), k) for k, v in _types.items()])
|
||||
_str_types = dict((v.lower(), k) for k, v in _types.items())
|
||||
|
||||
def __init__(self, type_input):
|
||||
"Figures out the correct OGR Type based upon the input."
|
||||
|
|
|
@ -84,7 +84,7 @@ def gdal_version_info():
|
|||
ver = gdal_version().decode()
|
||||
m = version_regex.match(ver)
|
||||
if not m: raise OGRException('Could not parse GDAL version string "%s"' % ver)
|
||||
return dict([(key, m.group(key)) for key in ('major', 'minor', 'subminor')])
|
||||
return dict((key, m.group(key)) for key in ('major', 'minor', 'subminor'))
|
||||
|
||||
_verinfo = gdal_version_info()
|
||||
GDAL_MAJOR_VERSION = int(_verinfo['major'])
|
||||
|
|
|
@ -20,13 +20,13 @@ TEST_DATA = os.path.join(os.path.dirname(upath(gis.__file__)), 'tests', 'data')
|
|||
def tuplize(seq):
|
||||
"Turn all nested sequences to tuples in given sequence."
|
||||
if isinstance(seq, (list, tuple)):
|
||||
return tuple([tuplize(i) for i in seq])
|
||||
return tuple(tuplize(i) for i in seq)
|
||||
return seq
|
||||
|
||||
|
||||
def strconvert(d):
|
||||
"Converts all keys in dictionary to str type."
|
||||
return dict([(str(k), v) for k, v in six.iteritems(d)])
|
||||
return dict((str(k), v) for k, v in six.iteritems(d))
|
||||
|
||||
|
||||
def get_ds_file(name, ext):
|
||||
|
|
|
@ -87,7 +87,7 @@ class GeometryCollection(GEOSGeometry):
|
|||
@property
|
||||
def tuple(self):
|
||||
"Returns a tuple of all the coordinates in this Geometry Collection"
|
||||
return tuple([g.tuple for g in self])
|
||||
return tuple(g.tuple for g in self)
|
||||
coords = tuple
|
||||
|
||||
# MultiPoint, MultiLineString, and MultiPolygon class definitions.
|
||||
|
|
|
@ -81,7 +81,7 @@ class Command(LabelCommand):
|
|||
raise CommandError('GDAL is required to inspect geospatial data sources.')
|
||||
|
||||
# Removing options with `None` values.
|
||||
options = dict([(k, v) for k, v in options.items() if not v is None])
|
||||
options = dict((k, v) for k, v in options.items() if not v is None)
|
||||
|
||||
# Getting the OGR DataSource from the string parameter.
|
||||
try:
|
||||
|
@ -110,7 +110,7 @@ class Command(LabelCommand):
|
|||
mapping_dict = mapping(ds, **kwargs)
|
||||
# This extra legwork is so that the dictionary definition comes
|
||||
# out in the same order as the fields in the model definition.
|
||||
rev_mapping = dict([(v, k) for k, v in mapping_dict.items()])
|
||||
rev_mapping = dict((v, k) for k, v in mapping_dict.items())
|
||||
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
|
||||
'%s_mapping = {' % model_name.lower()])
|
||||
output.extend([" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields])
|
||||
|
|
|
@ -292,7 +292,7 @@ class Distance(MeasureBase):
|
|||
'Yard (Indian)' : 'indian_yd',
|
||||
'Yard (Sears)' : 'sears_yd'
|
||||
}
|
||||
LALIAS = dict([(k.lower(), v) for k, v in ALIAS.items()])
|
||||
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
|
||||
|
||||
def __mul__(self, other):
|
||||
if isinstance(other, self.__class__):
|
||||
|
@ -310,9 +310,9 @@ class Distance(MeasureBase):
|
|||
class Area(MeasureBase):
|
||||
STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT
|
||||
# Getting the square units values and the alias dictionary.
|
||||
UNITS = dict([('%s%s' % (AREA_PREFIX, k), v ** 2) for k, v in Distance.UNITS.items()])
|
||||
ALIAS = dict([(k, '%s%s' % (AREA_PREFIX, v)) for k, v in Distance.ALIAS.items()])
|
||||
LALIAS = dict([(k.lower(), v) for k, v in ALIAS.items()])
|
||||
UNITS = dict(('%s%s' % (AREA_PREFIX, k), v ** 2) for k, v in Distance.UNITS.items())
|
||||
ALIAS = dict((k, '%s%s' % (AREA_PREFIX, v)) for k, v in Distance.ALIAS.items())
|
||||
LALIAS = dict((k.lower(), v) for k, v in ALIAS.items())
|
||||
|
||||
def __truediv__(self, other):
|
||||
if isinstance(other, NUMERIC_TYPES):
|
||||
|
|
|
@ -99,7 +99,7 @@ def get_commands():
|
|||
"""
|
||||
global _commands
|
||||
if _commands is None:
|
||||
_commands = dict([(name, 'django.core') for name in find_commands(__path__[0])])
|
||||
_commands = dict((name, 'django.core') for name in find_commands(__path__[0]))
|
||||
|
||||
# Find the installed apps
|
||||
from django.conf import settings
|
||||
|
|
|
@ -537,4 +537,4 @@ class DatabaseWrapper(BaseDatabaseWrapper):
|
|||
match = server_version_re.match(server_info)
|
||||
if not match:
|
||||
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
|
||||
return tuple([int(x) for x in match.groups()])
|
||||
return tuple(int(x) for x in match.groups())
|
||||
|
|
|
@ -773,7 +773,7 @@ class FormatStylePlaceholderCursor(object):
|
|||
try:
|
||||
return dict((k, OracleParam(v, self, True)) for k, v in params.items())
|
||||
except AttributeError:
|
||||
return tuple([OracleParam(p, self, True) for p in params])
|
||||
return tuple(OracleParam(p, self, True) for p in params)
|
||||
|
||||
def _guess_input_sizes(self, params_list):
|
||||
# Try dict handling; if that fails, treat as sequence
|
||||
|
|
|
@ -139,7 +139,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
|
|||
continue
|
||||
|
||||
# This will append (column_name, referenced_table_name, referenced_column_name) to key_columns
|
||||
key_columns.append(tuple([s.strip('"') for s in m.groups()]))
|
||||
key_columns.append(tuple(s.strip('"') for s in m.groups()))
|
||||
|
||||
return key_columns
|
||||
|
||||
|
|
|
@ -227,7 +227,7 @@ class SingleRelatedObjectDescriptor(six.with_metaclass(RenameRelatedObjectDescri
|
|||
if not router.allow_relation(value, instance):
|
||||
raise ValueError('Cannot assign "%r": the current database router prevents this relation.' % value)
|
||||
|
||||
related_pk = tuple([getattr(instance, field.attname) for field in self.related.field.foreign_related_fields])
|
||||
related_pk = tuple(getattr(instance, field.attname) for field in self.related.field.foreign_related_fields)
|
||||
if None in related_pk:
|
||||
raise ValueError('Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
|
||||
(value, instance._meta.object_name))
|
||||
|
@ -541,8 +541,8 @@ def create_many_related_manager(superclass, rel):
|
|||
('_prefetch_related_val_%s' % f.attname,
|
||||
'%s.%s' % (qn(join_table), qn(f.column))) for f in fk.local_related_fields))
|
||||
return (qs,
|
||||
lambda result: tuple([getattr(result, '_prefetch_related_val_%s' % f.attname) for f in fk.local_related_fields]),
|
||||
lambda inst: tuple([getattr(inst, f.attname) for f in fk.foreign_related_fields]),
|
||||
lambda result: tuple(getattr(result, '_prefetch_related_val_%s' % f.attname) for f in fk.local_related_fields),
|
||||
lambda inst: tuple(getattr(inst, f.attname) for f in fk.foreign_related_fields),
|
||||
False,
|
||||
self.prefetch_cache_name)
|
||||
|
||||
|
@ -964,11 +964,11 @@ class ForeignObject(RelatedField):
|
|||
|
||||
@property
|
||||
def local_related_fields(self):
|
||||
return tuple([lhs_field for lhs_field, rhs_field in self.related_fields])
|
||||
return tuple(lhs_field for lhs_field, rhs_field in self.related_fields)
|
||||
|
||||
@property
|
||||
def foreign_related_fields(self):
|
||||
return tuple([rhs_field for lhs_field, rhs_field in self.related_fields])
|
||||
return tuple(rhs_field for lhs_field, rhs_field in self.related_fields)
|
||||
|
||||
def get_local_related_value(self, instance):
|
||||
return self.get_instance_value_for_fields(instance, self.local_related_fields)
|
||||
|
@ -998,7 +998,7 @@ class ForeignObject(RelatedField):
|
|||
|
||||
def get_joining_columns(self, reverse_join=False):
|
||||
source = self.reverse_related_fields if reverse_join else self.related_fields
|
||||
return tuple([(lhs_field.column, rhs_field.column) for lhs_field, rhs_field in source])
|
||||
return tuple((lhs_field.column, rhs_field.column) for lhs_field, rhs_field in source)
|
||||
|
||||
def get_reverse_joining_columns(self):
|
||||
return self.get_joining_columns(reverse_join=True)
|
||||
|
@ -1105,10 +1105,10 @@ class ForeignObject(RelatedField):
|
|||
|
||||
@property
|
||||
def attnames(self):
|
||||
return tuple([field.attname for field in self.local_related_fields])
|
||||
return tuple(field.attname for field in self.local_related_fields)
|
||||
|
||||
def get_defaults(self):
|
||||
return tuple([field.get_default() for field in self.local_related_fields])
|
||||
return tuple(field.get_default() for field in self.local_related_fields)
|
||||
|
||||
def contribute_to_class(self, cls, name, virtual_only=False):
|
||||
super(ForeignObject, self).contribute_to_class(cls, name, virtual_only=virtual_only)
|
||||
|
|
|
@ -534,7 +534,7 @@ class QuerySet(object):
|
|||
if not id_list:
|
||||
return {}
|
||||
qs = self.filter(pk__in=id_list).order_by()
|
||||
return dict([(obj._get_pk_val(), obj) for obj in qs])
|
||||
return dict((obj._get_pk_val(), obj) for obj in qs)
|
||||
|
||||
def delete(self):
|
||||
"""
|
||||
|
|
|
@ -437,7 +437,7 @@ class SQLCompiler(object):
|
|||
# Firstly, avoid infinite loops.
|
||||
if not already_seen:
|
||||
already_seen = set()
|
||||
join_tuple = tuple([self.query.alias_map[j].table_name for j in joins])
|
||||
join_tuple = tuple(self.query.alias_map[j].table_name for j in joins)
|
||||
if join_tuple in already_seen:
|
||||
raise FieldError('Infinite loop caused by ordering.')
|
||||
already_seen.add(join_tuple)
|
||||
|
@ -866,7 +866,7 @@ class SQLInsertCompiler(SQLCompiler):
|
|||
return [(" ".join(result), tuple(params))]
|
||||
if can_bulk:
|
||||
result.append(self.connection.ops.bulk_insert_sql(fields, len(values)))
|
||||
return [(" ".join(result), tuple([v for val in values for v in val]))]
|
||||
return [(" ".join(result), tuple(v for val in values for v in val))]
|
||||
else:
|
||||
return [
|
||||
(" ".join(result + ["VALUES (%s)" % ", ".join(p)]), vals)
|
||||
|
|
|
@ -37,7 +37,7 @@ class Date(object):
|
|||
|
||||
def as_sql(self, qn, connection):
|
||||
if isinstance(self.col, (list, tuple)):
|
||||
col = '%s.%s' % tuple([qn(c) for c in self.col])
|
||||
col = '%s.%s' % tuple(qn(c) for c in self.col)
|
||||
else:
|
||||
col = self.col
|
||||
return connection.ops.date_trunc_sql(self.lookup_type, col), []
|
||||
|
@ -57,7 +57,7 @@ class DateTime(object):
|
|||
|
||||
def as_sql(self, qn, connection):
|
||||
if isinstance(self.col, (list, tuple)):
|
||||
col = '%s.%s' % tuple([qn(c) for c in self.col])
|
||||
col = '%s.%s' % tuple(qn(c) for c in self.col)
|
||||
else:
|
||||
col = self.col
|
||||
return connection.ops.datetime_trunc_sql(self.lookup_type, col, self.tzname)
|
||||
|
|
|
@ -762,7 +762,7 @@ class Query(object):
|
|||
# 2. Rename the alias in the internal table/alias datastructures.
|
||||
for ident, aliases in self.join_map.items():
|
||||
del self.join_map[ident]
|
||||
aliases = tuple([change_map.get(a, a) for a in aliases])
|
||||
aliases = tuple(change_map.get(a, a) for a in aliases)
|
||||
ident = (change_map.get(ident[0], ident[0]),) + ident[1:]
|
||||
self.join_map[ident] = aliases
|
||||
for old_alias, new_alias in six.iteritems(change_map):
|
||||
|
|
|
@ -543,7 +543,7 @@ class BaseModelFormSet(BaseFormSet):
|
|||
|
||||
def _existing_object(self, pk):
|
||||
if not hasattr(self, '_object_dict'):
|
||||
self._object_dict = dict([(o.pk, o) for o in self.get_queryset()])
|
||||
self._object_dict = dict((o.pk, o) for o in self.get_queryset())
|
||||
return self._object_dict.get(pk)
|
||||
|
||||
def _construct_form(self, i, **kwargs):
|
||||
|
|
|
@ -78,7 +78,7 @@ class ExtendsNode(Node):
|
|||
self.nodelist = nodelist
|
||||
self.parent_name = parent_name
|
||||
self.template_dirs = template_dirs
|
||||
self.blocks = dict([(n.name, n) for n in nodelist.get_nodes_by_type(BlockNode)])
|
||||
self.blocks = dict((n.name, n) for n in nodelist.get_nodes_by_type(BlockNode))
|
||||
|
||||
def __repr__(self):
|
||||
return '<ExtendsNode: extends %s>' % self.parent_name.token
|
||||
|
|
|
@ -147,7 +147,7 @@ class BlockTranslateNode(Node):
|
|||
default_value = settings.TEMPLATE_STRING_IF_INVALID
|
||||
render_value = lambda v: render_value_in_context(
|
||||
context.get(v, default_value), context)
|
||||
data = dict([(v, render_value(v)) for v in vars])
|
||||
data = dict((v, render_value(v)) for v in vars)
|
||||
context.pop()
|
||||
try:
|
||||
result = result % data
|
||||
|
|
|
@ -58,7 +58,7 @@ def patch_cache_control(response, **kwargs):
|
|||
|
||||
if response.has_header('Cache-Control'):
|
||||
cc = cc_delim_re.split(response['Cache-Control'])
|
||||
cc = dict([dictitem(el) for el in cc])
|
||||
cc = dict(dictitem(el) for el in cc)
|
||||
else:
|
||||
cc = {}
|
||||
|
||||
|
|
|
@ -334,7 +334,7 @@ class MultiValueDict(dict):
|
|||
|
||||
def __getstate__(self):
|
||||
obj_dict = self.__dict__.copy()
|
||||
obj_dict['_data'] = dict([(k, self.getlist(k)) for k in self])
|
||||
obj_dict['_data'] = dict((k, self.getlist(k)) for k in self)
|
||||
return obj_dict
|
||||
|
||||
def __setstate__(self, obj_dict):
|
||||
|
|
|
@ -257,7 +257,7 @@ class BaseConfigurator(object):
|
|||
c = self.resolve(c)
|
||||
props = config.pop('.', None)
|
||||
# Check for valid identifiers
|
||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
result = c(**kwargs)
|
||||
if props:
|
||||
for name, value in props.items():
|
||||
|
@ -494,7 +494,7 @@ class DictConfigurator(BaseConfigurator):
|
|||
'address' in config:
|
||||
config['address'] = self.as_tuple(config['address'])
|
||||
factory = klass
|
||||
kwargs = dict([(k, config[k]) for k in config if valid_ident(k)])
|
||||
kwargs = dict((k, config[k]) for k in config if valid_ident(k))
|
||||
try:
|
||||
result = factory(**kwargs)
|
||||
except TypeError as te:
|
||||
|
|
Loading…
Reference in New Issue