Fixed #20989 -- Removed useless explicit list comprehensions.

This commit is contained in:
Simon Charette 2013-08-29 19:20:00 -04:00
parent e4a67fd906
commit 11cd7388f7
75 changed files with 163 additions and 163 deletions

View File

@ -287,8 +287,8 @@ FieldListFilter.register(lambda f: bool(f.choices), ChoicesFieldListFilter)
class DateFieldListFilter(FieldListFilter): class DateFieldListFilter(FieldListFilter):
def __init__(self, field, request, params, model, model_admin, field_path): def __init__(self, field, request, params, model, model_admin, field_path):
self.field_generic = '%s__' % field_path self.field_generic = '%s__' % field_path
self.date_params = dict([(k, v) for k, v in params.items() self.date_params = dict((k, v) for k, v in params.items()
if k.startswith(self.field_generic)]) if k.startswith(self.field_generic))
now = timezone.now() now = timezone.now()
# When time zone support is enabled, convert "now" to the user's time # When time zone support is enabled, convert "now" to the user's time

View File

@ -112,7 +112,7 @@ class Fieldline(object):
yield AdminField(self.form, field, is_first=(i == 0)) yield AdminField(self.form, field, is_first=(i == 0))
def errors(self): def errors(self):
return mark_safe('\n'.join([self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields]).strip('\n')) return mark_safe('\n'.join(self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields).strip('\n'))
class AdminField(object): class AdminField(object):
def __init__(self, form, field, is_first): def __init__(self, form, field, is_first):

View File

@ -698,16 +698,16 @@ class ModelAdmin(BaseModelAdmin):
# Avoid trying to iterate over None # Avoid trying to iterate over None
if not class_actions: if not class_actions:
continue continue
actions.extend([self.get_action(action) for action in class_actions]) actions.extend(self.get_action(action) for action in class_actions)
# get_action might have returned None, so filter any of those out. # get_action might have returned None, so filter any of those out.
actions = filter(None, actions) actions = filter(None, actions)
# Convert the actions into an OrderedDict keyed by name. # Convert the actions into an OrderedDict keyed by name.
actions = OrderedDict([ actions = OrderedDict(
(name, (func, name, desc)) (name, (func, name, desc))
for func, name, desc in actions for func, name, desc in actions
]) )
return actions return actions

View File

@ -119,7 +119,7 @@ def url_params_from_lookup_dict(lookups):
if callable(v): if callable(v):
v = v() v = v()
if isinstance(v, (tuple, list)): if isinstance(v, (tuple, list)):
v = ','.join([str(x) for x in v]) v = ','.join(str(x) for x in v)
elif isinstance(v, bool): elif isinstance(v, bool):
# See django.db.fields.BooleanField.get_prep_lookup # See django.db.fields.BooleanField.get_prep_lookup
v = ('0', '1')[v] v = ('0', '1')[v]
@ -154,7 +154,7 @@ class ForeignKeyRawIdWidget(forms.TextInput):
params = self.url_parameters() params = self.url_parameters()
if params: if params:
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()]) url = '?' + '&'.join('%s=%s' % (k, v) for k, v in params.items())
else: else:
url = '' url = ''
if "class" not in attrs: if "class" not in attrs:
@ -199,7 +199,7 @@ class ManyToManyRawIdWidget(ForeignKeyRawIdWidget):
# The related object is registered with the same AdminSite # The related object is registered with the same AdminSite
attrs['class'] = 'vManyToManyRawIdAdminField' attrs['class'] = 'vManyToManyRawIdAdminField'
if value: if value:
value = ','.join([force_text(v) for v in value]) value = ','.join(force_text(v) for v in value)
else: else:
value = '' value = ''
return super(ManyToManyRawIdWidget, self).render(name, value, attrs) return super(ManyToManyRawIdWidget, self).render(name, value, attrs)

View File

@ -26,7 +26,7 @@ def trim_docstring(docstring):
return '' return ''
# Convert tabs to spaces and split into lines # Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines() lines = docstring.expandtabs().splitlines()
indent = min([len(line) - len(line.lstrip()) for line in lines if line.lstrip()]) indent = min(len(line) - len(line.lstrip()) for line in lines if line.lstrip())
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]] trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip() return "\n".join(trimmed).strip()

View File

@ -36,14 +36,14 @@ class ModelBackend(object):
user_groups_query = 'group__%s' % user_groups_field.related_query_name() user_groups_query = 'group__%s' % user_groups_field.related_query_name()
perms = Permission.objects.filter(**{user_groups_query: user_obj}) perms = Permission.objects.filter(**{user_groups_query: user_obj})
perms = perms.values_list('content_type__app_label', 'codename').order_by() perms = perms.values_list('content_type__app_label', 'codename').order_by()
user_obj._group_perm_cache = set(["%s.%s" % (ct, name) for ct, name in perms]) user_obj._group_perm_cache = set("%s.%s" % (ct, name) for ct, name in perms)
return user_obj._group_perm_cache return user_obj._group_perm_cache
def get_all_permissions(self, user_obj, obj=None): def get_all_permissions(self, user_obj, obj=None):
if user_obj.is_anonymous() or obj is not None: if user_obj.is_anonymous() or obj is not None:
return set() return set()
if not hasattr(user_obj, '_perm_cache'): if not hasattr(user_obj, '_perm_cache'):
user_obj._perm_cache = set(["%s.%s" % (p.content_type.app_label, p.codename) for p in user_obj.user_permissions.select_related()]) user_obj._perm_cache = set("%s.%s" % (p.content_type.app_label, p.codename) for p in user_obj.user_permissions.select_related())
user_obj._perm_cache.update(self.get_group_permissions(user_obj)) user_obj._perm_cache.update(self.get_group_permissions(user_obj))
return user_obj._perm_cache return user_obj._perm_cache

View File

@ -330,10 +330,10 @@ class PasswordChangeForm(SetPasswordForm):
) )
return old_password return old_password
PasswordChangeForm.base_fields = OrderedDict([ PasswordChangeForm.base_fields = OrderedDict(
(k, PasswordChangeForm.base_fields[k]) (k, PasswordChangeForm.base_fields[k])
for k in ['old_password', 'new_password1', 'new_password2'] for k in ['old_password', 'new_password1', 'new_password2']
]) )
class AdminPasswordChangeForm(forms.Form): class AdminPasswordChangeForm(forms.Form):

View File

@ -58,10 +58,10 @@ def update_contenttypes(app, created_models, verbosity=2, db=DEFAULT_DB_ALIAS, *
# Confirm that the content type is stale before deletion. # Confirm that the content type is stale before deletion.
if to_remove: if to_remove:
if kwargs.get('interactive', False): if kwargs.get('interactive', False):
content_type_display = '\n'.join([ content_type_display = '\n'.join(
' %s | %s' % (ct.app_label, ct.model) ' %s | %s' % (ct.app_label, ct.model)
for ct in to_remove for ct in to_remove
]) )
ok_to_delete = input("""The following content types are stale and need to be deleted: ok_to_delete = input("""The following content types are stale and need to be deleted:
%s %s

View File

@ -32,7 +32,7 @@ class GeoAggregate(Aggregate):
if hasattr(self.col, 'as_sql'): if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(qn, connection) field_name, params = self.col.as_sql(qn, connection)
elif isinstance(self.col, (list, tuple)): elif isinstance(self.col, (list, tuple)):
field_name = '.'.join([qn(c) for c in self.col]) field_name = '.'.join(qn(c) for c in self.col)
else: else:
field_name = self.col field_name = self.col

View File

@ -15,7 +15,7 @@ class GeoFeedMixin(object):
a single white space. Given a tuple of coordinates, this will return a single white space. Given a tuple of coordinates, this will return
a unicode GeoRSS representation. a unicode GeoRSS representation.
""" """
return ' '.join(['%f %f' % (coord[1], coord[0]) for coord in coords]) return ' '.join('%f %f' % (coord[1], coord[0]) for coord in coords)
def add_georss_point(self, handler, coords, w3c_geo=False): def add_georss_point(self, handler, coords, w3c_geo=False):
""" """

View File

@ -575,7 +575,7 @@ class LineString(OGRGeometry):
@property @property
def tuple(self): def tuple(self):
"Returns the tuple representation of this LineString." "Returns the tuple representation of this LineString."
return tuple([self[i] for i in xrange(len(self))]) return tuple(self[i] for i in xrange(len(self)))
coords = tuple coords = tuple
def _listarr(self, func): def _listarr(self, func):
@ -632,14 +632,14 @@ class Polygon(OGRGeometry):
@property @property
def tuple(self): def tuple(self):
"Returns a tuple of LinearRing coordinate tuples." "Returns a tuple of LinearRing coordinate tuples."
return tuple([self[i].tuple for i in xrange(self.geom_count)]) return tuple(self[i].tuple for i in xrange(self.geom_count))
coords = tuple coords = tuple
@property @property
def point_count(self): def point_count(self):
"The number of Points in this Polygon." "The number of Points in this Polygon."
# Summing up the number of points in each ring of the Polygon. # Summing up the number of points in each ring of the Polygon.
return sum([self[i].point_count for i in xrange(self.geom_count)]) return sum(self[i].point_count for i in xrange(self.geom_count))
@property @property
def centroid(self): def centroid(self):
@ -686,12 +686,12 @@ class GeometryCollection(OGRGeometry):
def point_count(self): def point_count(self):
"The number of Points in this Geometry Collection." "The number of Points in this Geometry Collection."
# Summing up the number of points in each geometry in this collection # Summing up the number of points in each geometry in this collection
return sum([self[i].point_count for i in xrange(self.geom_count)]) return sum(self[i].point_count for i in xrange(self.geom_count))
@property @property
def tuple(self): def tuple(self):
"Returns a tuple representation of this Geometry Collection." "Returns a tuple representation of this Geometry Collection."
return tuple([self[i].tuple for i in xrange(self.geom_count)]) return tuple(self[i].tuple for i in xrange(self.geom_count))
coords = tuple coords = tuple
# Multiple Geometry types. # Multiple Geometry types.

View File

@ -82,7 +82,7 @@ class GeometryCollection(GEOSGeometry):
@property @property
def kml(self): def kml(self):
"Returns the KML for this Geometry Collection." "Returns the KML for this Geometry Collection."
return '<MultiGeometry>%s</MultiGeometry>' % ''.join([g.kml for g in self]) return '<MultiGeometry>%s</MultiGeometry>' % ''.join(g.kml for g in self)
@property @property
def tuple(self): def tuple(self):

View File

@ -147,11 +147,11 @@ class GEOSCoordSeq(GEOSBase):
if self.hasz: substr = '%s,%s,%s ' if self.hasz: substr = '%s,%s,%s '
else: substr = '%s,%s,0 ' else: substr = '%s,%s,0 '
return '<coordinates>%s</coordinates>' % \ return '<coordinates>%s</coordinates>' % \
''.join([substr % self[i] for i in xrange(len(self))]).strip() ''.join(substr % self[i] for i in xrange(len(self))).strip()
@property @property
def tuple(self): def tuple(self):
"Returns a tuple version of this coordinate sequence." "Returns a tuple version of this coordinate sequence."
n = self.size n = self.size
if n == 1: return self[0] if n == 1: return self[0]
else: return tuple([self[i] for i in xrange(n)]) else: return tuple(self[i] for i in xrange(n))

View File

@ -159,12 +159,12 @@ class Polygon(GEOSGeometry):
@property @property
def tuple(self): def tuple(self):
"Gets the tuple for each ring in this Polygon." "Gets the tuple for each ring in this Polygon."
return tuple([self[i].tuple for i in xrange(len(self))]) return tuple(self[i].tuple for i in xrange(len(self)))
coords = tuple coords = tuple
@property @property
def kml(self): def kml(self):
"Returns the KML representation of this Polygon." "Returns the KML representation of this Polygon."
inner_kml = ''.join(["<innerBoundaryIs>%s</innerBoundaryIs>" % self[i+1].kml inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i+1].kml
for i in xrange(self.num_interior_rings)]) for i in xrange(self.num_interior_rings))
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml) return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)

View File

@ -838,9 +838,9 @@ class GEOSTest(unittest.TestCase, TestDataMixin):
# Creating a GeometryCollection WKT string composed of other # Creating a GeometryCollection WKT string composed of other
# collections and polygons. # collections and polygons.
coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid] coll = [mp.wkt for mp in self.geometries.multipolygons if mp.valid]
coll.extend([mls.wkt for mls in self.geometries.multilinestrings]) coll.extend(mls.wkt for mls in self.geometries.multilinestrings)
coll.extend([p.wkt for p in self.geometries.polygons]) coll.extend(p.wkt for p in self.geometries.polygons)
coll.extend([mp.wkt for mp in self.geometries.multipoints]) coll.extend(mp.wkt for mp in self.geometries.multipoints)
gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll) gc_wkt = 'GEOMETRYCOLLECTION(%s)' % ','.join(coll)
# Should construct ok from WKT # Should construct ok from WKT

View File

@ -113,6 +113,6 @@ class Command(LabelCommand):
rev_mapping = dict((v, k) for k, v in mapping_dict.items()) rev_mapping = dict((v, k) for k, v in mapping_dict.items())
output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name, output.extend(['', '# Auto-generated `LayerMapping` dictionary for %s model' % model_name,
'%s_mapping = {' % model_name.lower()]) '%s_mapping = {' % model_name.lower()])
output.extend([" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields]) output.extend(" '%s' : '%s'," % (rev_mapping[ogr_fld], ogr_fld) for ogr_fld in ds[options['layer_key']].fields)
output.extend([" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}']) output.extend(" '%s' : '%s'," % (options['geom_name'], mapping_dict[options['geom_name']]), '}')
return '\n'.join(output) + '\n' return '\n'.join(output) + '\n'

View File

@ -150,7 +150,7 @@ class GoogleMap(object):
@property @property
def icons(self): def icons(self):
"Returns a sequence of GIcon objects in this map." "Returns a sequence of GIcon objects in this map."
return set([marker.icon for marker in self.markers if marker.icon]) return set(marker.icon for marker in self.markers if marker.icon)
class GoogleMapSet(GoogleMap): class GoogleMapSet(GoogleMap):

View File

@ -61,7 +61,7 @@ class GOverlayBase(object):
def latlng_from_coords(self, coords): def latlng_from_coords(self, coords):
"Generates a JavaScript array of GLatLng objects for the given coordinates." "Generates a JavaScript array of GLatLng objects for the given coordinates."
return '[%s]' % ','.join(['new GLatLng(%s,%s)' % (y, x) for x, y in coords]) return '[%s]' % ','.join('new GLatLng(%s,%s)' % (y, x) for x, y in coords)
def add_event(self, event): def add_event(self, event):
"Attaches a GEvent to the overlay object." "Attaches a GEvent to the overlay object."

View File

@ -23,7 +23,7 @@ class GeoAdminTest(TestCase):
def test_ensure_geographic_media(self): def test_ensure_geographic_media(self):
geoadmin = admin.site._registry[City] geoadmin = admin.site._registry[City]
admin_js = geoadmin.media.render_js() admin_js = geoadmin.media.render_js()
self.assertTrue(any([geoadmin.openlayers_url in js for js in admin_js])) self.assertTrue(any(geoadmin.openlayers_url in js for js in admin_js))
def test_olmap_OSM_rendering(self): def test_olmap_OSM_rendering(self):
geoadmin = admin.site._registry[City] geoadmin = admin.site._registry[City]

View File

@ -28,7 +28,7 @@ class GeoFeedTest(TestCase):
def assertChildNodes(self, elem, expected): def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py." "Taken from syndication/tests.py."
actual = set([n.nodeName for n in elem.childNodes]) actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected) expected = set(expected)
self.assertEqual(actual, expected) self.assertEqual(actual, expected)

View File

@ -32,7 +32,7 @@ class GeoSitemapTest(TestCase):
def assertChildNodes(self, elem, expected): def assertChildNodes(self, elem, expected):
"Taken from syndication/tests.py." "Taken from syndication/tests.py."
actual = set([n.nodeName for n in elem.childNodes]) actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected) expected = set(expected)
self.assertEqual(actual, expected) self.assertEqual(actual, expected)

View File

@ -30,10 +30,10 @@ def precision_wkt(geom, prec):
coord_fmt = ' '.join([num_fmt, num_fmt]) coord_fmt = ' '.join([num_fmt, num_fmt])
def formatted_coords(coords): def formatted_coords(coords):
return ','.join([coord_fmt % c[:2] for c in coords]) return ','.join(coord_fmt % c[:2] for c in coords)
def formatted_poly(poly): def formatted_poly(poly):
return ','.join(['(%s)' % formatted_coords(r) for r in poly]) return ','.join('(%s)' % formatted_coords(r) for r in poly)
def formatted_geom(g): def formatted_geom(g):
gtype = str(g.geom_type).upper() gtype = str(g.geom_type).upper()
@ -47,11 +47,11 @@ def precision_wkt(geom, prec):
elif gtype == 'MULTIPOINT': elif gtype == 'MULTIPOINT':
yield formatted_coords(g.coords) yield formatted_coords(g.coords)
elif gtype == 'MULTIPOLYGON': elif gtype == 'MULTIPOLYGON':
yield ','.join(['(%s)' % formatted_poly(p) for p in g]) yield ','.join('(%s)' % formatted_poly(p) for p in g)
elif gtype == 'GEOMETRYCOLLECTION': elif gtype == 'GEOMETRYCOLLECTION':
yield ','.join([''.join([wkt for wkt in formatted_geom(child)]) for child in g]) yield ','.join(''.join(wkt for wkt in formatted_geom(child)) for child in g)
else: else:
raise TypeError raise TypeError
yield ')' yield ')'
return ''.join([wkt for wkt in formatted_geom(geom)]) return ''.join(wkt for wkt in formatted_geom(geom))

View File

@ -38,8 +38,8 @@ class MessageDecoder(json.JSONDecoder):
return Message(*obj[2:]) return Message(*obj[2:])
return [self.process_messages(item) for item in obj] return [self.process_messages(item) for item in obj]
if isinstance(obj, dict): if isinstance(obj, dict):
return dict([(key, self.process_messages(value)) return dict((key, self.process_messages(value))
for key, value in six.iteritems(obj)]) for key, value in six.iteritems(obj))
return obj return obj
def decode(self, s, **kwargs): def decode(self, s, **kwargs):

View File

@ -62,7 +62,7 @@ def paragraph():
The paragraph consists of between 1 and 4 sentences, inclusive. The paragraph consists of between 1 and 4 sentences, inclusive.
""" """
return ' '.join([sentence() for i in range(random.randint(1, 4))]) return ' '.join(sentence() for i in range(random.randint(1, 4)))
def paragraphs(count, common=True): def paragraphs(count, common=True):
""" """

View File

@ -10,6 +10,6 @@ TEMPLATE_FRAGMENT_KEY_TEMPLATE = 'template.cache.%s.%s'
def make_template_fragment_key(fragment_name, vary_on=None): def make_template_fragment_key(fragment_name, vary_on=None):
if vary_on is None: if vary_on is None:
vary_on = () vary_on = ()
key = ':'.join([urlquote(var) for var in vary_on]) key = ':'.join(urlquote(var) for var in vary_on)
args = hashlib.md5(force_bytes(key)) args = hashlib.md5(force_bytes(key))
return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest()) return TEMPLATE_FRAGMENT_KEY_TEMPLATE % (fragment_name, args.hexdigest())

View File

@ -114,8 +114,8 @@ def get_commands():
for app_name in apps: for app_name in apps:
try: try:
path = find_management_module(app_name) path = find_management_module(app_name)
_commands.update(dict([(name, app_name) _commands.update(dict((name, app_name)
for name in find_commands(path)])) for name in find_commands(path)))
except ImportError: except ImportError:
pass # No management module - ignore this app pass # No management module - ignore this app
@ -336,7 +336,7 @@ class ManagementUtility(object):
options = [opt for opt in options if opt[0] not in prev_opts] options = [opt for opt in options if opt[0] not in prev_opts]
# filter options by current input # filter options by current input
options = sorted([(k, v) for k, v in options if k.startswith(curr)]) options = sorted((k, v) for k, v in options if k.startswith(curr))
for option in options: for option in options:
opt_label = option[0] opt_label = option[0]
# append '=' to options which require args # append '=' to options which require args

View File

@ -164,7 +164,7 @@ class Command(BaseCommand):
for app_name, model_list in all_models for app_name, model_list in all_models
) )
create_models = set([x for x in itertools.chain(*manifest.values())]) create_models = set(itertools.chain(*manifest.values()))
emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias) emit_pre_migrate_signal(create_models, self.verbosity, self.interactive, connection.alias)
# Create the tables for each model # Create the tables for each model

View File

@ -29,7 +29,7 @@ def sql_create(app, style, connection):
app_models = models.get_models(app, include_auto_created=True) app_models = models.get_models(app, include_auto_created=True)
final_output = [] final_output = []
tables = connection.introspection.table_names() tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models]) known_models = set(model for model in connection.introspection.installed_models(tables) if model not in app_models)
pending_references = {} pending_references = {}
for model in app_models: for model in app_models:

View File

@ -52,7 +52,7 @@ def handle_extensions(extensions=('html',), ignored=('py',)):
for i, ext in enumerate(ext_list): for i, ext in enumerate(ext_list):
if not ext.startswith('.'): if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i] ext_list[i] = '.%s' % ext_list[i]
return set([x for x in ext_list if x.strip('.') not in ignored]) return set(x for x in ext_list if x.strip('.') not in ignored)
def find_command(cmd, path=None, pathext=None): def find_command(cmd, path=None, pathext=None):
if path is None: if path is None:

View File

@ -161,7 +161,7 @@ def get_validation_errors(outfile, app=None):
for rel_field in f.foreign_related_fields: for rel_field in f.foreign_related_fields:
has_unique_field = has_unique_field or rel_field.unique has_unique_field = has_unique_field or rel_field.unique
if not has_unique_field: if not has_unique_field:
e.add(opts, "Field combination '%s' under model '%s' must have a unique=True constraint" % (','.join([rel_field.name for rel_field in f.foreign_related_fields]), f.rel.to.__name__)) e.add(opts, "Field combination '%s' under model '%s' must have a unique=True constraint" % (','.join(rel_field.name for rel_field in f.foreign_related_fields), f.rel.to.__name__))
else: else:
if not f.foreign_related_fields[0].unique: if not f.foreign_related_fields[0].unique:
e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.foreign_related_fields[0].name, f.rel.to.__name__)) e.add(opts, "Field '%s' under model '%s' must have a unique=True constraint." % (f.foreign_related_fields[0].name, f.rel.to.__name__))

View File

@ -62,7 +62,7 @@ class ResolverMatch(object):
@property @property
def view_name(self): def view_name(self):
return ':'.join([ x for x in [ self.namespace, self.url_name ] if x ]) return ':'.join(filter(bool, (self.namespace, self.url_name)))
def __getitem__(self, index): def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index] return (self.func, self.args, self.kwargs)[index]
@ -274,7 +274,7 @@ class RegexURLResolver(LocaleRegexProvider):
for matches, pat, defaults in pattern.reverse_dict.getlist(name): for matches, pat, defaults in pattern.reverse_dict.getlist(name):
new_matches = [] new_matches = []
for piece, p_args in parent: for piece, p_args in parent:
new_matches.extend([(piece + suffix, p_args + args) for (suffix, args) in matches]) new_matches.extend((piece + suffix, p_args + args) for (suffix, args) in matches)
lookups.appendlist(name, (new_matches, p_pattern + pat, dict(defaults, **pattern.default_kwargs))) lookups.appendlist(name, (new_matches, p_pattern + pat, dict(defaults, **pattern.default_kwargs)))
for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items(): for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
namespaces[namespace] = (p_pattern + prefix, sub_pattern) namespaces[namespace] = (p_pattern + prefix, sub_pattern)
@ -321,7 +321,7 @@ class RegexURLResolver(LocaleRegexProvider):
except Resolver404 as e: except Resolver404 as e:
sub_tried = e.args[0].get('tried') sub_tried = e.args[0].get('tried')
if sub_tried is not None: if sub_tried is not None:
tried.extend([[pattern] + t for t in sub_tried]) tried.extend([pattern] + t for t in sub_tried)
else: else:
tried.append([pattern]) tried.append([pattern])
else: else:

View File

@ -1255,7 +1255,7 @@ class BaseDatabaseIntrospection(object):
if not router.allow_migrate(self.connection.alias, model): if not router.allow_migrate(self.connection.alias, model):
continue continue
tables.add(model._meta.db_table) tables.add(model._meta.db_table)
tables.update([f.m2m_db_table() for f in model._meta.local_many_to_many]) tables.update(f.m2m_db_table() for f in model._meta.local_many_to_many)
tables = list(tables) tables = list(tables)
if only_existing: if only_existing:
existing_tables = self.table_names() existing_tables = self.table_names()

View File

@ -20,7 +20,7 @@ class SQLCompiler(compiler.SQLCompiler):
def as_subquery_condition(self, alias, columns, qn): def as_subquery_condition(self, alias, columns, qn):
qn2 = self.connection.ops.quote_name qn2 = self.connection.ops.quote_name
sql, params = self.as_sql() sql, params = self.as_sql()
return '(%s) IN (%s)' % (', '.join(['%s.%s' % (qn(alias), qn2(column)) for column in columns]), sql), params return '(%s) IN (%s)' % (', '.join('%s.%s' % (qn(alias), qn2(column)) for column in columns), sql), params
class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler): class SQLInsertCompiler(compiler.SQLInsertCompiler, SQLCompiler):

View File

@ -54,7 +54,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
SELECT column_name, numeric_precision, numeric_scale FROM information_schema.columns SELECT column_name, numeric_precision, numeric_scale FROM information_schema.columns
WHERE table_name = %s AND table_schema = DATABASE() WHERE table_name = %s AND table_schema = DATABASE()
AND data_type='decimal'""", [table_name]) AND data_type='decimal'""", [table_name])
numeric_map = dict([(line[0], tuple([int(n) for n in line[1:]])) for line in cursor.fetchall()]) numeric_map = dict((line[0], tuple(int(n) for n in line[1:])) for line in cursor.fetchall())
cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name)) cursor.execute("SELECT * FROM %s LIMIT 1" % self.connection.ops.quote_name(table_name))
return [FieldInfo(*((force_text(line[0]),) return [FieldInfo(*((force_text(line[0]),)
@ -69,7 +69,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
Returns a dictionary of {field_name: field_index} for the given table. Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based. Indexes are 0-based.
""" """
return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))]) return dict((d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name)))
def get_relations(self, cursor, table_name): def get_relations(self, cursor, table_name):
""" """

View File

@ -860,12 +860,12 @@ class FormatStylePlaceholderCursor(object):
def fetchmany(self, size=None): def fetchmany(self, size=None):
if size is None: if size is None:
size = self.arraysize size = self.arraysize
return tuple([_rowfactory(r, self.cursor) return tuple(_rowfactory(r, self.cursor)
for r in self.cursor.fetchmany(size)]) for r in self.cursor.fetchmany(size))
def fetchall(self): def fetchall(self):
return tuple([_rowfactory(r, self.cursor) return tuple(_rowfactory(r, self.cursor)
for r in self.cursor.fetchall()]) for r in self.cursor.fetchall())
def var(self, *args): def var(self, *args):
return VariableWrapper(self.cursor.var(*args)) return VariableWrapper(self.cursor.var(*args))

View File

@ -66,7 +66,7 @@ class DatabaseIntrospection(BaseDatabaseIntrospection):
Returns a dictionary of {field_name: field_index} for the given table. Returns a dictionary of {field_name: field_index} for the given table.
Indexes are 0-based. Indexes are 0-based.
""" """
return dict([(d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name))]) return dict((d[0], i) for i, d in enumerate(self.get_table_description(cursor, table_name)))
def get_relations(self, cursor, table_name): def get_relations(self, cursor, table_name):
""" """

View File

@ -57,8 +57,8 @@ class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
field_maps = list(mapping.items()) field_maps = list(mapping.items())
self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % ( self.execute("INSERT INTO %s (%s) SELECT %s FROM %s;" % (
self.quote_name(temp_model._meta.db_table), self.quote_name(temp_model._meta.db_table),
', '.join([x for x, y in field_maps]), ', '.join(x for x, y in field_maps),
', '.join([y for x, y in field_maps]), ', '.join(y for x, y in field_maps),
self.quote_name(model._meta.db_table), self.quote_name(model._meta.db_table),
)) ))
# Delete the old table # Delete the old table

View File

@ -145,8 +145,8 @@ class MigrationAutodetector(object):
old_model_state = self.from_state.models[app_label, model_name] old_model_state = self.from_state.models[app_label, model_name]
new_model_state = self.to_state.models[app_label, model_name] new_model_state = self.to_state.models[app_label, model_name]
# New fields # New fields
old_field_names = set([x for x, y in old_model_state.fields]) old_field_names = set(x for x, y in old_model_state.fields)
new_field_names = set([x for x, y in new_model_state.fields]) new_field_names = set(x for x, y in new_model_state.fields)
for field_name in new_field_names - old_field_names: for field_name in new_field_names - old_field_names:
field = new_model_state.get_field_by_name(field_name) field = new_model_state.get_field_by_name(field_name)
# Scan to see if this is actually a rename! # Scan to see if this is actually a rename!

View File

@ -104,7 +104,7 @@ class MigrationWriter(object):
imports.update(k_imports) imports.update(k_imports)
imports.update(v_imports) imports.update(v_imports)
strings.append((k_string, v_string)) strings.append((k_string, v_string))
return "{%s}" % (", ".join(["%s: %s" % (k, v) for k, v in strings])), imports return "{%s}" % (", ".join("%s: %s" % (k, v) for k, v in strings)), imports
# Datetimes # Datetimes
elif isinstance(value, (datetime.datetime, datetime.date)): elif isinstance(value, (datetime.datetime, datetime.date)):
return repr(value), set(["import datetime"]) return repr(value), set(["import datetime"])

View File

@ -159,7 +159,7 @@ class ModelBase(type):
new_fields = new_class._meta.local_fields + \ new_fields = new_class._meta.local_fields + \
new_class._meta.local_many_to_many + \ new_class._meta.local_many_to_many + \
new_class._meta.virtual_fields new_class._meta.virtual_fields
field_names = set([f.name for f in new_fields]) field_names = set(f.name for f in new_fields)
# Basic setup for proxy models. # Basic setup for proxy models.
if is_proxy: if is_proxy:
@ -321,7 +321,7 @@ class ModelBase(type):
# Give the class a docstring -- its definition. # Give the class a docstring -- its definition.
if cls.__doc__ is None: if cls.__doc__ is None:
cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join([f.attname for f in opts.fields])) cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.attname for f in opts.fields))
if hasattr(cls, 'get_absolute_url'): if hasattr(cls, 'get_absolute_url'):
cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url), cls.get_absolute_url = update_wrapper(curry(get_absolute_url, opts, cls.get_absolute_url),

View File

@ -234,8 +234,8 @@ class Collector(object):
found = True found = True
if not found: if not found:
return return
self.data = OrderedDict([(model, self.data[model]) self.data = OrderedDict((model, self.data[model])
for model in sorted_models]) for model in sorted_models)
def delete(self): def delete(self):
# sort instance collections # sort instance collections

View File

@ -320,7 +320,7 @@ class Options(object):
cache.append((field, model)) cache.append((field, model))
else: else:
cache.append((field, parent)) cache.append((field, parent))
cache.extend([(f, None) for f in self.local_fields]) cache.extend((f, None) for f in self.local_fields)
self._field_cache = tuple(cache) self._field_cache = tuple(cache)
self._field_name_cache = [x for x, _ in cache] self._field_name_cache = [x for x, _ in cache]

View File

@ -1206,7 +1206,7 @@ class ValuesListQuerySet(ValuesQuerySet):
for row in self.query.get_compiler(self.db).results_iter(): for row in self.query.get_compiler(self.db).results_iter():
data = dict(zip(names, row)) data = dict(zip(names, row))
yield tuple([data[f] for f in fields]) yield tuple(data[f] for f in fields)
def _clone(self, *args, **kwargs): def _clone(self, *args, **kwargs):
clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs) clone = super(ValuesListQuerySet, self)._clone(*args, **kwargs)

View File

@ -77,7 +77,7 @@ class Aggregate(object):
if hasattr(self.col, 'as_sql'): if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(qn, connection) field_name, params = self.col.as_sql(qn, connection)
elif isinstance(self.col, (list, tuple)): elif isinstance(self.col, (list, tuple)):
field_name = '.'.join([qn(c) for c in self.col]) field_name = '.'.join(qn(c) for c in self.col)
else: else:
field_name = self.col field_name = self.col

View File

@ -718,11 +718,11 @@ class SQLCompiler(object):
loaded_fields = self.query.get_loaded_field_names().get(self.query.model, set()) or self.query.select loaded_fields = self.query.get_loaded_field_names().get(self.query.model, set()) or self.query.select
aggregate_start = len(self.query.extra_select) + len(loaded_fields) aggregate_start = len(self.query.extra_select) + len(loaded_fields)
aggregate_end = aggregate_start + len(self.query.aggregate_select) aggregate_end = aggregate_start + len(self.query.aggregate_select)
row = tuple(row[:aggregate_start]) + tuple([ row = tuple(row[:aggregate_start]) + tuple(
self.query.resolve_aggregate(value, aggregate, self.connection) self.query.resolve_aggregate(value, aggregate, self.connection)
for (alias, aggregate), value for (alias, aggregate), value
in zip(self.query.aggregate_select.items(), row[aggregate_start:aggregate_end]) in zip(self.query.aggregate_select.items(), row[aggregate_start:aggregate_end])
]) + tuple(row[aggregate_end:]) ) + tuple(row[aggregate_end:])
yield row yield row
@ -827,7 +827,7 @@ class SQLInsertCompiler(SQLCompiler):
has_fields = bool(self.query.fields) has_fields = bool(self.query.fields)
fields = self.query.fields if has_fields else [opts.pk] fields = self.query.fields if has_fields else [opts.pk]
result.append('(%s)' % ', '.join([qn(f.column) for f in fields])) result.append('(%s)' % ', '.join(qn(f.column) for f in fields))
if has_fields: if has_fields:
params = values = [ params = values = [
@ -1007,7 +1007,7 @@ class SQLUpdateCompiler(SQLCompiler):
# selecting from the updating table (e.g. MySQL). # selecting from the updating table (e.g. MySQL).
idents = [] idents = []
for rows in query.get_compiler(self.using).execute_sql(MULTI): for rows in query.get_compiler(self.using).execute_sql(MULTI):
idents.extend([r[0] for r in rows]) idents.extend(r[0] for r in rows)
self.query.add_filter(('pk__in', idents)) self.query.add_filter(('pk__in', idents))
self.query.related_ids = idents self.query.related_ids = idents
else: else:

View File

@ -371,11 +371,11 @@ class Query(object):
if result is None: if result is None:
result = [None for q in query.aggregate_select.items()] result = [None for q in query.aggregate_select.items()]
return dict([ return dict(
(alias, self.resolve_aggregate(val, aggregate, connection=connections[using])) (alias, self.resolve_aggregate(val, aggregate, connection=connections[using]))
for (alias, aggregate), val for (alias, aggregate), val
in zip(query.aggregate_select.items(), result) in zip(query.aggregate_select.items(), result)
]) )
def get_count(self, using): def get_count(self, using):
""" """
@ -1755,7 +1755,7 @@ class Query(object):
""" """
Callback used by get_deferred_field_names(). Callback used by get_deferred_field_names().
""" """
target[model] = set([f.name for f in fields]) target[model] = set(f.name for f in fields)
def set_aggregate_mask(self, names): def set_aggregate_mask(self, names):
"Set the mask of aggregates that will actually be returned by the SELECT" "Set the mask of aggregates that will actually be returned by the SELECT"
@ -1790,10 +1790,10 @@ class Query(object):
if self._aggregate_select_cache is not None: if self._aggregate_select_cache is not None:
return self._aggregate_select_cache return self._aggregate_select_cache
elif self.aggregate_select_mask is not None: elif self.aggregate_select_mask is not None:
self._aggregate_select_cache = OrderedDict([ self._aggregate_select_cache = OrderedDict(
(k, v) for k, v in self.aggregates.items() (k, v) for k, v in self.aggregates.items()
if k in self.aggregate_select_mask if k in self.aggregate_select_mask
]) )
return self._aggregate_select_cache return self._aggregate_select_cache
else: else:
return self.aggregates return self.aggregates
@ -1803,10 +1803,10 @@ class Query(object):
if self._extra_select_cache is not None: if self._extra_select_cache is not None:
return self._extra_select_cache return self._extra_select_cache
elif self.extra_select_mask is not None: elif self.extra_select_mask is not None:
self._extra_select_cache = OrderedDict([ self._extra_select_cache = OrderedDict(
(k, v) for k, v in self.extra.items() (k, v) for k, v in self.extra.items()
if k in self.extra_select_mask if k in self.extra_select_mask
]) )
return self._extra_select_cache return self._extra_select_cache
else: else:
return self.extra return self.extra

View File

@ -874,8 +874,8 @@ class MultipleChoiceField(ChoiceField):
data = [] data = []
if len(initial) != len(data): if len(initial) != len(data):
return True return True
initial_set = set([force_text(value) for value in initial]) initial_set = set(force_text(value) for value in initial)
data_set = set([force_text(value) for value in data]) data_set = set(force_text(value) for value in data)
return data_set != initial_set return data_set != initial_set

View File

@ -380,17 +380,17 @@ class BaseFormSet(object):
# XXX: there is no semantic division between forms here, there # XXX: there is no semantic division between forms here, there
# probably should be. It might make sense to render each form as a # probably should be. It might make sense to render each form as a
# table row with each field as a td. # table row with each field as a td.
forms = ' '.join([form.as_table() for form in self]) forms = ' '.join(form.as_table() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms])) return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_p(self): def as_p(self):
"Returns this formset rendered as HTML <p>s." "Returns this formset rendered as HTML <p>s."
forms = ' '.join([form.as_p() for form in self]) forms = ' '.join(form.as_p() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms])) return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_ul(self): def as_ul(self):
"Returns this formset rendered as HTML <li>s." "Returns this formset rendered as HTML <li>s."
forms = ' '.join([form.as_ul() for form in self]) forms = ' '.join(form.as_ul() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms])) return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False, def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False,

View File

@ -1181,7 +1181,7 @@ class ModelMultipleChoiceField(ModelChoiceField):
params={'pk': pk}, params={'pk': pk},
) )
qs = self.queryset.filter(**{'%s__in' % key: value}) qs = self.queryset.filter(**{'%s__in' % key: value})
pks = set([force_text(getattr(o, key)) for o in qs]) pks = set(force_text(getattr(o, key)) for o in qs)
for val in value: for val in value:
if force_text(val) not in pks: if force_text(val) not in pks:
raise ValidationError( raise ValidationError(
@ -1208,8 +1208,8 @@ class ModelMultipleChoiceField(ModelChoiceField):
data = [] data = []
if len(initial) != len(data): if len(initial) != len(data):
return True return True
initial_set = set([force_text(value) for value in self.prepare_value(initial)]) initial_set = set(force_text(value) for value in self.prepare_value(initial))
data_set = set([force_text(value) for value in data]) data_set = set(force_text(value) for value in data)
return data_set != initial_set return data_set != initial_set

View File

@ -49,7 +49,7 @@ class ErrorDict(dict):
)) ))
def as_text(self): def as_text(self):
return '\n'.join(['* %s\n%s' % (k, '\n'.join([' * %s' % force_text(i) for i in v])) for k, v in self.items()]) return '\n'.join('* %s\n%s' % (k, '\n'.join(' * %s' % force_text(i) for i in v)) for k, v in self.items())
@python_2_unicode_compatible @python_2_unicode_compatible
class ErrorList(list): class ErrorList(list):
@ -69,7 +69,7 @@ class ErrorList(list):
def as_text(self): def as_text(self):
if not self: return '' if not self: return ''
return '\n'.join(['* %s' % force_text(e) for e in self]) return '\n'.join('* %s' % force_text(e) for e in self)
def __repr__(self): def __repr__(self):
return repr([force_text(e) for e in self]) return repr([force_text(e) for e in self])

View File

@ -88,8 +88,8 @@ class VariableDoesNotExist(Exception):
self.params = params self.params = params
def __str__(self): def __str__(self):
return self.msg % tuple([force_text(p, errors='replace') return self.msg % tuple(force_text(p, errors='replace')
for p in self.params]) for p in self.params)
class InvalidTemplateLibrary(Exception): class InvalidTemplateLibrary(Exception):
pass pass
@ -1012,7 +1012,7 @@ def parse_bits(parser, bits, params, varargs, varkw, defaults,
# Some positional arguments were not supplied # Some positional arguments were not supplied
raise TemplateSyntaxError( raise TemplateSyntaxError(
"'%s' did not receive value(s) for the argument(s): %s" % "'%s' did not receive value(s) for the argument(s): %s" %
(name, ", ".join(["'%s'" % p for p in unhandled_params]))) (name, ", ".join("'%s'" % p for p in unhandled_params)))
return args, kwargs return args, kwargs
def generic_tag_compiler(parser, token, params, varargs, varkw, defaults, def generic_tag_compiler(parser, token, params, varargs, varkw, defaults,

View File

@ -205,7 +205,7 @@ class ForNode(Node):
# don't want to leave any vars from the previous loop on the # don't want to leave any vars from the previous loop on the
# context. # context.
context.pop() context.pop()
return mark_safe(''.join([force_text(n) for n in nodelist])) return mark_safe(''.join(force_text(n) for n in nodelist))
class IfChangedNode(Node): class IfChangedNode(Node):
child_nodelists = ('nodelist_true', 'nodelist_false') child_nodelists = ('nodelist_true', 'nodelist_false')
@ -410,8 +410,8 @@ class URLNode(Node):
def render(self, context): def render(self, context):
from django.core.urlresolvers import reverse, NoReverseMatch from django.core.urlresolvers import reverse, NoReverseMatch
args = [arg.resolve(context) for arg in self.args] args = [arg.resolve(context) for arg in self.args]
kwargs = dict([(smart_text(k, 'ascii'), v.resolve(context)) kwargs = dict((smart_text(k, 'ascii'), v.resolve(context))
for k, v in self.kwargs.items()]) for k, v in self.kwargs.items())
view_name = self.view_name.resolve(context) view_name = self.view_name.resolve(context)
@ -502,8 +502,8 @@ class WithNode(Node):
return "<WithNode>" return "<WithNode>"
def render(self, context): def render(self, context):
values = dict([(key, val.resolve(context)) for key, val in values = dict((key, val.resolve(context)) for key, val in
six.iteritems(self.extra_context)]) six.iteritems(self.extra_context))
with context.push(**values): with context.push(**values):
return self.nodelist.render(context) return self.nodelist.render(context)

View File

@ -112,8 +112,8 @@ class ExtendsNode(Node):
# The ExtendsNode has to be the first non-text node. # The ExtendsNode has to be the first non-text node.
if not isinstance(node, TextNode): if not isinstance(node, TextNode):
if not isinstance(node, ExtendsNode): if not isinstance(node, ExtendsNode):
blocks = dict([(n.name, n) for n in blocks = dict((n.name, n) for n in
compiled_parent.nodelist.get_nodes_by_type(BlockNode)]) compiled_parent.nodelist.get_nodes_by_type(BlockNode))
block_context.add_blocks(blocks) block_context.add_blocks(blocks)
break break

View File

@ -598,7 +598,7 @@ class DocTestParser:
# If all lines begin with the same indentation, then strip it. # If all lines begin with the same indentation, then strip it.
min_indent = self._min_indent(string) min_indent = self._min_indent(string)
if min_indent > 0: if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')]) string = '\n'.join(l[min_indent:] for l in string.split('\n'))
output = [] output = []
charno, lineno = 0, 0 charno, lineno = 0, 0
@ -670,7 +670,7 @@ class DocTestParser:
source_lines = m.group('source').split('\n') source_lines = m.group('source').split('\n')
self._check_prompt_blank(source_lines, indent, name, lineno) self._check_prompt_blank(source_lines, indent, name, lineno)
self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
source = '\n'.join([sl[indent+4:] for sl in source_lines]) source = '\n'.join(sl[indent+4:] for sl in source_lines)
# Divide want into lines; check that it's properly indented; and # Divide want into lines; check that it's properly indented; and
# then strip the indentation. Spaces before the last newline should # then strip the indentation. Spaces before the last newline should
@ -681,7 +681,7 @@ class DocTestParser:
del want_lines[-1] # forget final newline & spaces after it del want_lines[-1] # forget final newline & spaces after it
self._check_prefix(want_lines, ' '*indent, name, self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines)) lineno + len(source_lines))
want = '\n'.join([wl[indent:] for wl in want_lines]) want = '\n'.join(wl[indent:] for wl in want_lines)
# If `want` contains a traceback message, then extract it. # If `want` contains a traceback message, then extract it.
m = self._EXCEPTION_RE.match(want) m = self._EXCEPTION_RE.match(want)

View File

@ -76,7 +76,7 @@ def patch_cache_control(response, **kwargs):
for (k, v) in kwargs.items(): for (k, v) in kwargs.items():
cc[k.replace('_', '-')] = v cc[k.replace('_', '-')] = v
cc = ', '.join([dictvalue(el) for el in cc.items()]) cc = ', '.join(dictvalue(el) for el in cc.items())
response['Cache-Control'] = cc response['Cache-Control'] = cc
def get_max_age(response): def get_max_age(response):
@ -86,8 +86,8 @@ def get_max_age(response):
""" """
if not response.has_header('Cache-Control'): if not response.has_header('Cache-Control'):
return return
cc = dict([_to_tuple(el) for el in cc = dict(_to_tuple(el) for el in
cc_delim_re.split(response['Cache-Control'])]) cc_delim_re.split(response['Cache-Control']))
if 'max-age' in cc: if 'max-age' in cc:
try: try:
return int(cc['max-age']) return int(cc['max-age'])
@ -144,7 +144,7 @@ def patch_vary_headers(response, newheaders):
else: else:
vary_headers = [] vary_headers = []
# Use .lower() here so we treat headers as case-insensitive. # Use .lower() here so we treat headers as case-insensitive.
existing_headers = set([header.lower() for header in vary_headers]) existing_headers = set(header.lower() for header in vary_headers)
additional_headers = [newheader for newheader in newheaders additional_headers = [newheader for newheader in newheaders
if newheader.lower() not in existing_headers] if newheader.lower() not in existing_headers]
response['Vary'] = ', '.join(vary_headers + additional_headers) response['Vary'] = ', '.join(vary_headers + additional_headers)
@ -156,7 +156,7 @@ def has_vary_header(response, header_query):
if not response.has_header('Vary'): if not response.has_header('Vary'):
return False return False
vary_headers = cc_delim_re.split(response['Vary']) vary_headers = cc_delim_re.split(response['Vary'])
existing_headers = set([header.lower() for header in vary_headers]) existing_headers = set(header.lower() for header in vary_headers)
return header_query.lower() in existing_headers return header_query.lower() in existing_headers
def _i18n_cache_key_suffix(request, cache_key): def _i18n_cache_key_suffix(request, cache_key):

View File

@ -17,8 +17,8 @@ def luhn(candidate):
if not isinstance(candidate, six.string_types): if not isinstance(candidate, six.string_types):
candidate = str(candidate) candidate = str(candidate)
try: try:
evens = sum([int(c) for c in candidate[-1::-2]]) evens = sum(int(c) for c in candidate[-1::-2])
odds = sum([LUHN_ODD_LOOKUP[int(c)] for c in candidate[-2::-2]]) odds = sum(LUHN_ODD_LOOKUP[int(c)] for c in candidate[-2::-2])
return ((evens + odds) % 10 == 0) return ((evens + odds) % 10 == 0)
except ValueError: # Raised if an int conversion fails except ValueError: # Raised if an int conversion fails
return False return False

View File

@ -73,7 +73,7 @@ def get_random_string(length=12,
time.time(), time.time(),
settings.SECRET_KEY)).encode('utf-8') settings.SECRET_KEY)).encode('utf-8')
).digest()) ).digest())
return ''.join([random.choice(allowed_chars) for i in range(length)]) return ''.join(random.choice(allowed_chars) for i in range(length))
def constant_time_compare(val1, val2): def constant_time_compare(val1, val2):

View File

@ -231,7 +231,7 @@ class SortedDict(dict):
Replaces the normal dict.__repr__ with a version that returns the keys Replaces the normal dict.__repr__ with a version that returns the keys
in their sorted order. in their sorted order.
""" """
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in six.iteritems(self)]) return '{%s}' % ', '.join('%r: %r' % (k, v) for k, v in six.iteritems(self))
def clear(self): def clear(self):
super(SortedDict, self).clear() super(SortedDict, self).clear()

View File

@ -32,7 +32,7 @@ simple_url_2_re = re.compile(r'^www\.|^(?!http)\w[^@]+\.(com|edu|gov|int|mil|net
simple_email_re = re.compile(r'^\S+@\S+\.\S+$') simple_email_re = re.compile(r'^\S+@\S+\.\S+$')
link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+') link_target_attribute_re = re.compile(r'(<a [^>]*?)target=[^\s>]+')
html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE) html_gunk_re = re.compile(r'(?:<br clear="all">|<i><\/i>|<b><\/b>|<em><\/em>|<strong><\/strong>|<\/?smallcaps>|<\/?uppercase>)', re.IGNORECASE)
hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join([re.escape(x) for x in DOTS]), re.DOTALL) hard_coded_bullets_re = re.compile(r'((?:<p>(?:%s).*?[a-zA-Z].*?</p>\s*)+)' % '|'.join(re.escape(x) for x in DOTS), re.DOTALL)
trailing_empty_content_re = re.compile(r'(?:<p>(?:&nbsp;|\s|<br \/>)*?</p>\s*)+\Z') trailing_empty_content_re = re.compile(r'(?:<p>(?:&nbsp;|\s|<br \/>)*?</p>\s*)+\Z')
@ -81,8 +81,8 @@ def format_html(format_string, *args, **kwargs):
of str.format or % interpolation to build up small HTML fragments. of str.format or % interpolation to build up small HTML fragments.
""" """
args_safe = map(conditional_escape, args) args_safe = map(conditional_escape, args)
kwargs_safe = dict([(k, conditional_escape(v)) for (k, v) in kwargs_safe = dict((k, conditional_escape(v)) for (k, v) in
six.iteritems(kwargs)]) six.iteritems(kwargs))
return mark_safe(format_string.format(*args_safe, **kwargs_safe)) return mark_safe(format_string.format(*args_safe, **kwargs_safe))
def format_html_join(sep, format_string, args_generator): def format_html_join(sep, format_string, args_generator):

View File

@ -5,8 +5,8 @@ termcolors.py
from django.utils import six from django.utils import six
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white') color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = dict([(color_names[x], '3%s' % x) for x in range(8)]) foreground = dict((color_names[x], '3%s' % x) for x in range(8))
background = dict([(color_names[x], '4%s' % x) for x in range(8)]) background = dict((color_names[x], '4%s' % x) for x in range(8))
RESET = '0' RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'} opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}

View File

@ -238,7 +238,7 @@ def get_text_list(list_, last_word=ugettext_lazy('or')):
if len(list_) == 1: return force_text(list_[0]) if len(list_) == 1: return force_text(list_[0])
return '%s %s %s' % ( return '%s %s %s' % (
# Translators: This string is used as a separator between list elements # Translators: This string is used as a separator between list elements
_(', ').join([force_text(i) for i in list_][:-1]), _(', ').join(force_text(i) for i in list_[:-1]),
force_text(last_word), force_text(list_[-1])) force_text(last_word), force_text(list_[-1]))
get_text_list = allow_lazy(get_text_list, six.text_type) get_text_list = allow_lazy(get_text_list, six.text_type)

View File

@ -179,7 +179,7 @@ def _string_concat(*strings):
Lazy variant of string concatenation, needed for translations that are Lazy variant of string concatenation, needed for translations that are
constructed from multiple parts. constructed from multiple parts.
""" """
return ''.join([force_text(s) for s in strings]) return ''.join(force_text(s) for s in strings)
string_concat = lazy(_string_concat, six.text_type) string_concat = lazy(_string_concat, six.text_type)
def get_language_info(lang_code): def get_language_info(lang_code):

View File

@ -79,7 +79,7 @@ class OracleChecks(unittest.TestCase):
# than 4000 chars and read it properly # than 4000 chars and read it properly
c = connection.cursor() c = connection.cursor()
c.execute('CREATE TABLE ltext ("TEXT" NCLOB)') c.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join([six.text_type(x) for x in xrange(4000)]) long_str = ''.join(six.text_type(x) for x in xrange(4000))
c.execute('INSERT INTO ltext VALUES (%s)', [long_str]) c.execute('INSERT INTO ltext VALUES (%s)', [long_str])
c.execute('SELECT text FROM ltext') c.execute('SELECT text FROM ltext')
row = c.fetchone() row = c.fetchone()

View File

@ -89,14 +89,14 @@ def file_upload_echo(request):
""" """
Simple view to echo back info about uploaded files for tests. Simple view to echo back info about uploaded files for tests.
""" """
r = dict([(k, f.name) for k, f in request.FILES.items()]) r = dict((k, f.name) for k, f in request.FILES.items())
return HttpResponse(json.dumps(r)) return HttpResponse(json.dumps(r))
def file_upload_echo_content(request): def file_upload_echo_content(request):
""" """
Simple view to echo back the content of uploaded files for tests. Simple view to echo back the content of uploaded files for tests.
""" """
r = dict([(k, f.read().decode('utf-8')) for k, f in request.FILES.items()]) r = dict((k, f.read().decode('utf-8')) for k, f in request.FILES.items())
return HttpResponse(json.dumps(r)) return HttpResponse(json.dumps(r))
def file_upload_quota(request): def file_upload_quota(request):

View File

@ -221,7 +221,7 @@ class FormsErrorMessagesTestCase(TestCase, AssertFormErrorsMixin):
def as_divs(self): def as_divs(self):
if not self: return '' if not self: return ''
return mark_safe('<div class="error">%s</div>' % ''.join(['<p>%s</p>' % e for e in self])) return mark_safe('<div class="error">%s</div>' % ''.join('<p>%s</p>' % e for e in self))
# This form should print errors the default way. # This form should print errors the default way.
form1 = TestForm({'first_name': 'John'}) form1 = TestForm({'first_name': 'John'})

View File

@ -723,7 +723,7 @@ class FormsExtraTestCase(TestCase, AssertFormErrorsMixin):
def as_divs(self): def as_divs(self):
if not self: return '' if not self: return ''
return '<div class="errorlist">%s</div>' % ''.join(['<div class="error">%s</div>' % force_text(e) for e in self]) return '<div class="errorlist">%s</div>' % ''.join('<div class="error">%s</div>' % force_text(e) for e in self)
class CommentForm(Form): class CommentForm(Form):
name = CharField(max_length=50, required=False) name = CharField(max_length=50, required=False)

View File

@ -437,11 +437,11 @@ class FormsTestCase(TestCase):
name = ChoiceField(choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')], widget=RadioSelect) name = ChoiceField(choices=[('john', 'John'), ('paul', 'Paul'), ('george', 'George'), ('ringo', 'Ringo')], widget=RadioSelect)
f = BeatleForm(auto_id=False) f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), """<label><input type="radio" name="name" value="john" /> John</label> self.assertHTMLEqual('\n'.join(str(bf) for bf in f['name']), """<label><input type="radio" name="name" value="john" /> John</label>
<label><input type="radio" name="name" value="paul" /> Paul</label> <label><input type="radio" name="name" value="paul" /> Paul</label>
<label><input type="radio" name="name" value="george" /> George</label> <label><input type="radio" name="name" value="george" /> George</label>
<label><input type="radio" name="name" value="ringo" /> Ringo</label>""") <label><input type="radio" name="name" value="ringo" /> Ringo</label>""")
self.assertHTMLEqual('\n'.join(['<div>%s</div>' % bf for bf in f['name']]), """<div><label><input type="radio" name="name" value="john" /> John</label></div> self.assertHTMLEqual('\n'.join('<div>%s</div>' % bf for bf in f['name']), """<div><label><input type="radio" name="name" value="john" /> John</label></div>
<div><label><input type="radio" name="name" value="paul" /> Paul</label></div> <div><label><input type="radio" name="name" value="paul" /> Paul</label></div>
<div><label><input type="radio" name="name" value="george" /> George</label></div> <div><label><input type="radio" name="name" value="george" /> George</label></div>
<div><label><input type="radio" name="name" value="ringo" /> Ringo</label></div>""") <div><label><input type="radio" name="name" value="ringo" /> Ringo</label></div>""")
@ -452,7 +452,7 @@ class FormsTestCase(TestCase):
name = CharField() name = CharField()
f = BeatleForm(auto_id=False) f = BeatleForm(auto_id=False)
self.assertHTMLEqual('\n'.join([str(bf) for bf in f['name']]), '<input type="text" name="name" />') self.assertHTMLEqual('\n'.join(str(bf) for bf in f['name']), '<input type="text" name="name" />')
def test_forms_with_multiple_choice(self): def test_forms_with_multiple_choice(self):
# MultipleChoiceField is a special case, as its data is required to be a list: # MultipleChoiceField is a special case, as its data is required to be a list:

View File

@ -900,7 +900,7 @@ class FormsFormsetTestCase(TestCase):
} }
formset = AnotherChoiceFormSet(data, auto_id=False, prefix='choices') formset = AnotherChoiceFormSet(data, auto_id=False, prefix='choices')
self.assertTrue(formset.is_valid()) self.assertTrue(formset.is_valid())
self.assertTrue(all([form.is_valid_called for form in formset.forms])) self.assertTrue(all(form.is_valid_called for form in formset.forms))
def test_hard_limit_on_instantiated_forms(self): def test_hard_limit_on_instantiated_forms(self):
"""A formset has a hard limit on the number of forms instantiated.""" """A formset has a hard limit on the number of forms instantiated."""

View File

@ -640,7 +640,7 @@ beatle J R Ringo False""")
# You can create your own custom renderers for RadioSelect to use. # You can create your own custom renderers for RadioSelect to use.
class MyRenderer(RadioFieldRenderer): class MyRenderer(RadioFieldRenderer):
def render(self): def render(self):
return '<br />\n'.join([six.text_type(choice) for choice in self]) return '<br />\n'.join(six.text_type(choice) for choice in self)
w = RadioSelect(renderer=MyRenderer) w = RadioSelect(renderer=MyRenderer)
self.assertHTMLEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br /> self.assertHTMLEqual(w.render('beatle', 'G', choices=(('J', 'John'), ('P', 'Paul'), ('G', 'George'), ('R', 'Ringo'))), """<label><input type="radio" name="beatle" value="J" /> John</label><br />
<label><input type="radio" name="beatle" value="P" /> Paul</label><br /> <label><input type="radio" name="beatle" value="P" /> Paul</label><br />
@ -835,17 +835,17 @@ beatle J R Ringo False""")
def test_subwidget(self): def test_subwidget(self):
# Each subwidget tag gets a separate ID when the widget has an ID specified # Each subwidget tag gets a separate ID when the widget has an ID specified
self.assertHTMLEqual("\n".join([c.tag() for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))]), """<input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" /> self.assertHTMLEqual("\n".join(c.tag() for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))), """<input checked="checked" type="checkbox" name="letters" value="a" id="abc_0" />
<input type="checkbox" name="letters" value="b" id="abc_1" /> <input type="checkbox" name="letters" value="b" id="abc_1" />
<input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" />""") <input checked="checked" type="checkbox" name="letters" value="c" id="abc_2" />""")
# Each subwidget tag does not get an ID if the widget does not have an ID specified # Each subwidget tag does not get an ID if the widget does not have an ID specified
self.assertHTMLEqual("\n".join([c.tag() for c in CheckboxSelectMultiple().subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))]), """<input checked="checked" type="checkbox" name="letters" value="a" /> self.assertHTMLEqual("\n".join(c.tag() for c in CheckboxSelectMultiple().subwidgets('letters', list('ac'), choices=zip(list('abc'), list('ABC')))), """<input checked="checked" type="checkbox" name="letters" value="a" />
<input type="checkbox" name="letters" value="b" /> <input type="checkbox" name="letters" value="b" />
<input checked="checked" type="checkbox" name="letters" value="c" />""") <input checked="checked" type="checkbox" name="letters" value="c" />""")
# The id_for_label property of the subwidget should return the ID that is used on the subwidget's tag # The id_for_label property of the subwidget should return the ID that is used on the subwidget's tag
self.assertHTMLEqual("\n".join(['<input type="checkbox" name="letters" value="%s" id="%s" />' % (c.choice_value, c.id_for_label) for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', [], choices=zip(list('abc'), list('ABC')))]), """<input type="checkbox" name="letters" value="a" id="abc_0" /> self.assertHTMLEqual("\n".join('<input type="checkbox" name="letters" value="%s" id="%s" />' % (c.choice_value, c.id_for_label) for c in CheckboxSelectMultiple(attrs={'id': 'abc'}).subwidgets('letters', [], choices=zip(list('abc'), list('ABC')))), """<input type="checkbox" name="letters" value="a" id="abc_0" />
<input type="checkbox" name="letters" value="b" id="abc_1" /> <input type="checkbox" name="letters" value="b" id="abc_1" />
<input type="checkbox" name="letters" value="c" id="abc_2" />""") <input type="checkbox" name="letters" value="c" id="abc_2" />""")

View File

@ -79,7 +79,7 @@ class ProxyModelTests(TestCase):
Person.objects.create(name="Foo McBar") Person.objects.create(name="Foo McBar")
MyPerson.objects.create(name="Bazza del Frob") MyPerson.objects.create(name="Bazza del Frob")
LowerStatusPerson.objects.create(status="low", name="homer") LowerStatusPerson.objects.create(status="low", name="homer")
pp = sorted([mpp.name for mpp in MyPersonProxy.objects.all()]) pp = sorted(mpp.name for mpp in MyPersonProxy.objects.all())
self.assertEqual(pp, ['Bazza del Frob', 'Foo McBar', 'homer']) self.assertEqual(pp, ['Bazza del Frob', 'Foo McBar', 'homer'])
def test_proxy_included_in_ancestors(self): def test_proxy_included_in_ancestors(self):

View File

@ -42,7 +42,7 @@ class SchemaTests(TransactionTestCase):
"table": connection.ops.quote_name(field.rel.through._meta.db_table), "table": connection.ops.quote_name(field.rel.through._meta.db_table),
}) })
except DatabaseError as e: except DatabaseError as e:
if any([s in str(e).lower() for s in self.no_table_strings]): if any(s in str(e).lower() for s in self.no_table_strings):
pass pass
else: else:
raise raise
@ -53,7 +53,7 @@ class SchemaTests(TransactionTestCase):
"table": connection.ops.quote_name(model._meta.db_table), "table": connection.ops.quote_name(model._meta.db_table),
}) })
except DatabaseError as e: except DatabaseError as e:
if any([s in str(e).lower() for s in self.no_table_strings]): if any(s in str(e).lower() for s in self.no_table_strings):
pass pass
else: else:
raise raise

View File

@ -8,7 +8,7 @@ def example_view(request):
def model_view(request): def model_view(request):
people = Person.objects.all() people = Person.objects.all()
return HttpResponse('\n'.join([person.name for person in people])) return HttpResponse('\n'.join(person.name for person in people))
def create_model_instance(request): def create_model_instance(request):
@ -18,4 +18,4 @@ def create_model_instance(request):
def environ_view(request): def environ_view(request):
return HttpResponse("\n".join(["%s: %r" % (k, v) for k, v in request.environ.items()])) return HttpResponse("\n".join("%s: %r" % (k, v) for k, v in request.environ.items()))

View File

@ -15,7 +15,7 @@ class FeedTestCase(TestCase):
fixtures = ['feeddata.json'] fixtures = ['feeddata.json']
def assertChildNodes(self, elem, expected): def assertChildNodes(self, elem, expected):
actual = set([n.nodeName for n in elem.childNodes]) actual = set(n.nodeName for n in elem.childNodes)
expected = set(expected) expected = set(expected)
self.assertEqual(actual, expected) self.assertEqual(actual, expected)

View File

@ -64,13 +64,13 @@ simple_one_default.anything = "Expected simple_one_default __dict__"
@register.simple_tag @register.simple_tag
def simple_unlimited_args(one, two='hi', *args): def simple_unlimited_args(one, two='hi', *args):
"""Expected simple_unlimited_args __doc__""" """Expected simple_unlimited_args __doc__"""
return "simple_unlimited_args - Expected result: %s" % (', '.join([six.text_type(arg) for arg in [one, two] + list(args)])) return "simple_unlimited_args - Expected result: %s" % (', '.join(six.text_type(arg) for arg in [one, two] + list(args)))
simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__" simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__"
@register.simple_tag @register.simple_tag
def simple_only_unlimited_args(*args): def simple_only_unlimited_args(*args):
"""Expected simple_only_unlimited_args __doc__""" """Expected simple_only_unlimited_args __doc__"""
return "simple_only_unlimited_args - Expected result: %s" % ', '.join([six.text_type(arg) for arg in args]) return "simple_only_unlimited_args - Expected result: %s" % ', '.join(six.text_type(arg) for arg in args)
simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__" simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__"
@register.simple_tag @register.simple_tag
@ -79,8 +79,8 @@ def simple_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
# Sort the dictionary by key to guarantee the order for testing. # Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0)) sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0))
return "simple_unlimited_args_kwargs - Expected result: %s / %s" % ( return "simple_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join([six.text_type(arg) for arg in [one, two] + list(args)]), ', '.join(six.text_type(arg) for arg in [one, two] + list(args)),
', '.join(['%s=%s' % (k, v) for (k, v) in sorted_kwarg]) ', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
) )
simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__" simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__"
@ -191,25 +191,25 @@ inclusion_one_default_from_template.anything = "Expected inclusion_one_default_f
@register.inclusion_tag('inclusion.html') @register.inclusion_tag('inclusion.html')
def inclusion_unlimited_args(one, two='hi', *args): def inclusion_unlimited_args(one, two='hi', *args):
"""Expected inclusion_unlimited_args __doc__""" """Expected inclusion_unlimited_args __doc__"""
return {"result": "inclusion_unlimited_args - Expected result: %s" % (', '.join([six.text_type(arg) for arg in [one, two] + list(args)]))} return {"result": "inclusion_unlimited_args - Expected result: %s" % (', '.join(six.text_type(arg) for arg in [one, two] + list(args)))}
inclusion_unlimited_args.anything = "Expected inclusion_unlimited_args __dict__" inclusion_unlimited_args.anything = "Expected inclusion_unlimited_args __dict__"
@register.inclusion_tag(get_template('inclusion.html')) @register.inclusion_tag(get_template('inclusion.html'))
def inclusion_unlimited_args_from_template(one, two='hi', *args): def inclusion_unlimited_args_from_template(one, two='hi', *args):
"""Expected inclusion_unlimited_args_from_template __doc__""" """Expected inclusion_unlimited_args_from_template __doc__"""
return {"result": "inclusion_unlimited_args_from_template - Expected result: %s" % (', '.join([six.text_type(arg) for arg in [one, two] + list(args)]))} return {"result": "inclusion_unlimited_args_from_template - Expected result: %s" % (', '.join(six.text_type(arg) for arg in [one, two] + list(args)))}
inclusion_unlimited_args_from_template.anything = "Expected inclusion_unlimited_args_from_template __dict__" inclusion_unlimited_args_from_template.anything = "Expected inclusion_unlimited_args_from_template __dict__"
@register.inclusion_tag('inclusion.html') @register.inclusion_tag('inclusion.html')
def inclusion_only_unlimited_args(*args): def inclusion_only_unlimited_args(*args):
"""Expected inclusion_only_unlimited_args __doc__""" """Expected inclusion_only_unlimited_args __doc__"""
return {"result": "inclusion_only_unlimited_args - Expected result: %s" % (', '.join([six.text_type(arg) for arg in args]))} return {"result": "inclusion_only_unlimited_args - Expected result: %s" % (', '.join(six.text_type(arg) for arg in args))}
inclusion_only_unlimited_args.anything = "Expected inclusion_only_unlimited_args __dict__" inclusion_only_unlimited_args.anything = "Expected inclusion_only_unlimited_args __dict__"
@register.inclusion_tag(get_template('inclusion.html')) @register.inclusion_tag(get_template('inclusion.html'))
def inclusion_only_unlimited_args_from_template(*args): def inclusion_only_unlimited_args_from_template(*args):
"""Expected inclusion_only_unlimited_args_from_template __doc__""" """Expected inclusion_only_unlimited_args_from_template __doc__"""
return {"result": "inclusion_only_unlimited_args_from_template - Expected result: %s" % (', '.join([six.text_type(arg) for arg in args]))} return {"result": "inclusion_only_unlimited_args_from_template - Expected result: %s" % (', '.join(six.text_type(arg) for arg in args))}
inclusion_only_unlimited_args_from_template.anything = "Expected inclusion_only_unlimited_args_from_template __dict__" inclusion_only_unlimited_args_from_template.anything = "Expected inclusion_only_unlimited_args_from_template __dict__"
@register.inclusion_tag('test_incl_tag_current_app.html', takes_context=True) @register.inclusion_tag('test_incl_tag_current_app.html', takes_context=True)
@ -230,8 +230,8 @@ def inclusion_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
# Sort the dictionary by key to guarantee the order for testing. # Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0)) sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0))
return {"result": "inclusion_unlimited_args_kwargs - Expected result: %s / %s" % ( return {"result": "inclusion_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join([six.text_type(arg) for arg in [one, two] + list(args)]), ', '.join(six.text_type(arg) for arg in [one, two] + list(args)),
', '.join(['%s=%s' % (k, v) for (k, v) in sorted_kwarg]) ', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
)} )}
inclusion_unlimited_args_kwargs.anything = "Expected inclusion_unlimited_args_kwargs __dict__" inclusion_unlimited_args_kwargs.anything = "Expected inclusion_unlimited_args_kwargs __dict__"
@ -286,13 +286,13 @@ assignment_one_default.anything = "Expected assignment_one_default __dict__"
@register.assignment_tag @register.assignment_tag
def assignment_unlimited_args(one, two='hi', *args): def assignment_unlimited_args(one, two='hi', *args):
"""Expected assignment_unlimited_args __doc__""" """Expected assignment_unlimited_args __doc__"""
return "assignment_unlimited_args - Expected result: %s" % (', '.join([six.text_type(arg) for arg in [one, two] + list(args)])) return "assignment_unlimited_args - Expected result: %s" % (', '.join(six.text_type(arg) for arg in [one, two] + list(args)))
assignment_unlimited_args.anything = "Expected assignment_unlimited_args __dict__" assignment_unlimited_args.anything = "Expected assignment_unlimited_args __dict__"
@register.assignment_tag @register.assignment_tag
def assignment_only_unlimited_args(*args): def assignment_only_unlimited_args(*args):
"""Expected assignment_only_unlimited_args __doc__""" """Expected assignment_only_unlimited_args __doc__"""
return "assignment_only_unlimited_args - Expected result: %s" % ', '.join([six.text_type(arg) for arg in args]) return "assignment_only_unlimited_args - Expected result: %s" % ', '.join(six.text_type(arg) for arg in args)
assignment_only_unlimited_args.anything = "Expected assignment_only_unlimited_args __dict__" assignment_only_unlimited_args.anything = "Expected assignment_only_unlimited_args __dict__"
@register.assignment_tag @register.assignment_tag
@ -301,8 +301,8 @@ def assignment_unlimited_args_kwargs(one, two='hi', *args, **kwargs):
# Sort the dictionary by key to guarantee the order for testing. # Sort the dictionary by key to guarantee the order for testing.
sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0)) sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0))
return "assignment_unlimited_args_kwargs - Expected result: %s / %s" % ( return "assignment_unlimited_args_kwargs - Expected result: %s / %s" % (
', '.join([six.text_type(arg) for arg in [one, two] + list(args)]), ', '.join(six.text_type(arg) for arg in [one, two] + list(args)),
', '.join(['%s=%s' % (k, v) for (k, v) in sorted_kwarg]) ', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg)
) )
assignment_unlimited_args_kwargs.anything = "Expected assignment_unlimited_args_kwargs __dict__" assignment_unlimited_args_kwargs.anything = "Expected assignment_unlimited_args_kwargs __dict__"

View File

@ -507,7 +507,7 @@ class TemplateTests(TransRealMixin, TestCase):
template_tests.update(filter_tests) template_tests.update(filter_tests)
cache_loader = setup_test_template_loader( cache_loader = setup_test_template_loader(
dict([(name, t[0]) for name, t in six.iteritems(template_tests)]), dict((name, t[0]) for name, t in six.iteritems(template_tests)),
use_cached_loader=True, use_cached_loader=True,
) )