Fixed up some more flake8 violations (this particular violation still has many occurrences in the tests/ dir so it can't be removed from setup.cfg yet)

This commit is contained in:
Alex Gaynor 2013-10-26 10:50:40 -07:00
parent cc2049cdd7
commit f2d8027c9a
23 changed files with 68 additions and 68 deletions

View File

@ -62,7 +62,7 @@ class LineString(GEOSGeometry):
for i in xrange(ncoords): for i in xrange(ncoords):
if numpy_coords: if numpy_coords:
cs[i] = coords[i,:] cs[i] = coords[i, :]
elif isinstance(coords[i], Point): elif isinstance(coords[i], Point):
cs[i] = coords[i].tuple cs[i] = coords[i].tuple
else: else:

View File

@ -109,7 +109,7 @@ class GEOSMutationTest(unittest.TestCase):
# _set_single # _set_single
ls._set_single(0, (-50, 25)) ls._set_single(0, (-50, 25))
self.assertEqual(ls.coords, ((-50.0, 25.0),(4.0, 1.0),(6.0, -1.0)), 'LineString _set_single') self.assertEqual(ls.coords, ((-50.0, 25.0), (4.0, 1.0), (6.0, -1.0)), 'LineString _set_single')
# _set_list # _set_list
ls._set_list(2, ((-50.0, 25.0), (6.0, -1.0))) ls._set_list(2, ((-50.0, 25.0), (6.0, -1.0)))
@ -121,23 +121,23 @@ class GEOSMutationTest(unittest.TestCase):
def test05_Polygon(self): def test05_Polygon(self):
'Testing Polygon mutations' 'Testing Polygon mutations'
for pg in (Polygon(((1,0),(4,1),(6,-1),(8,10),(1,0)), for pg in (Polygon(((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
((5,4),(6,4),(6,3),(5,4))), ((5, 4), (6, 4), (6, 3), (5, 4))),
fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')): fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')):
self.assertEqual(pg._get_single_external(0), self.assertEqual(pg._get_single_external(0),
LinearRing((1,0),(4,1),(6,-1),(8,10),(1,0)), LinearRing((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
'Polygon _get_single_external(0)') 'Polygon _get_single_external(0)')
self.assertEqual(pg._get_single_external(1), self.assertEqual(pg._get_single_external(1),
LinearRing((5,4),(6,4),(6,3),(5,4)), LinearRing((5, 4), (6, 4), (6, 3), (5, 4)),
'Polygon _get_single_external(1)') 'Polygon _get_single_external(1)')
# _set_list # _set_list
pg._set_list(2, (((1,2),(10,0),(12,9),(-1,15),(1,2)), pg._set_list(2, (((1, 2), (10, 0), (12, 9), (-1, 15), (1, 2)),
((4,2),(5,2),(5,3),(4,2)))) ((4, 2), (5, 2), (5, 3), (4, 2))))
self.assertEqual( self.assertEqual(
pg.coords, pg.coords,
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)), (((1.0, 2.0), (10.0, 0.0), (12.0, 9.0), (-1.0, 15.0), (1.0, 2.0)),
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))), ((4.0, 2.0), (5.0, 2.0), (5.0, 3.0), (4.0, 2.0))),
'Polygon _set_list') 'Polygon _set_list')
lsa = Polygon(*pg.coords) lsa = Polygon(*pg.coords)
@ -146,13 +146,13 @@ class GEOSMutationTest(unittest.TestCase):
def test06_Collection(self): def test06_Collection(self):
'Testing Collection mutations' 'Testing Collection mutations'
for mp in (MultiPoint(*map(Point,((3,4),(-1,2),(5,-4),(2,8)))), for mp in (MultiPoint(*map(Point, ((3, 4), (-1, 2), (5, -4), (2, 8)))),
fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)')): fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)')):
self.assertEqual(mp._get_single_external(2), Point(5,-4), 'Collection _get_single_external') self.assertEqual(mp._get_single_external(2), Point(5, -4), 'Collection _get_single_external')
mp._set_list(3, map(Point,((5,5),(3,-2),(8,1)))) mp._set_list(3, map(Point, ((5, 5), (3, -2), (8, 1))))
self.assertEqual(mp.coords, ((5.0,5.0),(3.0,-2.0),(8.0,1.0)), 'Collection _set_list') self.assertEqual(mp.coords, ((5.0, 5.0), (3.0, -2.0), (8.0, 1.0)), 'Collection _set_list')
lsa = MultiPoint(*map(Point,((5,5),(3,-2),(8,1)))) lsa = MultiPoint(*map(Point, ((5, 5), (3, -2), (8, 1))))
for f in geos_function_tests: for f in geos_function_tests:
self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__) self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)

View File

@ -79,20 +79,20 @@ class ListMixinTest(unittest.TestCase):
self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i)) self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i))
for j in self.limits_plus(1): for j in self.limits_plus(1):
self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i,j)) self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i, j))
for k in self.step_range(): for k in self.step_range():
self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i,j,k)) self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i, j, k))
for k in self.step_range(): for k in self.step_range():
self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i,k)) self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i, k))
self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i,k)) self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i, k))
for k in self.step_range(): for k in self.step_range():
self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k)) self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k))
def test02_setslice(self): def test02_setslice(self):
'Slice assignment' 'Slice assignment'
def setfcn(x,i,j,k,L): def setfcn(x, i, j, k, L):
x[i:j:k] = range(L) x[i:j:k] = range(L)
pl, ul = self.lists_of_len() pl, ul = self.lists_of_len()
for slen in range(self.limit + 1): for slen in range(self.limit + 1):
@ -166,23 +166,23 @@ class ListMixinTest(unittest.TestCase):
pl, ul = self.lists_of_len(Len) pl, ul = self.lists_of_len(Len)
del pl[i:j] del pl[i:j]
del ul[i:j] del ul[i:j]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i,j)) self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i, j))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)): for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len) pl, ul = self.lists_of_len(Len)
del pl[i:j:k] del pl[i:j:k]
del ul[i:j:k] del ul[i:j:k]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i,j,k)) self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i, j, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)): for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len) pl, ul = self.lists_of_len(Len)
del pl[:i:k] del pl[:i:k]
del ul[:i:k] del ul[:i:k]
self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i,k)) self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i, k))
pl, ul = self.lists_of_len(Len) pl, ul = self.lists_of_len(Len)
del pl[i::k] del pl[i::k]
del ul[i::k] del ul[i::k]
self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i,k)) self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)): for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len) pl, ul = self.lists_of_len(Len)
@ -231,8 +231,8 @@ class ListMixinTest(unittest.TestCase):
ul.append(40) ul.append(40)
self.assertEqual(pl[:], ul[:], 'append') self.assertEqual(pl[:], ul[:], 'append')
pl.extend(range(50,55)) pl.extend(range(50, 55))
ul.extend(range(50,55)) ul.extend(range(50, 55))
self.assertEqual(pl[:], ul[:], 'extend') self.assertEqual(pl[:], ul[:], 'extend')
pl.reverse() pl.reverse()
@ -241,8 +241,8 @@ class ListMixinTest(unittest.TestCase):
for i in self.limits_plus(1): for i in self.limits_plus(1):
pl, ul = self.lists_of_len() pl, ul = self.lists_of_len()
pl.insert(i,50) pl.insert(i, 50)
ul.insert(i,50) ul.insert(i, 50)
self.assertEqual(pl[:], ul[:], 'insert at %d' % i) self.assertEqual(pl[:], ul[:], 'insert at %d' % i)
for i in self.limits_plus(0): for i in self.limits_plus(0):
@ -292,17 +292,17 @@ class ListMixinTest(unittest.TestCase):
def setfcn(x, i, v): def setfcn(x, i, v):
x[i] = v x[i] = v
self.assertRaises(TypeError, setfcn, ul, 2, 'hello') self.assertRaises(TypeError, setfcn, ul, 2, 'hello')
self.assertRaises(TypeError, setfcn, ul, slice(0,3,2), ('hello','goodbye')) self.assertRaises(TypeError, setfcn, ul, slice(0, 3, 2), ('hello', 'goodbye'))
def test08_min_length(self): def test08_min_length(self):
'Length limits' 'Length limits'
pl, ul = self.lists_of_len() pl, ul = self.lists_of_len()
ul._minlength = 1 ul._minlength = 1
def delfcn(x,i): def delfcn(x, i):
del x[:i] del x[:i]
def setfcn(x,i): def setfcn(x, i):
x[:i] = [] x[:i] = []
for i in range(self.limit - ul._minlength + 1, self.limit + 1): for i in range(self.limit - ul._minlength + 1, self.limit + 1):
self.assertRaises(ValueError, delfcn, ul, i) self.assertRaises(ValueError, delfcn, ul, i)
@ -363,7 +363,7 @@ class ListMixinTest(unittest.TestCase):
def test_12_arithmetic(self): def test_12_arithmetic(self):
'Arithmetic' 'Arithmetic'
pl, ul = self.lists_of_len() pl, ul = self.lists_of_len()
al = list(range(10,14)) al = list(range(10, 14))
self.assertEqual(list(pl + al), list(ul + al), 'add') self.assertEqual(list(pl + al), list(ul + al), 'add')
self.assertEqual(type(ul), type(ul + al), 'type of add result') self.assertEqual(type(ul), type(ul + al), 'type of add result')
self.assertEqual(list(al + pl), list(al + ul), 'radd') self.assertEqual(list(al + pl), list(al + ul), 'radd')

View File

@ -196,7 +196,7 @@ class Geo3DTest(TestCase):
""" """
self._load_city_data() self._load_city_data()
# `SELECT ST_Extent3D(point) FROM geo3d_city3d;` # `SELECT ST_Extent3D(point) FROM geo3d_city3d;`
ref_extent3d = (-123.305196, -41.315268, 14,174.783117, 48.462611, 1433) ref_extent3d = (-123.305196, -41.315268, 14, 174.783117, 48.462611, 1433)
extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d'] extent1 = City3D.objects.aggregate(Extent3D('point'))['point__extent3d']
extent2 = City3D.objects.extent3d() extent2 = City3D.objects.extent3d()

View File

@ -24,7 +24,7 @@ if HAS_GEOS and not spatialite:
def postgis_bug_version(): def postgis_bug_version():
spatial_version = getattr(connection.ops, "spatial_version", (0,0,0)) spatial_version = getattr(connection.ops, "spatial_version", (0, 0, 0))
return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1) return spatial_version and (2, 0, 0) <= spatial_version <= (2, 0, 1)

View File

@ -26,7 +26,7 @@ class InspectDbTests(TestCase):
""" """
out = StringIO() out = StringIO()
call_command('inspectdb', call_command('inspectdb',
table_name_filter=lambda tn:tn.startswith('inspectapp_'), table_name_filter=lambda tn: tn.startswith('inspectapp_'),
stdout=out) stdout=out)
output = out.getvalue() output = out.getvalue()
self.assertIn('geom = models.PolygonField()', output) self.assertIn('geom = models.PolygonField()', output)

View File

@ -255,7 +255,7 @@ class LayerMapTest(TestCase):
# Testing the `step` keyword -- should get the same counties # Testing the `step` keyword -- should get the same counties
# regardless of we use a step that divides equally, that is odd, # regardless of we use a step that divides equally, that is odd,
# or that is larger than the dataset. # or that is larger than the dataset.
for st in (4,7,1000): for st in (4, 7, 1000):
clear_counties() clear_counties()
lm.save(step=st, strict=True) lm.save(step=st, strict=True)
self.county_helper(county_feat=False) self.county_helper(county_feat=False)

View File

@ -31,7 +31,7 @@ def ogrinfo(data_source, num_features=10):
print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4])) print(" extent: %s - %s" % (extent_tup[0:2], extent_tup[2:4]))
print("Displaying the first %s features ====" % num_features) print("Displaying the first %s features ====" % num_features)
width = max(*map(len,layer.fields)) width = max(*map(len, layer.fields))
fmt = " %%%ss: %%s" % width fmt = " %%%ss: %%s" % width
for j, feature in enumerate(layer[:num_features]): for j, feature in enumerate(layer[:num_features]):
print("=== Feature %s" % j) print("=== Feature %s" % j)

View File

@ -34,7 +34,7 @@ def color_style():
# Use that palette as the basis for populating # Use that palette as the basis for populating
# the palette as defined in the environment. # the palette as defined in the environment.
for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]: for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
format = color_settings.get(role,{}) format = color_settings.get(role, {})
setattr(style, role, termcolors.make_style(**format)) setattr(style, role, termcolors.make_style(**format))
# For backwards compatibility, # For backwards compatibility,
# set style for ERROR_OUTPUT == ERROR # set style for ERROR_OUTPUT == ERROR

View File

@ -18,7 +18,7 @@ class Command(BaseCommand):
make_option('--database', action='store', dest='database', make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to dump ' default=DEFAULT_DB_ALIAS, help='Nominates a specific database to dump '
'fixtures from. Defaults to the "default" database.'), 'fixtures from. Defaults to the "default" database.'),
make_option('-e', '--exclude', dest='exclude',action='append', default=[], make_option('-e', '--exclude', dest='exclude', action='append', default=[],
help='An appname or appname.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).'), help='An appname or appname.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).'),
make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False, make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
help='Use natural keys if they are available.'), help='Use natural keys if they are available.'),

View File

@ -90,7 +90,7 @@ class BoundMethodWeakref(object):
else: else:
base = super(BoundMethodWeakref, cls).__new__(cls) base = super(BoundMethodWeakref, cls).__new__(cls)
cls._allInstances[key] = base cls._allInstances[key] = base
base.__init__(target, onDelete, *arguments,**named) base.__init__(target, onDelete, *arguments, **named)
return base return base
def __init__(self, target, onDelete=None): def __init__(self, target, onDelete=None):
@ -139,7 +139,7 @@ class BoundMethodWeakref(object):
Currently this is a two-tuple of the id()'s of the Currently this is a two-tuple of the id()'s of the
target object and the target function respectively. target object and the target function respectively.
""" """
return (id(target.__self__),id(target.__func__)) return (id(target.__self__), id(target.__func__))
calculateKey = classmethod(calculateKey) calculateKey = classmethod(calculateKey)
def __str__(self): def __str__(self):

View File

@ -235,7 +235,7 @@ class BaseForm(object):
# not be able to conscript the last row for our purposes, # not be able to conscript the last row for our purposes,
# so insert a new, empty row. # so insert a new, empty row.
last_row = (normal_row % {'errors': '', 'label': '', last_row = (normal_row % {'errors': '', 'label': '',
'field': '', 'help_text':'', 'field': '', 'help_text': '',
'html_class_attr': html_class_attr}) 'html_class_attr': html_class_attr})
output.append(last_row) output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender

View File

@ -28,7 +28,7 @@ __all__ = (
'SplitDateTimeWidget', 'SplitHiddenDateTimeWidget', 'SplitDateTimeWidget', 'SplitHiddenDateTimeWidget',
) )
MEDIA_TYPES = ('css','js') MEDIA_TYPES = ('css', 'js')
@python_2_unicode_compatible @python_2_unicode_compatible
class Media(object): class Media(object):

View File

@ -38,7 +38,7 @@ else:
# (real val, encoded_val) # (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val) val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054") encoded = encoded.replace(";", "\\073").replace(",", "\\054")
# If encoded now contains any quoted chars, we need double quotes # If encoded now contains any quoted chars, we need double quotes
# around the whole string. # around the whole string.
if "\\" in encoded and not encoded.startswith('"'): if "\\" in encoded and not encoded.startswith('"'):

View File

@ -341,9 +341,9 @@ def do_translate(parser, token):
if value[0] == "'": if value[0] == "'":
m = re.match("^'([^']+)'(\|.*$)", value) m = re.match("^'([^']+)'(\|.*$)", value)
if m: if m:
value = '"%s"%s' % (m.group(1).replace('"','\\"'), m.group(2)) value = '"%s"%s' % (m.group(1).replace('"', '\\"'), m.group(2))
elif value[-1] == "'": elif value[-1] == "'":
value = '"%s"' % value[1:-1].replace('"','\\"') value = '"%s"' % value[1:-1].replace('"', '\\"')
noop = False noop = False
asvar = None asvar = None

View File

@ -375,7 +375,7 @@ class SimpleTestCase(unittest.TestCase):
# Search all contexts for the error. # Search all contexts for the error.
found_form = False found_form = False
for i,context in enumerate(contexts): for i, context in enumerate(contexts):
if form not in context: if form not in context:
continue continue
found_form = True found_form = True
@ -600,7 +600,7 @@ class SimpleTestCase(unittest.TestCase):
self.assertEqual(optional.clean(e), empty_value) self.assertEqual(optional.clean(e), empty_value)
# test that max_length and min_length are always accepted # test that max_length and min_length are always accepted
if issubclass(fieldclass, CharField): if issubclass(fieldclass, CharField):
field_kwargs.update({'min_length':2, 'max_length':20}) field_kwargs.update({'min_length': 2, 'max_length': 20})
self.assertTrue(isinstance(fieldclass(*field_args, **field_kwargs), self.assertTrue(isinstance(fieldclass(*field_args, **field_kwargs),
fieldclass)) fieldclass))

View File

@ -285,8 +285,8 @@ def compare_xml(want, got):
return node return node
want, got = strip_quotes(want, got) want, got = strip_quotes(want, got)
want = want.replace('\\n','\n') want = want.replace('\\n', '\n')
got = got.replace('\\n','\n') got = got.replace('\\n', '\n')
# If the string is not a complete xml document, we may need to add a # If the string is not a complete xml document, we may need to add a
# root element. This allow us to compare fragments, like "<foo/><bar/>" # root element. This allow us to compare fragments, like "<foo/><bar/>"

View File

@ -258,7 +258,7 @@ def learn_cache_key(request, response, cache_timeout=None, key_prefix=None, cach
def _to_tuple(s): def _to_tuple(s):
t = s.split('=',1) t = s.split('=', 1)
if len(t) == 2: if len(t) == 2:
return t[0].lower(), t[1] return t[0].lower(), t[1]
return t[0].lower(), True return t[0].lower(), True

View File

@ -2,7 +2,7 @@
Common checksum routines. Common checksum routines.
""" """
__all__ = ['luhn',] __all__ = ['luhn']
from django.utils import six from django.utils import six

View File

@ -3,29 +3,29 @@
from django.utils.translation import ugettext_lazy as _, pgettext_lazy from django.utils.translation import ugettext_lazy as _, pgettext_lazy
WEEKDAYS = { WEEKDAYS = {
0:_('Monday'), 1:_('Tuesday'), 2:_('Wednesday'), 3:_('Thursday'), 4:_('Friday'), 0: _('Monday'), 1: _('Tuesday'), 2: _('Wednesday'), 3: _('Thursday'), 4: _('Friday'),
5:_('Saturday'), 6:_('Sunday') 5: _('Saturday'), 6: _('Sunday')
} }
WEEKDAYS_ABBR = { WEEKDAYS_ABBR = {
0:_('Mon'), 1:_('Tue'), 2:_('Wed'), 3:_('Thu'), 4:_('Fri'), 0: _('Mon'), 1: _('Tue'), 2: _('Wed'), 3: _('Thu'), 4: _('Fri'),
5:_('Sat'), 6:_('Sun') 5: _('Sat'), 6: _('Sun')
} }
WEEKDAYS_REV = { WEEKDAYS_REV = {
'monday':0, 'tuesday':1, 'wednesday':2, 'thursday':3, 'friday':4, 'monday': 0, 'tuesday': 1, 'wednesday': 2, 'thursday': 3, 'friday': 4,
'saturday':5, 'sunday':6 'saturday': 5, 'sunday': 6
} }
MONTHS = { MONTHS = {
1:_('January'), 2:_('February'), 3:_('March'), 4:_('April'), 5:_('May'), 6:_('June'), 1: _('January'), 2: _('February'), 3: _('March'), 4: _('April'), 5: _('May'), 6: _('June'),
7:_('July'), 8:_('August'), 9:_('September'), 10:_('October'), 11:_('November'), 7: _('July'), 8: _('August'), 9: _('September'), 10: _('October'), 11: _('November'),
12:_('December') 12: _('December')
} }
MONTHS_3 = { MONTHS_3 = {
1:_('jan'), 2:_('feb'), 3:_('mar'), 4:_('apr'), 5:_('may'), 6:_('jun'), 1: _('jan'), 2: _('feb'), 3: _('mar'), 4: _('apr'), 5: _('may'), 6: _('jun'),
7:_('jul'), 8:_('aug'), 9:_('sep'), 10:_('oct'), 11:_('nov'), 12:_('dec') 7: _('jul'), 8: _('aug'), 9: _('sep'), 10: _('oct'), 11: _('nov'), 12: _('dec')
} }
MONTHS_3_REV = { MONTHS_3_REV = {
'jan':1, 'feb':2, 'mar':3, 'apr':4, 'may':5, 'jun':6, 'jul':7, 'aug':8, 'jan': 1, 'feb': 2, 'mar': 3, 'apr': 4, 'may': 5, 'jun': 6, 'jul': 7, 'aug': 8,
'sep':9, 'oct':10, 'nov':11, 'dec':12 'sep': 9, 'oct': 10, 'nov': 11, 'dec': 12
} }
MONTHS_AP = { # month names in Associated Press style MONTHS_AP = { # month names in Associated Press style
1: pgettext_lazy('abbrev. month', 'Jan.'), 1: pgettext_lazy('abbrev. month', 'Jan.'),

View File

@ -78,7 +78,7 @@ def urlencode(query, doseq=0):
query = query.items() query = query.items()
return original_urlencode( return original_urlencode(
[(force_str(k), [(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v)) [force_str(i) for i in v] if isinstance(v, (list, tuple)) else force_str(v))
for k, v in query], for k, v in query],
doseq) doseq)

View File

@ -641,7 +641,7 @@ def templatize(src, origin=None):
out.write(' _(%s) ' % cmatch.group(1)) out.write(' _(%s) ' % cmatch.group(1))
for p in parts[1:]: for p in parts[1:]:
if p.find(':_(') >= 0: if p.find(':_(') >= 0:
out.write(' %s ' % p.split(':',1)[1]) out.write(' %s ' % p.split(':', 1)[1])
else: else:
out.write(blankout(p, 'F')) out.write(blankout(p, 'F'))
elif t.token_type == TOKEN_COMMENT: elif t.token_type == TOKEN_COMMENT:

View File

@ -40,7 +40,7 @@ def cleanse_setting(key, value):
cleansed = CLEANSED_SUBSTITUTE cleansed = CLEANSED_SUBSTITUTE
else: else:
if isinstance(value, dict): if isinstance(value, dict):
cleansed = dict((k, cleanse_setting(k, v)) for k,v in value.items()) cleansed = dict((k, cleanse_setting(k, v)) for k, v in value.items())
else: else:
cleansed = value cleansed = value
except TypeError: except TypeError: