Fixed E128, E741 flake8 warnings.

This commit is contained in:
Mariusz Felisiak 2020-05-12 08:52:23 +02:00 committed by GitHub
parent e6ec76d245
commit 0668164b4a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
11 changed files with 94 additions and 78 deletions

View File

@ -1071,7 +1071,7 @@ class ModelAdmin(BaseModelAdmin):
level = getattr(messages.constants, level.upper()) level = getattr(messages.constants, level.upper())
except AttributeError: except AttributeError:
levels = messages.constants.DEFAULT_TAGS.values() levels = messages.constants.DEFAULT_TAGS.values()
levels_repr = ', '.join('`%s`' % l for l in levels) levels_repr = ', '.join('`%s`' % level for level in levels)
raise ValueError( raise ValueError(
'Bad message level string: `%s`. Possible values are: %s' 'Bad message level string: `%s`. Possible values are: %s'
% (level, levels_repr) % (level, levels_repr)

View File

@ -157,8 +157,8 @@ class SafeMIMEText(MIMEMixin, MIMEText):
def set_payload(self, payload, charset=None): def set_payload(self, payload, charset=None):
if charset == 'utf-8' and not isinstance(charset, Charset.Charset): if charset == 'utf-8' and not isinstance(charset, Charset.Charset):
has_long_lines = any( has_long_lines = any(
len(l.encode()) > RFC5322_EMAIL_LINE_LENGTH_LIMIT len(line.encode()) > RFC5322_EMAIL_LINE_LENGTH_LIMIT
for l in payload.splitlines() for line in payload.splitlines()
) )
# Quoted-Printable encoding has the side effect of shortening long # Quoted-Printable encoding has the side effect of shortening long
# lines, if any (#22561). # lines, if any (#22561).

View File

@ -101,7 +101,7 @@ class Command(BaseCommand):
self.has_errors = False self.has_errors = False
for basedir in basedirs: for basedir in basedirs:
if locales: if locales:
dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locales] dirs = [os.path.join(basedir, locale, 'LC_MESSAGES') for locale in locales]
else: else:
dirs = [basedir] dirs = [basedir]
locations = [] locations = []

View File

@ -140,7 +140,7 @@ class Media:
except CyclicDependencyError: except CyclicDependencyError:
warnings.warn( warnings.warn(
'Detected duplicate Media files in an opposite order: {}'.format( 'Detected duplicate Media files in an opposite order: {}'.format(
', '.join(repr(l) for l in lists) ', '.join(repr(list_) for list_ in lists)
), MediaOrderConflictWarning, ), MediaOrderConflictWarning,
) )
return list(all_items) return list(all_items)

View File

@ -361,7 +361,7 @@ class HttpRequest:
def close(self): def close(self):
if hasattr(self, '_files'): if hasattr(self, '_files'):
for f in chain.from_iterable(l[1] for l in self._files.lists()): for f in chain.from_iterable(list_[1] for list_ in self._files.lists()):
f.close() f.close()
# File-like and iterator interface. # File-like and iterator interface.

View File

@ -122,7 +122,7 @@ class TimeFormat(Formatter):
"Minutes; i.e. '00' to '59'" "Minutes; i.e. '00' to '59'"
return '%02d' % self.data.minute return '%02d' % self.data.minute
def O(self): # NOQA: E743 def O(self): # NOQA: E743, E741
""" """
Difference to Greenwich time in hours; e.g. '+0200', '-0430'. Difference to Greenwich time in hours; e.g. '+0200', '-0430'.
@ -234,7 +234,7 @@ class DateFormat(TimeFormat):
"Month, textual, long; e.g. 'January'" "Month, textual, long; e.g. 'January'"
return MONTHS[self.data.month] return MONTHS[self.data.month]
def I(self): # NOQA: E743 def I(self): # NOQA: E743, E741
"'1' if Daylight Savings Time, '0' otherwise." "'1' if Daylight Savings Time, '0' otherwise."
try: try:
if self.timezone and self.timezone.dst(self.data): if self.timezone and self.timezone.dst(self.data):
@ -251,7 +251,7 @@ class DateFormat(TimeFormat):
"Day of the month without leading zeros; i.e. '1' to '31'" "Day of the month without leading zeros; i.e. '1' to '31'"
return self.data.day return self.data.day
def l(self): # NOQA: E743 def l(self): # NOQA: E743, E741
"Day of the week, textual, long; e.g. 'Friday'" "Day of the week, textual, long; e.g. 'Friday'"
return WEEKDAYS[self.data.weekday()] return WEEKDAYS[self.data.weekday()]

View File

@ -27,10 +27,10 @@ def topological_sort_as_sets(dependency_graph):
todo.items() if node not in current} todo.items() if node not in current}
def stable_topological_sort(l, dependency_graph): def stable_topological_sort(nodes, dependency_graph):
result = [] result = []
for layer in topological_sort_as_sets(dependency_graph): for layer in topological_sort_as_sets(dependency_graph):
for node in l: for node in nodes:
if node in layer: if node in layer:
result.append(node) result.append(node)
return result return result

View File

@ -71,7 +71,7 @@ class Geo3DLoadingHelper:
# Interstate (2D / 3D and Geographic/Projected variants) # Interstate (2D / 3D and Geographic/Projected variants)
for name, line, exp_z in interstate_data: for name, line, exp_z in interstate_data:
line_3d = GEOSGeometry(line, srid=4269) line_3d = GEOSGeometry(line, srid=4269)
line_2d = LineString([l[:2] for l in line_3d.coords], srid=4269) line_2d = LineString([coord[:2] for coord in line_3d.coords], srid=4269)
# Creating a geographic and projected version of the # Creating a geographic and projected version of the
# interstate in both 2D and 3D. # interstate in both 2D and 3D.

View File

@ -310,19 +310,19 @@ class GEOSTest(SimpleTestCase, TestDataMixin):
def test_linestring(self): def test_linestring(self):
"Testing LineString objects." "Testing LineString objects."
prev = fromstr('POINT(0 0)') prev = fromstr('POINT(0 0)')
for l in self.geometries.linestrings: for line in self.geometries.linestrings:
ls = fromstr(l.wkt) ls = fromstr(line.wkt)
self.assertEqual(ls.geom_type, 'LineString') self.assertEqual(ls.geom_type, 'LineString')
self.assertEqual(ls.geom_typeid, 1) self.assertEqual(ls.geom_typeid, 1)
self.assertEqual(ls.dims, 1) self.assertEqual(ls.dims, 1)
self.assertIs(ls.empty, False) self.assertIs(ls.empty, False)
self.assertIs(ls.ring, False) self.assertIs(ls.ring, False)
if hasattr(l, 'centroid'): if hasattr(line, 'centroid'):
self.assertEqual(l.centroid, ls.centroid.tuple) self.assertEqual(line.centroid, ls.centroid.tuple)
if hasattr(l, 'tup'): if hasattr(line, 'tup'):
self.assertEqual(l.tup, ls.tuple) self.assertEqual(line.tup, ls.tuple)
self.assertEqual(ls, fromstr(l.wkt)) self.assertEqual(ls, fromstr(line.wkt))
self.assertIs(ls == prev, False) # Use assertIs() to test __eq__. self.assertIs(ls == prev, False) # Use assertIs() to test __eq__.
with self.assertRaises(IndexError): with self.assertRaises(IndexError):
ls.__getitem__(len(ls)) ls.__getitem__(len(ls))
@ -389,16 +389,16 @@ class GEOSTest(SimpleTestCase, TestDataMixin):
def test_multilinestring(self): def test_multilinestring(self):
"Testing MultiLineString objects." "Testing MultiLineString objects."
prev = fromstr('POINT(0 0)') prev = fromstr('POINT(0 0)')
for l in self.geometries.multilinestrings: for line in self.geometries.multilinestrings:
ml = fromstr(l.wkt) ml = fromstr(line.wkt)
self.assertEqual(ml.geom_type, 'MultiLineString') self.assertEqual(ml.geom_type, 'MultiLineString')
self.assertEqual(ml.geom_typeid, 5) self.assertEqual(ml.geom_typeid, 5)
self.assertEqual(ml.dims, 1) self.assertEqual(ml.dims, 1)
self.assertAlmostEqual(l.centroid[0], ml.centroid.x, 9) self.assertAlmostEqual(line.centroid[0], ml.centroid.x, 9)
self.assertAlmostEqual(l.centroid[1], ml.centroid.y, 9) self.assertAlmostEqual(line.centroid[1], ml.centroid.y, 9)
self.assertEqual(ml, fromstr(l.wkt)) self.assertEqual(ml, fromstr(line.wkt))
self.assertIs(ml == prev, False) # Use assertIs() to test __eq__. self.assertIs(ml == prev, False) # Use assertIs() to test __eq__.
prev = ml prev = ml

View File

@ -72,7 +72,7 @@ class TestFindStatic(TestDefaults, CollectionTestCase):
findstatic returns all candidate files if run without --first and -v1. findstatic returns all candidate files if run without --first and -v1.
""" """
result = call_command('findstatic', 'test/file.txt', verbosity=1, stdout=StringIO()) result = call_command('findstatic', 'test/file.txt', verbosity=1, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')] lines = [line.strip() for line in result.split('\n')]
self.assertEqual(len(lines), 3) # three because there is also the "Found <file> here" line self.assertEqual(len(lines), 3) # three because there is also the "Found <file> here" line
self.assertIn('project', lines[1]) self.assertIn('project', lines[1])
self.assertIn('apps', lines[2]) self.assertIn('apps', lines[2])
@ -82,7 +82,7 @@ class TestFindStatic(TestDefaults, CollectionTestCase):
findstatic returns all candidate files if run without --first and -v0. findstatic returns all candidate files if run without --first and -v0.
""" """
result = call_command('findstatic', 'test/file.txt', verbosity=0, stdout=StringIO()) result = call_command('findstatic', 'test/file.txt', verbosity=0, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')] lines = [line.strip() for line in result.split('\n')]
self.assertEqual(len(lines), 2) self.assertEqual(len(lines), 2)
self.assertIn('project', lines[0]) self.assertIn('project', lines[0])
self.assertIn('apps', lines[1]) self.assertIn('apps', lines[1])
@ -93,7 +93,7 @@ class TestFindStatic(TestDefaults, CollectionTestCase):
Also, test that findstatic returns the searched locations with -v2. Also, test that findstatic returns the searched locations with -v2.
""" """
result = call_command('findstatic', 'test/file.txt', verbosity=2, stdout=StringIO()) result = call_command('findstatic', 'test/file.txt', verbosity=2, stdout=StringIO())
lines = [l.strip() for l in result.split('\n')] lines = [line.strip() for line in result.split('\n')]
self.assertIn('project', lines[1]) self.assertIn('project', lines[1])
self.assertIn('apps', lines[2]) self.assertIn('apps', lines[2])
self.assertIn("Looking in the following locations:", lines[3]) self.assertIn("Looking in the following locations:", lines[3])

View File

@ -42,17 +42,29 @@ class JsTokensTest(SimpleTestCase):
(r"a=/\//,1", ["id a", "punct =", r"regex /\//", "punct ,", "dnum 1"]), (r"a=/\//,1", ["id a", "punct =", r"regex /\//", "punct ,", "dnum 1"]),
# next two are from https://www-archive.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions # NOQA # next two are from https://www-archive.mozilla.org/js/language/js20-2002-04/rationale/syntax.html#regular-expressions # NOQA
("""for (var x = a in foo && "</x>" || mot ? z:/x:3;x<5;y</g/i) {xyz(x++);}""", (
["keyword for", "punct (", "keyword var", "id x", "punct =", "id a", "keyword in", """for (var x = a in foo && "</x>" || mot ? z:/x:3;x<5;y</g/i) {xyz(x++);}""",
"id foo", "punct &&", 'string "</x>"', "punct ||", "id mot", "punct ?", "id z", [
"punct :", "regex /x:3;x<5;y</g", "punct /", "id i", "punct )", "punct {", "keyword for", "punct (", "keyword var", "id x", "punct =",
"id xyz", "punct (", "id x", "punct ++", "punct )", "punct ;", "punct }"]), "id a", "keyword in", "id foo", "punct &&", 'string "</x>"',
("""for (var x = a in foo && "</x>" || mot ? z/x:3;x<5;y</g/i) {xyz(x++);}""", "punct ||", "id mot", "punct ?", "id z", "punct :",
["keyword for", "punct (", "keyword var", "id x", "punct =", "id a", "keyword in", "regex /x:3;x<5;y</g", "punct /", "id i", "punct )", "punct {",
"id foo", "punct &&", 'string "</x>"', "punct ||", "id mot", "punct ?", "id z", "id xyz", "punct (", "id x", "punct ++", "punct )", "punct ;",
"punct /", "id x", "punct :", "dnum 3", "punct ;", "id x", "punct <", "dnum 5", "punct }"
"punct ;", "id y", "punct <", "regex /g/i", "punct )", "punct {", ],
"id xyz", "punct (", "id x", "punct ++", "punct )", "punct ;", "punct }"]), ),
(
"""for (var x = a in foo && "</x>" || mot ? z/x:3;x<5;y</g/i) {xyz(x++);}""",
[
"keyword for", "punct (", "keyword var", "id x", "punct =",
"id a", "keyword in", "id foo", "punct &&", 'string "</x>"',
"punct ||", "id mot", "punct ?", "id z", "punct /", "id x",
"punct :", "dnum 3", "punct ;", "id x", "punct <", "dnum 5",
"punct ;", "id y", "punct <", "regex /g/i", "punct )",
"punct {", "id xyz", "punct (", "id x", "punct ++", "punct )",
"punct ;", "punct }",
],
),
# Various "illegal" regexes that are valid according to the std. # Various "illegal" regexes that are valid according to the std.
(r"""/????/, /++++/, /[----]/ """, ["regex /????/", "punct ,", "regex /++++/", "punct ,", "regex /[----]/"]), (r"""/????/, /++++/, /[----]/ """, ["regex /????/", "punct ,", "regex /++++/", "punct ,", "regex /[----]/"]),
@ -65,46 +77,50 @@ class JsTokensTest(SimpleTestCase):
(r"""/a[\]]b/""", [r"""regex /a[\]]b/"""]), (r"""/a[\]]b/""", [r"""regex /a[\]]b/"""]),
(r"""/[\]/]/gi""", [r"""regex /[\]/]/gi"""]), (r"""/[\]/]/gi""", [r"""regex /[\]/]/gi"""]),
(r"""/\[[^\]]+\]/gi""", [r"""regex /\[[^\]]+\]/gi"""]), (r"""/\[[^\]]+\]/gi""", [r"""regex /\[[^\]]+\]/gi"""]),
(r""" (
rexl.re = { r"""
NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/, rexl.re = {
UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/, NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/,
QUOTED_LITERAL: /^'(?:[^']|'')*'/, UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/, QUOTED_LITERAL: /^'(?:[^']|'')*'/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/ NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
}; SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
""", # NOQA };
["id rexl", "punct .", "id re", "punct =", "punct {", """, # NOQA
"id NAME", "punct :", r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", "punct ,", [
"id UNQUOTED_LITERAL", "punct :", r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""", "id rexl", "punct .", "id re", "punct =", "punct {",
"punct ,", "id NAME", "punct :", r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", "punct ,",
"id QUOTED_LITERAL", "punct :", r"""regex /^'(?:[^']|'')*'/""", "punct ,", "id UNQUOTED_LITERAL", "punct :", r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"id NUMERIC_LITERAL", "punct :", r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "punct ,", "punct ,",
"id SYMBOL", "punct :", r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA "id QUOTED_LITERAL", "punct :", r"""regex /^'(?:[^']|'')*'/""", "punct ,",
"punct }", "punct ;" "id NUMERIC_LITERAL", "punct :", r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "punct ,",
]), "id SYMBOL", "punct :", r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA
"punct }", "punct ;"
(r""" ],
rexl.re = { ),
NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/, (
UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/, r"""
QUOTED_LITERAL: /^'(?:[^']|'')*'/, rexl.re = {
NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/, NAME: /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/,
SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/ UNQUOTED_LITERAL: /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/,
}; QUOTED_LITERAL: /^'(?:[^']|'')*'/,
str = '"'; NUMERIC_LITERAL: /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/,
""", # NOQA SYMBOL: /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/
["id rexl", "punct .", "id re", "punct =", "punct {", };
"id NAME", "punct :", r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", "punct ,", str = '"';
"id UNQUOTED_LITERAL", "punct :", r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""", """, # NOQA
"punct ,", [
"id QUOTED_LITERAL", "punct :", r"""regex /^'(?:[^']|'')*'/""", "punct ,", "id rexl", "punct .", "id re", "punct =", "punct {",
"id NUMERIC_LITERAL", "punct :", r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "punct ,", "id NAME", "punct :", r"""regex /^(?![0-9])(?:\w)+|^"(?:[^"]|"")+"/""", "punct ,",
"id SYMBOL", "punct :", r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA "id UNQUOTED_LITERAL", "punct :", r"""regex /^@(?:(?![0-9])(?:\w|\:)+|^"(?:[^"]|"")+")\[[^\]]+\]/""",
"punct }", "punct ;", "punct ,",
"id str", "punct =", """string '"'""", "punct ;", "id QUOTED_LITERAL", "punct :", r"""regex /^'(?:[^']|'')*'/""", "punct ,",
]), "id NUMERIC_LITERAL", "punct :", r"""regex /^[0-9]+(?:\.[0-9]*(?:[eE][-+][0-9]+)?)?/""", "punct ,",
"id SYMBOL", "punct :", r"""regex /^(?:==|=|<>|<=|<|>=|>|!~~|!~|~~|~|!==|!=|!~=|!~|!|&|\||\.|\:|,|\(|\)|\[|\]|\{|\}|\?|\:|;|@|\^|\/\+|\/|\*|\+|-)/""", # NOQA
"punct }", "punct ;",
"id str", "punct =", """string '"'""", "punct ;",
],
),
(r""" this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\")"; """, (r""" this._js = "e.str(\"" + this.value.replace(/\\/g, "\\\\").replace(/"/g, "\\\"") + "\")"; """,
["keyword this", "punct .", "id _js", "punct =", r'''string "e.str(\""''', "punct +", "keyword this", ["keyword this", "punct .", "id _js", "punct =", r'''string "e.str(\""''', "punct +", "keyword this",
"punct .", "id value", "punct .", "id replace", "punct (", r"regex /\\/g", "punct ,", r'string "\\\\"', "punct .", "id value", "punct .", "id replace", "punct (", r"regex /\\/g", "punct ,", r'string "\\\\"',