Fix source reindenting by using `textwrap.dedent` directly.
This commit is contained in:
parent
df435fa8bd
commit
f102ccc8f0
|
@ -0,0 +1 @@
|
|||
Fix source reindenting by using ``textwrap.dedent`` directly.
|
|
@ -7,6 +7,7 @@ import linecache
|
|||
import sys
|
||||
import six
|
||||
import inspect
|
||||
import textwrap
|
||||
import tokenize
|
||||
import py
|
||||
|
||||
|
@ -23,7 +24,6 @@ class Source(object):
|
|||
def __init__(self, *parts, **kwargs):
|
||||
self.lines = lines = []
|
||||
de = kwargs.get("deindent", True)
|
||||
rstrip = kwargs.get("rstrip", True)
|
||||
for part in parts:
|
||||
if not part:
|
||||
partlines = []
|
||||
|
@ -33,11 +33,6 @@ class Source(object):
|
|||
partlines = [x.rstrip("\n") for x in part]
|
||||
elif isinstance(part, six.string_types):
|
||||
partlines = part.split("\n")
|
||||
if rstrip:
|
||||
while partlines:
|
||||
if partlines[-1].strip():
|
||||
break
|
||||
partlines.pop()
|
||||
else:
|
||||
partlines = getsource(part, deindent=de).lines
|
||||
if de:
|
||||
|
@ -269,46 +264,7 @@ def getsource(obj, **kwargs):
|
|||
|
||||
|
||||
def deindent(lines, offset=None):
|
||||
if offset is None:
|
||||
for line in lines:
|
||||
line = line.expandtabs()
|
||||
s = line.lstrip()
|
||||
if s:
|
||||
offset = len(line) - len(s)
|
||||
break
|
||||
else:
|
||||
offset = 0
|
||||
if offset == 0:
|
||||
return list(lines)
|
||||
newlines = []
|
||||
|
||||
def readline_generator(lines):
|
||||
for line in lines:
|
||||
yield line + "\n"
|
||||
|
||||
it = readline_generator(lines)
|
||||
|
||||
try:
|
||||
for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(
|
||||
lambda: next(it)
|
||||
):
|
||||
if sline > len(lines):
|
||||
break # End of input reached
|
||||
if sline > len(newlines):
|
||||
line = lines[sline - 1].expandtabs()
|
||||
if line.lstrip() and line[:offset].isspace():
|
||||
line = line[offset:] # Deindent
|
||||
newlines.append(line)
|
||||
|
||||
for i in range(sline, eline):
|
||||
# Don't deindent continuing lines of
|
||||
# multiline tokens (i.e. multiline strings)
|
||||
newlines.append(lines[i])
|
||||
except (IndentationError, tokenize.TokenError):
|
||||
pass
|
||||
# Add any lines we didn't see. E.g. if an exception was raised.
|
||||
newlines.extend(lines[len(newlines) :])
|
||||
return newlines
|
||||
return textwrap.dedent("\n".join(lines)).splitlines()
|
||||
|
||||
|
||||
def get_statement_startend2(lineno, node):
|
||||
|
|
|
@ -27,16 +27,7 @@ def test_source_str_function():
|
|||
x = Source(
|
||||
"""
|
||||
3
|
||||
""",
|
||||
rstrip=False,
|
||||
)
|
||||
assert str(x) == "\n3\n "
|
||||
|
||||
x = Source(
|
||||
"""
|
||||
3
|
||||
""",
|
||||
rstrip=True,
|
||||
)
|
||||
assert str(x) == "\n3"
|
||||
|
||||
|
@ -400,10 +391,13 @@ def test_getfuncsource_with_multine_string():
|
|||
pass
|
||||
"""
|
||||
|
||||
assert (
|
||||
str(_pytest._code.Source(f)).strip()
|
||||
== 'def f():\n c = """while True:\n pass\n"""'
|
||||
)
|
||||
expected = '''\
|
||||
def f():
|
||||
c = """while True:
|
||||
pass
|
||||
"""
|
||||
'''
|
||||
assert str(_pytest._code.Source(f)) == expected.rstrip()
|
||||
|
||||
|
||||
def test_deindent():
|
||||
|
@ -417,7 +411,7 @@ def test_deindent():
|
|||
"""
|
||||
|
||||
lines = deindent(inspect.getsource(f).splitlines())
|
||||
assert lines == ["def f():", ' c = """while True:', " pass", '"""']
|
||||
assert lines == [" def f():", ' c = """while True:', " pass", '"""']
|
||||
|
||||
source = """
|
||||
def f():
|
||||
|
@ -425,7 +419,7 @@ def test_deindent():
|
|||
pass
|
||||
"""
|
||||
lines = deindent(source.splitlines())
|
||||
assert lines == ["", "def f():", " def g():", " pass", " "]
|
||||
assert lines == ["", "def f():", " def g():", " pass"]
|
||||
|
||||
|
||||
def test_source_of_class_at_eof_without_newline(tmpdir):
|
||||
|
|
Loading…
Reference in New Issue