forked from jasder/antlr
Fix error when Interval is rewritten with a tuple
This commit is contained in:
parent
377f2f5c89
commit
f50fb7df15
|
@ -39,6 +39,8 @@
|
|||
# {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a
|
||||
# {@link CommonTokenStream}.</p>
|
||||
from io import StringIO
|
||||
|
||||
from antlr4.IntervalSet import Interval
|
||||
from antlr4.Token import Token
|
||||
from antlr4.error.Errors import IllegalStateException
|
||||
|
||||
|
@ -293,18 +295,18 @@ class BufferedTokenStream(TokenStream):
|
|||
def getSourceName(self):
|
||||
return self.tokenSource.getSourceName()
|
||||
|
||||
# Get the text of all tokens in this buffer.#/
|
||||
def getText(self, interval=None):
|
||||
"""
|
||||
Get the text of all tokens in this buffer.
|
||||
|
||||
:param interval:
|
||||
:type interval: antlr4.IntervalSet.Interval
|
||||
:return:
|
||||
:return: string
|
||||
"""
|
||||
self.lazyInit()
|
||||
self.fill()
|
||||
if interval is None:
|
||||
interval = (0, len(self.tokens)-1)
|
||||
interval = Interval(0, len(self.tokens)-1)
|
||||
start = interval.start
|
||||
if isinstance(start, Token):
|
||||
start = start.tokenIndex
|
||||
|
@ -323,9 +325,12 @@ class BufferedTokenStream(TokenStream):
|
|||
buf.write(t.text)
|
||||
return buf.getvalue()
|
||||
|
||||
|
||||
# Get all tokens from lexer until EOF#/
|
||||
def fill(self):
|
||||
"""
|
||||
Get all tokens from lexer until EOF
|
||||
|
||||
:return: None
|
||||
"""
|
||||
self.lazyInit()
|
||||
while self.fetch(1000)==1000:
|
||||
pass
|
||||
|
|
|
@ -528,8 +528,10 @@ class TestTokenStreamRewriter(unittest.TestCase):
|
|||
|
||||
self.assertEquals('afoofoo', rewriter.getDefaultText())
|
||||
|
||||
# Test for fix for: https://github.com/antlr/antlr4/issues/550
|
||||
def testPreservesOrderOfContiguousInserts(self):
|
||||
"""
|
||||
Test for fix for: https://github.com/antlr/antlr4/issues/550
|
||||
"""
|
||||
input = InputStream('aa')
|
||||
lexer = TestLexer(input)
|
||||
stream = CommonTokenStream(lexer=lexer)
|
||||
|
|
Loading…
Reference in New Issue