Merge pull request #1651 from bhamiltoncx/python-lexer-parser-output-file

Python 2 and Python 3 support for lexer and parser output to a file
This commit is contained in:
Terence Parr 2017-02-13 15:10:19 -08:00 committed by GitHub
commit 9b7e2a81ce
8 changed files with 95 additions and 59 deletions

View File

@ -1,6 +1,6 @@
writeln(s) ::= <<print(<s>)>>
write(s) ::= <<print(<s>,end='')>>
writeList(s) ::= <<print(<s: {v | str(<v>)}; separator="+">)>>
writeln(s) ::= <<print(<s>, file=self._output)>>
write(s) ::= <<print(<s>,end='', file=self._output)>>
writeList(s) ::= <<print(<s: {v | str(<v>)}; separator="+">, file=self._output)>>
False() ::= "False"
@ -152,14 +152,16 @@ else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def __init__(self, output):
self._output = output
def visitTerminal(self, node):
print(node.symbol.text)
print(node.symbol.text, file=self._output)
}
>>
WalkListener(s) ::= <<
walker = ParseTreeWalker()
walker.walk(TParser.LeafListener(), <s>)
walker.walk(TParser.LeafListener(self._output), <s>)
>>
TreeNodeWithAltNumField(X) ::= <<
@ -183,11 +185,13 @@ else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def __init__(self, output):
self._output = output
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.INT(0).symbol.text + ' ' + ctx.INT(1).symbol.text + ' ' + str_list(ctx.INT()))
print(ctx.INT(0).symbol.text + ' ' + ctx.INT(1).symbol.text + ' ' + str_list(ctx.INT()), file=self._output)
else:
print(str(ctx.ID().symbol))
print(str(ctx.ID().symbol), file=self._output)
}
>>
@ -199,11 +203,13 @@ else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def __init__(self, output):
self._output = output
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.b(0).start.text + ' ' + ctx.b(1).start.text + ' ' + ctx.b()[0].start.text)
print(ctx.b(0).start.text + ' ' + ctx.b(1).start.text + ' ' + ctx.b()[0].start.text, file=self._output)
else:
print(ctx.b(0).start.text)
print(ctx.b(0).start.text, file=self._output)
}
>>
@ -216,11 +222,13 @@ else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def __init__(self, output):
self._output = output
def exitE(self, ctx):
if ctx.getChildCount()==3:
print(ctx.e(0).start.text + ' ' + ctx.e(1).start.text + ' ' + ctx.e()[0].start.text)
print(ctx.e(0).start.text + ' ' + ctx.e(1).start.text + ' ' + ctx.e()[0].start.text, file=self._output)
else:
print(ctx.INT().symbol.text)
print(ctx.INT().symbol.text, file=self._output)
}
>>
@ -232,10 +240,12 @@ else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def __init__(self, output):
self._output = output
def exitCall(self, ctx):
print(ctx.e().start.text + ' ' + str(ctx.eList()))
print(ctx.e().start.text + ' ' + str(ctx.eList()), file=self._output)
def exitInt(self, ctx):
print(ctx.INT().symbol.text)
print(ctx.INT().symbol.text, file=self._output)
}
>>
@ -247,13 +257,13 @@ def foo():
>>
Declare_foo() ::= <<def foo(self):
print('foo')
print('foo', file=self._output)
>>
Invoke_foo() ::= "self.foo()"
Declare_pred() ::= <<def pred(self, v):
print('eval=' + str(v).lower())
print('eval=' + str(v).lower(), file=self._output)
return v
>>

View File

@ -1,6 +1,6 @@
writeln(s) ::= <<print(<s>)>>
write(s) ::= <<print(<s>,end='')>>
writeList(s) ::= <<print(<s: {v | str(<v>)}; separator="+">)>>
writeln(s) ::= <<print(<s>, file=self._output)>>
write(s) ::= <<print(<s>,end='',file=self._output)>>
writeList(s) ::= <<print(<s: {v | str(<v>)}; separator="+">, file=self._output)>>
False() ::= "False"
@ -152,8 +152,10 @@ def isIdentifierChar(c):
BasicListener(X) ::= <<
@parser::members {
class LeafListener(MockListener):
def __init__(self, output):
self._output = output
def visitTerminal(self, node):
print(node.symbol.text)
print(node.symbol.text, file=self._output)
}
>>
@ -164,7 +166,7 @@ else:
from TListener import TListener
TParser.LeafListener.__bases__ = (TListener,)
walker = ParseTreeWalker()
walker.walk(TParser.LeafListener(), <s>)
walker.walk(TParser.LeafListener(self._output), <s>)
>>
TreeNodeWithAltNumField(X) ::= <<
@ -183,22 +185,26 @@ class MyRuleNode(ParserRuleContext):
TokenGetterListener(X) ::= <<
@parser::members {
class LeafListener(MockListener):
def __init__(self, output):
self._output = output
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.INT(0).symbol.text + ' ' + ctx.INT(1).symbol.text + ' ' + str_list(ctx.INT()))
print(ctx.INT(0).symbol.text + ' ' + ctx.INT(1).symbol.text + ' ' + str_list(ctx.INT()), file=self._output)
else:
print(str(ctx.ID().symbol))
print(str(ctx.ID().symbol), file=self._output)
}
>>
RuleGetterListener(X) ::= <<
@parser::members {
class LeafListener(MockListener):
def __init__(self, output):
self._output = output
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.b(0).start.text + ' ' + ctx.b(1).start.text + ' ' + ctx.b()[0].start.text)
print(ctx.b(0).start.text + ' ' + ctx.b(1).start.text + ' ' + ctx.b()[0].start.text, file=self._output)
else:
print(ctx.b(0).start.text)
print(ctx.b(0).start.text, file=self._output)
}
>>
@ -206,21 +212,25 @@ class LeafListener(MockListener):
LRListener(X) ::= <<
@parser::members {
class LeafListener(MockListener):
def __init__(self, output):
self._output = output
def exitE(self, ctx):
if ctx.getChildCount()==3:
print(ctx.e(0).start.text + ' ' + ctx.e(1).start.text + ' ' + ctx.e()[0].start.text)
print(ctx.e(0).start.text + ' ' + ctx.e(1).start.text + ' ' + ctx.e()[0].start.text, file=self._output)
else:
print(ctx.INT().symbol.text)
print(ctx.INT().symbol.text, file=self._output)
}
>>
LRWithLabelsListener(X) ::= <<
@parser::members {
class LeafListener(MockListener):
def __init__(self, output):
self._output = output
def exitCall(self, ctx):
print(ctx.e().start.text + ' ' + str(ctx.eList()))
print(ctx.e().start.text + ' ' + str(ctx.eList()), file=self._output)
def exitInt(self, ctx):
print(ctx.INT().symbol.text)
print(ctx.INT().symbol.text, file=self._output)
}
>>
@ -232,13 +242,13 @@ def foo():
>>
Declare_foo() ::= <<def foo(self):
print('foo')
print('foo', file=self._output)
>>
Invoke_foo() ::= "self.foo()"
Declare_pred() ::= <<def pred(self, v):
print('eval=' + str(v).lower())
print('eval=' + str(v).lower(), file=self._output)
return v
>>

View File

@ -8,7 +8,9 @@
# uses simplified match() and error recovery mechanisms in the interest
# of speed.
#/
from __future__ import print_function
from io import StringIO
import sys
from antlr4.CommonTokenFactory import CommonTokenFactory
from antlr4.Recognizer import Recognizer
from antlr4.Token import Token
@ -30,9 +32,10 @@ class Lexer(Recognizer, TokenSource):
MIN_CHAR_VALUE = '\u0000'
MAX_CHAR_VALUE = '\uFFFE'
def __init__(self, input):
def __init__(self, input, output=sys.stdout):
super(Lexer, self).__init__()
self._input = input
self._output = output
self._factory = CommonTokenFactory.DEFAULT
self._tokenFactorySourcePair = (self, input)
@ -160,7 +163,7 @@ class Lexer(Recognizer, TokenSource):
def pushMode(self, m):
if self._interp.debug:
print("pushMode " + str(m))
print("pushMode " + str(m), file=self._output)
self._modeStack.append(self._mode)
self.mode(m)
@ -168,7 +171,7 @@ class Lexer(Recognizer, TokenSource):
if len(self._modeStack)==0:
raise Exception("Empty Stack")
if self._interp.debug:
print("popMode back to "+ self._modeStack[:-1])
print("popMode back to "+ self._modeStack[:-1], file=self._output)
self.mode( self._modeStack.pop() )
return self._mode

View File

@ -11,7 +11,7 @@ from antlr4.atn.ATNDeserializationOptions import ATNDeserializationOptions
from antlr4.error.Errors import UnsupportedOperationException
from antlr4.tree.ParseTreePatternMatcher import ParseTreePatternMatcher
from antlr4.tree.Tree import ParseTreeListener, ErrorNode, TerminalNode
import sys
class TraceListener(ParseTreeListener):
@ -19,16 +19,16 @@ class TraceListener(ParseTreeListener):
self._parser = parser
def enterEveryRule(self, ctx):
print("enter " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text)
print("enter " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text, file=self._parser._output)
def visitTerminal(self, node):
print("consume " + str(node.symbol) + " rule " + self._parser.ruleNames[self._parser._ctx.getRuleIndex()])
print("consume " + str(node.symbol) + " rule " + self._parser.ruleNames[self._parser._ctx.getRuleIndex()], file=self._parser._output)
def visitErrorNode(self, node):
pass
def exitEveryRule(self, ctx):
print("exit " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text)
print("exit " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text, file=self._parser._output)
# self is all the parsing support code essentially; most of it is error recovery stuff.#
@ -41,10 +41,11 @@ class Parser (Recognizer):
#
bypassAltsAtnCache = dict()
def __init__(self, input):
def __init__(self, input, output=sys.stdout):
super(Parser, self).__init__()
# The input stream.
self._input = None
self._output = output
# The error handling strategy for the parser. The default value is a new
# instance of {@link DefaultErrorStrategy}.
self._errHandler = DefaultErrorStrategy()
@ -532,9 +533,9 @@ class Parser (Recognizer):
dfa = self._interp.decisionToDFA[i]
if len(dfa.states)>0:
if seenOne:
print()
print("Decision " + str(dfa.decision) + ":")
print(dfa.toString(self.literalNames, self.symbolicNames), end='')
print(file=self._output)
print("Decision " + str(dfa.decision) + ":", file=self._output)
print(dfa.toString(self.literalNames, self.symbolicNames), end='', file=self._output)
seenOne = True

View File

@ -9,6 +9,8 @@
# of speed.
#/
from io import StringIO
from typing.io import TextIO
import sys
from antlr4.CommonTokenFactory import CommonTokenFactory
from antlr4.atn.LexerATNSimulator import LexerATNSimulator
from antlr4.InputStream import InputStream
@ -32,9 +34,10 @@ class Lexer(Recognizer, TokenSource):
MIN_CHAR_VALUE = '\u0000'
MAX_CHAR_VALUE = '\uFFFE'
def __init__(self, input:InputStream):
def __init__(self, input:InputStream, output:TextIO = sys.stdout):
super().__init__()
self._input = input
self._output = output
self._factory = CommonTokenFactory.DEFAULT
self._tokenFactorySourcePair = (self, input)
@ -162,7 +165,7 @@ class Lexer(Recognizer, TokenSource):
def pushMode(self, m:int):
if self._interp.debug:
print("pushMode " + str(m))
print("pushMode " + str(m), file=self._output)
self._modeStack.append(self._mode)
self.mode(m)
@ -170,7 +173,7 @@ class Lexer(Recognizer, TokenSource):
if len(self._modeStack)==0:
raise Exception("Empty Stack")
if self._interp.debug:
print("popMode back to "+ self._modeStack[:-1])
print("popMode back to "+ self._modeStack[:-1], file=self._output)
self.mode( self._modeStack.pop() )
return self._mode

View File

@ -2,6 +2,8 @@
# Copyright (c) 2012-2016 The ANTLR Project. All rights reserved.
# Use of this file is governed by the BSD 3-clause license that
# can be found in the LICENSE.txt file in the project root.
import sys
from typing.io import TextIO
from antlr4.BufferedTokenStream import TokenStream
from antlr4.CommonTokenFactory import TokenFactory
from antlr4.error.ErrorStrategy import DefaultErrorStrategy
@ -23,18 +25,18 @@ class TraceListener(ParseTreeListener):
self._parser = parser
def enterEveryRule(self, ctx):
print("enter " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text)
print("enter " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text, file=self._parser._output)
def visitTerminal(self, node):
print("consume " + str(node.symbol) + " rule " + self._parser.ruleNames[self._parser._ctx.getRuleIndex()])
print("consume " + str(node.symbol) + " rule " + self._parser.ruleNames[self._parser._ctx.getRuleIndex()], file=self._parser._output)
def visitErrorNode(self, node):
pass
def exitEveryRule(self, ctx):
print("exit " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text)
print("exit " + self._parser.ruleNames[ctx.getRuleIndex()] + ", LT(1)=" + self._parser._input.LT(1).text, file=self._parser._output)
# self is all the parsing support code essentially; most of it is error recovery stuff.#
@ -47,10 +49,11 @@ class Parser (Recognizer):
#
bypassAltsAtnCache = dict()
def __init__(self, input:TokenStream):
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__()
# The input stream.
self._input = None
self._output = output
# The error handling strategy for the parser. The default value is a new
# instance of {@link DefaultErrorStrategy}.
self._errHandler = DefaultErrorStrategy()
@ -538,9 +541,9 @@ class Parser (Recognizer):
dfa = self._interp.decisionToDFA[i]
if len(dfa.states)>0:
if seenOne:
print()
print("Decision " + str(dfa.decision) + ":")
print(dfa.toString(self.literalNames, self.symbolicNames), end='')
print(file=self._output)
print("Decision " + str(dfa.decision) + ":", file=self._output)
print(dfa.toString(self.literalNames, self.symbolicNames), end='', file=self._output)
seenOne = True

View File

@ -52,6 +52,7 @@ ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
<namedActions.header>
<parser>
@ -198,8 +199,8 @@ def sempred(self, localctx, ruleIndex, predIndex):
>>
parser_ctor(p) ::= <<
def __init__(self, input):
super(<parser.name>, self).__init__(input)
def __init__(self, input, output=sys.stdout):
super(<parser.name>, self).__init__(input, output=output)
self.checkVersion("<file.ANTLRVersion>")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
@ -740,6 +741,7 @@ LexerFile(lexerFile, lexer, namedActions) ::= <<
from __future__ import print_function
from antlr4 import *
from io import StringIO
import sys
<namedActions.header>
@ -780,8 +782,8 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
grammarFileName = u"<lexer.grammarFileName>"
def __init__(self, input=None):
super(<lexer.name>, self).__init__(input)
def __init__(self, input=None, output=sys.stdout):
super(<lexer.name>, self).__init__(input, output=output)
self.checkVersion("<lexerFile.ANTLRVersion>")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None

View File

@ -51,6 +51,8 @@ ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
<namedActions.header>
<parser>
@ -206,8 +208,8 @@ def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
>>
parser_ctor(p) ::= <<
def __init__(self, input:TokenStream):
super().__init__(input)
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("<file.ANTLRVersion>")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
@ -745,6 +747,8 @@ LexerFile(lexerFile, lexer, namedActions) ::= <<
<fileHeader(lexerFile.grammarFileName, lexerFile.ANTLRVersion)>
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
<namedActions.header>
@ -785,8 +789,8 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
grammarFileName = "<lexer.grammarFileName>"
def __init__(self, input=None):
super().__init__(input)
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("<lexerFile.ANTLRVersion>")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None