mv files into proper position for folding into main antlr4 repo; delete already moved stuff

This commit is contained in:
Terence Parr 2015-06-30 14:13:56 -07:00
parent 6e0379d1e4
commit fe207fb3ee
67 changed files with 0 additions and 1442 deletions

10
.gitignore vendored
View File

@ -1,10 +0,0 @@
/.idea
*.pyc
__pycache__
src/TestAntLR.py
/dist
/MANIFEST
*.egg-info/
.DS_Store
tmp/
build/

View File

@ -1,805 +0,0 @@
/*
* [The "BSD license"]
* Copyright (c) 2012 Terence Parr
* Copyright (c) 2012 Sam Harwell
* Copyright (c) 2014 Eric Vergnaud
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/** ANTLR tool checks output templates are compatible with tool code generation.
* For now, a simple string match used on x.y of x.y.z scheme.
* Must match Tool.VERSION during load to templates.
*
* REQUIRED.
*/
pythonTypeInitMap ::= [
"bool":"False",
"int":"0",
"float":"0.0",
"str":"",
default:"None" // anything other than a primitive type is an object
]
// args must be <object-model-object>, <fields-resulting-in-STs>
ParserFile(file, parser, namedActions) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
<namedActions.header>
<parser>
>>
ListenerFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
from antlr4 import *
<header>
# This class defines a complete listener for a parse tree produced by <file.parserName>.
class <file.grammarName>Listener(ParseTreeListener):
<file.listenerNames:{lname |
# Enter a parse tree produced by <file.parserName>#<lname>.
def enter<lname; format="cap">(self, ctx):
pass
# Exit a parse tree produced by <file.parserName>#<lname>.
def exit<lname; format="cap">(self, ctx):
pass
}; separator="\n">
>>
VisitorFile(file, header) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
from antlr4 import *
<header>
# This class defines a complete generic visitor for a parse tree produced by <file.parserName>.
class <file.grammarName>Visitor(ParseTreeVisitor):
<file.visitorNames:{lname |
# Visit a parse tree produced by <file.parserName>#<lname>.
def visit<lname; format="cap">(self, ctx):
return self.visitChildren(ctx)
}; separator="\n">
>>
fileHeader(grammarFileName, ANTLRVersion) ::= <<
# Generated from <grammarFileName> by ANTLR <ANTLRVersion>
>>
Parser(parser, funcs, atn, sempredFuncs, superClass) ::= <<
<Parser_(ctor="parser_ctor", ...)>
>>
Parser_(parser, funcs, atn, sempredFuncs, ctor, superClass) ::= <<
<if(superClass)>
from .<superClass> import <superClass>
<endif>
<atn>
class <parser.name> ( <if(superClass)><superClass><else>Parser<endif> ):
grammarFileName = "<parser.grammarFileName>"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ <parser.literalNames:{t | u<t>}; null="u\"\<INVALID>\"", separator=", ", wrap, anchor> ]
symbolicNames = [ <parser.symbolicNames:{t | u<t>}; null="u\"\<INVALID>\"", separator=", ", wrap, anchor> ]
<parser.rules:{r | RULE_<r.name> = <r.index>}; separator="\n", wrap, anchor>
ruleNames = [ <parser.ruleNames:{r | u"<r>"}; separator=", ", wrap, anchor> ]
EOF = <TokenLabelType()>.EOF
<if(parser.tokens)>
<parser.tokens:{k | <k>=<parser.tokens.(k)>}; separator="\n", wrap, anchor>
<endif>
<parser:(ctor)()>
<namedActions.members>
<funcs; separator="\n">
<if(sempredFuncs)>
def sempred(self, localctx, ruleIndex, predIndex):
if self._predicates == None:
self._predicates = dict()
<parser.sempredFuncs.values:{f |
self._predicates[<f.ruleIndex>] = self.<f.name>_sempred}; separator="\n ">
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
<sempredFuncs.values; separator="\n">
<endif>
>>
dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= <<
<if(actionFuncs)>
def action(self, localctx, ruleIndex, actionIndex):
if self._actions is None:
actions = dict()
<recog.actionFuncs.values:{f|
actions[<f.ruleIndex>] = self.<f.name>_action }; separator="\n">
self._actions = actions
action = self._actions.get(ruleIndex, None)
if action is not None:
action(localctx, actionIndex)
else:
raise Exception("No registered action for:" + str(ruleIndex))
<actionFuncs.values; separator="\n">
<endif>
<if(sempredFuncs)>
def sempred(self, localctx, ruleIndex, predIndex):
if self._predicates is None:
preds = dict()
<recog.sempredFuncs.values:{f|
preds[<f.ruleIndex>] = self.<f.name>_sempred}; separator="\n">
self._predicates = preds
pred = self._predicates.get(ruleIndex, None)
if pred is not None:
return pred(localctx, predIndex)
else:
raise Exception("No registered predicate for:" + str(ruleIndex))
<sempredFuncs.values; separator="\n">
<endif>
>>
parser_ctor(p) ::= <<
def __init__(self, input):
super(<parser.name>, self).__init__(input)
self.checkVersion("<file.ANTLRVersion>")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
>>
/* This generates a private method since the actionIndex is generated, making an
* overriding implementation impossible to maintain.
*/
RuleActionFunction(r, actions) ::= <<
def <r.name>_action(self, localctx , actionIndex):
<actions:{index|
<if(first(actions))>
if actionIndex == <index>:
<actions.(index)>
<elseif(rest(actions))>
elif actionIndex == <index>:
<actions.(index)>
<endif> }; separator="\n">
>>
/* This generates a private method since the predIndex is generated, making an
* overriding implementation impossible to maintain.
*/
RuleSempredFunction(r, actions) ::= <<
def <r.name>_sempred(self, localctx, predIndex):
<actions:{index|
<if(first(actions))>
if predIndex == <index>:
return <actions.(index)>
<elseif(rest(actions))>
elif predIndex == <index>:
return <actions.(index)>
<endif> }; separator="\n">
>>
RuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,namedActions,finallyAction,postamble,exceptions) ::= <<
<ruleCtx>
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
def <currentRule.name>(self<currentRule.args:{a | , <a.name>}>):
localctx = <parser.name>.<currentRule.ctxType>(self, self._ctx, self.state<currentRule.args:{a | , <a.name>}>)
self.enterRule(localctx, <currentRule.startState>, self.RULE_<currentRule.name>)
<namedActions.init>
<locals; separator="\n">
try:
<code>
<postamble; separator="\n">
<namedActions.after>
<if(exceptions)>
<exceptions; separator="\n">
<else>
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
<endif>
finally:
<finallyAction>
self.exitRule()
return localctx
>>
LeftRecursiveRuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,
namedActions,finallyAction,postamble) ::=
<<
<ruleCtx>
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
def <currentRule.name>(self, _p=0<if(currentRule.args)>, <args:{a | , <a>}><endif>):
_parentctx = self._ctx
_parentState = self.state
localctx = <parser.name>.<currentRule.ctxType>(self, self._ctx, _parentState<args:{a | , <a.name>}>)
_prevctx = localctx
_startState = <currentRule.startState>
self.enterRecursionRule(localctx, <currentRule.startState>, self.RULE_<currentRule.name>, _p)
<namedActions.init>
<locals; separator="\n">
try:
<code>
<postamble; separator="\n">
<namedActions.after>
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
<finallyAction>
self.unrollRecursionContexts(_parentctx)
return localctx
>>
CodeBlockForOuterMostAlt(currentOuterMostAltCodeBlock, locals, preamble, ops) ::= <<
<if(currentOuterMostAltCodeBlock.altLabel)>localctx = <parser.name>.<currentOuterMostAltCodeBlock.altLabel; format="cap">Context(self, localctx)<endif>
self.enterOuterAlt(localctx, <currentOuterMostAltCodeBlock.alt.altNum>)
<CodeBlockForAlt(currentAltCodeBlock=currentOuterMostAltCodeBlock, ...)>
>>
CodeBlockForAlt(currentAltCodeBlock, locals, preamble, ops) ::= <<
<locals; separator="\n">
<preamble; separator="\n">
<ops; separator="\n">
>>
LL1AltBlock(choice, preamble, alts, error) ::= <<
self.state = <choice.stateNumber>
<!_errHandler.sync(this);!>
<if(choice.label)><labelref(choice.label)> = _input.LT(1)<endif>
<preamble; separator="\n">
token = self._input.LA(1)
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
}; separator="\nel">
else:
<error>
>>
LL1OptionalBlock(choice, alts, error) ::= <<
self.state = <choice.stateNumber>
<!_errHandler.sync(this);!>
token = self._input.LA(1)
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
<alt>
pass}; separator="\nel">
else:
<error>
>>
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
self.state = <choice.stateNumber>
<!_errHandler.sync(this);!>
<preamble; separator="\n">
if <expr>:
<alts; separator="\n">
<!else if ( !(<followExpr>) ) <error>!>
>>
LL1StarBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= <<
self.state = <choice.stateNumber>
self._errHandler.sync(self)
<preamble; separator="\n">
while <loopExpr>:
<alts; separator="\n">
self.state = <choice.loopBackStateNumber>
self._errHandler.sync(self)
<iteration>
>>
LL1PlusBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= <<
self.state = <choice.blockStartStateNumber> <! alt block decision !>
self._errHandler.sync(self)
<preamble; separator="\n">
while True:
<alts; separator="\n">
self.state = <choice.stateNumber> <! loopback/exit decision !>
self._errHandler.sync(self)
<iteration>
if not (<loopExpr>):
break
>>
// LL(*) stuff
AltBlock(choice, preamble, alts, error) ::= <<
self.state = <choice.stateNumber>
<!_errHandler.sync(this);!>
<if(choice.label)><labelref(choice.label)> = _input.LT(1)<endif>
<preamble; separator="\n">
la_ = self._interp.adaptivePredict(self._input,<choice.decision>,self._ctx)
<alts:{alt |
if la_ == <i>:
<alt>
pass
}; separator="\nel">
>>
OptionalBlock(choice, alts, error) ::= <<
self.state = <choice.stateNumber>
<!_errHandler.sync(this);!>
la_ = self._interp.adaptivePredict(self._input,<choice.decision>,self._ctx)
<alts:{alt |
if la_ == <i><if(!choice.ast.greedy)>+1<endif>:
<alt>
}; separator="\nel">
>>
StarBlock(choice, alts, sync, iteration) ::= <<
self.state = <choice.stateNumber>
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,<choice.decision>,self._ctx)
while _alt!=<choice.exitAlt> and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1<if(!choice.ast.greedy)>+1<endif>:
<iteration>
<alts> <! should only be one !>
self.state = <choice.loopBackStateNumber>
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,<choice.decision>,self._ctx)
>>
PlusBlock(choice, alts, error) ::= <<
self.state = <choice.blockStartStateNumber> <! alt block decision !>
self._errHandler.sync(self)
_alt = 1<if(!choice.ast.greedy)>+1<endif>
while _alt!=<choice.exitAlt> and _alt!=ATN.INVALID_ALT_NUMBER:
<alts:{alt|
if _alt == <i><if(!choice.ast.greedy)>+1<endif>:
<alt>
}; separator="\nel">
else:
<error>
self.state = <choice.loopBackStateNumber> <! loopback/exit decision !>
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,<choice.decision>,self._ctx)
>>
Sync(s) ::= "sync(<s.expecting.name>)"
ThrowNoViableAlt(t) ::= "raise NoViableAltException(self)"
TestSetInline(s) ::= <<
<s.bitsets:{bits | <if(rest(rest(bits.ttypes)))><bitsetBitfieldComparison(s, bits)><else><bitsetInlineComparison(s, bits)><endif>}; separator=" or ">
>>
// Java language spec 15.19 - shift operators mask operands rather than overflow to 0... need range test
testShiftInRange(shiftAmount) ::= <<
((<shiftAmount>) & ~0x3f) == 0
>>
// produces smaller bytecode only when bits.ttypes contains more than two items
bitsetBitfieldComparison(s, bits) ::= <%
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> and ((1 \<\< <offsetShiftVar(s.varName, bits.shift)>) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
%>
isZero ::= [
"0":true,
default:false
]
offsetShiftVar(shiftAmount, offset) ::= <%
<if(!isZero.(offset))>(<shiftAmount> - <offset>)<else><shiftAmount><endif>
%>
offsetShiftType(shiftAmount, offset) ::= <%
<if(!isZero.(offset))>(<parser.name>.<shiftAmount> - <offset>)<else><parser.name>.<shiftAmount><endif>
%>
// produces more efficient bytecode when bits.ttypes contains at most two items
bitsetInlineComparison(s, bits) ::= <%
<bits.ttypes:{ttype | <s.varName>==<parser.name>.<ttype>}; separator=" or ">
%>
cases(ttypes) ::= <<
if token in [<ttypes:{t | <parser.name>.<t>}; separator=", ">]:
>>
InvokeRule(r, argExprsChunks) ::= <<
self.state = <r.stateNumber>
<if(r.labels)><r.labels:{l | <labelref(l)> = }><endif>self.<r.name>(<if(r.ast.options.p)><r.ast.options.p><if(argExprsChunks)>,<endif><endif><argExprsChunks>)
>>
MatchToken(m) ::= <<
self.state = <m.stateNumber>
<if(m.labels)><m.labels:{l | <labelref(l)> = }><endif>self.match(<parser.name>.<m.name>)
>>
MatchSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, false)>"
MatchNotSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, true)>"
CommonSetStuff(m, expr, capture, invert) ::= <<
self.state = <m.stateNumber>
<if(m.labels)><m.labels:{l | <labelref(l)> = }>self._input.LT(1)<endif>
<capture>
<if(invert)>if <m.varName> \<= 0 or <expr><else>if not(<expr>)<endif>:
<if(m.labels)><m.labels:{l | <labelref(l)> = }><else> <endif>self._errHandler.recoverInline(self)
else:
self.consume()
>>
Wildcard(w) ::= <<
self.state = <w.stateNumber>
<if(w.labels)><w.labels:{l | <labelref(l)> = }><endif>self.matchWildcard()
>>
// ACTION STUFF
Action(a, foo, chunks) ::= "<chunks>"
ArgAction(a, chunks) ::= "<chunks>"
SemPred(p, chunks, failChunks) ::= <<
self.state = <p.stateNumber>
if not <chunks>:
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, <p.predicate><if(failChunks)>, <failChunks><elseif(p.msg)>, <p.msg><endif>)
>>
ExceptionClause(e, catchArg, catchAction) ::= <<
catch (<catchArg>) {
<catchAction>
}
>>
// lexer actions are not associated with model objects
LexerSkipCommand() ::= "skip()"
LexerMoreCommand() ::= "more()"
LexerPopModeCommand() ::= "popMode()"
LexerTypeCommand(arg) ::= "_type = <arg>"
LexerChannelCommand(arg) ::= "_channel = <arg>"
LexerModeCommand(arg) ::= "_mode = <arg>"
LexerPushModeCommand(arg) ::= "pushMode(<arg>)"
ActionText(t) ::= "<t.text>"
ActionTemplate(t) ::= "<t.st>"
ArgRef(a) ::= "localctx.<a.name>"
LocalRef(a) ::= "localctx.<a.name>"
RetValueRef(a) ::= "localctx.<a.name>"
QRetValueRef(a) ::= "<ctx(a)>.<a.dict>.<a.name>"
/** How to translate $tokenLabel */
TokenRef(t) ::= "<ctx(t)>.<t.name>"
LabelRef(t) ::= "<ctx(t)>.<t.name>"
ListLabelRef(t) ::= "<ctx(t)>.<ListLabelName(t.name)>"
SetAttr(s,rhsChunks) ::= "<ctx(s)>.<s.name> = <rhsChunks>"
TokenLabelType() ::= "<file.TokenLabelType; null={Token}>"
InputSymbolType() ::= "<file.InputSymbolType; null={Token}>"
TokenPropertyRef_text(t) ::= "(None if <ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.text)"
TokenPropertyRef_type(t) ::= "(0 if <ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.type()"
TokenPropertyRef_line(t) ::= "(0 if <ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.line)"
TokenPropertyRef_pos(t) ::= "(0 if <ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.column)"
TokenPropertyRef_channel(t) ::= "(0 if (<ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.channel)"
TokenPropertyRef_index(t) ::= "(0 if <ctx(t)>.<t.label> is None else <ctx(t)>.<t.label>.tokenIndex)"
TokenPropertyRef_int(t) ::= "(0 if <ctx(t)>.<t.label> is None else int(<ctx(t)>.<t.label>.text))"
RulePropertyRef_start(r) ::= "(None if <ctx(r)>.<r.label> is None else <ctx(r)>.<r.label>.start)"
RulePropertyRef_stop(r) ::= "(None if <ctx(r)>.<r.label> is None else <ctx(r)>.<r.label>.stop)"
RulePropertyRef_text(r) ::= "(None if <ctx(r)>.<r.label> is None else self._input.getText((<ctx(r)>.<r.label>.start,<ctx(r)>.<r.label>.stop)))"
RulePropertyRef_ctx(r) ::= "<ctx(r)>.<r.label>"
RulePropertyRef_parser(r) ::= "self"
ThisRulePropertyRef_start(r) ::= "localctx.start"
ThisRulePropertyRef_stop(r) ::= "localctx.stop"
ThisRulePropertyRef_text(r) ::= "self._input.getText((localctx.start, self._input.LT(-1)))"
ThisRulePropertyRef_ctx(r) ::= "localctx"
ThisRulePropertyRef_parser(r) ::= "self"
NonLocalAttrRef(s) ::= "getInvokingContext(<s.ruleIndex>).<s.name>"
SetNonLocalAttr(s, rhsChunks) ::= "getInvokingContext(<s.ruleIndex>).<s.name> = <rhsChunks>"
AddToLabelList(a) ::= "<ctx(a.label)>.<a.listName>.append(<labelref(a.label)>)"
TokenDecl(t) ::= "self.<t.name> = None # <TokenLabelType()>"
TokenTypeDecl(t) ::= "self.<t.name> = 0 # <TokenLabelType()> type"
TokenListDecl(t) ::= "self.<t.name> = list() # of <TokenLabelType()>s"
RuleContextDecl(r) ::= "self.<r.name> = None # <r.ctxName>"
RuleContextListDecl(rdecl) ::= "self.<rdecl.name> = list() # of <rdecl.ctxName>s"
ContextTokenGetterDecl(t) ::= <<
def <t.name>(self):
return self.getToken(<parser.name>.<t.name>, 0)
>>
// should never be called
ContextTokenListGetterDecl(t) ::= <<
def <t.name>_list(self):
return self.getTokens(<parser.name>.<t.name>)
>>
ContextTokenListIndexedGetterDecl(t) ::= <<
def <t.name>(self, i=None):
if i is None:
return self.getTokens(<parser.name>.<t.name>)
else:
return self.getToken(<parser.name>.<t.name>, i)
>>
ContextRuleGetterDecl(r) ::= <<
def <r.name>(self):
return self.getTypedRuleContext(<parser.name>.<r.ctxName>,0)
>>
// should never be called
ContextRuleListGetterDecl(r) ::= <<
def <r.name>_list(self):
return self.getTypedRuleContexts(<parser.name>.<r.ctxName>)
>>
ContextRuleListIndexedGetterDecl(r) ::= <<
def <r.name>(self, i=None):
if i is None:
return self.getTypedRuleContexts(<parser.name>.<r.ctxName>)
else:
return self.getTypedRuleContext(<parser.name>.<r.ctxName>,i)
>>
LexerRuleContext() ::= "RuleContext"
/** The rule context name is the rule followed by a suffix; e.g.,
* r becomes rContext.
*/
RuleContextNameSuffix() ::= "Context"
ImplicitTokenLabel(tokenName) ::= "_<tokenName>"
ImplicitRuleLabel(ruleName) ::= "_<ruleName>"
ImplicitSetLabel(id) ::= "_tset<id>"
ListLabelName(label) ::= "<label>"
CaptureNextToken(d) ::= "<d.varName> = self._input.LT(1)"
CaptureNextTokenType(d) ::= "<d.varName> = self._input.LA(1)"
StructDecl(struct,ctorAttrs,attrs,getters,dispatchMethods,interfaces,extensionMembers,
superClass={ParserRuleContext}) ::= <<
class <struct.name>(<superClass>):
def __init__(self, parser, parent=None, invokingState=-1<struct.ctorAttrs:{a | , <a.name>=None}>):
super(<parser.name>.<struct.name>, self).__init__(parent, invokingState)
self.parser = parser
<attrs:{a | <a>}; separator="\n">
<struct.ctorAttrs:{a | self.<a.name> = <a.name>}; separator="\n">
<getters:{g | <g>}; separator="\n\n">
def getRuleIndex(self):
return <parser.name>.RULE_<struct.derivedFromName>
<if(struct.provideCopyFrom)> <! don't need copy unless we have subclasses !>
def copyFrom(self, ctx):
super(<parser.name>.<struct.name>, self).copyFrom(ctx)
<struct.attrs:{a | self.<a.name> = ctx.<a.name>}; separator="\n">
<endif>
<dispatchMethods; separator="\n">
<extensionMembers; separator="\n">
>>
AltLabelStructDecl(struct,attrs,getters,dispatchMethods) ::= <<
class <struct.name>(<currentRule.name; format="cap">Context):
def __init__(self, parser, ctx): # actually a <parser.name>.<currentRule.name; format="cap">Context)
super(<parser.name>.<struct.name>, self).__init__(parser)
<attrs:{a | <a>}; separator="\n">
self.copyFrom(ctx)
<getters:{g | <g>}; separator="\n">
<dispatchMethods; separator="\n">
>>
ListenerDispatchMethod(method) ::= <<
def <if(method.isEnter)>enter<else>exit<endif>Rule(self, listener):
if hasattr(listener, "<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">"):
listener.<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(self)
>>
VisitorDispatchMethod(method) ::= <<
def accept(self, visitor):
if hasattr(visitor, "visit<struct.derivedFromName; format="cap">"):
return visitor.visit<struct.derivedFromName; format="cap">(self)
else:
return visitor.visitChildren(self)
>>
AttributeDecl(d) ::= "self.<d.name> = <if(d.initValue)><d.initValue><else>None<endif>"
/** If we don't know location of label def x, use this template */
labelref(x) ::= "<if(!x.isLocal)>localctx.<endif><x.name>"
/** For any action chunk, what is correctly-typed context struct ptr? */
ctx(actionChunk) ::= "localctx"
// used for left-recursive rules
recRuleAltPredicate(ruleName,opPrec) ::= "self.precpred(self._ctx, <opPrec>)"
recRuleSetReturnAction(src,name) ::= "$<name>=$<src>.<name>"
recRuleSetStopToken() ::= "self._ctx.stop = self._input.LT(-1)"
recRuleAltStartAction(ruleName, ctxName, label) ::= <<
localctx = <parser.name>.<ctxName>Context(self, _parentctx, _parentState)
<if(label)>localctx.<label> = _prevctx<endif>
self.pushNewRecursionContext(localctx, _startState, self.RULE_<ruleName>)
>>
recRuleLabeledAltStartAction(ruleName, currentAltLabel, label, isListLabel) ::= <<
localctx = <parser.name>.<currentAltLabel; format="cap">Context(self, <parser.name>.<ruleName; format="cap">Context(self, _parentctx, _parentState))
<if(label)>
<if(isListLabel)>
localctx.<label>.append(_prevctx)
<else>
localctx.<label> = _prevctx
<endif>
<endif>
self.pushNewRecursionContext(localctx, _startState, self.RULE_<ruleName>)
>>
recRuleReplaceContext(ctxName) ::= <<
localctx = <parser.name>.<ctxName>Context(self, localctx)
self._ctx = localctx
_prevctx = localctx
>>
recRuleSetPrevCtx() ::= <<
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
>>
LexerFile(lexerFile, lexer, namedActions) ::= <<
<fileHeader(lexerFile.grammarFileName, lexerFile.ANTLRVersion)>
# encoding: utf-8
from __future__ import print_function
from antlr4 import *
from io import StringIO
<namedActions.header>
<lexer>
>>
Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
<atn>
class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
<rest(lexer.modes):{m| <m> = <i>}; separator="\n">
<lexer.tokens:{k | <k> = <lexer.tokens.(k)>}; separator="\n", wrap, anchor>
modeNames = [ <lexer.modes:{m| u"<m>"}; separator=", ", wrap, anchor> ]
literalNames = [ u"\<INVALID>",
<lexer.literalNames:{t | u<t>}; separator=", ", wrap, anchor> ]
symbolicNames = [ u"\<INVALID>",
<lexer.symbolicNames:{t | u<t>}; separator=", ", wrap, anchor> ]
ruleNames = [ <lexer.ruleNames:{r | u"<r>"}; separator=", ", wrap, anchor> ]
grammarFileName = u"<lexer.grammarFileName>"
def __init__(self, input=None):
super(<lexer.name>, self).__init__(input)
self.checkVersion("<lexerFile.ANTLRVersion>")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
<namedActions.members>
<dumpActions(lexer, "", actionFuncs, sempredFuncs)>
>>
SerializedATN(model) ::= <<
<! only one segment, can be inlined !>
def serializedATN():
with StringIO() as buf:
buf.write(u"<model.serialized; wrap={")<\n> buf.write(u"}>")
return buf.getvalue()
>>
/** Using a type to init value map, try to init a type; if not in table
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
<javaTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".py"

View File

@ -1,134 +0,0 @@
/*
* [The "BSD license"]
* Copyright (c) 2012 Terence Parr
* Copyright (c) 2012 Sam Harwell
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr.v4.codegen;
import org.antlr.v4.tool.ast.GrammarAST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.StringRenderer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Locale;
import java.util.Set;
/**
*
* @author Eric Vergnaud
*/
public class Python2Target extends Target {
protected static final String[] python2Keywords = {
"abs", "all", "any", "apply", "as",
"bin", "bool", "buffer", "bytearray",
"callable", "chr", "classmethod", "coerce", "compile", "complex",
"delattr", "dict", "dir", "divmod",
"enumerate", "eval", "execfile",
"file", "filter", "float", "format", "frozenset",
"getattr", "globals",
"hasattr", "hash", "help", "hex",
"id", "input", "int", "intern", "isinstance", "issubclass", "iter",
"len", "list", "locals",
"map", "max", "min", "next",
"memoryview",
"object", "oct", "open", "ord",
"pow", "print", "property",
"range", "raw_input", "reduce", "reload", "repr", "reversed", "round",
"set", "setattr", "slice", "sorted", "staticmethod", "str", "sum", "super",
"tuple", "type",
"unichr", "unicode",
"vars",
"with",
"xrange",
"zip",
"__import__",
"True", "False", "None"
};
/** Avoid grammar symbols in this set to prevent conflicts in gen'd code. */
protected final Set<String> badWords = new HashSet<String>();
public Python2Target(CodeGenerator gen) {
super(gen, "Python2");
}
@Override
protected boolean visibleGrammarSymbolCausesIssueInGeneratedCode(GrammarAST idNode) {
return getBadWords().contains(idNode.getText());
}
@Override
protected STGroup loadTemplates() {
STGroup result = super.loadTemplates();
result.registerRenderer(String.class, new PythonStringRenderer(), true);
return result;
}
protected static class PythonStringRenderer extends StringRenderer {
@Override
public String toString(Object o, String formatString, Locale locale) {
return super.toString(o, formatString, locale);
}
}
@Override
public boolean wantsBaseListener() {
return false;
}
@Override
public boolean wantsBaseVisitor() {
return false;
}
@Override
public boolean supportsOverloadedMethods() {
return false;
}
@Override
public String getVersion() {
return "4.5.2";
}
public Set<String> getBadWords() {
if (badWords.isEmpty()) {
addBadWords();
}
return badWords;
}
protected void addBadWords() {
badWords.addAll(Arrays.asList(python2Keywords));
badWords.add("rule");
badWords.add("parserRule");
}
}

View File

@ -1,94 +0,0 @@
package org.antlr.v4.test.runtime.python2;
import org.antlr.v4.test.runtime.python.BasePythonTest;
import org.stringtemplate.v4.ST;
public abstract class BasePython2Test extends BasePythonTest {
@Override
protected String getLanguage() {
return "Python2";
}
@Override
protected String getPythonExecutable() {
return "python2.7";
}
@Override
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
ST outputFileST = new ST(
"from __future__ import print_function\n"
+ "import sys\n"
+ "from antlr4 import *\n"
+ "from <lexerName> import <lexerName>\n"
+ "\n"
+ "def main(argv):\n"
+ " input = FileStream(argv[1])\n"
+ " lexer = <lexerName>(input)\n"
+ " stream = CommonTokenStream(lexer)\n"
+ " stream.fill()\n"
+ " [ print(str(t)) for t in stream.tokens ]\n"
+ (showDFA ? " print(lexer._interp.decisionToDFA[Lexer.DEFAULT_MODE].toLexerString(), end='')\n"
: "") + "\n" + "if __name__ == '__main__':\n"
+ " main(sys.argv)\n" + "\n");
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.py", outputFileST.render());
}
@Override
protected void writeParserTestFile(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug, boolean trace) {
if(!parserStartRuleName.endsWith(")"))
parserStartRuleName += "()";
ST outputFileST = new ST(
"import sys\n"
+ "from antlr4 import *\n"
+ "from <lexerName> import <lexerName>\n"
+ "from <parserName> import <parserName>\n"
+ "from <listenerName> import <listenerName>\n"
+ "from <visitorName> import <visitorName>\n"
+ "\n"
+ "class TreeShapeListener(ParseTreeListener):\n"
+ "\n"
+ " def visitTerminal(self, node):\n"
+ " pass\n"
+ "\n"
+ " def visitErrorNode(self, node):\n"
+ " pass\n"
+ "\n"
+ " def exitEveryRule(self, ctx):\n"
+ " pass\n"
+ "\n"
+ " def enterEveryRule(self, ctx):\n"
+ " for child in ctx.getChildren():\n"
+ " parent = child.parentCtx\n"
+ " if not isinstance(parent, RuleNode) or parent.getRuleContext() != ctx:\n"
+ " raise IllegalStateException(\"Invalid parse tree shape detected.\")\n"
+ "\n"
+ "def main(argv):\n"
+ " input = FileStream(argv[1])\n"
+ " lexer = <lexerName>(input)\n"
+ " stream = CommonTokenStream(lexer)\n"
+ "<createParser>"
+ " parser.buildParseTrees = True\n"
+ " tree = parser.<parserStartRuleName>\n"
+ " ParseTreeWalker.DEFAULT.walk(TreeShapeListener(), tree)\n"
+ "\n" + "if __name__ == '__main__':\n"
+ " main(sys.argv)\n" + "\n");
String stSource = " parser = <parserName>(stream)\n";
if(debug)
stSource += " parser.addErrorListener(DiagnosticErrorListener())\n";
if(trace)
stSource += " parser.setTrace(True)\n";
ST createParserST = new ST(stSource);
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.py", outputFileST.render());
}
}

View File

@ -1,399 +0,0 @@
IgnoredTests ::= [
default: false
]
TestFile(file) ::= <<
/* This file is generated by TestGenerator, any edits will be overwritten by the next generation. */
package org.antlr.v4.test.runtime.python2;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
<if(file.Options.("ImportErrorQueue"))>
import org.antlr.v4.test.tool.ErrorQueue;
<endif>
<if(file.Options.("ImportGrammar"))>
import org.antlr.v4.tool.Grammar;
<endif>
@SuppressWarnings("unused")
public class Test<file.name> extends BasePython2Test {
<file.tests:{test | <test>}; separator="\n", wrap, anchor>
}
>>
LexerTestMethod(test) ::= <<
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
<testAnnotations(test)>
public void test<test.name>() throws Exception {
mkdir(tmpdir);
<test.SlaveGrammars:{grammar |
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
}; separator="\n">
<test.Grammar:{grammar |
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
<if(test.AfterGrammar)>
<test.AfterGrammar>
<endif>
String input =<writeStringLiteral(test.Input)>;
String found = execLexer("<grammar>.g4", grammar, "<grammar><if(test.Options.("CombinedGrammar"))>Lexer<endif>", input, <writeBoolean(test.Options.("ShowDFA"))>);
assertEquals(<writeStringLiteral(test.Output)>, found);
<if(!isEmpty.(test.Errors))>
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
<else>
assertNull(this.stderrDuringParse);
<endif>
}>
}
>>
CompositeLexerTestMethod(test) ::= <<
<LexerTestMethod(test)>
>>
ParserTestMethod(test) ::= <<
/* This file and method are generated by TestGenerator, any edits will be overwritten by the next generation. */
<testAnnotations(test)>
public void test<test.name>() throws Exception {
mkdir(tmpdir);
<test.SlaveGrammars:{grammar |
String slave_<grammar> =<writeStringLiteral(test.SlaveGrammars.(grammar))>;
<if(test.Options.("SlaveIsLexer"))>
rawGenerateAndBuildRecognizer("<grammar>.g4", slave_<grammar>, null, "<grammar>");
<else>
writeFile(tmpdir, "<grammar>.g4", slave_<grammar>);
<endif>
}; separator="\n">
<test.Grammar:{grammar |
<buildStringLiteral(test.Grammar.(grammar), "grammar")>
<test.AfterGrammar>
String input =<writeStringLiteral(test.Input)>;
String found = execParser("<grammar>.g4", grammar, "<grammar><if(!test.slaveIsLexer)>Parser<endif>", "<if(test.slaveIsLexer)><first(test.slaveGrammars).grammarName><else><grammar>Lexer<endif>", "<grammar>Listener", "<grammar>Visitor", "<test.Rule>", input, <writeBoolean(test.Options.("Debug"))>);
assertEquals(<writeStringLiteral(test.Output)>, found);
<if(!isEmpty.(test.Errors))>
assertEquals(<writeStringLiteral(test.Errors)>, this.stderrDuringParse);
<else>
assertNull(this.stderrDuringParse);
<endif>
}>
}
>>
CompositeParserTestMethod(test) ::= <<
<ParserTestMethod(test)>
>>
AbstractParserTestMethod(test) ::= <<
/* this file and method are generated, any edit will be overwritten by the next generation */
String test<test.name>(String input) throws Exception {
String grammar = <test.grammar.lines:{ line | "<line>};separator="\\n\" +\n", wrap, anchor>";
return execParser("<test.grammar.grammarName>.g4", grammar, "<test.grammar.grammarName>Parser", "<test.grammar.grammarName>Lexer", "<test.startRule>", input, <test.debug>);
}
>>
ConcreteParserTestMethod(test) ::= <<
/* this file and method are generated, any edit will be overwritten by the next generation */
@Test
public void test<test.name>() throws Exception {
String found = test<test.baseName>("<test.input>");
assertEquals("<test.expectedOutput>", found);
<if(test.expectedErrors)>
assertEquals("<test.expectedErrors>", this.stderrDuringParse);
<else>
assertNull(this.stderrDuringParse);
<endif>
}
>>
testAnnotations(test) ::= <%
@Test
<if(test.Options.("Ignore"))>
<\n>@Ignore(<writeStringLiteral(test.Options.("Ignore"))>)
<elseif(IgnoredTests.(({<file.name>.<test.name>})))>
<\n>@Ignore(<writeStringLiteral(IgnoredTests.(({<file.name>.<test.name>})))>)
<endif>
%>
buildStringLiteral(text, variable) ::= <<
StringBuilder <variable>Builder = new StringBuilder(<strlen.(text)>);
<lines.(text):{line|<variable>Builder.append("<escape.(line)>");}; separator="\n">
String <variable> = <variable>Builder.toString();
>>
writeStringLiteral(text) ::= <%
<if(isEmpty.(text))>
""
<else>
<writeLines(lines.(text))>
<endif>
%>
writeLines(textLines) ::= <%
<if(rest(textLines))>
<textLines:{line|
<\n> "<escape.(line)>}; separator="\" +">"
<else>
"<escape.(first(textLines))>"
<endif>
%>
string(text) ::= <<
"<escape.(text)>"
>>
writeBoolean(o) ::= "<if(o && !isEmpty.(o))>true<else>false<endif>"
writeln(s) ::= <<print(<s>)>>
write(s) ::= <<print(<s>,end='')>>
False() ::= "False"
True() ::= "True"
Not(v) ::= "not <v>"
Assert(s) ::= ""
Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + str(<b>)"
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "<s> = <v>"
AssertIsList(v) ::= "assert isinstance(v, (list, tuple))"
AssignLocal(s,v) ::= "<s> = <v>"
InitIntMember(n,v) ::= <%<n> = <v>%>
InitBooleanMember(n,v) ::= <%<n> = <v>%>
GetMember(n) ::= <%self.<n>%>
SetMember(n,v) ::= <%self.<n> = <v>%>
AddMember(n,v) ::= <%self.<n> += <v>%>
PlusMember(v,n) ::= <%<v> + str(self.<n>)%>
MemberEquals(n,v) ::= <%self.<n> == <v>%>
ModMemberEquals(n,m,v) ::= <%self.<n> % <m> == <v>%>
ModMemberNotEquals(n,m,v) ::= <%self.<n> % <m> != <v>%>
DumpDFA() ::= "self.dumpDFA()"
Pass() ::= "pass"
StringList() ::= ""
BuildParseTrees() ::= "self._buildParseTrees = True"
BailErrorStrategy() ::= <%self._errHandler = BailErrorStrategy()%>
ToStringTree(s) ::= <%<s>.toStringTree(recog=self)%>
Column() ::= "self.column"
Text() ::= "self.text"
ValEquals(a,b) ::= <%<a>==<b>%>
TextEquals(a) ::= <%self.text=="<a>"%>
PlusText(a) ::= <%"<a>" + self.text%>
InputText() ::= "self._input.getText()"
LTEquals(i, v) ::= <%self._input.LT(<i>).text==<v>%>
LANotEquals(i, v) ::= <%self._input.LA(<i>)!=<v>%>
TokenStartColumnEquals(i) ::= <%self._tokenStartColumn==<i>%>
ImportListener(X) ::= ""
GetExpectedTokenNames() ::= "self.getExpectedTokens().toString(self.literalNames, self.symbolicNames)"
RuleInvocationStack() ::= "str_list(self.getRuleInvocationStack())"
LL_EXACT_AMBIG_DETECTION() ::= <<self._interp.predictionMode = PredictionMode.LL_EXACT_AMBIG_DETECTION>>
ParserPropertyMember() ::= <<
@members {
def Property(self):
return True
}
>>
PositionAdjustingLexer() ::= <<
def resetAcceptPosition(self, index, line, column):
self._input.seek(index)
self.line = line
self.column = column
self._interp.consume(self._input)
def nextToken(self):
if self._interp.__dict__.get("resetAcceptPosition", None) is None:
self._interp.__dict__["resetAcceptPosition"] = self.resetAcceptPosition
return super(type(self),self).nextToken()
def emit(self):
if self._type==PositionAdjustingLexer.TOKENS:
self.handleAcceptPositionForKeyword("tokens")
elif self._type==PositionAdjustingLexer.LABEL:
self.handleAcceptPositionForIdentifier()
return super(type(self),self).emit()
def handleAcceptPositionForIdentifier(self):
tokenText = self.text
identifierLength = 0
while identifierLength \< len(tokenText) and self.isIdentifierChar(tokenText[identifierLength]):
identifierLength += 1
if self._input.index > self._tokenStartCharIndex + identifierLength:
offset = identifierLength - 1
self._interp.resetAcceptPosition(self._tokenStartCharIndex + offset,
self._tokenStartLine, self._tokenStartColumn + offset)
return True
else:
return False
def handleAcceptPositionForKeyword(self, keyword):
if self._input.index > self._tokenStartCharIndex + len(keyword):
offset = len(keyword) - 1
self._interp.resetAcceptPosition(self._tokenStartCharIndex + offset,
self._tokenStartLine, self._tokenStartColumn + offset)
return True
else:
return False
@staticmethod
def isIdentifierChar(c):
return c.isalnum() or c == '_'
>>
BasicListener(X) ::= <<
if __name__ is not None and "." in __name__:
from .<X>Listener import <X>Listener
else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def visitTerminal(self, node):
print(node.symbol.text)
>>
WalkListener(s) ::= <<
walker = ParseTreeWalker()
walker.walk(TParser.LeafListener(), <s>)
>>
TokenGetterListener(X) ::= <<
if __name__ is not None and "." in __name__:
from .<X>Listener import <X>Listener
else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.INT(0).symbol.text + ' ' + ctx.INT(1).symbol.text + ' ' + str_list(ctx.INT()))
else:
print(str(ctx.ID().symbol))
>>
RuleGetterListener(X) ::= <<
if __name__ is not None and "." in __name__:
from .<X>Listener import <X>Listener
else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def exitA(self, ctx):
if ctx.getChildCount()==2:
print(ctx.b(0).start.text + ' ' + ctx.b(1).start.text + ' ' + ctx.b()[0].start.text)
else:
print(ctx.b(0).start.text)
>>
LRListener(X) ::= <<
if __name__ is not None and "." in __name__:
from .<X>Listener import <X>Listener
else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def exitE(self, ctx):
if ctx.getChildCount()==3:
print(ctx.e(0).start.text + ' ' + ctx.e(1).start.text + ' ' + ctx.e()[0].start.text)
else:
print(ctx.INT().symbol.text)
>>
LRWithLabelsListener(X) ::= <<
if __name__ is not None and "." in __name__:
from .<X>Listener import <X>Listener
else:
from <X>Listener import <X>Listener
class LeafListener(TListener):
def exitCall(self, ctx):
print(ctx.e().start.text + ' ' + str(ctx.eList()))
def exitInt(self, ctx):
print(ctx.INT().symbol.text)
>>
DeclareContextListGettersFunction() ::= <<
def foo():
s = SContext()
a = s.a()
b = s.b()
>>
Declare_foo() ::= <<def foo(self):
print('foo')
>>
Invoke_foo() ::= "self.foo()"
Declare_pred() ::= <<def pred(self, v):
print('eval=' + str(v).lower())
return v
>>
Invoke_pred(v) ::= <<self.pred(<v>)>>
isEmpty ::= [
"": true,
default: false
]