Repair init methods
This commit is contained in:
parent
694096d517
commit
8cba3ecc3a
|
@ -1,869 +0,0 @@
|
|||
/** ANTLR tool checks output templates are compatible with tool code generation.
|
||||
* For now, a simple string Match used on x.y of x.y.z scheme.
|
||||
* Must Match Tool.VERSION during load to templates.
|
||||
*
|
||||
* REQUIRED.
|
||||
*/
|
||||
|
||||
fileHeader(grammarFileName, ANTLRVersion) ::= <<
|
||||
// Generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>
|
||||
>>
|
||||
|
||||
// args must be <object-model-object>, <fields-resulting-in-STs>
|
||||
|
||||
ParserFile(file, parser, namedActions) ::= <<
|
||||
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
|
||||
package parser // <file.grammarName>
|
||||
|
||||
import (
|
||||
"antlr4"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
<namedActions.header>
|
||||
|
||||
<parser>
|
||||
|
||||
>>
|
||||
|
||||
|
||||
|
||||
ListenerFile(file, header) ::= <<
|
||||
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
|
||||
package parser // <file.grammarName>
|
||||
|
||||
import "antlr4"
|
||||
|
||||
// This class defines a complete listener for a parse tree produced by <file.parserName>
|
||||
|
||||
type <file.grammarName>Listener struct {
|
||||
|
||||
}
|
||||
|
||||
func (l *<file.grammarName>Listener) EnterEveryRule(node antlr4.IParserRuleContext) {
|
||||
\}
|
||||
|
||||
func (l *<file.grammarName>Listener) ExitEveryRule(node antlr4.IParserRuleContext) {
|
||||
\}
|
||||
|
||||
func (l *<file.grammarName>Listener) VisitTerminal(ctx antlr4.TerminalNode) {
|
||||
\}
|
||||
|
||||
func (l *<file.grammarName>Listener) VisitErrorNode(ctx antlr4.ErrorNode) {
|
||||
\}
|
||||
|
||||
<file.listenerNames:{lname |
|
||||
// Enter a parse tree produced by <file.parserName>#<lname>.
|
||||
func (l *<file.grammarName>Listener) Enter<lname; format="cap">(ctx antlr4.IParserRuleContext) {
|
||||
\}
|
||||
|
||||
// Exit a parse tree produced by <file.parserName>#<lname>.
|
||||
func (l *<file.grammarName>Listener) Exit<lname; format="cap">(ctx antlr4.IParserRuleContext) {
|
||||
\}
|
||||
|
||||
}; separator="\n">
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
>>
|
||||
|
||||
|
||||
VisitorFile(file, header) ::= <<
|
||||
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
|
||||
package parser // <file.grammarName>
|
||||
|
||||
import "antlr4"
|
||||
|
||||
<header>
|
||||
|
||||
// This class defines a complete generic Visitor for a parse tree produced by <file.parserName>.
|
||||
|
||||
type <file.grammarName>Visitor struct {
|
||||
|
||||
}
|
||||
|
||||
<file.VisitorNames:{lname |
|
||||
// Visit a parse tree produced by <file.parserName>#<lname>.
|
||||
func (l <file.grammarName>Visitor) Visit<lname; format="cap">(ctx IParserRuleContext) {
|
||||
\}
|
||||
|
||||
}; separator="\n">
|
||||
|
||||
>>
|
||||
|
||||
Parser(parser, funcs, atn, sempredFuncs, superClass) ::= <<
|
||||
|
||||
<if(superClass)>
|
||||
var <superClass> = require('./<superClass>').<superClass> // TODO
|
||||
<endif>
|
||||
|
||||
var parserATN = <atn>
|
||||
var deserializer = antlr4.NewATNDeserializer(nil)
|
||||
var deserializedATN = deserializer.DeserializeFromUInt16( parserATN )
|
||||
|
||||
var literalNames = []string{ <parser.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
|
||||
var symbolicNames = []string{ <parser.symbolicNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
|
||||
var ruleNames = []string{ <parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> }
|
||||
|
||||
type <parser.name> struct {
|
||||
<superClass; null="*antlr4.Parser">
|
||||
}
|
||||
|
||||
func New<parser.name>(input antlr4.TokenStream) *<parser.name> {
|
||||
|
||||
var decisionToDFA = make([]*antlr4.DFA,len(deserializedATN.DecisionToState))
|
||||
var sharedContextCache = antlr4.NewPredictionContextCache()
|
||||
|
||||
for index, ds := range deserializedATN.DecisionToState {
|
||||
decisionToDFA[index] = antlr4.NewDFA(ds, index)
|
||||
}
|
||||
|
||||
parser := new(<parser.name>)
|
||||
|
||||
parser.Parser = NewParser(input)
|
||||
|
||||
parser.Interpreter = antlr4.NewParserATNSimulator(parser, deserializedATN, decisionToDFA, sharedContextCache)
|
||||
parser.RuleNames = ruleNames
|
||||
parser.LiteralNames = literalNames
|
||||
parser.SymbolicNames = symbolicNames
|
||||
<namedActions.members>
|
||||
parser.GrammarFileName = "<parser.grammarFileName; format="java-escape">"
|
||||
|
||||
return parser
|
||||
}
|
||||
|
||||
const(
|
||||
<parser.name>EOF = antlr4.TokenEOF
|
||||
<if(parser.tokens)>
|
||||
<parser.tokens:{k | <parser.name><k> = <parser.tokens.(k)>}; separator="\n", wrap, anchor>
|
||||
<endif>
|
||||
)
|
||||
|
||||
const (
|
||||
<parser.rules:{r | <parser.name>RULE_<r.name> = <r.index>}; separator="\n", wrap, anchor>
|
||||
)
|
||||
|
||||
<funcs; separator="\n">
|
||||
|
||||
<if(sempredFuncs)>
|
||||
func (p *<parser.name>) Sempred(localctx, ruleIndex int, predIndex int) {
|
||||
switch ruleIndex {
|
||||
<parser.sempredFuncs.values:{f | case <f.ruleIndex>:
|
||||
return p.<f.name>_Sempred(localctx, predIndex);}; separator="\n">
|
||||
default:
|
||||
panic("No predicate with index:" + ruleIndex)
|
||||
}
|
||||
}
|
||||
|
||||
<sempredFuncs.values; separator="\n">
|
||||
<endif>
|
||||
|
||||
>>
|
||||
|
||||
dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= <<
|
||||
<if(actionFuncs)>
|
||||
func (l *<lexer.name>) Action(localctx, ruleIndex int, actionIndex int) {
|
||||
switch ruleIndex {
|
||||
<recog.actionFuncs.values:{f|
|
||||
case <f.ruleIndex>:
|
||||
p.<f.name>_Action(localctx, actionIndex)
|
||||
}; separator="\n">
|
||||
default:
|
||||
panic("No registered action for:" + ruleIndex)
|
||||
}
|
||||
}
|
||||
|
||||
<actionFuncs.values; separator="\n">
|
||||
<endif>
|
||||
<if(sempredFuncs)>
|
||||
func (l *<lexer.name>) Sempred(localctx, ruleIndex, predIndex) {
|
||||
switch ruleIndex {
|
||||
<recog.sempredFuncs.values:{f| case <f.ruleIndex>:
|
||||
return l.<f.name>_Sempred(localctx, predIndex);}; separator="\n">
|
||||
default:
|
||||
panic("No registered predicate for:" + ruleIndex)
|
||||
}
|
||||
}
|
||||
|
||||
<sempredFuncs.values; separator="\n">
|
||||
<endif>
|
||||
>>
|
||||
|
||||
|
||||
/* This generates a private method since the actionIndex is generated, making an
|
||||
* overriding implementation impossible to maintain.
|
||||
*/
|
||||
RuleActionFunction(r, actions) ::= <<
|
||||
|
||||
func (l *<lexer.name>) <r.name>_Action(localctx , actionIndex) {
|
||||
switch actionIndex {
|
||||
<actions:{index|
|
||||
case <index>:
|
||||
<actions.(index)>
|
||||
}; separator="\n">
|
||||
default:
|
||||
panic("No registered action for:" + actionIndex)
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
/* This generates a private method since the predIndex is generated, making an
|
||||
* overriding implementation impossible to maintain.
|
||||
*/
|
||||
RuleSempredFunction(r, actions) ::= <<
|
||||
func (s *<if(parser)><parser.name><else><lexer.name><endif>) <r.name>_Sempred(localctx, predIndex int) {
|
||||
switch predIndex {
|
||||
<actions:{index| case <index>:
|
||||
return <actions.(index)>;}; separator="\n">
|
||||
default:
|
||||
panic("No predicate with index:" + predIndex)
|
||||
}
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
|
||||
|
||||
RuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,namedActions,finallyAction,postamble,exceptions) ::= <<
|
||||
|
||||
<ruleCtx>
|
||||
|
||||
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
|
||||
|
||||
func (p *<parser.name>) <currentRule.name>(<currentRule.args:{a | <a.name>}; separator=", ">) *<currentRule.ctxType> {
|
||||
|
||||
localctx := New<currentRule.ctxType>(p, p.GetParserRuleContext(), p.GetState()<currentRule.args:{a | , <a.name>}>)
|
||||
p.EnterRule(localctx, <currentRule.startState>, <parser.name>RULE_<currentRule.name>)
|
||||
<namedActions.init>
|
||||
<locals; separator="\n">
|
||||
|
||||
defer func(){
|
||||
<finallyAction>
|
||||
p.ExitRule()
|
||||
}()
|
||||
|
||||
defer func() {
|
||||
if err := recover(); err != nil {
|
||||
<if(exceptions)>
|
||||
<exceptions; separator="\n">
|
||||
<else>
|
||||
if v, ok := err.(antlr4.IRecognitionException); ok {
|
||||
localctx.SetException( v )
|
||||
p.GetErrorHandler().ReportError(p, v)
|
||||
p.GetErrorHandler().Recover(p, v)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
<endif>
|
||||
}
|
||||
}()
|
||||
|
||||
<code>
|
||||
<postamble; separator="\n">
|
||||
<namedActions.after>
|
||||
|
||||
return localctx
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
LeftRecursiveRuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,
|
||||
namedActions,finallyAction,postamble) ::=
|
||||
<<
|
||||
|
||||
<ruleCtx>
|
||||
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
|
||||
|
||||
func (p *<parser.name>) <currentRule.name>(_p<if(currentRule.args)>, <args:{a | , <a>}><endif>) *<currentRule.ctxType> {
|
||||
|
||||
_parentctx := p.GetParent().(IParserRuleContext)
|
||||
_parentState := p.GetState()
|
||||
localctx := New<currentRule.ctxType>(p, p.GetParserRuleContext(), _parentState<args:{a | , <a.name>}>)
|
||||
_prevctx := localctx
|
||||
_startState := <currentRule.startState>
|
||||
p.EnterRecursionRule(localctx, <currentRule.startState>, <parser.name>RULE_<currentRule.name>, _p)
|
||||
<namedActions.init>
|
||||
<locals; separator="\n">
|
||||
|
||||
defer func(){
|
||||
<finallyAction>
|
||||
p.UnrollRecursionContexts(_parentctx)
|
||||
}()
|
||||
|
||||
defer func(){
|
||||
if err := recover(); err != nil {
|
||||
if v, ok := err.(antlr4.IRecognitionException); ok {
|
||||
localctx.SetException(v)
|
||||
p.GetErrorHandler().ReportError(p, v)
|
||||
p.GetErrorHandler().Recover(p, v)
|
||||
} else {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
<code>
|
||||
<postamble; separator="\n">
|
||||
<namedActions.after>
|
||||
|
||||
return localctx
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
CodeBlockForOuterMostAlt(currentOuterMostAltCodeBlock, locals, preamble, ops) ::= <<
|
||||
<if(currentOuterMostAltCodeBlock.altLabel)>localctx = New<currentOuterMostAltCodeBlock.altLabel; format="cap">Context(p, localctx)<endif>
|
||||
p.EnterOuterAlt(localctx, <currentOuterMostAltCodeBlock.alt.altNum>)
|
||||
<CodeBlockForAlt(currentAltCodeBlock=currentOuterMostAltCodeBlock, ...)>
|
||||
>>
|
||||
|
||||
CodeBlockForAlt(currentAltCodeBlock, locals, preamble, ops) ::= <<
|
||||
<locals; separator="\n">
|
||||
<preamble; separator="\n">
|
||||
<ops; separator="\n">
|
||||
>>
|
||||
|
||||
LL1AltBlock(choice, preamble, alts, error) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
<if(choice.label)><labelref(choice.label)> = p.GetTokenStream().LT(1)<endif>
|
||||
<preamble; separator="\n">
|
||||
switch p.GetTokenStream().LA(1) {
|
||||
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
|
||||
<alt>
|
||||
}; separator="\n">
|
||||
default:
|
||||
<error>
|
||||
}
|
||||
>>
|
||||
|
||||
LL1OptionalBlock(choice, alts, error) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
switch p.GetTokenStream().LA(1) {
|
||||
<choice.altLook,alts:{look,alt| <cases(ttypes=look)>
|
||||
<alt>
|
||||
}; separator="\n">
|
||||
default:
|
||||
<error>
|
||||
}
|
||||
>>
|
||||
|
||||
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
<preamble; separator="\n">
|
||||
if <expr> {
|
||||
<alts; separator="\n">
|
||||
}
|
||||
<!else if ( !(<followExpr>) ) <error>!>
|
||||
>>
|
||||
|
||||
LL1StarBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
<preamble; separator="\n">
|
||||
for <loopExpr> {
|
||||
<alts; separator="\n">
|
||||
p.SetState(<choice.loopBackStateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
<iteration>
|
||||
}
|
||||
>>
|
||||
|
||||
LL1PlusBlockSingleAlt(choice, loopExpr, alts, preamble, iteration) ::= <<
|
||||
p.SetState(<choice.blockStartStateNumber>) <! alt block decision !>
|
||||
p.GetErrorHandler().Sync(p)
|
||||
<preamble; separator="\n">
|
||||
for ok := true; ok; ok = <loopExpr> {
|
||||
<alts; separator="\n">
|
||||
p.SetState(<choice.stateNumber>); <! loopback/exit decision !>
|
||||
p.GetErrorHandler().Sync(p)
|
||||
<iteration>
|
||||
}
|
||||
>>
|
||||
|
||||
// LL(*) stuff
|
||||
|
||||
AltBlock(choice, preamble, alts, error) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
<if(choice.label)><labelref(choice.label)> = _input.LT(1)<endif>
|
||||
<preamble; separator="\n">
|
||||
la_ := p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>,p.GetParserRuleContext())
|
||||
switch la_ {
|
||||
<alts:{alt |
|
||||
case <i>:
|
||||
<alt>
|
||||
}; separator="\n">
|
||||
}
|
||||
>>
|
||||
|
||||
OptionalBlock(choice, alts, error) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
la_ := p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>,p.GetParserRuleContext())
|
||||
<alts:{alt |
|
||||
if la_==<i><if(!choice.ast.greedy)>+1<endif> {
|
||||
<alt>
|
||||
}; separator="} else ">
|
||||
}
|
||||
>>
|
||||
|
||||
StarBlock(choice, alts, Sync, iteration) ::= <<
|
||||
p.SetState(<choice.stateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
_alt := p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>,p.GetParserRuleContext())
|
||||
for _alt!=<choice.exitAlt> && _alt!= antlr4.ATNINVALID_ALT_NUMBER {
|
||||
if(_alt==1<if(!choice.ast.greedy)>+1<endif>) {
|
||||
<iteration>
|
||||
<alts> <! should only be one !>
|
||||
}
|
||||
p.SetState(<choice.loopBackStateNumber>)
|
||||
p.GetErrorHandler().Sync(p)
|
||||
_alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>,p.GetParserRuleContext())
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
PlusBlock(choice, alts, error) ::= <<
|
||||
p.SetState(<choice.blockStartStateNumber>) <! alt block decision !>
|
||||
p.GetErrorHandler().Sync(p)
|
||||
_alt := 1<if(!choice.ast.greedy)>+1<endif>
|
||||
for ok := true; ok; ok = _alt!=<choice.exitAlt> && _alt!= antlr4.ATNINVALID_ALT_NUMBER {
|
||||
switch _alt {
|
||||
<alts:{alt|
|
||||
case <i><if(!choice.ast.greedy)>+1<endif>:
|
||||
<alt>
|
||||
//}; separator="\n">
|
||||
default:
|
||||
<error>
|
||||
}
|
||||
p.SetState(<choice.loopBackStateNumber>) <! loopback/exit decision !>
|
||||
p.GetErrorHandler().Sync(p)
|
||||
_alt = p.GetInterpreter().AdaptivePredict(p.GetTokenStream(),<choice.decision>, p.GetParserRuleContext())
|
||||
}
|
||||
>>
|
||||
|
||||
Sync(s) ::= "Sync(<s.expecting.name>)"
|
||||
|
||||
ThrowNoViableAlt(t) ::= "panic(NewNoViableAltException(p))"
|
||||
|
||||
TestSetInline(s) ::= <<
|
||||
<s.bitsets:{bits | <if(rest(rest(bits.ttypes)))><bitsetBitfieldComparison(s, bits)><else><bitsetInlineComparison(s, bits)><endif>}; separator=" || ">
|
||||
>>
|
||||
|
||||
// Javascript language spec - shift operators are 32 bits long max
|
||||
testShiftInRange(shiftAmount) ::= <<
|
||||
((<shiftAmount>) & 0x1f) == 0
|
||||
>>
|
||||
|
||||
// produces smaller bytecode only when bits.ttypes contains more than two items
|
||||
bitsetBitfieldComparison(s, bits) ::= <%
|
||||
(<testShiftInRange({<offsetShiftVar(s.varName, bits.shift)>})> && ((1 \<\< uint(<offsetShiftVar(s.varName, bits.shift)>)) & (<bits.ttypes:{ttype | (1 \<\< <offsetShiftType(ttype, bits.shift)>)}; separator=" | ">)) != 0)
|
||||
%>
|
||||
|
||||
isZero ::= [
|
||||
"0":true,
|
||||
default:false
|
||||
]
|
||||
|
||||
offsetShiftVar(shiftAmount, offset) ::= <%
|
||||
<if(!isZero.(offset))>(<shiftAmount> - <offset>)<else><shiftAmount><endif>
|
||||
%>
|
||||
|
||||
offsetShiftType(shiftAmount, offset) ::= <%
|
||||
<if(!isZero.(offset))>(<parser.name><shiftAmount> - <offset>)<else><parser.name><shiftAmount><endif>
|
||||
%>
|
||||
|
||||
// produces more efficient bytecode when bits.ttypes contains at most two items
|
||||
bitsetInlineComparison(s, bits) ::= <%
|
||||
<bits.ttypes:{ttype | <s.varName>==<parser.name><ttype>}; separator=" || ">
|
||||
%>
|
||||
|
||||
cases(ttypes) ::= <<
|
||||
<ttypes:{t | case <parser.name><t>:}; separator="\n">
|
||||
>>
|
||||
|
||||
InvokeRule(r, argExprsChunks) ::= <<
|
||||
p.SetState(<r.stateNumber>)
|
||||
<if(r.labels)><r.labels:{l | <labelref(l)> = }><endif>p.<r.name>(<if(r.ast.options.p)><r.ast.options.p><if(argExprsChunks)>,<endif><endif><argExprsChunks>)
|
||||
>>
|
||||
|
||||
MatchToken(m) ::= <<
|
||||
p.SetState(<m.stateNumber>)
|
||||
<if(m.labels)><m.labels:{l | <labelref(l)> = }><endif>p.Match(<parser.name><m.name>)
|
||||
>>
|
||||
|
||||
MatchSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, false)>"
|
||||
|
||||
MatchNotSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, true)>"
|
||||
|
||||
CommonSetStuff(m, expr, capture, invert) ::= <<
|
||||
p.SetState(<m.stateNumber>)
|
||||
<if(m.labels)><m.labels:{l | <labelref(l)> = }>p.GetTokenStream().LT(1);<endif>
|
||||
<capture>
|
||||
<if(invert)>if <m.varName>\<=0 || <expr> <else>if !(<expr>)<endif> {
|
||||
<if(m.labels)><m.labels:{l | <labelref(l)> = }><endif>p.GetErrorHandler().RecoverInline(p)
|
||||
} else {
|
||||
p.Consume()
|
||||
}
|
||||
>>
|
||||
|
||||
Wildcard(w) ::= <<
|
||||
p.SetState(<w.stateNumber>)
|
||||
<if(w.labels)><w.labels:{l | <labelref(l)> = }><endif>MatchWildcard()
|
||||
>>
|
||||
|
||||
// ACTION STUFF
|
||||
|
||||
Action(a, foo, chunks) ::= "<chunks>"
|
||||
|
||||
ArgAction(a, chunks) ::= "<chunks>"
|
||||
|
||||
SemPred(p, chunks, failChunks) ::= <<
|
||||
p.SetState(<p.stateNumber>)
|
||||
if !( <chunks>) {
|
||||
panic( FailedPredicateException(p, <p.predicate><if(failChunks)>, <failChunks><elseif(p.msg)>, <p.msg><endif>))
|
||||
}
|
||||
>>
|
||||
|
||||
ExceptionClause(e, catchArg, catchAction) ::= <<
|
||||
catch (<catchArg>) {
|
||||
<catchAction>
|
||||
}
|
||||
>>
|
||||
|
||||
// lexer actions are not associated with model objects
|
||||
|
||||
LexerSkipCommand() ::= "p.skip()"
|
||||
LexerMoreCommand() ::= "p.more()"
|
||||
LexerPopModeCommand() ::= "p.popMode()"
|
||||
LexerTypeCommand(arg) ::= "p._type = <arg>"
|
||||
LexerChannelCommand(arg) ::= "p._channel = <arg>"
|
||||
LexerModeCommand(arg) ::= "p._mode = <arg>"
|
||||
LexerPushModeCommand(arg) ::= "p.pushMode(<arg>)"
|
||||
|
||||
ActionText(t) ::= "<t.text>"
|
||||
ActionTemplate(t) ::= "<t.st>"
|
||||
ArgRef(a) ::= "localctx.<a.name>"
|
||||
LocalRef(a) ::= "localctx.<a.name>"
|
||||
RetValueRef(a) ::= "localctx.<a.name>"
|
||||
QRetValueRef(a) ::= "<ctx(a)>.<a.dict>.<a.name>"
|
||||
/** How to translate $tokenLabel */
|
||||
TokenRef(t) ::= "<ctx(t)>.<t.name>"
|
||||
LabelRef(t) ::= "<ctx(t)>.<t.name>"
|
||||
ListLabelRef(t) ::= "<ctx(t)>.<ListLabelName(t.name)>"
|
||||
SetAttr(s,rhsChunks) ::= "<ctx(s)>.<s.name> = <rhsChunks>"
|
||||
|
||||
TokenLabelType() ::= "<file.TokenLabelType; null={Token}>"
|
||||
InputSymbolType() ::= "<file.InputSymbolType; null={Token}>"
|
||||
|
||||
TokenPropertyRef_text(t) ::= "(<ctx(t)>.<t.label>==null ? null : <ctx(t)>.<t.label>.text)"
|
||||
TokenPropertyRef_type(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : <ctx(t)>.<t.label>.type)"
|
||||
TokenPropertyRef_line(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : <ctx(t)>.<t.label>.line)"
|
||||
TokenPropertyRef_pos(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : <ctx(t)>.<t.label>.column)"
|
||||
TokenPropertyRef_channel(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : <ctx(t)>.<t.label>.channel)"
|
||||
TokenPropertyRef_index(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : <ctx(t)>.<t.label>.tokenIndex)"
|
||||
TokenPropertyRef_int(t) ::= "(<ctx(t)>.<t.label> == null ? 0 : parseInt(<ctx(t)>.<t.label>.text))"
|
||||
|
||||
RulePropertyRef_start(r) ::= "(<ctx(r)>.<r.label>==null ? null : <ctx(r)>.<r.label>.start)"
|
||||
RulePropertyRef_stop(r) ::= "(<ctx(r)>.<r.label>==null ? null : <ctx(r)>.<r.label>.stop)"
|
||||
RulePropertyRef_text(r) ::= "(<ctx(r)>.<r.label>==null ? null : p.GetTokenStream().GetTextFromInterval(NewInterval(<ctx(r)>.<r.label>.GetStart(),<ctx(r)>.<r.label>.GetStop())))"
|
||||
RulePropertyRef_ctx(r) ::= "<ctx(r)>.<r.label>"
|
||||
RulePropertyRef_parser(r) ::= "this"
|
||||
|
||||
ThisRulePropertyRef_start(r) ::= "localctx.start"
|
||||
ThisRulePropertyRef_stop(r) ::= "localctx.stop"
|
||||
ThisRulePropertyRef_text(r) ::= "p.GetTokenStream().GetTextFromInterval(NewInterval(localctx.GetStart(), p.GetTokenStream().LT(-1)))"
|
||||
ThisRulePropertyRef_ctx(r) ::= "localctx"
|
||||
ThisRulePropertyRef_parser(r) ::= "p"
|
||||
|
||||
NonLocalAttrRef(s) ::= "getInvokingContext(<s.ruleIndex>).<s.name>"
|
||||
SetNonLocalAttr(s, rhsChunks) ::= "getInvokingContext(<s.ruleIndex>).<s.name> = <rhsChunks>"
|
||||
|
||||
AddToLabelList(a) ::= "<ctx(a.label)>.<a.listName> = append(<ctx(a.label)>.<a.listName>, push(<labelref(a.label)>)"
|
||||
|
||||
TokenDecl(t) ::= "p.<t.name> = nil // <TokenLabelType()>"
|
||||
TokenTypeDecl(t) ::= "<t.name> := 0 // <TokenLabelType()> type"
|
||||
TokenListDecl(t) ::= "p.<t.name> = [] // of <TokenLabelType()>s"
|
||||
RuleContextDecl(r) ::= "p.<r.name> = nil // reflect.TypeOf((*<r.ctxName>)(nil)).Elem()"
|
||||
RuleContextListDecl(rdecl) ::= "p.<rdecl.name> = [] // of <rdecl.ctxName>s"
|
||||
|
||||
ContextTokenGetterDecl(t) ::= <<
|
||||
<t.name>() interface{} {
|
||||
return s.GetToken(<parser.name><t.name>, 0)
|
||||
}
|
||||
>>
|
||||
|
||||
// should never be called
|
||||
ContextTokenListGetterDecl(t) ::= <<
|
||||
def <t.name>_list(self):
|
||||
return self.GetTokens(<parser.name><t.name>)
|
||||
>>
|
||||
|
||||
ContextTokenListIndexedGetterDecl(t) ::= <<
|
||||
<t.name>(i int) interface{} {
|
||||
if i \< 0 {
|
||||
return s.GetTokens(<parser.name><t.name>)
|
||||
} else {
|
||||
return s.GetToken(<parser.name><t.name>, i)
|
||||
}
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
ContextRuleGetterDecl(r) ::= <<
|
||||
<r.name>() interface{} {
|
||||
return s.GetTypedRuleContext(reflect.TypeOf((*<r.ctxName>)(nil)).Elem(),0)
|
||||
}
|
||||
>>
|
||||
|
||||
// should never be called
|
||||
ContextRuleListGetterDecl(r) ::= <<
|
||||
func <r.name>_list(self):
|
||||
return s.GetTypedRuleContexts(reflect.TypeOf((*<r.ctxName>)(nil)).Elem())
|
||||
|
||||
>>
|
||||
|
||||
ContextRuleListIndexedGetterDecl(r) ::= <<
|
||||
<r.name>(i int) interface{} {
|
||||
if i \< 0 {
|
||||
return s.GetTypedRuleContexts(reflect.TypeOf((*<r.ctxName>)(nil)).Elem())
|
||||
} else {
|
||||
return s.GetTypedRuleContext(reflect.TypeOf((*<r.ctxName>)(nil)).Elem(),i)
|
||||
}
|
||||
}
|
||||
>>
|
||||
|
||||
LexerRuleContext() ::= "RuleContext"
|
||||
|
||||
/** The rule context name is the rule followed by a suffix; e.g.,
|
||||
* r becomes rContext.
|
||||
*/
|
||||
RuleContextNameSuffix() ::= "Context"
|
||||
|
||||
ImplicitTokenLabel(tokenName) ::= "_<tokenName>"
|
||||
ImplicitRuleLabel(ruleName) ::= "_<ruleName>"
|
||||
ImplicitSetLabel(id) ::= "_tset<id>"
|
||||
ListLabelName(label) ::= "<label>"
|
||||
|
||||
CaptureNextToken(d) ::= "<d.varName> = p.GetTokenStream().LT(1)"
|
||||
CaptureNextTokenType(d) ::= "<d.varName> = p.GetTokenStream().LA(1);"
|
||||
|
||||
StructDecl(struct,ctorAttrs,attrs,getters,dispatchMethods,interfaces,extensionMembers,
|
||||
superClass={ParserRuleContext}) ::= <<
|
||||
|
||||
type <struct.name> struct {
|
||||
*antlr4.ParserRuleContext
|
||||
|
||||
parser antlr4.IParser
|
||||
}
|
||||
|
||||
func New<struct.name>(parser antlr4.IParser, parent antlr4.IParserRuleContext, invokingState int<struct.ctorAttrs:{a | , <a.name>}>) *<struct.name> {
|
||||
|
||||
var p = new(<struct.name>)
|
||||
|
||||
p.InitParserRuleContext( parent, invokingState )
|
||||
|
||||
p.parser = parser
|
||||
p.RuleIndex = <parser.name>RULE_<struct.derivedFromName>
|
||||
<attrs:{a | <a>}; separator="\n">
|
||||
<struct.ctorAttrs:{a | p.<a.name> = <a.name> || null;}; separator="\n">
|
||||
return p
|
||||
}
|
||||
|
||||
<getters:{g | func (s *<struct.name>) <g>}; separator="\n\n">
|
||||
|
||||
<if(struct.provideCopyFrom)> <! don't need copy unless we have subclasses !>
|
||||
func (s *<struct.name>) CopyFrom(ctx <struct.name>) {
|
||||
<superClass>.prototype.CopyFrom.call(s, ctx)
|
||||
<struct.attrs:{a | s.<a.name> = ctx.<a.name>;}; separator="\n">
|
||||
}
|
||||
<endif>
|
||||
<dispatchMethods; separator="\n">
|
||||
<extensionMembers; separator="\n">
|
||||
|
||||
>>
|
||||
|
||||
AltLabelStructDecl(struct,attrs,getters,dispatchMethods) ::= <<
|
||||
|
||||
type <struct.name> struct {
|
||||
parent antlr4.IParserRuleContext
|
||||
parser antlr4.IParser
|
||||
}
|
||||
|
||||
func New<struct.name>(parser antlr4.IParser, ctx antlr4.IParserRuleContext) *<struct.name> {
|
||||
|
||||
var p = new(<struct.name>)
|
||||
|
||||
<currentRule.name; format="cap">Context.call(this, parser)
|
||||
|
||||
<attrs:{a | <a>;}; separator="\n">
|
||||
<currentRule.name; format="cap">Context.prototype.CopyFrom.call(this, ctx)
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
<getters:{g | func (s *<struct.name>) <g>}; separator="\n\n">
|
||||
<dispatchMethods; separator="\n">
|
||||
|
||||
>>
|
||||
|
||||
|
||||
ListenerDispatchMethod(method) ::= <<
|
||||
func (s *<struct.name>) <if(method.isEnter)>Enter<else>Exit<endif>Rule(listener antlr4.ParseTreeListener) {
|
||||
|
||||
listener.(*<parser.grammarName>Listener).<if(method.isEnter)>Enter<else>Exit<endif><struct.derivedFromName; format="cap">(s)
|
||||
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
|
||||
VisitorDispatchMethod(method) ::= <<
|
||||
func (s *<struct.name>) accept(Visitor antlr4.ParseTreeVisitor) interface{} {
|
||||
|
||||
switch t := listener.(type) {
|
||||
case *<parser.grammarName>Listener:
|
||||
return t.Visit<struct.derivedFromName; format="cap">(s)
|
||||
default:
|
||||
return t.VisitChildren(s)
|
||||
}
|
||||
}
|
||||
|
||||
>>
|
||||
|
||||
|
||||
|
||||
|
||||
AttributeDecl(d) ::= "p.<d.name> = <if(d.InitValue)><d.InitValue><else>null<endif>"
|
||||
|
||||
/** If we don't know location of label def x, use this template */
|
||||
labelref(x) ::= "<if(!x.isLocal)>localctx.<endif><x.name>"
|
||||
|
||||
/** For any action chunk, what is correctly-typed context struct ptr? */
|
||||
ctx(actionChunk) ::= "localctx"
|
||||
|
||||
// used for left-recursive rules
|
||||
recRuleAltPredicate(ruleName,opPrec) ::= "p.Precpred(p.GetParserRuleContext(), <opPrec>)"
|
||||
recRuleSetReturnAction(src,name) ::= "$<name>=$<src>.<name>"
|
||||
recRuleSetStopToken() ::= "p.GetParserRuleContext().stop = p.GetTokenStream().LT(-1);"
|
||||
|
||||
recRuleAltStartAction(ruleName, ctxName, label) ::= <<
|
||||
localctx = New<ctxName>Context(this, _parentctx, _parentState)
|
||||
<if(label)>localctx.<label> = _prevctx;<endif>
|
||||
p.PushNewRecursionContext(localctx, _startState, <parser.name>RULE_<ruleName>)
|
||||
>>
|
||||
|
||||
recRuleLabeledAltStartAction(ruleName, currentAltLabel, label, isListLabel) ::= <<
|
||||
localctx = New<currentAltLabel; format="cap">Context(this, New<ruleName; format="cap">Context(this, _parentctx, _parentState))
|
||||
<if(label)>
|
||||
<if(isListLabel)>
|
||||
localctx.<label>.push(_prevctx)
|
||||
<else>
|
||||
localctx.<label> = _prevctx
|
||||
<endif>
|
||||
<endif>
|
||||
p.PushNewRecursionContext(localctx, _startState, <parser.name>RULE_<ruleName>)
|
||||
>>
|
||||
|
||||
recRuleReplaceContext(ctxName) ::= <<
|
||||
localctx = New<ctxName>Context(this, localctx)
|
||||
p.GetParserRuleContext() = localctx
|
||||
_prevctx = localctx
|
||||
>>
|
||||
|
||||
recRuleSetPrevCtx() ::= <<
|
||||
if(p.GetParseListeners()!=nil) {
|
||||
p.TriggerExitRuleEvent()
|
||||
}
|
||||
_prevctx = localctx
|
||||
>>
|
||||
|
||||
|
||||
LexerFile(lexerFile, lexer, namedActions) ::= <<
|
||||
<fileHeader(lexerFile.grammarFileName, lexerFile.ANTLRVersion)>
|
||||
package parser
|
||||
|
||||
import (
|
||||
"antlr4"
|
||||
)
|
||||
|
||||
<namedActions.header>
|
||||
|
||||
<lexer>
|
||||
|
||||
>>
|
||||
|
||||
Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
|
||||
|
||||
var serializedLexerAtn = <atn>
|
||||
var lexerDeserializer = antlr4.NewATNDeserializer(nil)
|
||||
var lexerAtn = lexerDeserializer.DeserializeFromUInt16( serializedLexerAtn )
|
||||
|
||||
var lexerModeNames = []string{ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> }
|
||||
var lexerLiteralNames = []string{ <lexer.literalNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
|
||||
var lexerSymbolicNames = []string{ <lexer.symbolicNames:{t | <t>}; null="\"\"", separator=", ", wrap, anchor> }
|
||||
var lexerRuleNames = []string{ <lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor> }
|
||||
|
||||
type <lexer.name> struct {
|
||||
*<if(superClass)><superClass><else>antlr4.Lexer<endif>
|
||||
|
||||
modeNames []string
|
||||
// EOF string
|
||||
}
|
||||
|
||||
func New<lexer.name>(input antlr4.CharStream) *<lexer.name> {
|
||||
|
||||
var lexerDecisionToDFA = make([]*antlr4.DFA,len(lexerAtn.DecisionToState))
|
||||
|
||||
for index, ds := range lexerAtn.DecisionToState {
|
||||
lexerDecisionToDFA[index] = antlr4.NewDFA(ds, index)
|
||||
}
|
||||
|
||||
lex := new(<lexer.name>)
|
||||
|
||||
lex.Lexer = NewLexer(input)
|
||||
|
||||
lex.Interpreter = antlr4.NewLexerATNSimulator(lex, lexerAtn, lexerDecisionToDFA, antlr4.NewPredictionContextCache())
|
||||
|
||||
lex.modeNames = lexerModeNames
|
||||
lex.RuleNames = lexerRuleNames
|
||||
lex.LiteralNames = lexerLiteralNames
|
||||
lex.SymbolicNames = lexerSymbolicNames
|
||||
lex.GrammarFileName = "<lexer.grammarFileName>"
|
||||
//lex.EOF = antlr4.TokenEOF
|
||||
|
||||
return lex
|
||||
}
|
||||
|
||||
const (
|
||||
<lexer.tokens:{k | <lexer.name><k> = <lexer.tokens.(k)>}; separator="\n", wrap, anchor>
|
||||
)
|
||||
|
||||
const (
|
||||
<rest(lexer.modes):{m| <lexer.name><m> = <i>}; separator="\n">
|
||||
)
|
||||
|
||||
<namedActions.members>
|
||||
|
||||
<dumpActions(lexer, "", actionFuncs, sempredFuncs)>
|
||||
|
||||
>>
|
||||
|
||||
SerializedATN(model) ::= <<
|
||||
<! only one segment, can be inlined !>
|
||||
[]uint16{ <model.serialized; wrap={<\n> }> }
|
||||
|
||||
>>
|
||||
|
||||
/**
|
||||
* strings.Join( []string{ "<model.serialized; wrap={",<\n> "}>" }, "" )
|
||||
* Using a type to init value map, try to init a type; if not in table
|
||||
* must be an object, default value is "nil".
|
||||
*/
|
||||
InitValue(typeName) ::= <<
|
||||
<javaTypeInitMap.(typeName)>
|
||||
>>
|
||||
|
||||
codeFileExtension() ::= ".go"
|
|
@ -199,7 +199,7 @@ func NewLexerATNConfig6(state IATNState, alt int, context IPredictionContext) *L
|
|||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
this.ATNConfig = NewATNConfig(state, alt, context, SemanticContextNONE)
|
||||
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
this.lexerActionExecutor = nil
|
||||
|
@ -210,7 +210,7 @@ func NewLexerATNConfig5(state IATNState, alt int, context IPredictionContext, le
|
|||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
this.ATNConfig = NewATNConfig(state, alt, context, SemanticContextNONE)
|
||||
this.lexerActionExecutor = lexerActionExecutor
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
return this
|
||||
|
@ -220,7 +220,7 @@ func NewLexerATNConfig4(c *LexerATNConfig, state IATNState) *LexerATNConfig {
|
|||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
||||
this.ATNConfig = NewATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
||||
this.lexerActionExecutor = c.lexerActionExecutor
|
||||
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
||||
return this
|
||||
|
@ -230,7 +230,7 @@ func NewLexerATNConfig3(c *LexerATNConfig, state IATNState, lexerActionExecutor
|
|||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
||||
this.ATNConfig = NewATNConfig(c, state, c.GetContext(), c.GetSemanticContext())
|
||||
this.lexerActionExecutor = lexerActionExecutor
|
||||
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
||||
return this
|
||||
|
@ -240,7 +240,7 @@ func NewLexerATNConfig2(c *LexerATNConfig, state IATNState, context IPredictionC
|
|||
|
||||
this := new(LexerATNConfig)
|
||||
|
||||
this.InitATNConfig(c, state, context, c.GetSemanticContext())
|
||||
this.ATNConfig = NewATNConfig(c, state, context, c.GetSemanticContext())
|
||||
this.lexerActionExecutor = c.lexerActionExecutor
|
||||
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
|
||||
return this
|
||||
|
@ -251,7 +251,7 @@ func NewLexerATNConfig1(state IATNState, alt int, context IPredictionContext) *L
|
|||
this := new(LexerATNConfig)
|
||||
|
||||
// c IATNConfig , state IATNState, context IPredictionContext, semanticContext SemanticContext
|
||||
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
|
||||
this.ATNConfig = NewATNConfig(state, alt, context, SemanticContextNONE)
|
||||
|
||||
this.lexerActionExecutor = nil
|
||||
this.passedThroughNonGreedyDecision = false
|
||||
|
|
|
@ -9,7 +9,7 @@ func NewATNSimulator(atn *ATN, sharedContextCache *PredictionContextCache) *ATNS
|
|||
|
||||
this := new(ATNSimulator)
|
||||
|
||||
this.InitATNSimulator(atn, sharedContextCache)
|
||||
this.ATNSimulator = NewATNSimulator(atn, sharedContextCache)
|
||||
|
||||
return this
|
||||
}
|
||||
|
|
|
@ -78,7 +78,7 @@ type ATNState struct {
|
|||
func NewATNState() *ATNState {
|
||||
|
||||
as := new(ATNState)
|
||||
as.InitATNState()
|
||||
as.ATNState = NewATNState()
|
||||
|
||||
return as
|
||||
}
|
||||
|
@ -181,7 +181,7 @@ type BasicState struct {
|
|||
|
||||
func NewBasicState() *BasicState {
|
||||
this := new(BasicState)
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
|
||||
this.stateType = ATNStateBASIC
|
||||
return this
|
||||
|
@ -198,8 +198,8 @@ func NewDecisionState() *DecisionState {
|
|||
|
||||
this := new(DecisionState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.ATNState = NewATNState()
|
||||
this.DecisionState = NewDecisionState()
|
||||
|
||||
return this
|
||||
}
|
||||
|
@ -222,8 +222,7 @@ func NewBlockStartState() *BlockStartState {
|
|||
|
||||
this := new(BlockStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.DecisionState = NewDecisionState()
|
||||
|
||||
return this
|
||||
}
|
||||
|
@ -242,9 +241,7 @@ func NewBasicBlockStartState() *BasicBlockStartState {
|
|||
|
||||
this := new(BasicBlockStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.InitBlockStartState()
|
||||
this.BlockStartState = NewBlockStartState()
|
||||
|
||||
this.stateType = ATNStateBLOCK_START
|
||||
return this
|
||||
|
@ -261,7 +258,7 @@ func NewBlockEndState() *BlockEndState {
|
|||
|
||||
this := new(BlockEndState)
|
||||
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
this.stateType = ATNStateBLOCK_END
|
||||
this.startState = nil
|
||||
|
||||
|
@ -280,7 +277,7 @@ type RuleStopState struct {
|
|||
func NewRuleStopState() *RuleStopState {
|
||||
this := new(RuleStopState)
|
||||
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
this.stateType = ATNStateRULE_STOP
|
||||
return this
|
||||
}
|
||||
|
@ -296,7 +293,7 @@ func NewRuleStartState() *RuleStartState {
|
|||
|
||||
this := new(RuleStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
this.stateType = ATNStateRULE_START
|
||||
this.stopState = nil
|
||||
this.isPrecedenceRule = false
|
||||
|
@ -315,9 +312,7 @@ func NewPlusLoopbackState() *PlusLoopbackState {
|
|||
|
||||
this := new(PlusLoopbackState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.InitBlockStartState()
|
||||
this.BlockStartState = NewBlockStartState()
|
||||
|
||||
this.stateType = ATNStatePLUS_LOOP_BACK
|
||||
return this
|
||||
|
@ -338,9 +333,7 @@ func NewPlusBlockStartState() *PlusBlockStartState {
|
|||
|
||||
this := new(PlusBlockStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.InitBlockStartState()
|
||||
this.BlockStartState = NewBlockStartState()
|
||||
|
||||
this.stateType = ATNStatePLUS_BLOCK_START
|
||||
this.loopBackState = nil
|
||||
|
@ -357,9 +350,7 @@ func NewStarBlockStartState() *StarBlockStartState {
|
|||
|
||||
this := new(StarBlockStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.InitBlockStartState()
|
||||
this.BlockStartState = NewBlockStartState()
|
||||
|
||||
this.stateType = ATNStateSTAR_BLOCK_START
|
||||
|
||||
|
@ -374,7 +365,7 @@ func NewStarLoopbackState() *StarLoopbackState {
|
|||
|
||||
this := new(StarLoopbackState)
|
||||
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
|
||||
this.stateType = ATNStateSTAR_LOOP_BACK
|
||||
return this
|
||||
|
@ -391,8 +382,7 @@ func NewStarLoopEntryState() *StarLoopEntryState {
|
|||
|
||||
this := new(StarLoopEntryState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.DecisionState = NewDecisionState()
|
||||
|
||||
this.stateType = ATNStateSTAR_LOOP_ENTRY
|
||||
this.loopBackState = nil
|
||||
|
@ -413,7 +403,7 @@ func NewLoopEndState() *LoopEndState {
|
|||
|
||||
this := new(LoopEndState)
|
||||
|
||||
this.InitATNState()
|
||||
this.ATNState = NewATNState()
|
||||
|
||||
this.stateType = ATNStateLOOP_END
|
||||
this.loopBackState = nil
|
||||
|
@ -430,8 +420,7 @@ func NewTokensStartState() *TokensStartState {
|
|||
|
||||
this := new(TokensStartState)
|
||||
|
||||
this.InitATNState()
|
||||
this.InitDecisionState()
|
||||
this.DecisionState = NewDecisionState()
|
||||
|
||||
this.stateType = ATNStateTOKEN_START
|
||||
return this
|
||||
|
|
|
@ -24,7 +24,7 @@ func NewDFASerializer(dfa *DFA, literalNames, symbolicNames []string) *DFASerial
|
|||
|
||||
this := new(DFASerializer)
|
||||
|
||||
this.InitDFASerializer(dfa, literalNames, symbolicNames)
|
||||
this.DFASerializer = NewDFASerializer(dfa, literalNames, symbolicNames)
|
||||
|
||||
return this
|
||||
}
|
||||
|
@ -113,7 +113,7 @@ func NewLexerDFASerializer(dfa *DFA) *LexerDFASerializer {
|
|||
|
||||
this := new(LexerDFASerializer)
|
||||
|
||||
this.InitDFASerializer(dfa, nil, nil)
|
||||
this.DFASerializer = NewDFASerializer(dfa, nil, nil)
|
||||
|
||||
return this
|
||||
}
|
||||
|
|
|
@ -56,7 +56,7 @@ type DefaultErrorStrategy struct {
|
|||
func NewDefaultErrorStrategy() *DefaultErrorStrategy {
|
||||
|
||||
d := new(DefaultErrorStrategy)
|
||||
d.InitDefaultErrorStrategy()
|
||||
d.DefaultErrorStrategy = NewDefaultErrorStrategy()
|
||||
return d
|
||||
}
|
||||
|
||||
|
@ -748,7 +748,7 @@ type BailErrorStrategy struct {
|
|||
func NewBailErrorStrategy() *BailErrorStrategy {
|
||||
|
||||
this := new(BailErrorStrategy)
|
||||
this.InitDefaultErrorStrategy()
|
||||
this.DefaultErrorStrategy = NewDefaultErrorStrategy()
|
||||
|
||||
return this
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ func NewRecognitionException(message string, recognizer IRecognizer, input IntSt
|
|||
// TODO may be able to use - "runtime" func Stack(buf []byte, all bool) int
|
||||
|
||||
t := new(RecognitionException)
|
||||
t.InitRecognitionException(message, recognizer, input, ctx)
|
||||
t.RecognitionException = NewRecognitionException(message, recognizer, input, ctx)
|
||||
|
||||
return t
|
||||
}
|
||||
|
@ -109,7 +109,7 @@ func NewLexerNoViableAltException(lexer ILexer, input CharStream, startIndex int
|
|||
|
||||
this := new(LexerNoViableAltException)
|
||||
|
||||
this.InitRecognitionException("", lexer, input, nil)
|
||||
this.RecognitionException = NewRecognitionException("", lexer, input, nil)
|
||||
|
||||
this.startIndex = startIndex
|
||||
this.deadEndConfigs = deadEndConfigs
|
||||
|
@ -158,7 +158,7 @@ func NewNoViableAltException(recognizer IParser, input TokenStream, startToken *
|
|||
}
|
||||
|
||||
this := new(NoViableAltException)
|
||||
this.InitRecognitionException("", recognizer, input, ctx)
|
||||
this.RecognitionException = NewRecognitionException("", recognizer, input, ctx)
|
||||
|
||||
// Which configurations did we try at input.Index() that couldn't Match
|
||||
// input.LT(1)?//
|
||||
|
@ -183,7 +183,7 @@ type InputMisMatchException struct {
|
|||
func NewInputMisMatchException(recognizer IParser) *InputMisMatchException {
|
||||
|
||||
this := new(InputMisMatchException)
|
||||
this.InitRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
|
||||
this.RecognitionException = NewRecognitionException("", recognizer, recognizer.GetInputStream(), recognizer.GetParserRuleContext())
|
||||
|
||||
this.offendingToken = recognizer.getCurrentToken()
|
||||
|
||||
|
@ -208,7 +208,7 @@ func NewFailedPredicateException(recognizer *Parser, predicate string, message s
|
|||
|
||||
this := new(FailedPredicateException)
|
||||
|
||||
this.InitRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer._ctx)
|
||||
this.RecognitionException = NewRecognitionException(this.formatMessage(predicate, message), recognizer, recognizer.GetInputStream(), recognizer._ctx)
|
||||
|
||||
var s = recognizer.Interpreter.atn.states[recognizer.state]
|
||||
var trans = s.GetTransitions()[0]
|
||||
|
|
|
@ -47,7 +47,7 @@ func NewLexer(input CharStream) *Lexer {
|
|||
|
||||
lexer := new(Lexer)
|
||||
|
||||
lexer.InitRecognizer()
|
||||
lexer.Recognizer = NewRecognizer()
|
||||
lexer.Lexer = NewLexer(input)
|
||||
|
||||
return lexer
|
||||
|
|
|
@ -66,7 +66,7 @@ func NewLexerATNSimulator(recog ILexer, atn *ATN, decisionToDFA []*DFA, sharedCo
|
|||
|
||||
this := new(LexerATNSimulator)
|
||||
|
||||
this.InitATNSimulator(atn, sharedContextCache)
|
||||
this.ATNSimulator = NewATNSimulator(atn, sharedContextCache)
|
||||
|
||||
this.decisionToDFA = decisionToDFA
|
||||
this.recog = recog
|
||||
|
|
|
@ -378,7 +378,7 @@ type LexerIndexedCustomAction struct {
|
|||
func NewLexerIndexedCustomAction(offset int, lexerAction ILexerAction) *LexerIndexedCustomAction {
|
||||
|
||||
this := new(LexerIndexedCustomAction)
|
||||
this.InitLexerAction(lexerAction.getActionType())
|
||||
this.LexerAction = NewLexerAction(lexerAction.getActionType())
|
||||
|
||||
this.offset = offset
|
||||
this.lexerAction = lexerAction
|
||||
|
|
|
@ -49,7 +49,7 @@ func NewParser(input TokenStream) *Parser {
|
|||
|
||||
func (p *Parser) InitParser(input TokenStream) {
|
||||
|
||||
p.InitRecognizer()
|
||||
p.Recognizer = NewRecognizer()
|
||||
|
||||
// The input stream.
|
||||
p._input = nil
|
||||
|
|
|
@ -23,14 +23,14 @@ func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, share
|
|||
|
||||
this := new(ParserATNSimulator)
|
||||
|
||||
this.InitParserATNSimulator(parser, atn, decisionToDFA, sharedContextCache)
|
||||
this.ParserATNSimulator = NewParserATNSimulator(parser, atn, decisionToDFA, sharedContextCache)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
func (this *ParserATNSimulator) InitParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) {
|
||||
|
||||
this.InitATNSimulator(atn, sharedContextCache)
|
||||
this.ATNSimulator = NewATNSimulator(atn, sharedContextCache)
|
||||
|
||||
this.parser = parser
|
||||
this.decisionToDFA = decisionToDFA
|
||||
|
|
|
@ -35,7 +35,7 @@ func NewParserRuleContext(parent IParserRuleContext, invokingStateNumber int) *P
|
|||
|
||||
prc := new(ParserRuleContext)
|
||||
|
||||
prc.InitParserRuleContext(parent, invokingStateNumber)
|
||||
prc.ParserRuleContext = NewParserRuleContext(parent, invokingStateNumber)
|
||||
|
||||
return prc
|
||||
|
||||
|
@ -43,7 +43,7 @@ func NewParserRuleContext(parent IParserRuleContext, invokingStateNumber int) *P
|
|||
|
||||
func (prc *ParserRuleContext) InitParserRuleContext(parent IParserRuleContext, invokingStateNumber int) {
|
||||
|
||||
prc.InitRuleContext(parent, invokingStateNumber)
|
||||
prc.RuleContext = NewRuleContext(parent, invokingStateNumber)
|
||||
|
||||
prc.RuleIndex = -1
|
||||
// * If we are debugging or building a parse tree for a Visitor,
|
||||
|
@ -266,7 +266,7 @@ func NewInterpreterRuleContext(parent InterpreterRuleContext, invokingStateNumbe
|
|||
|
||||
prc := new(InterpreterRuleContext)
|
||||
|
||||
prc.InitParserRuleContext(parent, invokingStateNumber)
|
||||
prc.ParserRuleContext = NewParserRuleContext(parent, invokingStateNumber)
|
||||
|
||||
prc.RuleIndex = ruleIndex
|
||||
|
||||
|
|
|
@ -155,7 +155,7 @@ type SingletonPredictionContext struct {
|
|||
|
||||
func NewSingletonPredictionContext(parent IPredictionContext, returnState int) *SingletonPredictionContext {
|
||||
s := new(SingletonPredictionContext)
|
||||
s.InitSingletonPredictionContext(parent, returnState)
|
||||
s.SingletonPredictionContext = NewSingletonPredictionContext(parent, returnState)
|
||||
return s
|
||||
}
|
||||
|
||||
|
@ -248,7 +248,7 @@ func NewEmptyPredictionContext() *EmptyPredictionContext {
|
|||
|
||||
p := new(EmptyPredictionContext)
|
||||
|
||||
p.InitSingletonPredictionContext(nil, PredictionContextEMPTY_RETURN_STATE)
|
||||
p.SingletonPredictionContext = NewSingletonPredictionContext(nil, PredictionContextEMPTY_RETURN_STATE)
|
||||
|
||||
return p
|
||||
}
|
||||
|
|
|
@ -35,7 +35,7 @@ type Recognizer struct {
|
|||
|
||||
func NewRecognizer() *Recognizer {
|
||||
rec := new(Recognizer)
|
||||
rec.InitRecognizer()
|
||||
rec.Recognizer = NewRecognizer()
|
||||
return rec
|
||||
}
|
||||
|
||||
|
|
|
@ -49,7 +49,7 @@ func NewRuleContext(parent IRuleContext, invokingState int) *RuleContext {
|
|||
|
||||
rn := new(RuleContext)
|
||||
|
||||
rn.InitRuleContext(parent, invokingState)
|
||||
rn.RuleContext = NewRuleContext(parent, invokingState)
|
||||
|
||||
return rn
|
||||
}
|
||||
|
|
|
@ -663,7 +663,7 @@ func New<struct.name>(parser antlr4.IParser, parent antlr4.IParserRuleContext, i
|
|||
|
||||
var p = new(<struct.name>)
|
||||
|
||||
p.InitParserRuleContext( parent, invokingState )
|
||||
p.ParserRuleContext = NewParserRuleContext( parent, invokingState )
|
||||
|
||||
p.parser = parser
|
||||
p.RuleIndex = <parser.name>RULE_<struct.derivedFromName>
|
||||
|
|
Loading…
Reference in New Issue