forked from jasder/antlr
Numerous minor fixes
This commit is contained in:
parent
cc2c5eca2f
commit
3b69b21834
|
@ -12,20 +12,38 @@
|
|||
|
||||
package antlr4
|
||||
|
||||
type TokenStream interface {
|
||||
type IntStream interface {
|
||||
consume()
|
||||
LA(i int) int
|
||||
mark() int
|
||||
release(marker int)
|
||||
index() int
|
||||
seek(index int)
|
||||
size() int
|
||||
getSourceName() string
|
||||
}
|
||||
|
||||
type TokenStream interface {
|
||||
IntStream
|
||||
LT(k int) *Token
|
||||
get(index int) *Token
|
||||
getTokenSource() *TokenSource
|
||||
getText() string
|
||||
getTextFromInterval(Interval) string
|
||||
getTextFromRuleContext(*RuleContext) string
|
||||
getTextFromTokens(*Token, *Token) string
|
||||
}
|
||||
|
||||
// bt is just to keep meaningful parameter types to Parser
|
||||
type BufferedTokenStream struct {
|
||||
tokenSource TokenSource
|
||||
tokens []Token
|
||||
tokenSource *TokenSource
|
||||
tokens []*Token
|
||||
index int
|
||||
fetchedEOF bool
|
||||
channel int
|
||||
}
|
||||
|
||||
func NewBufferedTokenStream(tokenSource TokenSource) BufferedTokenStream {
|
||||
func NewBufferedTokenStream(tokenSource *TokenSource) *BufferedTokenStream {
|
||||
|
||||
ts := new(BufferedTokenStream)
|
||||
|
||||
|
@ -85,7 +103,7 @@ func (bt *BufferedTokenStream) seek(index int) {
|
|||
bt.index = bt.adjustSeekIndex(index)
|
||||
}
|
||||
|
||||
func (bt *BufferedTokenStream) get(index int) {
|
||||
func (bt *BufferedTokenStream) get(index int) *Token {
|
||||
bt.lazyInit()
|
||||
return bt.tokens[index]
|
||||
}
|
||||
|
@ -138,7 +156,7 @@ func (bt *BufferedTokenStream) fetch(n int) int {
|
|||
}
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
var t = bt.tokenSource.nextToken()
|
||||
var t *Token = bt.tokenSource.nextToken()
|
||||
t.tokenIndex = len(bt.tokens)
|
||||
bt.tokens = append(bt.tokens, t)
|
||||
if (t.tokenType == TokenEOF) {
|
||||
|
|
|
@ -109,15 +109,18 @@ func (la *LL1Analyzer) LOOK(s, stopState int, ctx *RuleContext) *IntervalSet {
|
|||
// @param addEOF Add {@link Token//EOF} to the result if the end of the
|
||||
// outermost context is reached. This parameter has no effect if {@code ctx}
|
||||
// is {@code nil}.
|
||||
///
|
||||
func (la *LL1Analyzer) _LOOK(s, stopState int, ctx *RuleContext, look *Set, lookBusy, calledRuleStack, seeThruPreds, addEOF) {
|
||||
var c = atn.NewATNConfig({state:s, alt:0}, ctx)
|
||||
if (lookBusy.contains(c)) {
|
||||
|
||||
|
||||
func (la *LL1Analyzer) _LOOK(s, stopState *atn.ATNState, ctx *PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
|
||||
|
||||
c := atn.NewATNConfig6(s, 0, ctx)
|
||||
|
||||
if !lookBusy.add(c) {
|
||||
return
|
||||
}
|
||||
lookBusy.add(c)
|
||||
|
||||
if (s == stopState) {
|
||||
if (ctx ==nil) {
|
||||
if (ctx == nil) {
|
||||
look.addOne(TokenEpsilon)
|
||||
return
|
||||
} else if (ctx.isEmpty() && addEOF) {
|
||||
|
@ -125,61 +128,85 @@ func (la *LL1Analyzer) _LOOK(s, stopState int, ctx *RuleContext, look *Set, look
|
|||
return
|
||||
}
|
||||
}
|
||||
if (s instanceof RuleStopState ) {
|
||||
if (ctx ==nil) {
|
||||
|
||||
_,ok := s.(atn.RuleStopState)
|
||||
|
||||
if ok {
|
||||
if ( ctx==nil ) {
|
||||
look.addOne(TokenEpsilon)
|
||||
return
|
||||
} else if (ctx.isEmpty() && addEOF) {
|
||||
look.addOne(TokenEOF)
|
||||
return
|
||||
}
|
||||
if (ctx != PredictionContext.EMPTY) {
|
||||
|
||||
if ( ctx != PredictionContextEMPTY ) {
|
||||
|
||||
// run thru all possible stack tops in ctx
|
||||
for(var i=0 i<ctx.length i++) {
|
||||
var returnState = la.atn.states[ctx.getReturnState(i)]
|
||||
var removed = calledRuleStack.contains(returnState.ruleIndex)
|
||||
try {
|
||||
calledRuleStack.remove(returnState.ruleIndex)
|
||||
la._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} finally {
|
||||
for i := 0; i < ctx.length(); i++ {
|
||||
|
||||
returnState := atn.states.get(ctx.getReturnState(i))
|
||||
// System.out.println("popping back to "+retState)
|
||||
|
||||
removed := calledRuleStack.get(returnState.ruleIndex)
|
||||
|
||||
defer func(){
|
||||
if (removed) {
|
||||
calledRuleStack.add(returnState.ruleIndex)
|
||||
calledRuleStack.set(returnState.ruleIndex)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
calledRuleStack.clear(returnState.ruleIndex)
|
||||
la._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
|
||||
}
|
||||
return
|
||||
}
|
||||
}
|
||||
for j := 0; j < len(s.transitions); j++ {
|
||||
var t = s.transitions[j]
|
||||
if (t.constructor == RuleTransition) {
|
||||
if (calledRuleStack.contains(t.target.ruleIndex)) {
|
||||
|
||||
n := len(s.transitions)
|
||||
|
||||
for i:=0; i<n; i++ {
|
||||
t := s.transitions[i]
|
||||
|
||||
if ( t.getClass() == RuleTransition.class ) {
|
||||
if (calledRuleStack.get(((RuleTransition)t).target.ruleIndex)) {
|
||||
continue
|
||||
}
|
||||
var newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber)
|
||||
|
||||
newContext :=
|
||||
SingletonPredictionContext.create(ctx, ((RuleTransition)t).followState.stateNumber)
|
||||
|
||||
try {
|
||||
calledRuleStack.add(t.target.ruleIndex)
|
||||
la._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} finally {
|
||||
calledRuleStack.remove(t.target.ruleIndex)
|
||||
calledRuleStack.set(((RuleTransition)t).target.ruleIndex)
|
||||
_LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
}
|
||||
} else if (t instanceof AbstractPredicateTransition ) {
|
||||
if (seeThruPreds) {
|
||||
la._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else {
|
||||
look.addOne(LL1Analyzer.HIT_PRED)
|
||||
finally {
|
||||
calledRuleStack.clear(((RuleTransition)t).target.ruleIndex)
|
||||
}
|
||||
} else if( t.isEpsilon) {
|
||||
la._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
} else if (t.constructor == WildcardTransition) {
|
||||
look.addRange( TokenMinUserTokenType, la.atn.maxTokenType )
|
||||
} else {
|
||||
var set = t.label
|
||||
}
|
||||
else if ( t instanceof AbstractPredicateTransition ) {
|
||||
if ( seeThruPreds ) {
|
||||
_LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
}
|
||||
else {
|
||||
look.add(HIT_PRED)
|
||||
}
|
||||
}
|
||||
else if ( t.isEpsilon() ) {
|
||||
_LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
|
||||
}
|
||||
else if ( t.getClass() == WildcardTransition.class ) {
|
||||
look.addAll( IntervalSet.of(Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType) )
|
||||
}
|
||||
else {
|
||||
// System.out.println("adding "+ t)
|
||||
IntervalSet set = t.label()
|
||||
if (set != nil) {
|
||||
if _, ok := t.(NotSetTransition); ok {
|
||||
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)
|
||||
if (t instanceof NotSetTransition) {
|
||||
set = set.complement(IntervalSet.of(Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType))
|
||||
}
|
||||
look.addSet(set)
|
||||
look.addAll(set)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,16 +13,18 @@ import (
|
|||
///
|
||||
|
||||
type TokenSource interface {
|
||||
nextToken() *Token
|
||||
getLine() int
|
||||
getCharPositionInLine() int
|
||||
getInputStream() *InputStream
|
||||
getSourceName() string
|
||||
getText(int, int) string
|
||||
nextToken() Token
|
||||
column() int
|
||||
line() int
|
||||
setTokenFactory(factory *TokenFactory)
|
||||
getTokenFactory() *TokenFactory
|
||||
}
|
||||
|
||||
type TokenFactorySourcePair struct {
|
||||
factory TokenFactory
|
||||
inputStream InputStream
|
||||
factory *TokenFactory
|
||||
inputStream *InputStream
|
||||
}
|
||||
|
||||
type Lexer struct {
|
||||
|
@ -30,7 +32,7 @@ type Lexer struct {
|
|||
|
||||
_input *InputStream
|
||||
_factory *TokenFactory
|
||||
_tokenFactorySourcePair TokenFactorySourcePair
|
||||
_tokenFactorySourcePair *TokenFactorySourcePair
|
||||
_interp *atn.LexerATNSimulator
|
||||
_token int
|
||||
_tokenStartCharIndex int
|
||||
|
@ -46,7 +48,9 @@ type Lexer struct {
|
|||
|
||||
func NewLexer(input *InputStream) *Lexer {
|
||||
|
||||
lexer := &Lexer{Recognizer{}}
|
||||
lexer := new(Lexer)
|
||||
|
||||
lexer.initRecognizer()
|
||||
|
||||
lexer._input = input
|
||||
lexer._factory = CommonTokenFactoryDEFAULT
|
||||
|
@ -279,6 +283,8 @@ func (l *Lexer) emitEOF() int {
|
|||
return eof
|
||||
}
|
||||
|
||||
|
||||
|
||||
Object.defineProperty(Lexer.prototype, "type", {
|
||||
get : function() {
|
||||
return l.type
|
||||
|
@ -339,7 +345,7 @@ func (l *Lexer) getErrorDisplay(s []string) string {
|
|||
}
|
||||
|
||||
func (l *Lexer) getErrorDisplayForChar(c rune) string {
|
||||
if (c.charCodeAt(0) == TokenEOF) {
|
||||
if (c == TokenEOF) {
|
||||
return "<EOF>"
|
||||
} else if (c == '\n') {
|
||||
return "\\n"
|
||||
|
|
|
@ -19,17 +19,18 @@ func NewTraceListener(parser *Parser) *TraceListener {
|
|||
}
|
||||
|
||||
func (this *TraceListener) enterEveryRule(ctx *ParserRuleContext) {
|
||||
fmt.Println("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
|
||||
fmt.Println("enter " + this.parser.getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text())
|
||||
}
|
||||
|
||||
func (this *TraceListener) visitTerminal( node *tree.TerminalNode ) {
|
||||
fmt.Println("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex])
|
||||
fmt.Println("consume " + node.symbol + " rule " + this.parser.getRuleNames()[this.parser._ctx.ruleIndex])
|
||||
}
|
||||
|
||||
func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) {
|
||||
fmt.Println("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text)
|
||||
fmt.Println("exit " + this.parser.getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text())
|
||||
}
|
||||
|
||||
|
||||
type Parser struct {
|
||||
Recognizer
|
||||
_input *TokenStream
|
||||
|
@ -38,15 +39,17 @@ type Parser struct {
|
|||
_ctx *ParserRuleContext
|
||||
buildParseTrees bool
|
||||
_tracer bool
|
||||
_parseListeners []tree.ParseTreeListener
|
||||
_parseListeners []*tree.ParseTreeListener
|
||||
_syntaxErrors int
|
||||
}
|
||||
|
||||
// p.is all the parsing support code essentially most of it is error
|
||||
// recovery stuff.//
|
||||
func Parser(input *TokenStream) *Parser {
|
||||
func NewParser(input *TokenStream) *Parser {
|
||||
|
||||
p := &Parser{Recognizer{}}
|
||||
p := new(Parser)
|
||||
|
||||
p.initRecognizer()
|
||||
|
||||
// The input stream.
|
||||
p._input = nil
|
||||
|
@ -74,12 +77,10 @@ func Parser(input *TokenStream) *Parser {
|
|||
// incremented each time {@link //notifyErrorListeners} is called.
|
||||
p._syntaxErrors = 0
|
||||
p.setInputStream(input)
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
//Parser.prototype = Object.create(Recognizer.prototype)
|
||||
//Parser.prototype.contructor = Parser
|
||||
|
||||
// p.field maps from the serialized ATN string to the deserialized {@link
|
||||
// ATN} with
|
||||
// bypass alternatives.
|
||||
|
@ -124,7 +125,7 @@ func (p *Parser) reset() {
|
|||
func (p *Parser) match(ttype int) *Token {
|
||||
var t = p.getCurrentToken()
|
||||
if (t.tokenType == ttype) {
|
||||
p._errHandler.reportMatch(p.
|
||||
p._errHandler.reportMatch(p)
|
||||
p.consume()
|
||||
} else {
|
||||
t = p._errHandler.recoverInline(p)
|
||||
|
@ -156,7 +157,7 @@ func (p *Parser) match(ttype int) *Token {
|
|||
func (p *Parser) matchWildcard() {
|
||||
var t = p.getCurrentToken()
|
||||
if (t.tokenType > 0) {
|
||||
p._errHandler.reportMatch(p.
|
||||
p._errHandler.reportMatch(p)
|
||||
p.consume()
|
||||
} else {
|
||||
t = p._errHandler.recoverInline(p)
|
||||
|
@ -267,12 +268,12 @@ func (p *Parser) triggerExitRuleEvent() {
|
|||
}
|
||||
|
||||
func (p *Parser) getTokenFactory() {
|
||||
return p._input.tokenSource._factory
|
||||
return p._input.getTokenSource()._factory
|
||||
}
|
||||
|
||||
// Tell our token source and error strategy about a Newway to create tokens.//
|
||||
func (p *Parser) setTokenFactory(factory) {
|
||||
p._input.tokenSource._factory = factory
|
||||
p._input.getTokenSource()._factory = factory
|
||||
}
|
||||
|
||||
// The ATN with bypass alternatives is expensive to create so we create it
|
||||
|
@ -284,7 +285,7 @@ func (p *Parser) setTokenFactory(factory) {
|
|||
func (p *Parser) getATNWithBypassAlts() {
|
||||
var serializedAtn = p.getSerializedATN()
|
||||
if (serializedAtn == nil) {
|
||||
panic "The current parser does not support an ATN with bypass alternatives."
|
||||
panic("The current parser does not support an ATN with bypass alternatives.")
|
||||
}
|
||||
var result = p.bypassAltsAtnCache[serializedAtn]
|
||||
if (result == nil) {
|
||||
|
@ -311,7 +312,7 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer
|
|||
|
||||
if (lexer == nil) {
|
||||
if (p.getTokenStream() != nil) {
|
||||
var tokenSource = p.getTokenStream().tokenSource
|
||||
var tokenSource = p.getTokenStream().getTokenSource()
|
||||
if _, ok := tokenSource.(Lexer); ok {
|
||||
lexer = tokenSource
|
||||
}
|
||||
|
@ -320,8 +321,10 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer *Lexer
|
|||
if (lexer == nil) {
|
||||
panic("Parser can't discover a lexer to use")
|
||||
}
|
||||
var m = NewParseTreePatternMatcher(lexer, p)
|
||||
return m.compile(pattern, patternRuleIndex)
|
||||
|
||||
panic("NewParseTreePatternMatcher not implemented!")
|
||||
// var m = NewParseTreePatternMatcher(lexer, p)
|
||||
// return m.compile(pattern, patternRuleIndex)
|
||||
}
|
||||
|
||||
func (p *Parser) getInputStream() *TokenStream {
|
||||
|
@ -350,7 +353,7 @@ func (p *Parser) getCurrentToken() *Token {
|
|||
return p._input.LT(1)
|
||||
}
|
||||
|
||||
func (p *Parser) notifyErrorListeners(msg, offendingToken, err) {
|
||||
func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err *error.RecognitionException) {
|
||||
offendingToken = offendingToken || nil
|
||||
err = err || nil
|
||||
if (offendingToken == nil) {
|
||||
|
@ -359,8 +362,8 @@ func (p *Parser) notifyErrorListeners(msg, offendingToken, err) {
|
|||
p._syntaxErrors += 1
|
||||
var line = offendingToken.line
|
||||
var column = offendingToken.column
|
||||
var listener = p.getErrorListenerDispatch()
|
||||
listener.syntaxError(p. offendingToken, line, column, msg, err)
|
||||
var listener := p.getErrorListenerDispatch()
|
||||
listener.syntaxError(p, offendingToken, line, column, msg, err)
|
||||
}
|
||||
|
||||
//
|
||||
|
@ -671,7 +674,7 @@ func (p *Parser) setTrace(trace *TraceListener) {
|
|||
if (p._tracer != nil) {
|
||||
p.removeParseListener(p._tracer)
|
||||
}
|
||||
p._tracer = NewTraceListener(p.
|
||||
p._tracer = NewTraceListener(p)
|
||||
p.addParseListener(p._tracer)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
package antlr4
|
||||
|
||||
import (
|
||||
"antlr4/error"
|
||||
"antlr4/tree"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
//* A rule invocation record for parsing.
|
||||
|
@ -27,46 +29,49 @@ import (
|
|||
// group values such as prc aggregate. The getters/setters are there to
|
||||
// satisfy the superclass interface.
|
||||
|
||||
//var RuleContext = require('./RuleContext').RuleContext
|
||||
//var Tree = require('./tree/Tree')
|
||||
//var INVALID_INTERVAL = Tree.INVALID_INTERVAL
|
||||
//var TerminalNode = Tree.TerminalNode
|
||||
//var TerminalNodeImpl = Tree.TerminalNodeImpl
|
||||
//var ErrorNodeImpl = Tree.ErrorNodeImpl
|
||||
//var Interval = require("./IntervalSet").Interval
|
||||
|
||||
type ParserRuleContext struct {
|
||||
RuleContext
|
||||
ruleIndex int
|
||||
children []RuleContext
|
||||
start
|
||||
stop
|
||||
exception
|
||||
start, stop *Token
|
||||
exception *error.RecognitionException
|
||||
}
|
||||
|
||||
func NewParserRuleContext(parent, invokingStateNumber) *ParserRuleContext {
|
||||
func NewParserRuleContext(parent *ParserRuleContext, invokingStateNumber int) *ParserRuleContext {
|
||||
|
||||
RuleContext.call(prc, parent, invokingStateNumber)
|
||||
prc := new(ParserRuleContext)
|
||||
|
||||
prc.initRuleContext(parent, invokingStateNumber)
|
||||
prc.initParserRuleContext(parent, invokingStateNumber)
|
||||
|
||||
return prc
|
||||
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) initParserRuleContext(parent *ParserRuleContext, invokingStateNumber int){
|
||||
|
||||
prc.initRuleContext(parent, invokingStateNumber)
|
||||
|
||||
prc.ruleIndex = -1
|
||||
// * If we are debugging or building a parse tree for a visitor,
|
||||
// we need to track all of the tokens and rule invocations associated
|
||||
// with prc rule's context. This is empty for parsing w/o tree constr.
|
||||
// operation because we don't the need to track the details about
|
||||
// how we parse prc rule.
|
||||
// /
|
||||
prc.children = nil
|
||||
prc.start = nil
|
||||
prc.stop = nil
|
||||
// The exception that forced prc rule to return. If the rule successfully
|
||||
// completed, prc is {@code nil}.
|
||||
prc.exception = nil
|
||||
// * If we are debugging or building a parse tree for a visitor,
|
||||
// we need to track all of the tokens and rule invocations associated
|
||||
// with prc rule's context. This is empty for parsing w/o tree constr.
|
||||
// operation because we don't the need to track the details about
|
||||
// how we parse prc rule.
|
||||
// /
|
||||
prc.children = nil
|
||||
prc.start = nil
|
||||
prc.stop = nil
|
||||
// The exception that forced prc rule to return. If the rule successfully
|
||||
// completed, prc is {@code nil}.
|
||||
prc.exception = nil
|
||||
|
||||
return prc
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
func (prc *ParserRuleContext) copyFrom(ctx *RuleContext) {
|
||||
func (prc *ParserRuleContext) copyFrom(ctx *ParserRuleContext) {
|
||||
// from RuleContext
|
||||
prc.parentCtx = ctx.parentCtx
|
||||
prc.invokingState = ctx.invokingState
|
||||
|
@ -83,11 +88,11 @@ func (prc *ParserRuleContext) exitRule(listener *tree.ParseTreeListener) {
|
|||
}
|
||||
|
||||
// * Does not set parent link other add methods do that///
|
||||
func (prc *ParserRuleContext) addChild(child) {
|
||||
func (prc *ParserRuleContext) addChild(child *ParserRuleContext) {
|
||||
if (prc.children == nil) {
|
||||
prc.children = []
|
||||
prc.children = make([]*ParserRuleContext)
|
||||
}
|
||||
prc.children.push(child)
|
||||
prc.children = append( prc.children, child )
|
||||
return child
|
||||
}
|
||||
|
||||
|
@ -96,33 +101,36 @@ func (prc *ParserRuleContext) addChild(child) {
|
|||
// generic ruleContext object.
|
||||
// /
|
||||
func (prc *ParserRuleContext) removeLastChild() {
|
||||
if (prc.children != nil) {
|
||||
prc.children.pop()
|
||||
if (prc.children != nil && len(prc.children) > 0) {
|
||||
prc.children = prc.children[0:len(prc.children)-1]
|
||||
}
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) addTokenNode(token) {
|
||||
var node = NewTerminalNodeImpl(token)
|
||||
func (prc *ParserRuleContext) addTokenNode(token *Token) *tree.TerminalNodeImpl {
|
||||
var node = tree.NewTerminalNodeImpl(token)
|
||||
prc.addChild(node)
|
||||
node.parentCtx = prc
|
||||
return node
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) addErrorNode(badToken) {
|
||||
var node = NewErrorNodeImpl(badToken)
|
||||
func (prc *ParserRuleContext) addErrorNode(badToken *Token) *tree.ErrorNodeImpl {
|
||||
var node = tree.NewErrorNodeImpl(badToken)
|
||||
prc.addChild(node)
|
||||
node.parentCtx = prc
|
||||
return node
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getChild(i, type) {
|
||||
type = type || nil
|
||||
if (type == nil) {
|
||||
return len(prc.children) >= i ? prc.children[i] : nil
|
||||
func (prc *ParserRuleContext) getChild(i int, childType reflect.Type) {
|
||||
if (childType == nil) {
|
||||
if (prc.children != nil && len(prc.children) >= i){
|
||||
return prc.children[i]
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
} else {
|
||||
for(var j=0 j<len(prc.children) j++) {
|
||||
for j :=0; j<len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
if_, ok := child.(type); ok {
|
||||
if reflect.TypeOf(child) == childType {
|
||||
if(i==0) {
|
||||
return child
|
||||
} else {
|
||||
|
@ -135,11 +143,12 @@ func (prc *ParserRuleContext) getChild(i, type) {
|
|||
}
|
||||
|
||||
|
||||
func (prc *ParserRuleContext) getToken(ttype, i int) {
|
||||
func (prc *ParserRuleContext) getToken(ttype int, i int) *tree.TerminalNode {
|
||||
|
||||
for j :=0; j<len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
if _, ok := child.(TerminalNode); ok {
|
||||
if (child.symbol.type == ttype) {
|
||||
if _, ok := child.(*tree.TerminalNode); ok {
|
||||
if (child.symbol.tokenType == ttype) {
|
||||
if(i==0) {
|
||||
return child
|
||||
} else {
|
||||
|
@ -151,16 +160,16 @@ func (prc *ParserRuleContext) getToken(ttype, i int) {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getTokens(ttype int) {
|
||||
func (prc *ParserRuleContext) getTokens(ttype int) []*tree.TerminalNode {
|
||||
if (prc.children== nil) {
|
||||
return []
|
||||
return make([]*tree.TerminalNode)
|
||||
} else {
|
||||
var tokens = []
|
||||
var tokens = make([]*tree.TerminalNode)
|
||||
for j:=0; j<len(prc.children); j++ {
|
||||
var child = prc.children[j]
|
||||
if _, ok := child.(TerminalNode); ok {
|
||||
if (child.symbol.type == ttype) {
|
||||
tokens.push(child)
|
||||
if tchild, ok := child.(*tree.TerminalNode); ok {
|
||||
if (tchild.symbol.tokenType == ttype) {
|
||||
tokens = append(tokens, tchild)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -168,23 +177,25 @@ func (prc *ParserRuleContext) getTokens(ttype int) {
|
|||
}
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getTypedRuleContext(ctxType, i) {
|
||||
return prc.getChild(i, ctxType)
|
||||
func (prc *ParserRuleContext) getTypedRuleContext(ctxType reflect.Type, i int) *interface{} {
|
||||
panic("getTypedRuleContexts not implemented")
|
||||
// return prc.getChild(i, ctxType)
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getTypedRuleContexts(ctxType) {
|
||||
if (prc.children== nil) {
|
||||
return []
|
||||
} else {
|
||||
var contexts = []
|
||||
for(var j=0 j<len(prc.children) j++) {
|
||||
var child = prc.children[j]
|
||||
if _, ok := child.(ctxType); ok {
|
||||
contexts.push(child)
|
||||
}
|
||||
}
|
||||
return contexts
|
||||
}
|
||||
func (prc *ParserRuleContext) getTypedRuleContexts(ctxType reflect.Type) []*interface{} {
|
||||
panic("getTypedRuleContexts not implemented")
|
||||
// if (prc.children== nil) {
|
||||
// return []
|
||||
// } else {
|
||||
// var contexts = []
|
||||
// for(var j=0 j<len(prc.children) j++) {
|
||||
// var child = prc.children[j]
|
||||
// if _, ok := child.(ctxType); ok {
|
||||
// contexts.push(child)
|
||||
// }
|
||||
// }
|
||||
// return contexts
|
||||
// }
|
||||
}
|
||||
|
||||
func (prc *ParserRuleContext) getChildCount() {
|
||||
|
@ -197,7 +208,7 @@ func (prc *ParserRuleContext) getChildCount() {
|
|||
|
||||
func (prc *ParserRuleContext) getSourceInterval() {
|
||||
if( prc.start == nil || prc.stop == nil) {
|
||||
return INVALID_INTERVAL
|
||||
return tree.TreeINVALID_INTERVAL
|
||||
} else {
|
||||
return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
|
||||
}
|
||||
|
@ -205,11 +216,13 @@ func (prc *ParserRuleContext) getSourceInterval() {
|
|||
|
||||
var RuleContextEMPTY = NewParserRuleContext(nil, nil)
|
||||
|
||||
|
||||
type InterpreterRuleContext struct {
|
||||
ParserRuleContext
|
||||
ruleIndex int
|
||||
}
|
||||
|
||||
func InterpreterRuleContext(parent, invokingStateNumber, ruleIndex int) {
|
||||
func NewInterpreterRuleContext(parent *InterpreterRuleContext, invokingStateNumber, ruleIndex int) {
|
||||
|
||||
prc := new(InterpreterRuleContext)
|
||||
|
||||
|
|
|
@ -16,10 +16,14 @@ type Recognizer struct {
|
|||
|
||||
func NewRecognizer() *Recognizer {
|
||||
rec := new(Recognizer)
|
||||
rec.initRecognizer()
|
||||
return rec
|
||||
}
|
||||
|
||||
func (rec *Recognizer) initRecognizer() {
|
||||
rec._listeners = []tree.ParseTreeListener{ error.ConsoleErrorListenerINSTANCE }
|
||||
rec._interp = nil
|
||||
rec.state = -1
|
||||
return rec
|
||||
}
|
||||
|
||||
var tokenTypeMapCache = make(map[[]string]int)
|
||||
|
@ -160,7 +164,7 @@ func (this *Recognizer) getTokenErrorDisplay(t *Token) string {
|
|||
return "'" + s + "'"
|
||||
}
|
||||
|
||||
func (this *Recognizer) getErrorListenerDispatch() {
|
||||
func (this *Recognizer) getErrorListenerDispatch() *error.ErrorListener {
|
||||
return error.NewProxyErrorListener(this._listeners)
|
||||
}
|
||||
|
||||
|
|
|
@ -30,13 +30,20 @@ type RuleContext struct {
|
|||
parentCtx *RuleContext
|
||||
invokingState int
|
||||
ruleIndex int
|
||||
children []RuleContext
|
||||
children []*RuleContext
|
||||
}
|
||||
|
||||
func NewRuleContext(parent *RuleContext, invokingState int) *RuleContext {
|
||||
// tree.RuleNode.call(this)
|
||||
|
||||
rn := RuleContext{tree.RuleNode{}}
|
||||
rn := &RuleContext{tree.RuleNode{}}
|
||||
|
||||
rn.initRuleNode()
|
||||
rn.initRuleContext(parent, invokingState)
|
||||
|
||||
return rn
|
||||
}
|
||||
|
||||
func (rn *RuleContext) initRuleContext(parent *RuleContext, invokingState int) {
|
||||
|
||||
// What context invoked this rule?
|
||||
rn.parentCtx = parent
|
||||
|
@ -49,8 +56,6 @@ func NewRuleContext(parent *RuleContext, invokingState int) *RuleContext {
|
|||
} else {
|
||||
rn.invokingState = invokingState
|
||||
}
|
||||
|
||||
return rn
|
||||
}
|
||||
|
||||
func (this *RuleContext) depth() {
|
||||
|
@ -65,13 +70,13 @@ func (this *RuleContext) depth() {
|
|||
|
||||
// A context is empty if there is no invoking state meaning nobody call
|
||||
// current context.
|
||||
func (this *RuleContext) isEmpty() {
|
||||
func (this *RuleContext) isEmpty() bool {
|
||||
return this.invokingState == -1
|
||||
}
|
||||
|
||||
// satisfy the ParseTree / SyntaxTree interface
|
||||
|
||||
func (this *RuleContext) getSourceInterval() {
|
||||
func (this *RuleContext) getSourceInterval() *Interval {
|
||||
return tree.TreeINVALID_INTERVAL
|
||||
}
|
||||
|
||||
|
@ -112,7 +117,7 @@ func (this *RuleContext) getChildCount() {
|
|||
}
|
||||
|
||||
func (this *RuleContext) accept(visitor *tree.ParseTreeVisitor) {
|
||||
return visitor.visitChildren(this)
|
||||
visitor.visitChildren(this)
|
||||
}
|
||||
|
||||
//need to manage circular dependencies, so export now
|
||||
|
@ -128,7 +133,7 @@ func (this *RuleContext) toStringTree(ruleNames []string, recog *Recognizer) str
|
|||
func (this *RuleContext) toString(ruleNames []string, stop *RuleContext) string {
|
||||
ruleNames = ruleNames || nil
|
||||
stop = stop || nil
|
||||
var p = this
|
||||
var p *RuleContext = this
|
||||
var s = "["
|
||||
for (p != nil && p != stop) {
|
||||
if (ruleNames == nil) {
|
||||
|
|
|
@ -23,10 +23,17 @@ type Token struct {
|
|||
line int // line=1..n of the 1st character
|
||||
column int // beginning of the line at which it occurs, 0..n-1
|
||||
_text string // text of the token.
|
||||
}
|
||||
|
||||
func NewToken() *Token {
|
||||
return new(Token)
|
||||
// String getText();
|
||||
// int getType();
|
||||
// int getLine();
|
||||
// int getCharPositionInLine();
|
||||
// int getChannel();
|
||||
// int getTokenIndex();
|
||||
// int getStartIndex();
|
||||
// int getStopIndex();
|
||||
// TokenSource getTokenSource();
|
||||
// CharStream getInputStream();
|
||||
}
|
||||
|
||||
const (
|
||||
|
@ -80,7 +87,7 @@ type CommonToken struct {
|
|||
Token
|
||||
}
|
||||
|
||||
func NewCommonToken(source *TokenSourceInputStreamPair, tokenType int, channel, start int, stop int) *CommonToken {
|
||||
func NewCommonToken(source *TokenSourceInputStreamPair, tokenType, channel, start, stop int) *CommonToken {
|
||||
|
||||
t := CommonToken{Token{}}
|
||||
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
package atn
|
||||
import (
|
||||
"antlr4"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
// A tuple: (ATN state, predicted alt, syntactic, semantic context).
|
||||
// The syntactic context is a graph-structured stack node whose
|
||||
|
@ -6,82 +10,144 @@ package atn
|
|||
// chain used to arrive at the state. The semantic context is
|
||||
// the tree of semantic predicates encountered before reaching
|
||||
// an ATN state.
|
||||
///
|
||||
//
|
||||
|
||||
//var DecisionState = require('./ATNState').DecisionState
|
||||
//var SemanticContext = require('./SemanticContext').SemanticContext
|
||||
|
||||
func checkParams(params, isCfg) {
|
||||
if(params==nil) {
|
||||
var result = { state:nil, alt:nil, context:nil, semanticContext:nil }
|
||||
if(isCfg) {
|
||||
result.reachesIntoOuterContext = 0
|
||||
}
|
||||
return result
|
||||
} else {
|
||||
var props = {}
|
||||
props.state = params.state || nil
|
||||
props.alt = params.alt || nil
|
||||
props.context = params.context || nil
|
||||
props.semanticContext = params.semanticContext || nil
|
||||
if(isCfg) {
|
||||
props.reachesIntoOuterContext = params.reachesIntoOuterContext || 0
|
||||
props.precedenceFilterSuppressed = params.precedenceFilterSuppressed || false
|
||||
}
|
||||
return props
|
||||
}
|
||||
type ATNConfig struct {
|
||||
precedenceFilterSuppressed int
|
||||
state *ATNState
|
||||
alt int
|
||||
context *antlr4.PredictionContext
|
||||
semanticContext int
|
||||
reachesIntoOuterContext int
|
||||
}
|
||||
|
||||
func ATNConfig(params, config) {
|
||||
this.checkContext(params, config)
|
||||
params = checkParams(params)
|
||||
config = checkParams(config, true)
|
||||
// The ATN state associated with this configuration///
|
||||
this.state = params.state!=nil ? params.state : config.state
|
||||
// What alt (or lexer rule) is predicted by this configuration///
|
||||
this.alt = params.alt!=nil ? params.alt : config.alt
|
||||
// The stack of invoking states leading to the rule/states associated
|
||||
// with this config. We track only those contexts pushed during
|
||||
// execution of the ATN simulator.
|
||||
this.context = params.context!=nil ? params.context : config.context
|
||||
this.semanticContext = params.semanticContext!=nil ? params.semanticContext :
|
||||
(config.semanticContext!=nil ? config.semanticContext : SemanticContext.NONE)
|
||||
// We cannot execute predicates dependent upon local context unless
|
||||
// we know for sure we are in the correct context. Because there is
|
||||
// no way to do this efficiently, we simply cannot evaluate
|
||||
// dependent predicates unless we are in the rule that initially
|
||||
// invokes the ATN simulator.
|
||||
//
|
||||
// closure() tracks the depth of how far we dip into the
|
||||
// outer context: depth > 0. Note that it may not be totally
|
||||
// accurate depth since I don't ever decrement. TODO: make it a boolean then
|
||||
this.reachesIntoOuterContext = config.reachesIntoOuterContext
|
||||
this.precedenceFilterSuppressed = config.precedenceFilterSuppressed
|
||||
return this
|
||||
func NewATNConfig7(old *ATNConfig) *ATNConfig { // dup
|
||||
a := new(ATNConfig)
|
||||
a.state = old.state;
|
||||
a.alt = old.alt;
|
||||
a.context = old.context;
|
||||
a.semanticContext = old.semanticContext;
|
||||
a.reachesIntoOuterContext = old.reachesIntoOuterContext;
|
||||
return a
|
||||
}
|
||||
|
||||
func (this *ATNConfig) checkContext(params, config) {
|
||||
if((params.context==nil || params.context==nil) &&
|
||||
(config==nil || config.context==nil || config.context==nil)) {
|
||||
this.context = nil
|
||||
}
|
||||
func NewATNConfig6(state *ATNState, alt int, context *antlr4.PredictionContext) *ATNConfig {
|
||||
return NewATNConfig(state, alt, context, SemanticContextNONE);
|
||||
}
|
||||
|
||||
func NewATNConfig5(state *ATNState, alt int, context *antlr4.PredictionContext, semanticContext *SemanticContext) *ATNConfig {
|
||||
a := new(ATNConfig)
|
||||
a.state = state;
|
||||
a.alt = alt;
|
||||
a.context = context;
|
||||
a.semanticContext = semanticContext;
|
||||
return a
|
||||
}
|
||||
|
||||
func NewATNConfig4(c *ATNConfig, state *ATNState) *ATNConfig {
|
||||
return NewATNConfig(c, state, c.context, c.semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig3(c *ATNConfig, state *ATNState, semanticContext *SemanticContext) *ATNConfig {
|
||||
return NewATNConfig(c, state, c.context, semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig2(c *ATNConfig, semanticContext *SemanticContext) *ATNConfig {
|
||||
return NewATNConfig(c, c.state, c.context, semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig1(c *ATNConfig, state *ATNState, context *antlr4.PredictionContext) *ATNConfig {
|
||||
return NewATNConfig(c, state, context, c.semanticContext);
|
||||
}
|
||||
|
||||
func NewATNConfig(c *ATNConfig, state *ATNState, context *antlr4.PredictionContext, semanticContext *SemanticContext) *ATNConfig {
|
||||
a := new(ATNConfig)
|
||||
a.state = state;
|
||||
a.alt = c.alt;
|
||||
a.context = context;
|
||||
a.semanticContext = semanticContext;
|
||||
a.reachesIntoOuterContext = c.reachesIntoOuterContext;
|
||||
return a
|
||||
}
|
||||
|
||||
//
|
||||
//
|
||||
//func checkParams(params *ATNConfig, isCfg bool) *ATNConfigParams {
|
||||
// if(params == nil) {
|
||||
// var result = { state:nil, alt:nil, context:nil, semanticContext:nil }
|
||||
// if(isCfg) {
|
||||
// result.reachesIntoOuterContext = 0
|
||||
// }
|
||||
// return result
|
||||
// } else {
|
||||
// var props = {}
|
||||
// props.state = params.state || nil
|
||||
// props.alt = params.alt || nil
|
||||
// props.context = params.context || nil
|
||||
// props.semanticContext = params.semanticContext || nil
|
||||
// if(isCfg) {
|
||||
// props.reachesIntoOuterContext = params.reachesIntoOuterContext || 0
|
||||
// props.precedenceFilterSuppressed = params.precedenceFilterSuppressed || false
|
||||
// }
|
||||
// return props
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//
|
||||
//func NewATNConfig(params *ATNConfig, config *ATNConfig) *ATNConfig {
|
||||
//
|
||||
// this := new(ATNConfig)
|
||||
//
|
||||
// this.checkContext(params, config)
|
||||
//
|
||||
// params = checkParams(params, false)
|
||||
// config = checkParams(config, true)
|
||||
//
|
||||
// if params.state != nil {
|
||||
// this.state = params.state
|
||||
// } else {
|
||||
// this.state = config.state
|
||||
// }
|
||||
//
|
||||
// if params.alt != nil {
|
||||
// this.alt = params.alt
|
||||
// } else {
|
||||
// this.alt = config.alt
|
||||
// }
|
||||
//
|
||||
// this.context = params.context!=nil ? params.context : config.context
|
||||
//
|
||||
// this.semanticContext = params.semanticContext!=nil ? params.semanticContext :
|
||||
// (config.semanticContext!=nil ? config.semanticContext : SemanticContext.NONE)
|
||||
//
|
||||
// this.reachesIntoOuterContext = config.reachesIntoOuterContext
|
||||
// this.precedenceFilterSuppressed = config.precedenceFilterSuppressed
|
||||
//
|
||||
// return this
|
||||
//}
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//func (this *ATNConfig) checkContext(params, config) {
|
||||
// if((params.context==nil || params.context==nil) &&
|
||||
// (config==nil || config.context==nil || config.context==nil)) {
|
||||
// this.context = nil
|
||||
// }
|
||||
//}
|
||||
|
||||
// An ATN configuration is equal to another if both have
|
||||
// the same state, they predict the same alternative, and
|
||||
// syntactic/semantic contexts are the same.
|
||||
///
|
||||
func (this *ATNConfig) equals(other) {
|
||||
func (this *ATNConfig) equals(other interface{}) bool {
|
||||
if (this == other) {
|
||||
return true
|
||||
} else if (! _, ok := other.(ATNConfig); ok) {
|
||||
} else if _, ok := other.(*ATNConfig); !ok {
|
||||
return false
|
||||
} else {
|
||||
return this.state.stateNumber==other.state.stateNumber &&
|
||||
this.alt==other.alt &&
|
||||
(this.context==nil ? other.context==nil : this.context.equals(other.context)) &&
|
||||
this.semanticContext.equals(other.semanticContext) &&
|
||||
this.precedenceFilterSuppressed==other.precedenceFilterSuppressed
|
||||
return reflect.DeepEqual(this, other)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -107,6 +173,10 @@ func (this *ATNConfig) toString() string {
|
|||
}
|
||||
|
||||
|
||||
type LexerATNConfig struct {
|
||||
ATNConfig
|
||||
}
|
||||
|
||||
func LexerATNConfig(params, config) {
|
||||
ATNConfig.call(this, params, config)
|
||||
|
||||
|
@ -117,9 +187,6 @@ func LexerATNConfig(params, config) {
|
|||
return this
|
||||
}
|
||||
|
||||
//LexerATNConfig.prototype = Object.create(ATNConfig.prototype)
|
||||
//LexerATNConfig.prototype.constructor = LexerATNConfig
|
||||
|
||||
func (this *LexerATNConfig) hashString() {
|
||||
return "" + this.state.stateNumber + this.alt + this.context +
|
||||
this.semanticContext + (this.passedThroughNonGreedyDecision ? 1 : 0) +
|
||||
|
|
|
@ -10,158 +10,130 @@ import (
|
|||
|
||||
var TreeINVALID_INTERVAL = antlr4.NewInterval(-1, -2)
|
||||
|
||||
type Tree struct {
|
||||
|
||||
type Tree interface {
|
||||
getParent() *Tree
|
||||
getPayload() *interface{}
|
||||
getChild(i int) *Tree
|
||||
getChildCount() int
|
||||
toStringTree() string
|
||||
}
|
||||
|
||||
func NewTree() *Tree {
|
||||
return new(Tree)
|
||||
}
|
||||
|
||||
type SyntaxTree struct {
|
||||
type SyntaxTree interface {
|
||||
Tree
|
||||
getSourceInterval() *antlr4.Interval
|
||||
}
|
||||
|
||||
func NewSyntaxTree() *SyntaxTree{
|
||||
Tree.call(this)
|
||||
return this
|
||||
}
|
||||
|
||||
type ParseTree struct {
|
||||
type ParseTree interface {
|
||||
SyntaxTree
|
||||
// <T> T accept(ParseTreeVisitor<? extends T> visitor);
|
||||
accept(visitor *ParseTreeVisitor)
|
||||
getText() string
|
||||
toStringTree(parser *antlr4.Parser) string
|
||||
}
|
||||
|
||||
func NewParseTree() *ParseTree{
|
||||
SyntaxTree.call(this)
|
||||
return this
|
||||
}
|
||||
|
||||
|
||||
type RuleNode struct {
|
||||
type RuleNode interface {
|
||||
ParseTree
|
||||
getRuleContext() *antlr4.RuleContext
|
||||
}
|
||||
|
||||
func NewRuleNode() *RuleNode{
|
||||
ParseTree.call(this)
|
||||
return this
|
||||
}
|
||||
|
||||
|
||||
type TerminalNode struct {
|
||||
type TerminalNode interface {
|
||||
ParseTree
|
||||
getSymbol() *antlr4.Token
|
||||
}
|
||||
|
||||
func NewTerminalNode() *TerminalNode{
|
||||
ParseTree.call(this)
|
||||
return this
|
||||
}
|
||||
|
||||
|
||||
|
||||
type ErrorNode struct {
|
||||
type ErrorNode interface {
|
||||
TerminalNode
|
||||
}
|
||||
|
||||
func NewErrorNode() *ErrorNode{
|
||||
TerminalNode.call(this)
|
||||
return this
|
||||
type ParseTreeVisitor interface {
|
||||
// NOTE: removed type arguments
|
||||
visit(tree *ParseTree) interface{}
|
||||
visitChildren(node *RuleNode) interface{}
|
||||
visitTerminal(node *TerminalNode) interface{}
|
||||
visitErrorNode(node *ErrorNode) interface{}
|
||||
}
|
||||
|
||||
type ParseTreeVisitor struct {
|
||||
//func (this *ParseTreeVisitor) visit(ctx) {
|
||||
// if (Utils.isArray(ctx)) {
|
||||
// var self = this
|
||||
// return ctx.map(function(child) { return visitAtom(self, child)})
|
||||
// } else {
|
||||
// return visitAtom(this, ctx)
|
||||
// }
|
||||
//}
|
||||
//
|
||||
//func visitAtom(visitor, ctx) {
|
||||
// if (ctx.parser == nil) { //is terminal
|
||||
// return
|
||||
// }
|
||||
//
|
||||
// var name = ctx.parser.ruleNames[ctx.ruleIndex]
|
||||
// var funcName = "visit" + Utils.titleCase(name)
|
||||
//
|
||||
// return visitor[funcName](ctx)
|
||||
//}
|
||||
|
||||
type ParseTreeListener interface {
|
||||
visitTerminal(node *TerminalNode)
|
||||
visitErrorNode(node *ErrorNode)
|
||||
enterEveryRule(ctx *antlr4.ParserRuleContext)
|
||||
exitEveryRule(ctx *antlr4.ParserRuleContext)
|
||||
}
|
||||
|
||||
func NewParseTreeVisitor() *ParseTreeVisitor {
|
||||
return new(ParseTreeVisitor)
|
||||
}
|
||||
|
||||
func (this *ParseTreeVisitor) visit(ctx) {
|
||||
if (Utils.isArray(ctx)) {
|
||||
var self = this
|
||||
return ctx.map(function(child) { return visitAtom(self, child)})
|
||||
} else {
|
||||
return visitAtom(this, ctx)
|
||||
}
|
||||
}
|
||||
|
||||
func visitAtom(visitor, ctx) {
|
||||
if (ctx.parser == nil) { //is terminal
|
||||
return
|
||||
}
|
||||
|
||||
var name = ctx.parser.ruleNames[ctx.ruleIndex]
|
||||
var funcName = "visit" + Utils.titleCase(name)
|
||||
|
||||
return visitor[funcName](ctx)
|
||||
}
|
||||
|
||||
type ParseTreeListener struct {
|
||||
|
||||
}
|
||||
|
||||
func NewParseTreeListener() *ParseTreeListener {
|
||||
return new(ParseTreeListener)
|
||||
}
|
||||
|
||||
func (this *ParseTreeListener) visitTerminal(node) {
|
||||
}
|
||||
|
||||
func (this *ParseTreeListener) visitErrorNode(node) {
|
||||
}
|
||||
|
||||
func (this *ParseTreeListener) enterEveryRule(node) {
|
||||
}
|
||||
|
||||
func (this *ParseTreeListener) exitEveryRule(node) {
|
||||
}
|
||||
|
||||
type TerminalNodeImpl struct {
|
||||
TerminalNode
|
||||
parentCtx *antlr4.RuleContext
|
||||
symbol
|
||||
symbol *antlr4.Token
|
||||
}
|
||||
|
||||
func TerminalNodeImpl(symbol) {
|
||||
func NewTerminalNodeImpl(symbol *antlr4.Token) *TerminalNodeImpl {
|
||||
tn := &TerminalNodeImpl{TerminalNode{}}
|
||||
tn.parentCtx = nil
|
||||
tn.symbol = symbol
|
||||
|
||||
tn.initTerminalNodeImpl(symbol)
|
||||
|
||||
return tn
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getChild(i) {
|
||||
func (this *TerminalNodeImpl) initTerminalNodeImpl(symbol *antlr4.Token) {
|
||||
this.parentCtx = nil
|
||||
this.symbol = symbol
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getChild(i int) *Tree {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getSymbol() {
|
||||
func (this *TerminalNodeImpl) getSymbol() *antlr4.Token {
|
||||
return this.symbol
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getParent() {
|
||||
func (this *TerminalNodeImpl) getParent() *Tree {
|
||||
return this.parentCtx
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getPayload() {
|
||||
func (this *TerminalNodeImpl) getPayload() *antlr4.Token {
|
||||
return this.symbol
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getSourceInterval() {
|
||||
func (this *TerminalNodeImpl) getSourceInterval() *antlr4.Interval {
|
||||
if (this.symbol == nil) {
|
||||
return INVALID_INTERVAL
|
||||
return TreeINVALID_INTERVAL
|
||||
}
|
||||
var tokenIndex = this.symbol.tokenIndex
|
||||
return NewInterval(tokenIndex, tokenIndex)
|
||||
return antlr4.NewInterval(tokenIndex, tokenIndex)
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getChildCount() {
|
||||
return 0
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) accept(visitor) {
|
||||
func (this *TerminalNodeImpl) accept(visitor *ParseTreeVisitor ) interface{} {
|
||||
return visitor.visitTerminal(this)
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) getText() {
|
||||
return this.symbol.text
|
||||
func (this *TerminalNodeImpl) getText() string {
|
||||
return this.symbol.text()
|
||||
}
|
||||
|
||||
func (this *TerminalNodeImpl) toString() string {
|
||||
|
@ -172,45 +144,52 @@ func (this *TerminalNodeImpl) toString() string {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
// Represents a token that was consumed during resynchronization
|
||||
// rather than during a valid match operation. For example,
|
||||
// we will create this kind of a node during single token insertion
|
||||
// and deletion as well as during "consume until error recovery set"
|
||||
// upon no viable alternative exceptions.
|
||||
|
||||
func ErrorNodeImpl(token) {
|
||||
TerminalNodeImpl.call(this, token)
|
||||
return this
|
||||
type ErrorNodeImpl struct {
|
||||
TerminalNodeImpl
|
||||
}
|
||||
|
||||
func NewErrorNodeImpl(token *antlr4.Token) *ErrorNodeImpl {
|
||||
en := new(ErrorNodeImpl)
|
||||
en.initTerminalNodeImpl(token)
|
||||
return en
|
||||
}
|
||||
|
||||
|
||||
func (this *ErrorNodeImpl) isErrorNode() {
|
||||
func (this *ErrorNodeImpl) isErrorNode() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (this *ErrorNodeImpl) accept(visitor) {
|
||||
func (this *ErrorNodeImpl) accept( visitor *ParseTreeVisitor ) interface{} {
|
||||
return visitor.visitErrorNode(this)
|
||||
}
|
||||
|
||||
|
||||
|
||||
type ParseTreeWalker struct {
|
||||
|
||||
}
|
||||
|
||||
func NewParseTreeWalker() *ParseTreeWalker{
|
||||
return this
|
||||
func NewParseTreeWalker() *ParseTreeWalker {
|
||||
return new(ParseTreeWalker)
|
||||
}
|
||||
|
||||
func (this *ParseTreeWalker) walk(listener, t) {
|
||||
var errorNode = t instanceof ErrorNode ||
|
||||
(t.isErrorNode != nil && t.isErrorNode())
|
||||
if (errorNode) {
|
||||
listener.visitErrorNode(t)
|
||||
} else if _, ok := t.(TerminalNode); ok {
|
||||
listener.visitTerminal(t)
|
||||
func (this *ParseTreeWalker) walk(listener *ParseTreeListener, t *Tree) {
|
||||
|
||||
if errorNode, ok := t.(*ErrorNode); ok {
|
||||
listener.visitErrorNode(errorNode)
|
||||
} else if term, ok := t.(TerminalNode); ok {
|
||||
listener.visitTerminal(term)
|
||||
} else {
|
||||
this.enterRule(listener, t)
|
||||
for i := 0 i < t.getChildCount() i++) {
|
||||
for i := 0; i < t.getChildCount(); i++ {
|
||||
var child = t.getChild(i)
|
||||
this.walk(listener, child)
|
||||
}
|
||||
|
@ -223,14 +202,14 @@ func (this *ParseTreeWalker) walk(listener, t) {
|
|||
// {@link RuleContext}-specific event. First we trigger the generic and then
|
||||
// the rule specific. We to them in reverse order upon finishing the node.
|
||||
//
|
||||
func (this *ParseTreeWalker) enterRule(listener, r) {
|
||||
var ctx = r.getRuleContext()
|
||||
func (this *ParseTreeWalker) enterRule(listener *ParseTreeListener, r *RuleNode) {
|
||||
var ctx = r.getRuleContext().(*antlr4.ParserRuleContext)
|
||||
listener.enterEveryRule(ctx)
|
||||
ctx.enterRule(listener)
|
||||
}
|
||||
|
||||
func (this *ParseTreeWalker) exitRule(listener, r) {
|
||||
var ctx = r.getRuleContext()
|
||||
func (this *ParseTreeWalker) exitRule(listene *ParseTreeListener, r *RuleNode) {
|
||||
var ctx = r.getRuleContext().(*antlr4.ParserRuleContext)
|
||||
ctx.exitRule(listener)
|
||||
listener.exitEveryRule(ctx)
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue