And now all of the changes are commmitted

This commit is contained in:
Peter Boyer 2015-12-22 16:30:14 -06:00
parent 9f6ea082e8
commit 859df5231c
32 changed files with 944 additions and 831 deletions

View File

@ -51,7 +51,7 @@ func NewATN(grammarType int, maxTokenType int) *ATN {
// If {@code ctx} is nil, the set of tokens will not include what can follow
// the rule surrounding {@code s}. In other words, the set will be
// restricted to tokens reachable staying within {@code s}'s rule.
func (this *ATN) nextTokensInContext(s IATNState, ctx *RuleContext) *IntervalSet {
func (this *ATN) nextTokensInContext(s IATNState, ctx IRuleContext) *IntervalSet {
var anal = NewLL1Analyzer(this)
return anal.LOOK(s, nil, ctx)
}
@ -68,7 +68,7 @@ func (this *ATN) nextTokensNoContext(s IATNState) *IntervalSet {
return s.getNextTokenWithinRule()
}
func (this *ATN) nextTokens(s IATNState, ctx *RuleContext) *IntervalSet {
func (this *ATN) nextTokens(s IATNState, ctx IRuleContext) *IntervalSet {
if ( ctx==nil ) {
return this.nextTokensNoContext(s)
} else {

View File

@ -54,31 +54,30 @@ func NewATNConfig6(state IATNState, alt int, context IPredictionContext) *ATNCon
return NewATNConfig5(state, alt, context, SemanticContextNONE);
}
func NewATNConfig5(state IATNState,
alt int, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
func NewATNConfig5(state IATNState, alt int, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
a := new(ATNConfig)
a.InitATNConfig2(state, alt, context, semanticContext)
return a
}
func NewATNConfig4(c *ATNConfig, state IATNState) *ATNConfig {
return NewATNConfig(c, state, c.context, c.semanticContext);
func NewATNConfig4(c IATNConfig , state IATNState) *ATNConfig {
return NewATNConfig(c, state, c.getContext(), c.getSemanticContext());
}
func NewATNConfig3(c *ATNConfig, state IATNState, semanticContext SemanticContext) *ATNConfig {
return NewATNConfig(c, state, c.context, semanticContext);
func NewATNConfig3(c IATNConfig , state IATNState, semanticContext SemanticContext) *ATNConfig {
return NewATNConfig(c, state, c.getContext(), semanticContext);
}
func NewATNConfig2(c *ATNConfig, semanticContext SemanticContext) *ATNConfig {
return NewATNConfig(c, c.state, c.context, semanticContext);
func NewATNConfig2(c IATNConfig , semanticContext SemanticContext) *ATNConfig {
return NewATNConfig(c, c.getState(), c.getContext(), semanticContext);
}
func NewATNConfig1(c *ATNConfig, state IATNState, context IPredictionContext) *ATNConfig {
return NewATNConfig(c, state, context, c.semanticContext);
func NewATNConfig1(c IATNConfig , state IATNState, context IPredictionContext) *ATNConfig {
return NewATNConfig(c, state, context, c.getSemanticContext());
}
func NewATNConfig(c *ATNConfig, state IATNState, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
func NewATNConfig(c IATNConfig , state IATNState, context IPredictionContext, semanticContext SemanticContext) *ATNConfig {
a := new(ATNConfig)
a.InitATNConfig(c, state, context, semanticContext)
@ -224,7 +223,7 @@ func NewLexerATNConfig4(c *LexerATNConfig, state IATNState) *LexerATNConfig {
this := new(LexerATNConfig)
this.InitATNConfig(c, state, c.context, c.semanticContext)
this.InitATNConfig(c, state, c.getContext(), c.getSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
@ -234,7 +233,7 @@ func NewLexerATNConfig3(c *LexerATNConfig, state IATNState, lexerActionExecutor
this := new(LexerATNConfig)
this.InitATNConfig(c, state, c.context, c.semanticContext)
this.InitATNConfig(c, state, c.getContext(), c.getSemanticContext())
this.lexerActionExecutor = lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
@ -244,7 +243,7 @@ func NewLexerATNConfig2(c *LexerATNConfig, state IATNState, context IPrediction
this := new(LexerATNConfig)
this.InitATNConfig(c, state, context, c.semanticContext)
this.InitATNConfig(c, state, context, c.getSemanticContext())
this.lexerActionExecutor = c.lexerActionExecutor
this.passedThroughNonGreedyDecision = checkNonGreedyDecision(c, state)
return this
@ -255,7 +254,7 @@ func NewLexerATNConfig1( state IATNState, alt int, context IPredictionContext) *
this := new(LexerATNConfig)
// c *ATNConfig, state IATNState, context IPredictionContext, semanticContext SemanticContext
// c IATNConfig , state IATNState, context IPredictionContext, semanticContext SemanticContext
this.InitATNConfig2(state, alt, context, SemanticContextNONE)
this.lexerActionExecutor = nil

View File

@ -391,7 +391,7 @@ func (this *ATNDeserializer) generateRuleBypassTransition(atn *ATN, idx int) {
atn.addState(bypassStop)
bypassStart.endState = bypassStop
var bs = bypassStart
atn.defineDecisionState(bypassStart.DecisionState)
bypassStop.startState = bypassStart

View File

@ -1,6 +1,42 @@
package antlr4
import "strconv"
const (
// constants for serialization
ATNStateInvalidType = 0
ATNStateBASIC = 1
ATNStateRULE_START = 2
ATNStateBLOCK_START = 3
ATNStatePLUS_BLOCK_START = 4
ATNStateSTAR_BLOCK_START = 5
ATNStateTOKEN_START = 6
ATNStateRULE_STOP = 7
ATNStateBLOCK_END = 8
ATNStateSTAR_LOOP_BACK = 9
ATNStateSTAR_LOOP_ENTRY = 10
ATNStatePLUS_LOOP_BACK = 11
ATNStateLOOP_END = 12
ATNStateINVALID_STATE_NUMBER = -1
)
//var ATNState.serializationNames = [
// "INVALID",
// "BASIC",
// "RULE_START",
// "BLOCK_START",
// "PLUS_BLOCK_START",
// "STAR_BLOCK_START",
// "TOKEN_START",
// "RULE_STOP",
// "BLOCK_END",
// "STAR_LOOP_BACK",
// "STAR_LOOP_ENTRY",
// "PLUS_LOOP_BACK",
// "LOOP_END" ]
var INITIAL_NUM_TRANSITIONS = 4
type IATNState interface {
@ -24,6 +60,8 @@ type IATNState interface {
getTransitions() []ITransition
setTransitions( []ITransition )
addTransition(ITransition, int)
toString() string
}
type ATNState struct {
@ -109,40 +147,6 @@ func (as *ATNState) setNextTokenWithinRule(v *IntervalSet) {
as.nextTokenWithinRule = v
}
const (
// constants for serialization
ATNStateInvalidType = 0
ATNStateBASIC = 1
ATNStateRULE_START = 2
ATNStateBLOCK_START = 3
ATNStatePLUS_BLOCK_START = 4
ATNStateSTAR_BLOCK_START = 5
ATNStateTOKEN_START = 6
ATNStateRULE_STOP = 7
ATNStateBLOCK_END = 8
ATNStateSTAR_LOOP_BACK = 9
ATNStateSTAR_LOOP_ENTRY = 10
ATNStatePLUS_LOOP_BACK = 11
ATNStateLOOP_END = 12
ATNStateINVALID_STATE_NUMBER = -1
)
//var ATNState.serializationNames = [
// "INVALID",
// "BASIC",
// "RULE_START",
// "BLOCK_START",
// "PLUS_BLOCK_START",
// "STAR_BLOCK_START",
// "TOKEN_START",
// "RULE_STOP",
// "BLOCK_END",
// "STAR_LOOP_BACK",
// "STAR_LOOP_ENTRY",
// "PLUS_LOOP_BACK",
// "LOOP_END" ]
func (this *ATNState) toString() string {
return strconv.Itoa(this.stateNumber)
}

View File

@ -13,30 +13,6 @@
package antlr4
import "strconv"
type IntStream interface {
consume()
LA(i int) int
mark() int
release(marker int)
index() int
seek(index int)
size() int
getSourceName() string
}
type TokenStream interface {
IntStream
LT(k int) *Token
get(index int) *Token
getTokenSource() TokenSource
setTokenSource(TokenSource)
getText() string
getTextFromInterval(*Interval) string
getTextFromRuleContext(*RuleContext) string
getTextFromTokens(*Token, *Token) string
}
// bt is just to keep meaningful parameter types to Parser
type BufferedTokenStream struct {
tokenSource TokenSource

View File

@ -1 +1,7 @@
package antlr4
type CharStream interface {
IntStream
getTextFromInterval(*Interval) string
}

View File

@ -6,7 +6,7 @@
package antlr4
type TokenFactory interface {
create(source *TokenSourceInputStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token
}
type CommonTokenFactory struct {
@ -45,14 +45,14 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
//
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (this *CommonTokenFactory) create(source *TokenSourceInputStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
func (this *CommonTokenFactory) create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) *Token {
var t = NewCommonToken(source, ttype, channel, start, stop)
t.line = line
t.column = column
if (text != "") {
t.setText( text )
} else if (this.copyText && source.inputStream != nil) {
t.setText( source.inputStream.getText(start,stop) )
} else if (this.copyText && source.charStream != nil) {
t.setText( source.charStream.getTextFromInterval(NewInterval(start,stop)))
}
return t.Token
}

View File

@ -9,10 +9,10 @@ import (
type PredPrediction struct {
alt int
pred int
pred SemanticContext
}
func NewPredPrediction(pred, alt int) *PredPrediction {
func NewPredPrediction(pred SemanticContext, alt int) *PredPrediction {
this := new(PredPrediction)
this.alt = alt
@ -58,7 +58,7 @@ type DFAState struct {
prediction int
lexerActionExecutor *LexerActionExecutor
requiresFullContext bool
predicates []PredPrediction
predicates []*PredPrediction
}
func NewDFAState(stateNumber int, configs *ATNConfigSet) *DFAState {

View File

@ -172,7 +172,7 @@ func (this *DefaultErrorStrategy) reportError(recognizer IParser, e IRecognition
//
func (this *DefaultErrorStrategy) recover(recognizer IParser, e IRecognitionException) {
if (this.lastErrorIndex==recognizer.getInputStream().index &&
if (this.lastErrorIndex==recognizer.getInputStream().index() &&
this.lastErrorStates != nil && this.lastErrorStates.contains(recognizer.getState())) {
// uh oh, another error at same token index and previously-visited
// state in ATN must be a case where LT(1) is in the recovery
@ -180,7 +180,7 @@ func (this *DefaultErrorStrategy) recover(recognizer IParser, e IRecognitionExce
// at least to prevent an infinite loop this is a failsafe.
recognizer.consume()
}
this.lastErrorIndex = recognizer.getInputStream().index
this.lastErrorIndex = recognizer.getInputStream().index()
if (this.lastErrorStates == nil) {
this.lastErrorStates = NewIntervalSet()
}
@ -323,7 +323,7 @@ func (this *DefaultErrorStrategy) reportInputMismatch(recognizer IParser, e *Inp
// @param e the recognition exception
//
func (this *DefaultErrorStrategy) reportFailedPredicate(recognizer IParser, e *FailedPredicateException) {
var ruleName = recognizer.getRuleNames()[recognizer.getParserRuleContext().ruleIndex]
var ruleName = recognizer.getRuleNames()[recognizer.getParserRuleContext().getRuleIndex()]
var msg = "rule " + ruleName + " " + e.message
recognizer.notifyErrorListeners(msg, e.offendingToken, e)
}
@ -476,7 +476,7 @@ func (this *DefaultErrorStrategy) singleTokenInsertion(recognizer IParser) bool
var atn = recognizer.getInterpreter().atn
var currentState = atn.states[recognizer.getState()]
var next = currentState.getTransitions()[0].getTarget()
var expectingAtLL2 = atn.nextTokens(next, recognizer.getParserRuleContext().RuleContext)
var expectingAtLL2 = atn.nextTokens(next, recognizer.getParserRuleContext())
if (expectingAtLL2.contains(currentSymbolType) ){
this.reportMissingToken(recognizer)
return true
@ -691,13 +691,13 @@ func (this *DefaultErrorStrategy) getErrorRecoverySet(recognizer IParser) *Inter
var atn = recognizer.getInterpreter().atn
var ctx = recognizer.getParserRuleContext()
var recoverSet = NewIntervalSet()
for (ctx != nil && ctx.invokingState>=0) {
for (ctx != nil && ctx.getInvokingState()>=0) {
// compute what follows who invoked us
var invokingState = atn.states[ctx.invokingState]
var invokingState = atn.states[ctx.getInvokingState()]
var rt = invokingState.getTransitions()[0]
var follow = atn.nextTokens(rt.(*RuleTransition).followState, nil)
recoverSet.addSet(follow)
ctx = ctx.parentCtx
ctx = ctx.getParent().(IParserRuleContext)
}
recoverSet.removeOne(TokenEpsilon)
return recoverSet
@ -760,8 +760,8 @@ func NewBailErrorStrategy() *BailErrorStrategy {
func (this *BailErrorStrategy) recover(recognizer IParser, e IRecognitionException) {
var context = recognizer.getParserRuleContext()
for (context != nil) {
context.exception = e
context = context.parentCtx
context.setException(e)
context = context.getParent().(IParserRuleContext)
}
panic(NewParseCancellationException()) // TODO we don't emit e properly
}

View File

@ -22,11 +22,11 @@ type RecognitionException struct {
offendingToken *Token
offendingState int
ctx IRuleContext
input *InputStream
input CharStream
}
func NewRecognitionException(message string, recognizer IRecognizer, input *InputStream, ctx IRuleContext) *RecognitionException {
func NewRecognitionException(message string, recognizer IRecognizer, input CharStream, ctx IRuleContext) *RecognitionException {
// todo
// Error.call(this)
@ -44,7 +44,7 @@ func NewRecognitionException(message string, recognizer IRecognizer, input *Inpu
return t
}
func (t *RecognitionException) InitRecognitionException(message string, recognizer IRecognizer, input *InputStream, ctx IRuleContext){
func (t *RecognitionException) InitRecognitionException(message string, recognizer IRecognizer, input CharStream, ctx IRuleContext){
t.message = message
t.recognizer = recognizer
@ -107,7 +107,7 @@ type LexerNoViableAltException struct {
}
func NewLexerNoViableAltException(lexer *Lexer, input *InputStream, startIndex int,
func NewLexerNoViableAltException(lexer *Lexer, input CharStream, startIndex int,
deadEndConfigs *ATNConfigSet) *LexerNoViableAltException {
this := new (LexerNoViableAltException)
@ -122,8 +122,8 @@ func NewLexerNoViableAltException(lexer *Lexer, input *InputStream, startIndex i
func (this *LexerNoViableAltException) toString() string {
var symbol = ""
if (this.startIndex >= 0 && this.startIndex < this.input.size) {
symbol = this.input.getText(this.startIndex,this.startIndex)
if (this.startIndex >= 0 && this.startIndex < this.input.size()) {
symbol = this.input.getTextFromInterval(NewInterval(this.startIndex,this.startIndex))
}
return "LexerNoViableAltException" + symbol
}
@ -135,7 +135,7 @@ type NoViableAltException struct {
startToken *Token
offendingToken *Token
ctx *ParserRuleContext
ctx IParserRuleContext
deadEndConfigs *ATNConfigSet
}
@ -145,10 +145,10 @@ type NoViableAltException struct {
// of the offending input and also knows where the parser was
// in the various paths when the error. Reported by reportNoViableAlternative()
//
func NewNoViableAltException(recognizer *Parser, input *InputStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx *ParserRuleContext) *NoViableAltException {
func NewNoViableAltException(recognizer IParser, input CharStream, startToken *Token, offendingToken *Token, deadEndConfigs *ATNConfigSet, ctx IParserRuleContext) *NoViableAltException {
if (ctx == nil){
ctx = recognizer._ctx
ctx = recognizer.getParserRuleContext()
}
if (offendingToken == nil){

View File

@ -1,7 +1,5 @@
package antlr4
// Vacuums all input from a string and then treat it like a buffer.
type InputStream struct {
name string
index int
@ -21,10 +19,6 @@ func NewInputStream(data string) *InputStream {
return is
}
// Reset the stream so that it's in the same state it was
// when the object was created *except* the data array is not
// touched.
//
func (is *InputStream) reset() {
is.index = 0
}
@ -63,9 +57,6 @@ func (is *InputStream) mark() int {
func (is *InputStream) release(marker int) {
}
// consume() ahead until p==index can't just set p=index as we must
// update line and column. If we seek backwards, just set p
//
func (is *InputStream) seek(index int) {
if index <= is.index {
is.index = index // just jump don't update stream state (line,...)

View File

@ -1 +1,14 @@
package antlr4
type IntStream interface {
consume()
LA(int) int
mark() int
release(marker int)
index() int
seek(index int)
size() int
getSourceName() string
}

View File

@ -67,7 +67,7 @@ func (la *LL1Analyzer) getDecisionLookahead(s IATNState) []*IntervalSet {
// @return The set of tokens that can follow {@code s} in the ATN in the
// specified {@code ctx}.
///
func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx *RuleContext) *IntervalSet {
func (la *LL1Analyzer) LOOK(s, stopState IATNState, ctx IRuleContext) *IntervalSet {
var r = NewIntervalSet()
var seeThruPreds = true // ignore preds get all lookahead
var lookContext IPredictionContext

View File

@ -11,18 +11,6 @@ import (
// of speed.
///
type TokenSource interface {
nextToken() *Token
getLine() int
skip()
more()
getCharPositionInLine() int
getInputStream() *InputStream
getSourceName() string
setTokenFactory(factory TokenFactory)
getTokenFactory() TokenFactory
}
type ILexer interface {
TokenSource
IRecognizer
@ -38,9 +26,9 @@ type ILexer interface {
type Lexer struct {
Recognizer
_input *InputStream
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceInputStreamPair
_tokenFactorySourcePair *TokenSourceCharStreamPair
_interp *LexerATNSimulator
_token *Token
_tokenStartCharIndex int
@ -55,7 +43,7 @@ type Lexer struct {
actionType int
}
func NewLexer(input *InputStream) *Lexer {
func NewLexer(input CharStream) *Lexer {
lexer := new(Lexer)
@ -65,11 +53,11 @@ func NewLexer(input *InputStream) *Lexer {
return lexer
}
func (l *Lexer) InitLexer(input *InputStream){
func (l *Lexer) InitLexer(input CharStream){
l._input = input
l._factory = CommonTokenFactoryDEFAULT
l._tokenFactorySourcePair = &TokenSourceInputStreamPair{l, input}
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, input}
l._interp = nil // child classes must populate l
@ -146,14 +134,12 @@ func (l *Lexer) reset() {
l._interp.reset()
}
func (l *Lexer) getInputStream() *InputStream {
func (l *Lexer) getInputStream() CharStream {
return l._input
}
func (l *Lexer) getSourceName() string {
panic("Not implemented")
return ""
// return l._input.sourceName
return l._input.getSourceName()
}
func (l *Lexer) setChannel(v int){
@ -207,7 +193,7 @@ func (l *Lexer) nextToken() *Token {
}
l._token = nil
l._channel = TokenDefaultChannel
l._tokenStartCharIndex = l._input.index
l._tokenStartCharIndex = l._input.index()
l._tokenStartColumn = l._interp.column
l._tokenStartLine = l._interp.line
l._text = nil
@ -283,16 +269,16 @@ func (l *Lexer) popMode() int {
}
func (l *Lexer) inputStream() *InputStream {
func (l *Lexer) inputStream() CharStream {
return l._input
}
func (l *Lexer) setInputStream(input *InputStream) {
func (l *Lexer) setInputStream(input CharStream) {
l._input = nil
l._tokenFactorySourcePair = &TokenSourceInputStreamPair{l, l._input}
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
l.reset()
l._input = input
l._tokenFactorySourcePair = &TokenSourceInputStreamPair{l, l._input}
l._tokenFactorySourcePair = &TokenSourceCharStreamPair{l, l._input}
}
// By default does not support multiple emits per nextToken invocation
@ -319,7 +305,7 @@ func (l *Lexer) emit() *Token {
func (l *Lexer) emitEOF() *Token {
cpos := l.getCharPositionInLine();
lpos := l.getLine();
var eof = l._factory.create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.index, l._input.index - 1, lpos, cpos)
var eof = l._factory.create(l._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, l._input.index(), l._input.index() - 1, lpos, cpos)
l.emitToken(eof)
return eof
}
@ -342,7 +328,7 @@ func (l *Lexer) setType(t int) {
// What is the index of the current character of lookahead?///
func (l *Lexer) getCharIndex() int {
return l._input.index
return l._input.index()
}
// Return the text matched so far for the current token or any text override.
@ -378,8 +364,8 @@ func (l *Lexer) getAllTokens() []*Token {
func (l *Lexer) notifyListeners(e IRecognitionException) {
var start = l._tokenStartCharIndex
var stop = l._input.index
var text = l._input.getText(start, stop)
var stop = l._input.index()
var text = l._input.getTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = l.getErrorListenerDispatch()
listener.syntaxError(l, nil, l._tokenStartLine, l._tokenStartColumn, msg, e)

View File

@ -103,7 +103,7 @@ func (this *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
this.startIndex = simulator.startIndex
}
func (this *LexerATNSimulator) match(input *InputStream, mode int) int {
func (this *LexerATNSimulator) match(input CharStream, mode int) int {
this.match_calls += 1
this.mode = mode
@ -113,7 +113,7 @@ func (this *LexerATNSimulator) match(input *InputStream, mode int) int {
input.release(mark)
}()
this.startIndex = input.index
this.startIndex = input.index()
this.prevAccept.reset()
var dfa = this.decisionToDFA[mode]
if (dfa.s0 == nil) {
@ -131,7 +131,7 @@ func (this *LexerATNSimulator) reset() {
this.mode = LexerDefaultMode
}
func (this *LexerATNSimulator) matchATN(input *InputStream) int {
func (this *LexerATNSimulator) matchATN(input CharStream) int {
var startState = this.atn.modeToStartState[this.mode]
if (LexerATNSimulatordebug) {
@ -156,7 +156,7 @@ func (this *LexerATNSimulator) matchATN(input *InputStream) int {
return predict
}
func (this *LexerATNSimulator) execATN(input *InputStream, ds0 *DFAState) int {
func (this *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
if (LexerATNSimulatordebug) {
fmt.Println("start state closure=" + ds0.configs.toString())
}
@ -252,7 +252,7 @@ func (this *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFASt
// @return The computed target DFA state for the given input symbol
// {@code t}. If {@code t} does not lead to a valid DFA state, this method
// returns {@link //ERROR}.
func (this *LexerATNSimulator) computeTargetState(input *InputStream, s *DFAState, t int) *DFAState {
func (this *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t int) *DFAState {
var reach = NewOrderedATNConfigSet()
// if we don't find an existing DFA state
// Fill reach starting from closure, following t transitions
@ -271,7 +271,7 @@ func (this *LexerATNSimulator) computeTargetState(input *InputStream, s *DFAStat
return this.addDFAEdge(s, t, nil, reach.ATNConfigSet)
}
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input *InputStream, reach *ATNConfigSet, t int) int {
func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach *ATNConfigSet, t int) int {
if (this.prevAccept.dfaState != nil) {
var lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor
this.accept(input, lexerActionExecutor, this.startIndex,
@ -279,7 +279,7 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input *InputSt
return prevAccept.dfaState.prediction
} else {
// if no accept and EOF is first char, return EOF
if (t == TokenEOF && input.index == this.startIndex) {
if (t == TokenEOF && input.index() == this.startIndex) {
return TokenEOF
}
panic(NewLexerNoViableAltException(this.recog, input, this.startIndex, reach))
@ -289,7 +289,7 @@ func (this *LexerATNSimulator) failOrAccept(prevAccept *SimState, input *InputSt
// Given a starting configuration set, figure out all ATN configurations
// we can reach upon input {@code t}. Parameter {@code reach} is a return
// parameter.
func (this *LexerATNSimulator) getReachableConfigSet(input *InputStream, closure *ATNConfigSet, reach *ATNConfigSet, t int) {
func (this *LexerATNSimulator) getReachableConfigSet(input CharStream, closure *ATNConfigSet, reach *ATNConfigSet, t int) {
// this is used to skip processing for configs which have a lower priority
// than a config that already reached an accept state for the same rule
var skipAlt = ATNINVALID_ALT_NUMBER
@ -308,7 +308,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input *InputStream, closure
if (target != nil) {
var lexerActionExecutor = cfg.(*LexerATNConfig).lexerActionExecutor
if (lexerActionExecutor != nil) {
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex)
lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index() - this.startIndex)
}
var treatEofAsEpsilon = (t == TokenEOF)
var config = NewLexerATNConfig3(cfg.(*LexerATNConfig), target, lexerActionExecutor)
@ -323,7 +323,7 @@ func (this *LexerATNSimulator) getReachableConfigSet(input *InputStream, closure
}
}
func (this *LexerATNSimulator) accept(input *InputStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
func (this *LexerATNSimulator) accept(input CharStream, lexerActionExecutor *LexerActionExecutor, startIndex, index, line, charPos int) {
if (LexerATNSimulatordebug) {
fmt.Println("ACTION %s\n", lexerActionExecutor)
}
@ -344,7 +344,7 @@ func (this *LexerATNSimulator) getReachableTarget(trans ITransition, t int) IATN
}
}
func (this *LexerATNSimulator) computeStartState(input *InputStream, p IATNState ) *OrderedATNConfigSet {
func (this *LexerATNSimulator) computeStartState(input CharStream, p IATNState ) *OrderedATNConfigSet {
var configs = NewOrderedATNConfigSet()
for i := 0; i < len(p.getTransitions()); i++ {
@ -363,7 +363,7 @@ func (this *LexerATNSimulator) computeStartState(input *InputStream, p IATNState
//
// @return {@code true} if an accept state is reached, otherwise
// {@code false}.
func (this *LexerATNSimulator) closure(input *InputStream, config *LexerATNConfig, configs *ATNConfigSet,
func (this *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, configs *ATNConfigSet,
currentAltReachedAcceptState, speculative, treatEofAsEpsilon bool) bool {
if (LexerATNSimulatordebug) {
@ -418,7 +418,7 @@ func (this *LexerATNSimulator) closure(input *InputStream, config *LexerATNConfi
}
// side-effect: can alter configs.hasSemanticContext
func (this *LexerATNSimulator) getEpsilonTarget(input *InputStream, config *LexerATNConfig, trans ITransition,
func (this *LexerATNSimulator) getEpsilonTarget(input CharStream, config *LexerATNConfig, trans ITransition,
configs *ATNConfigSet, speculative, treatEofAsEpsilon bool) *LexerATNConfig {
var cfg *LexerATNConfig
@ -513,7 +513,7 @@ func (this *LexerATNSimulator) getEpsilonTarget(input *InputStream, config *Lexe
// @return {@code true} if the specified predicate evaluates to
// {@code true}.
// /
func (this *LexerATNSimulator) evaluatePredicate(input *InputStream, ruleIndex, predIndex int, speculative bool) bool {
func (this *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predIndex int, speculative bool) bool {
// assume true if no recognizer was provided
if (this.recog == nil) {
return true
@ -523,7 +523,7 @@ func (this *LexerATNSimulator) evaluatePredicate(input *InputStream, ruleIndex,
}
var savedcolumn = this.column
var savedLine = this.line
var index = input.index
var index = input.index()
var marker = input.mark()
defer func(){
@ -537,8 +537,8 @@ func (this *LexerATNSimulator) evaluatePredicate(input *InputStream, ruleIndex,
return this.recog.sempred(nil, ruleIndex, predIndex)
}
func (this *LexerATNSimulator) captureSimState(settings *SimState, input *InputStream, dfaState *DFAState) {
settings.index = input.index
func (this *LexerATNSimulator) captureSimState(settings *SimState, input CharStream, dfaState *DFAState) {
settings.index = input.index()
settings.line = this.line
settings.column = this.column
settings.dfaState = dfaState
@ -626,12 +626,12 @@ func (this *LexerATNSimulator) getDFA(mode int) *DFA {
}
// Get the text matched so far for the current token.
func (this *LexerATNSimulator) getText(input *InputStream) string {
func (this *LexerATNSimulator) getText(input CharStream) string {
// index is first lookahead char, don't include.
return input.getText(this.startIndex, input.index - 1)
return input.getTextFromInterval(NewInterval(this.startIndex, input.index() - 1))
}
func (this *LexerATNSimulator) consume(input *InputStream) {
func (this *LexerATNSimulator) consume(input CharStream) {
var curChar = input.LA(1)
if (curChar == int('\n')) {
this.line += 1

View File

@ -128,9 +128,9 @@ func (this *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionEx
// {@link IntStream//seek} to set the {@code input} position to the beginning
// of the token.
// /
func (this *LexerActionExecutor) execute(lexer *Lexer, input *InputStream, startIndex int) {
func (this *LexerActionExecutor) execute(lexer *Lexer, input CharStream, startIndex int) {
var requiresSeek = false
var stopIndex = input.index
var stopIndex = input.index()
defer func(){
if (requiresSeek) {

View File

@ -14,46 +14,51 @@ func NewTraceListener(parser *Parser) *TraceListener {
return tl
}
func (this *TraceListener) enterEveryRule(ctx *ParserRuleContext) {
fmt.Println("enter " + this.parser.getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text())
func (this *TraceListener) visitErrorNode(_ ErrorNode) {
}
func (this *TraceListener) enterEveryRule(ctx IParserRuleContext) {
fmt.Println("enter " + this.parser.getRuleNames()[ctx.getRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
}
func (this *TraceListener) visitTerminal( node TerminalNode ) {
fmt.Println("consume " + fmt.Sprint(node.getSymbol()) + " rule " + this.parser.getRuleNames()[this.parser._ctx.ruleIndex])
fmt.Println("consume " + fmt.Sprint(node.getSymbol()) + " rule " + this.parser.getRuleNames()[this.parser._ctx.getRuleIndex()])
}
func (this *TraceListener) exitEveryRule(ctx *ParserRuleContext) {
fmt.Println("exit " + this.parser.getRuleNames()[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text())
func (this *TraceListener) exitEveryRule(ctx IParserRuleContext) {
fmt.Println("exit " + this.parser.getRuleNames()[ctx.getRuleIndex()] + ", LT(1)=" + this.parser._input.LT(1).text())
}
type IParser interface {
IRecognizer
getInterpreter() *ParserATNSimulator
getInputStream() *InputStream
consume()
getInputStream() CharStream
consume() *Token
getCurrentToken() *Token
getTokenStream() TokenStream
getTokenFactory() TokenFactory
getLiteralNames() []string
getSymbolicNames() []string
getExpectedTokens() *IntervalSet
getParserRuleContext() *ParserRuleContext
getParserRuleContext() IParserRuleContext
notifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException)
isExpectedToken(symbol int) bool
getPrecedence() int
getRuleInvocationStack(IParserRuleContext) []string
}
type Parser struct {
Recognizer
*Recognizer
_input TokenStream
_errHandler IErrorStrategy
_precedenceStack IntStack
_ctx *ParserRuleContext
_ctx IParserRuleContext
buildParseTrees bool
_tracer *TraceListener
_parseListeners []*ParseTreeListener
_parseListeners []ParseTreeListener
_syntaxErrors int
_interp *ParserATNSimulator
@ -189,13 +194,13 @@ func (p *Parser) matchWildcard() *Token {
return t
}
func (p *Parser) getParserRuleContext() *ParserRuleContext {
func (p *Parser) getParserRuleContext() IParserRuleContext {
return p._ctx
}
func (p *Parser) getParseListeners() []*ParseTreeListener {
func (p *Parser) getParseListeners() []ParseTreeListener {
if (p._parseListeners == nil){
return make([]*ParseTreeListener,0)
return make([]ParseTreeListener,0)
}
return p._parseListeners
}
@ -228,12 +233,12 @@ func (p *Parser) getParseListeners() []*ParseTreeListener {
//
// @panics nilPointerException if {@code} listener is {@code nil}
//
func (p *Parser) addParseListener(listener *ParseTreeListener) {
func (p *Parser) addParseListener(listener ParseTreeListener) {
if (listener == nil) {
panic("listener")
}
if (p._parseListeners == nil) {
p._parseListeners = make([]*ParseTreeListener, 0)
p._parseListeners = make([]ParseTreeListener, 0)
}
p._parseListeners = append(p._parseListeners, listener)
}
@ -245,7 +250,7 @@ func (p *Parser) addParseListener(listener *ParseTreeListener) {
// listener, p.method does nothing.</p>
// @param listener the listener to remove
//
func (p *Parser) removeParseListener(listener *ParseTreeListener) {
func (p *Parser) removeParseListener(listener ParseTreeListener) {
panic("Not implemented!")
// if (p._parseListeners != nil) {
// var idx = p._parseListeners.indexOf(listener)
@ -268,7 +273,7 @@ func (p *Parser) triggerEnterRuleEvent() {
if (p._parseListeners != nil) {
var ctx = p._ctx
for _,listener := range p._parseListeners {
(*listener).enterEveryRule(ctx)
listener.enterEveryRule(ctx)
ctx.enterRule(listener)
}
}
@ -288,7 +293,7 @@ func (p *Parser) triggerExitRuleEvent() {
for i := range p._parseListeners {
listener := p._parseListeners[l-i]
ctx.exitRule(listener)
(*listener).exitEveryRule(ctx)
listener.exitEveryRule(ctx)
}
}
}
@ -374,8 +379,8 @@ func (p *Parser) compileParseTreePattern(pattern, patternRuleIndex, lexer ILexer
// return m.compile(pattern, patternRuleIndex)
}
func (p *Parser) getInputStream() *InputStream {
return p.getTokenStream().(*InputStream)
func (p *Parser) getInputStream() CharStream {
return p.getTokenStream().(CharStream)
}
func (p *Parser) setInputStream(input TokenStream) {
@ -401,8 +406,6 @@ func (p *Parser) getCurrentToken() *Token {
}
func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err IRecognitionException) {
offendingToken = offendingToken || nil
err = err || nil
if (offendingToken == nil) {
offendingToken = p.getCurrentToken()
}
@ -413,64 +416,46 @@ func (p *Parser) notifyErrorListeners(msg string, offendingToken *Token, err IRe
listener.syntaxError(p, offendingToken, line, column, msg, err)
}
//
// Consume and return the {@linkplain //getCurrentToken current symbol}.
//
// <p>E.g., given the following input with {@code A} being the current
// lookahead symbol, p.func moves the cursor to {@code B} and returns
// {@code A}.</p>
//
// <pre>
// A B
// ^
// </pre>
//
// If the parser is not in error recovery mode, the consumed symbol is added
// to the parse tree using {@link ParserRuleContext//addChild(Token)}, and
// {@link ParseTreeListener//visitTerminal} is called on any parse listeners.
// If the parser <em>is</em> in error recovery mode, the consumed symbol is
// added to the parse tree using
// {@link ParserRuleContext//addErrorNode(Token)}, and
// {@link ParseTreeListener//visitErrorNode} is called on any parse
// listeners.
//
func (p *Parser) consume() {
func (p *Parser) consume() *Token {
var o = p.getCurrentToken()
if (o.tokenType != TokenEOF) {
p.getInputStream().consume()
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
if (p.buildParseTrees || hasListener) {
var node *ErrorNodeImpl
if (p._errHandler.inErrorRecoveryMode(p)) {
node = p._ctx.addErrorNode(o)
var node = p._ctx.addErrorNode(o)
if (p._parseListeners != nil) {
for _, l := range p._parseListeners {
l.visitErrorNode(node);
}
}
} else {
node = p._ctx.addTokenNode(o)
}
node.invokingState = p.state
if (hasListener) {
for _, l := range p._parseListeners {
l.visitTerminal(node)
node := p._ctx.addTokenNode(o);
if (p._parseListeners != nil) {
for _, l := range p._parseListeners {
l.visitTerminal(node)
}
}
}
// node.invokingState = p.state
}
return o
}
func (p *Parser) addContextToParseTree() {
// add current context to parent if we have a parent
if (p._ctx.parentCtx != nil) {
p._ctx.parentCtx.children = append(p._ctx.parentCtx.children, p._ctx)
if (p._ctx.getParent() != nil) {
p._ctx.getParent().setChildren( append(p._ctx.getParent().getChildren(), p._ctx) )
}
}
// Always called by generated parsers upon entry to a rule. Access field
// {@link //_ctx} get the current context.
func (p *Parser) enterRule(localctx *ParserRuleContext, state, ruleIndex int) {
func (p *Parser) enterRule(localctx IParserRuleContext, state, ruleIndex int) {
p.state = state
p._ctx = localctx
p._ctx.start = p._input.LT(1)
p._ctx.setStart( p._input.LT(1) )
if (p.buildParseTrees) {
p.addContextToParseTree()
}
@ -480,22 +465,22 @@ func (p *Parser) enterRule(localctx *ParserRuleContext, state, ruleIndex int) {
}
func (p *Parser) exitRule() {
p._ctx.stop = p._input.LT(-1)
p._ctx.setStop( p._input.LT(-1) )
// trigger event on _ctx, before it reverts to parent
if (p._parseListeners != nil) {
p.triggerExitRuleEvent()
}
p.state = p._ctx.invokingState
p._ctx = p._ctx.parentCtx
p.state = p._ctx.getInvokingState()
p._ctx = p._ctx.getParent().(IParserRuleContext)
}
func (p *Parser) enterOuterAlt(localctx *ParserRuleContext, altNum int) {
func (p *Parser) enterOuterAlt(localctx IParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
if (p.buildParseTrees && p._ctx != localctx) {
if (p._ctx.parentCtx != nil) {
p._ctx.parentCtx.removeLastChild()
p._ctx.parentCtx.addChild(localctx)
if (p._ctx.getParent() != nil) {
p._ctx.getParent().(IParserRuleContext).removeLastChild()
p._ctx.getParent().(IParserRuleContext).addChild(localctx)
}
}
p._ctx = localctx
@ -514,11 +499,11 @@ func (p *Parser) getPrecedence() int {
}
}
func (p *Parser) enterRecursionRule(localctx *ParserRuleContext, state, ruleIndex, precedence int) {
func (p *Parser) enterRecursionRule(localctx IParserRuleContext, state, ruleIndex, precedence int) {
p.state = state
p._precedenceStack.Push(precedence)
p._ctx = localctx
p._ctx.start = p._input.LT(1)
p._ctx.setStart( p._input.LT(1) )
if (p._parseListeners != nil) {
p.triggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules
@ -528,14 +513,14 @@ func (p *Parser) enterRecursionRule(localctx *ParserRuleContext, state, ruleInde
//
// Like {@link //enterRule} but for recursive rules.
func (p *Parser) pushNewRecursionContext(localctx *ParserRuleContext, state, ruleIndex int) {
func (p *Parser) pushNewRecursionContext(localctx IParserRuleContext, state, ruleIndex int) {
var previous = p._ctx
previous.parentCtx = localctx
previous.invokingState = state
previous.stop = p._input.LT(-1)
previous.setParent( localctx )
previous.setInvokingState( state )
previous.setStart( p._input.LT(-1) )
p._ctx = localctx
p._ctx.start = previous.start
p._ctx.setStart( previous.getStart() )
if (p.buildParseTrees) {
p._ctx.addChild(previous)
}
@ -545,43 +530,43 @@ func (p *Parser) pushNewRecursionContext(localctx *ParserRuleContext, state, rul
}
}
func (p *Parser) unrollRecursionContexts(parentCtx *ParserRuleContext) {
func (p *Parser) unrollRecursionContexts(parentCtx IParserRuleContext) {
p._precedenceStack.Pop()
p._ctx.stop = p._input.LT(-1)
p._ctx.setStop( p._input.LT(-1) )
var retCtx = p._ctx // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method
if (p._parseListeners != nil) {
for (p._ctx != parentCtx) {
p.triggerExitRuleEvent()
p._ctx = p._ctx.parentCtx
p._ctx = p._ctx.getParent().(IParserRuleContext)
}
} else {
p._ctx = parentCtx
}
// hook into tree
retCtx.parentCtx = parentCtx
retCtx.setParent( parentCtx )
if (p.buildParseTrees && parentCtx != nil) {
// add return ctx into invoking rule's tree
parentCtx.addChild(retCtx)
}
}
func (p *Parser) getInvokingContext(ruleIndex int) *ParserRuleContext {
func (p *Parser) getInvokingContext(ruleIndex int) IParserRuleContext {
var ctx = p._ctx
for (ctx != nil) {
if (ctx.ruleIndex == ruleIndex) {
if (ctx.getRuleIndex() == ruleIndex) {
return ctx
}
ctx = ctx.parentCtx
ctx = ctx.getParent().(IParserRuleContext)
}
return nil
}
func (p *Parser) precpred(localctx, precedence int) {
func (p *Parser) precpred(localctx IRuleContext, precedence int) bool {
return precedence >= p._precedenceStack[ len(p._precedenceStack) -1]
}
func (p *Parser) inContext(context *ParserRuleContext) bool {
func (p *Parser) inContext(context IParserRuleContext) bool {
// TODO: useful in parser?
return false
}
@ -611,14 +596,14 @@ func (p *Parser) isExpectedToken(symbol int) bool {
if (!following.contains(TokenEpsilon)) {
return false
}
for (ctx != nil && ctx.invokingState >= 0 && following.contains(TokenEpsilon)) {
var invokingState = atn.states[ctx.invokingState]
for (ctx != nil && ctx.getInvokingState() >= 0 && following.contains(TokenEpsilon)) {
var invokingState = atn.states[ctx.getInvokingState()]
var rt = invokingState.getTransitions()[0]
following = atn.nextTokens(rt.(*RuleTransition).followState,nil)
if (following.contains(symbol)) {
return true
}
ctx = ctx.parentCtx
ctx = ctx.getParent().(IParserRuleContext)
}
if (following.contains(TokenEpsilon) && symbol == TokenEOF) {
return true
@ -645,8 +630,8 @@ func (p *Parser) getExpectedTokensWithinCurrentRule() *IntervalSet {
// Get a rule's index (i.e., {@code RULE_ruleName} field) or -1 if not found.//
func (p *Parser) getRuleIndex(ruleName string) int {
var ruleIndex = p.getRuleIndexMap()[ruleName]
if (ruleIndex != nil) {
var ruleIndex, ok = p.getRuleIndexMap()[ruleName]
if (ok) {
return ruleIndex
} else {
return -1
@ -660,20 +645,20 @@ func (p *Parser) getRuleIndex(ruleName string) int {
//
// this very useful for error messages.
func (this *Parser) getRuleInvocationStack(p *ParserRuleContext) []string {
func (this *Parser) getRuleInvocationStack(p IParserRuleContext) []string {
if (p == nil) {
p = this._ctx;
}
var stack = make([]string)
var stack = make([]string,0)
for (p != nil) {
// compute what follows who invoked us
var ruleIndex = p.ruleIndex;
var ruleIndex = p.getRuleIndex();
if (ruleIndex < 0) {
stack = append(stack, "n/a")
} else {
stack = append(stack, this.getRuleNames()[ruleIndex]);
}
p = p.parentCtx;
p = p.getParent().(IParserRuleContext);
}
return stack;
};

View File

@ -2,21 +2,24 @@ package antlr4
import (
"fmt"
"strconv"
"strings"
)
type ParserATNSimulator struct {
ATNSimulator
parser *Parser
*ATNSimulator
parser IParser
predictionMode int
_input *TokenStream
_input TokenStream
_startIndex int
_dfa *DFA
decisionToDFA []*DFA
mergeCache DoubleDict
_outerContext *ParserRuleContext
mergeCache *DoubleDict
_outerContext IParserRuleContext
}
func NewParserATNSimulator(parser *Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
func NewParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
this := new(ParserATNSimulator)
@ -25,7 +28,7 @@ func NewParserATNSimulator(parser *Parser, atn *ATN, decisionToDFA []*DFA, share
return this
}
func (this *ParserATNSimulator) InitParserATNSimulator(parser *Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) {
func (this *ParserATNSimulator) InitParserATNSimulator(parser IParser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) {
this.InitATNSimulator(atn, sharedContextCache)
@ -58,13 +61,13 @@ var ParserATNSimulatorprototyperetry_debug = false
func (this *ParserATNSimulator) reset() {
}
func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int, outerContext *ParserRuleContext) int {
func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int, outerContext IParserRuleContext) int {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedebug_list_atn_decisions) {
fmt.Println("adaptivePredict decision " + decision +
fmt.Println("adaptivePredict decision " + strconv.Itoa(decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
" line " + input.LT(1).line + ":" +
input.LT(1).column)
" line " + strconv.Itoa(input.LT(1).line) + ":" +
strconv.Itoa( input.LT(1).column) )
}
this._input = input
@ -73,7 +76,7 @@ func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int,
var dfa = this.decisionToDFA[decision]
this._dfa = dfa
var m = input.mark(-1)
var m = input.mark()
var index = input.index()
defer func(){
@ -100,7 +103,7 @@ func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int,
outerContext = RuleContextEMPTY
}
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedebug_list_atn_decisions) {
fmt.Println("predictATN decision " + dfa.decision +
fmt.Println("predictATN decision " + strconv.Itoa(dfa.decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
", outerContext=" + outerContext.toString(this.parser.getRuleNames(), nil))
}
@ -108,8 +111,9 @@ func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int,
// to determine if this ATN start state is the decision for the
// closure block that determines whether a precedence rule
// should continue or complete.
//
t, ok := dfa.atnStartState.(*StarLoopEntryState)
var t2 IATNState = dfa.atnStartState
t, ok := t2.(*StarLoopEntryState)
if (!dfa.precedenceDfa && ok) {
if (t.precedenceRuleDecision) {
dfa.setPrecedenceDfa(true)
@ -126,16 +130,16 @@ func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int,
// than simply setting DFA.s0.
//
s0_closure = this.applyPrecedenceFilter(s0_closure)
s0 = this.addDFAState(dfa, NewDFAState(nil, s0_closure))
s0 = this.addDFAState(dfa, NewDFAState(-1, s0_closure))
dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0)
} else {
s0 = this.addDFAState(dfa, NewDFAState(nil, s0_closure))
s0 = this.addDFAState(dfa, NewDFAState(-1, s0_closure))
dfa.s0 = s0
}
}
var alt = this.execATN(dfa, s0, input, index, outerContext)
if (ParserATNSimulatorprototypedebug) {
fmt.Println("DFA after predictATN: " + dfa.toString(this.parser.literalNames, nil))
fmt.Println("DFA after predictATN: " + dfa.toString(this.parser.getLiteralNames(), nil))
}
return alt
@ -171,24 +175,24 @@ func (this *ParserATNSimulator) adaptivePredict(input TokenStream, decision int,
// conflict
// conflict + preds
//
func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext *ParserRuleContext ) int {
func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream, startIndex int, outerContext IParserRuleContext ) int {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedebug_list_atn_decisions) {
fmt.Println("execATN decision " + dfa.decision +
fmt.Println("execATN decision " + strconv.Itoa(dfa.decision) +
" exec LA(1)==" + this.getLookaheadName(input) +
" line " + input.LT(1).line + ":" + input.LT(1).column)
" line " + strconv.Itoa(input.LT(1).line) + ":" + strconv.Itoa(input.LT(1).column))
}
var previousD = s0
if (ParserATNSimulatorprototypedebug) {
fmt.Println("s0 = " + s0)
fmt.Println("s0 = " + s0.toString())
}
var t = input.LA(1)
for(true) { // for more work
var D = this.getExistingTargetState(previousD *DFAState, t)
var D = this.getExistingTargetState(previousD, t)
if(D==nil) {
D = this.computeTargetState(dfa, previousD *DFAState, t)
D = this.computeTargetState(dfa, previousD, t)
}
if(D==ATNSimulatorERROR) {
// if any configs in previous dipped into outer context, that
@ -221,7 +225,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
input.seek(startIndex)
}
conflictingAlts = this.evalSemanticContext(D.predicates, outerContext, true)
if (len(conflictingAlts)==1) {
if (conflictingAlts.length()==1) {
if(ParserATNSimulatorprototypedebug) {
fmt.Println("Full LL avoided")
}
@ -234,7 +238,7 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
}
}
if (ParserATNSimulatorprototypedfa_debug) {
fmt.Println("ctx sensitive state " + outerContext +" in " + D)
fmt.Println("ctx sensitive state " + outerContext.toString(nil,nil) +" in " + D.toString())
}
var fullCtx = true
var s0_closure = this.computeStartState(dfa.atnStartState, outerContext, fullCtx)
@ -249,9 +253,9 @@ func (this *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStrea
var stopIndex = input.index()
input.seek(startIndex)
var alts = this.evalSemanticContext(D.predicates, outerContext, true)
if (len(alts)==0) {
if (alts.length()==0) {
panic(this.noViableAlt(input, outerContext, D.configs, startIndex))
} else if (len(alts)==1) {
} else if (alts.length()==1) {
return alts.minValue()
} else {
// report ambiguity after predicate evaluation to make sure the correct set of ambig alts is reported.
@ -304,23 +308,23 @@ func (this *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState
var reach = this.computeReachSet(previousD.configs, t, false)
if(reach==nil) {
this.addDFAEdge(dfa, previousD *DFAState, t, ATNSimulatorERROR)
this.addDFAEdge(dfa, previousD, t, ATNSimulatorERROR)
return ATNSimulatorERROR
}
// create Newtarget state we'll add to DFA after it's complete
var D = NewDFAState(nil, reach)
var D = NewDFAState(-1, reach)
var predictedAlt = this.getUniqueAlt(reach)
if (ParserATNSimulatorprototypedebug) {
var altSubSets = PredictionModegetConflictingAltSubsets(reach)
fmt.Println("SLL altSubSets=" + fmt.Sprint(altSubSets) +
", previous=" + previousD.configs +
", configs=" + reach +
", predict=" + predictedAlt +
", previous=" + previousD.configs.toString() +
", configs=" + reach.toString() +
", predict=" + strconv.Itoa(predictedAlt) +
", allSubsetsConflict=" +
PredictionModeallSubsetsConflict(altSubSets) + ", conflictingAlts=" +
this.getConflictingAlts(reach))
fmt.Sprint(PredictionModeallSubsetsConflict(altSubSets)) +
", conflictingAlts=" + this.getConflictingAlts(reach).toString())
}
if (predictedAlt!=ATNINVALID_ALT_NUMBER) {
// NO CONFLICT, UNIQUELY PREDICTED ALT
@ -342,14 +346,14 @@ func (this *ParserATNSimulator) computeTargetState(dfa *DFA, previousD *DFAState
}
}
// all adds to dfa are done after we've created full D state
D = this.addDFAEdge(dfa, previousD *DFAState, t, D)
D = this.addDFAEdge(dfa, previousD, t, D)
return D
}
func (this *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionState *DecisionState) {
// We need to test all predicates, even in DFA states that
// uniquely predict alternative.
var nalts = len(decisionState.transitions)
var nalts = len(decisionState.getTransitions())
// Update DFA so reach becomes accept state with (predicate,alt)
// pairs if preds found for conflicting alts
var altsToCollectPredsFrom = this.getConflictingAltsOrUniqueAlt(dfaState.configs)
@ -366,10 +370,10 @@ func (this *ParserATNSimulator) predicateDFAState(dfaState *DFAState, decisionSt
}
// comes back with reach.uniqueAlt set to a valid alt
func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 *ATNConfigSet, input TokenStream, startIndex int, outerContext *ParserRuleContext) int{
func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0 *ATNConfigSet, input TokenStream, startIndex int, outerContext IParserRuleContext) int{
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedebug_list_atn_decisions) {
fmt.Println("execATNWithFullContext "+s0)
fmt.Println("execATNWithFullContext "+ s0.toString())
}
var fullCtx = true
@ -403,9 +407,9 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
}
var altSubSets = PredictionModegetConflictingAltSubsets(reach)
if(ParserATNSimulatorprototypedebug) {
fmt.Println("LL altSubSets=" + altSubSets + ", predict=" +
PredictionModegetUniqueAlt(altSubSets) + ", resolvesToJustOneViableAlt=" +
PredictionModeresolvesToJustOneViableAlt(altSubSets))
fmt.Println("LL altSubSets=" + fmt.Sprint(altSubSets) + ", predict=" +
strconv.Itoa(PredictionModegetUniqueAlt(altSubSets)) + ", resolvesToJustOneViableAlt=" +
fmt.Sprint(PredictionModeresolvesToJustOneViableAlt(altSubSets)))
}
reach.uniqueAlt = this.getUniqueAlt(reach)
// unique prediction?
@ -476,7 +480,7 @@ func (this *ParserATNSimulator) execATNWithFullContext(dfa *DFA, D *DFAState, s0
func (this *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fullCtx bool) *ATNConfigSet {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("in computeReachSet, starting closure: " + closure)
fmt.Println("in computeReachSet, starting closure: " + closure.toString())
}
if( this.mergeCache==nil) {
this.mergeCache = NewDoubleDict()
@ -500,32 +504,32 @@ func (this *ParserATNSimulator) computeReachSet(closure *ATNConfigSet, t int, fu
var c = closure.configs[i]
if(ParserATNSimulatorprototypedebug) {
fmt.Println("testing " + this.getTokenName(t) + " at " + c)
fmt.Println("testing " + this.getTokenName(t) + " at " + c.toString())
}
_, ok := c.state.(*RuleStopState)
_, ok := c.getState().(*RuleStopState)
if (ok) {
if (fullCtx || t == TokenEOF) {
if (skippedStopStates==nil) {
skippedStopStates = make([]*ATNConfig)
skippedStopStates = make([]*ATNConfig, 0)
}
skippedStopStates = append(skippedStopStates, c)
skippedStopStates = append(skippedStopStates, c.(*ATNConfig))
if(ParserATNSimulatorprototypedebug) {
fmt.Println("added " + c + " to skippedStopStates")
fmt.Println("added " + c.toString() + " to skippedStopStates")
}
}
continue
}
for j:=0; j<len(c.state.transitions); j++ {
var trans = c.state.transitions[j]
for j:=0; j<len(c.getState().getTransitions()); j++ {
var trans = c.getState().getTransitions()[j]
var target = this.getReachableTarget(trans, t)
if (target!=nil) {
var cfg = NewATNConfig4(target, c)
var cfg = NewATNConfig4(c, target)
intermediate.add(cfg, this.mergeCache)
if(ParserATNSimulatorprototypedebug) {
fmt.Println("added " + cfg + " to intermediate")
fmt.Println("added " + cfg.toString() + " to intermediate")
}
}
}
@ -633,30 +637,30 @@ func (this *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs *ATNC
for i:=0; i<len(configs.configs); i++ {
var config = configs.configs[i]
_, ok := config.state.(*RuleStopState)
_, ok := config.getState().(*RuleStopState)
if (ok) {
result.add(config, this.mergeCache)
continue
}
if (lookToEndOfRule && config.state.epsilonOnlyTransitions) {
var nextTokens = this.atn.nextTokens(config.state)
if (lookToEndOfRule && config.getState().getEpsilonOnlyTransitions()) {
var nextTokens = this.atn.nextTokens(config.getState(), nil)
if (nextTokens.contains(TokenEpsilon)) {
var endOfRuleState = this.atn.ruleToStopState[config.state.ruleIndex]
result.add(NewATNConfig4(endOfRuleState, config), this.mergeCache)
var endOfRuleState = this.atn.ruleToStopState[config.getState().getRuleIndex()]
result.add(NewATNConfig4(config, endOfRuleState), this.mergeCache)
}
}
}
return result
}
func (this *ParserATNSimulator) computeStartState(p IATNState, ctx *RuleContext, fullCtx bool) *ATNConfigSet {
func (this *ParserATNSimulator) computeStartState(p IATNState, ctx IRuleContext, fullCtx bool) *ATNConfigSet {
// always at least the implicit call to start rule
var initialContext = predictionContextFromRuleContext(this.atn, ctx)
var configs = NewATNConfigSet(fullCtx)
for i:=0; i<len(p.transitions); i++ {
var target = p.transitions[i].target
var c = NewATNConfig(target, i+1, initialContext, nil)
for i:=0; i<len(p.getTransitions()); i++ {
var target = p.getTransitions()[i].getTarget()
var c = NewATNConfig5(target, i+1, initialContext, nil)
var closureBusy = NewSet(nil,nil)
this.closure(c, configs, closureBusy, true, fullCtx, false)
}
@ -727,33 +731,33 @@ func (this *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *AT
for i:=0; i<len(configs.configs); i++ {
config := configs.configs[i]
// handle alt 1 first
if (config.alt != 1) {
if (config.getAlt() != 1) {
continue
}
var updatedContext = config.semanticContext.evalPrecedence(this.parser, this._outerContext)
var updatedContext = config.getSemanticContext().evalPrecedence(this.parser, this._outerContext)
if (updatedContext==nil) {
// the configuration was eliminated
continue
}
statesFromAlt1[config.state.stateNumber] = config.context
if (updatedContext != config.semanticContext) {
configSet.add(NewATNConfig4(updatedContext, config), this.mergeCache)
statesFromAlt1[config.getState().getStateNumber()] = config.getContext()
if (updatedContext != config.getSemanticContext()) {
configSet.add(NewATNConfig2(config, updatedContext), this.mergeCache)
} else {
configSet.add(config, this.mergeCache)
}
}
for i:=0; i<len(configs.configs); i++ {
config := configs.configs[i]
if (config.alt == 1) {
if (config.getAlt() == 1) {
// already handled
continue
}
// In the future, this elimination step could be updated to also
// filter the prediction context for alternatives predicting alt>1
// (basically a graph subtraction algorithm).
if (!config.precedenceFilterSuppressed) {
var context = statesFromAlt1[config.state.stateNumber]
if (context!=nil && context.equals(config.context)) {
if (!config.getPrecedenceFilterSuppressed()) {
var context = statesFromAlt1[config.getState().getStateNumber()]
if (context!=nil && context.equals(config.getContext())) {
// eliminated
continue
}
@ -765,7 +769,7 @@ func (this *ParserATNSimulator) applyPrecedenceFilter(configs *ATNConfigSet) *AT
func (this *ParserATNSimulator) getReachableTarget(trans ITransition, ttype int) IATNState {
if (trans.matches(ttype, 0, this.atn.maxTokenType)) {
return trans.target
return trans.getTarget()
} else {
return nil
}
@ -776,13 +780,13 @@ func (this *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs
var altToPred = make([]SemanticContext, nalts + 1)
for i:=0; i<len(configs.configs); i++ {
var c = configs.configs[i]
if(ambigAlts.contains( c.alt )) {
altToPred[c.alt] = SemanticContextorContext(altToPred[c.alt] || nil, c.semanticContext)
if(ambigAlts.contains( c.getAlt() )) {
altToPred[c.getAlt()] = SemanticContextorContext(altToPred[c.getAlt()], c.getSemanticContext())
}
}
var nPredAlts = 0
for i := 1; i< nalts+1; i++ {
var pred = altToPred[i] || nil
var pred = altToPred[i]
if (pred==nil) {
altToPred[i] = SemanticContextNONE
} else if (pred != SemanticContextNONE) {
@ -799,8 +803,8 @@ func (this *ParserATNSimulator) getPredsForAmbigAlts(ambigAlts *BitSet, configs
return altToPred
}
func (this *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPred *[]SemanticContext) *PredPrediction {
var pairs =make([]*PredPrediction)
func (this *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altToPred []SemanticContext) []*PredPrediction {
var pairs =make([]*PredPrediction, 0)
var containsPredicate = false
for i :=1; i<len(altToPred); i++ {
var pred = altToPred[i]
@ -864,7 +868,7 @@ func (this *ParserATNSimulator) getPredicatePredictions(ambigAlts *BitSet, altTo
// {@link ATN//INVALID_ALT_NUMBER} if a suitable alternative was not
// identified and {@link //adaptivePredict} should report an error instead.
//
func (this *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs *ATNConfigSet, outerContext ParserRuleContext) int {
func (this *ParserATNSimulator) getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs *ATNConfigSet, outerContext IParserRuleContext) int {
var cfgs = this.splitAccordingToSemanticValidity(configs, outerContext)
var semValidConfigs = cfgs[0]
var semInvalidConfigs = cfgs[1]
@ -887,13 +891,13 @@ func (this *ParserATNSimulator) getAltThatFinishedDecisionEntryRule(configs *ATN
for i:=0; i<len(configs.configs); i++ {
var c = configs.configs[i]
_, ok := c.state.(*RuleStopState)
_, ok := c.getState().(*RuleStopState)
if (c.reachesIntoOuterContext>0 || (ok && c.context.hasEmptyPath())) {
alts.addOne(c.alt)
if (c.getReachesIntoOuterContext()>0 || (ok && c.getContext().hasEmptyPath())) {
alts.addOne(c.getAlt())
}
}
if (len(alts)==0) {
if (alts.length()==0) {
return ATNINVALID_ALT_NUMBER
} else {
return alts.first()
@ -912,14 +916,14 @@ type ATNConfigSetPair struct {
item0, item1 *ATNConfigSet
}
func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs *ATNConfigSet, outerContext *ParserRuleContext) []*ATNConfigSet {
func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs *ATNConfigSet, outerContext IParserRuleContext) []*ATNConfigSet {
var succeeded = NewATNConfigSet(configs.fullCtx)
var failed = NewATNConfigSet(configs.fullCtx)
for i:=0; i<len(configs.configs); i++ {
var c = configs.configs[i]
if (c.semanticContext != SemanticContextNONE) {
var predicateEvaluationResult = c.semanticContext.evaluate(this.parser, outerContext)
if (c.getSemanticContext() != SemanticContextNONE) {
var predicateEvaluationResult = c.getSemanticContext().evaluate(this.parser, outerContext)
if (predicateEvaluationResult) {
succeeded.add(c,nil)
} else {
@ -938,7 +942,7 @@ func (this *ParserATNSimulator) splitAccordingToSemanticValidity( configs *ATNCo
// then we stop at the first predicate that evaluates to true. This
// includes pairs with nil predicates.
//
func (this *ParserATNSimulator) evalSemanticContext(predPredictions []PredPrediction, outerContext *ParserRuleContext, complete bool) *BitSet {
func (this *ParserATNSimulator) evalSemanticContext(predPredictions []*PredPrediction, outerContext IParserRuleContext, complete bool) *BitSet {
var predictions = NewBitSet()
for i:=0; i<len(predPredictions); i++ {
var pair = predPredictions[i]
@ -951,11 +955,11 @@ func (this *ParserATNSimulator) evalSemanticContext(predPredictions []PredPredic
}
var predicateEvaluationResult = pair.pred.evaluate(this.parser, outerContext)
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedfa_debug) {
fmt.Println("eval pred " + pair + "=" + predicateEvaluationResult)
fmt.Println("eval pred " + pair.toString() + "=" + fmt.Sprint(predicateEvaluationResult))
}
if (predicateEvaluationResult) {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototypedfa_debug) {
fmt.Println("PREDICT " + pair.alt)
fmt.Println("PREDICT " + fmt.Sprint(pair.alt))
}
predictions.add(pair.alt)
if (! complete) {
@ -973,51 +977,50 @@ func (this *ParserATNSimulator) evalSemanticContext(predPredictions []PredPredic
// ambig detection thought :(
//
func (this *ParserATNSimulator) closure(config *ATNConfig, configs *ATNConfigSet, closureBusy Set, collectPredicates, fullCtx, treatEofAsEpsilon bool) {
func (this *ParserATNSimulator) closure(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx, treatEofAsEpsilon bool) {
var initialDepth = 0
this.closureCheckingStopState(config, configs, closureBusy, collectPredicates,
fullCtx, initialDepth, treatEofAsEpsilon)
}
func (this *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, configs *ATNConfigSet, closureBusy Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
func (this *ParserATNSimulator) closureCheckingStopState(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("closure(" + config.toString() + ")") //config.toString(this.parser,true) + ")")
fmt.Println("configs(" + configs.toString() + ")")
if(config.reachesIntoOuterContext>50) {
if(config.getReachesIntoOuterContext()>50) {
panic("problem")
}
}
_, ok := config.state.(*RuleStopState)
_, ok := config.getState().(*RuleStopState)
if (ok) {
// We hit rule end. If we have context info, use it
// run thru all possible stack tops in ctx
if (!config.context.isEmpty()) {
for i :=0; i<len(config.context); i++ {
if (config.context.getReturnState(i) == PredictionContextEMPTY_RETURN_STATE) {
if (!config.getContext().isEmpty()) {
for i :=0; i< config.getContext().length(); i++ {
if (config.getContext().getReturnState(i) == PredictionContextEMPTY_RETURN_STATE) {
if (fullCtx) {
configs.add(NewATNConfig1(config, config.state, PredictionContextEMPTY), this.mergeCache)
configs.add(NewATNConfig1(config, config.getState(), PredictionContextEMPTY), this.mergeCache)
continue
} else {
// we have no context info, just chase follow links (if greedy)
if (ParserATNSimulatorprototypedebug) {
fmt.Println("FALLING off rule " + this.getRuleName(config.state.ruleIndex))
fmt.Println("FALLING off rule " + this.getRuleName(config.getState().getRuleIndex()))
}
this.closure_(config, configs, closureBusy, collectPredicates,
fullCtx, depth, treatEofAsEpsilon)
this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon)
}
continue
}
returnState := this.atn.states[config.context.getReturnState(i)]
newContext := config.context.getParent(i) // "pop" return state
returnState := this.atn.states[config.getContext().getReturnState(i)]
newContext := config.getContext().getParent(i) // "pop" return state
c := NewATNConfig(returnState,config.alt, newContext, config.semanticContext)
c := NewATNConfig5(returnState, config.getAlt(), newContext, config.getSemanticContext())
// While we have context to pop back from, we may have
// gotten that context AFTER having falling off a rule.
// Make sure we track that we are now out of context.
c.reachesIntoOuterContext = config.reachesIntoOuterContext
c.setReachesIntoOuterContext( config.getReachesIntoOuterContext() )
this.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth - 1, treatEofAsEpsilon)
}
return
@ -1028,7 +1031,7 @@ func (this *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, conf
} else {
// else if we have no context info, just chase follow links (if greedy)
if (ParserATNSimulatorprototypedebug) {
fmt.Println("FALLING off rule " + this.getRuleName(config.state.ruleIndex))
fmt.Println("FALLING off rule " + this.getRuleName(config.getState().getRuleIndex()))
}
}
}
@ -1036,26 +1039,26 @@ func (this *ParserATNSimulator) closureCheckingStopState(config *ATNConfig, conf
}
// Do the actual work of walking epsilon edges//
func (this *ParserATNSimulator) closure_(config *ATNConfig, configs *ATNConfigSet, closureBusy Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
var p = config.state
func (this *ParserATNSimulator) closure_(config IATNConfig, configs *ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEofAsEpsilon bool) {
var p = config.getState()
// optimization
if (! p.epsilonOnlyTransitions) {
if (! p.getEpsilonOnlyTransitions()) {
configs.add(config, this.mergeCache)
// make sure to not return here, because EOF transitions can act as
// both epsilon transitions and non-epsilon transitions.
}
for i := 0; i<len(p.transitions); i++ {
var t = p.transitions[i]
for i := 0; i<len(p.getTransitions()); i++ {
var t = p.getTransitions()[i]
_, ok := t.(*ActionTransition)
var continueCollecting = collectPredicates && !ok
var c = this.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEofAsEpsilon)
if (c!=nil) {
if (!t.isEpsilon && closureBusy.add(c)!=c){
if (!t.getIsEpsilon() && closureBusy.add(c)!=c){
// avoid infinite recursion for EOF* and EOF+
continue
}
var newDepth = depth
t2, ok := t.(*RuleStopState)
t2, ok := t.(*EpsilonTransition)
if (ok) {
// target fell off end of rule mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
@ -1069,18 +1072,18 @@ func (this *ParserATNSimulator) closure_(config *ATNConfig, configs *ATNConfigSe
}
if (this._dfa != nil && this._dfa.precedenceDfa) {
if (t.outermostPrecedenceReturn == this._dfa.atnStartState.ruleIndex) {
if (t2.outermostPrecedenceReturn == this._dfa.atnStartState.getRuleIndex()) {
c.precedenceFilterSuppressed = true
}
}
c.reachesIntoOuterContext += 1
c.setReachesIntoOuterContext( c.getReachesIntoOuterContext() + 1 )
configs.dipsIntoOuterContext = true // TODO: can remove? only care when we add to set per middle of this method
newDepth -= 1
if (ParserATNSimulatorprototypedebug) {
fmt.Println("dips into outer ctx: " + c)
fmt.Println("dips into outer ctx: " + c.toString())
}
} else if _, ok := t.(RuleTransition); ok {
} else if _, ok := t.(*RuleTransition); ok {
// latch when newDepth goes negative - once we step out of the entry context we can't return
if (newDepth >= 0) {
newDepth += 1
@ -1099,25 +1102,25 @@ func (this *ParserATNSimulator) getRuleName( index int ) string {
}
}
func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t ITransition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
func (this *ParserATNSimulator) getEpsilonTarget(config IATNConfig, t ITransition, collectPredicates, inContext, fullCtx, treatEofAsEpsilon bool) *ATNConfig {
switch(t.serializationType) {
switch(t.getSerializationType()) {
case TransitionRULE:
return this.ruleTransition(config, t)
return this.ruleTransition(config, t.(*RuleTransition))
case TransitionPRECEDENCE:
return this.precedenceTransition(config, t, collectPredicates, inContext, fullCtx)
return this.precedenceTransition(config, t.(*PrecedencePredicateTransition), collectPredicates, inContext, fullCtx)
case TransitionPREDICATE:
return this.predTransition(config, t, collectPredicates, inContext, fullCtx)
return this.predTransition(config, t.(*PredicateTransition), collectPredicates, inContext, fullCtx)
case TransitionACTION:
return this.actionTransition(config, t)
return this.actionTransition(config, t.(*ActionTransition))
case TransitionEPSILON:
return NewATNConfig4( config, t.target )
return NewATNConfig4( config, t.getTarget() )
case TransitionATOM:
// EOF transitions act like epsilon transitions after the first EOF
// transition is traversed
if (treatEofAsEpsilon) {
if (t.matches(TokenEOF, 0, 1)) {
return NewATNConfig4(config, t.target)
return NewATNConfig4(config, t.getTarget())
}
}
return nil
@ -1126,7 +1129,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t ITransitio
// transition is traversed
if (treatEofAsEpsilon) {
if (t.matches(TokenEOF, 0, 1)) {
return NewATNConfig4(config, t.target)
return NewATNConfig4(config, t.getTarget())
}
}
return nil
@ -1135,7 +1138,7 @@ func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t ITransitio
// transition is traversed
if (treatEofAsEpsilon) {
if (t.matches(TokenEOF, 0, 1)) {
return NewATNConfig4(config, t.target)
return NewATNConfig4(config, t.getTarget())
}
}
return nil
@ -1144,21 +1147,21 @@ func (this *ParserATNSimulator) getEpsilonTarget(config *ATNConfig, t ITransitio
}
}
func (this *ParserATNSimulator) actionTransition(config *ATNConfig, t *ActionTransition) *ATNConfig {
func (this *ParserATNSimulator) actionTransition(config IATNConfig, t *ActionTransition) *ATNConfig {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("ACTION edge " + t.ruleIndex + ":" + t.actionIndex)
fmt.Println("ACTION edge " + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa( t.actionIndex) )
}
return NewATNConfig4(config, t.target)
return NewATNConfig4(config, t.getTarget())
}
func (this *ParserATNSimulator) precedenceTransition(config *ATNConfig,
func (this *ParserATNSimulator) precedenceTransition(config IATNConfig,
pt *PrecedencePredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("PRED (collectPredicates=" + collectPredicates + ") " +
pt.precedence + ">=_p, ctx dependent=true")
fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " +
strconv.Itoa(pt.precedence) + ">=_p, ctx dependent=true")
if (this.parser!=nil) {
fmt.Println("context surrounding pred is " + fmt.Sprint(this.parser.getRuleInvocationStack()))
fmt.Println("context surrounding pred is " + fmt.Sprint(this.parser.getRuleInvocationStack(nil)))
}
}
var c *ATNConfig = nil
@ -1173,32 +1176,31 @@ func (this *ParserATNSimulator) precedenceTransition(config *ATNConfig,
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition)
if (predSucceeds) {
c = NewATNConfig4(config, pt.target) // no pred context
c = NewATNConfig4(config, pt.getTarget()) // no pred context
}
} else {
newSemCtx := SemanticContextandContext(config.semanticContext, pt.getPredicate())
c = NewATNConfig3(config, pt.target, newSemCtx)
newSemCtx := SemanticContextandContext(config.getSemanticContext(), pt.getPredicate())
c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
}
} else {
c = NewATNConfig4(config, pt.target)
c = NewATNConfig4(config, pt.getTarget())
}
if (ParserATNSimulatorprototypedebug) {
fmt.Println("config from pred transition=" + c)
fmt.Println("config from pred transition=" + c.toString())
}
return c
}
func (this *ParserATNSimulator) predTransition(config *ATNConfig,
pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
func (this *ParserATNSimulator) predTransition(config IATNConfig, pt *PredicateTransition, collectPredicates, inContext, fullCtx bool) *ATNConfig {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("PRED (collectPredicates=" + collectPredicates + ") " + pt.ruleIndex +
":" + pt.predIndex + ", ctx dependent=" + pt.isCtxDependent)
fmt.Println("PRED (collectPredicates=" + fmt.Sprint(collectPredicates) + ") " + strconv.Itoa(pt.ruleIndex) +
":" + strconv.Itoa(pt.predIndex) + ", ctx dependent=" + fmt.Sprint(pt.isCtxDependent))
if (this.parser!=nil) {
fmt.Println("context surrounding pred is " + fmt.Sprint(this.parser.getRuleInvocationStack()))
fmt.Println("context surrounding pred is " + fmt.Sprint(this.parser.getRuleInvocationStack(nil)))
}
}
var c *ATNConfigSet = nil
var c *ATNConfig = nil
if (collectPredicates && ((pt.isCtxDependent && inContext) || ! pt.isCtxDependent)) {
if (fullCtx) {
// In full context mode, we can evaluate predicates on-the-fly
@ -1210,28 +1212,28 @@ func (this *ParserATNSimulator) predTransition(config *ATNConfig,
var predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext)
this._input.seek(currentPosition)
if (predSucceeds) {
c = NewATNConfig4(config, pt.target) // no pred context
c = NewATNConfig4(config, pt.getTarget()) // no pred context
}
} else {
var newSemCtx = SemanticContextandContext(config.semanticContext, pt.getPredicate())
c = NewATNConfig3(config, pt.target, newSemCtx)
var newSemCtx = SemanticContextandContext(config.getSemanticContext(), pt.getPredicate())
c = NewATNConfig3(config, pt.getTarget(), newSemCtx)
}
} else {
c = NewATNConfig4(config, pt.target )
c = NewATNConfig4(config, pt.getTarget() )
}
if (ParserATNSimulatorprototypedebug) {
fmt.Println("config from pred transition=" + c)
fmt.Println("config from pred transition=" + c.toString())
}
return c
}
func (this *ParserATNSimulator) ruleTransition(config *ATNConfig, t *RuleTransition) *ATNConfig {
func (this *ParserATNSimulator) ruleTransition(config IATNConfig, t *RuleTransition) *ATNConfig {
if (ParserATNSimulatorprototypedebug) {
fmt.Println("CALL rule " + this.getRuleName(t.target.ruleIndex) + ", ctx=" + config.context)
fmt.Println("CALL rule " + this.getRuleName(t.getTarget().getRuleIndex()) + ", ctx=" + config.getContext().toString())
}
var returnState = t.followState
var newContext = SingletonPredictionContextcreate(config.context, returnState.stateNumber)
return NewATNConfig3( config, t.target, newContext )
var newContext = SingletonPredictionContextcreate(config.getContext(), returnState.getStateNumber())
return NewATNConfig1( config, t.getTarget(), newContext )
}
func (this *ParserATNSimulator) getConflictingAlts(configs *ATNConfigSet) *BitSet {
@ -1290,18 +1292,18 @@ func (this *ParserATNSimulator) getTokenName( t int ) string {
if (t==TokenEOF) {
return "EOF"
}
if( this.parser!=nil && this.parser.literalNames!=nil) {
if (t >= len(this.parser.literalNames)) {
fmt.Println("" + t + " ttype out of range: " + this.parser.literalNames)
fmt.Println("" + this.parser.getInputStream().getTokens())
if( this.parser!=nil && this.parser.getLiteralNames()!=nil) {
if (t >= len(this.parser.getLiteralNames())) {
fmt.Println(strconv.Itoa(t) + " ttype out of range: " + strings.Join(this.parser.getLiteralNames(), ","))
// fmt.Println(this.parser.getInputStream().getTokens())
} else {
return this.parser.literalNames[t] + "<" + t + ">"
return this.parser.getLiteralNames()[t] + "<" + strconv.Itoa(t) + ">"
}
}
return "" + t
return "" + strconv.Itoa(t)
}
func (this *ParserATNSimulator) getLookaheadName(input *TokenStream) string {
func (this *ParserATNSimulator) getLookaheadName(input TokenStream) string {
return this.getTokenName(input.LA(1))
}
@ -1320,8 +1322,8 @@ func (this *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) {
//
// var c = decs[i]
// var trans = "no edges"
// if (len(c.state.transitions)>0) {
// var t = c.state.transitions[0]
// if (len(c.state.getTransitions())>0) {
// var t = c.state.getTransitions()[0]
// if t2, ok := t.(*AtomTransition); ok {
// trans = "Atom "+ this.getTokenName(t2.label)
// } else if t3, ok := t.(SetTransition); ok {
@ -1339,7 +1341,7 @@ func (this *ParserATNSimulator) dumpDeadEndConfigs(nvae *NoViableAltException) {
// }
}
func (this *ParserATNSimulator) noViableAlt(input *TokenStream, outerContext *ParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException {
func (this *ParserATNSimulator) noViableAlt(input TokenStream, outerContext IParserRuleContext, configs *ATNConfigSet, startIndex int) *NoViableAltException {
return NewNoViableAltException(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext)
}
@ -1348,8 +1350,8 @@ func (this *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int {
for i:=0; i<len(configs.configs); i++ {
var c = configs.configs[i]
if (alt == ATNINVALID_ALT_NUMBER) {
alt = c.alt // found first alt
} else if( c.alt!=alt) {
alt = c.getAlt() // found first alt
} else if( c.getAlt()!=alt) {
return ATNINVALID_ALT_NUMBER
}
}
@ -1378,7 +1380,7 @@ func (this *ParserATNSimulator) getUniqueAlt(configs *ATNConfigSet) int {
//
func (this *ParserATNSimulator) addDFAEdge(dfa *DFA, from_ *DFAState, t int, to *DFAState) *DFAState {
if( ParserATNSimulatorprototypedebug) {
fmt.Println("EDGE " + from_ + " -> " + to + " upon " + this.getTokenName(t))
fmt.Println("EDGE " + from_.toString() + " -> " + to.toString() + " upon " + this.getTokenName(t))
}
if (to==nil) {
return nil
@ -1388,14 +1390,14 @@ func (this *ParserATNSimulator) addDFAEdge(dfa *DFA, from_ *DFAState, t int, to
return to
}
if (from_.edges==nil) {
from_.edges = make([]*DFAState)
from_.edges = make([]*DFAState, this.atn.maxTokenType+1+1)
}
from_.edges[t+1] = to // connect
if (ParserATNSimulatorprototypedebug) {
var names []string
if (this.parser != nil){
names = this.parser.literalNames
names = this.parser.getLiteralNames()
}
fmt.Println("DFA=\n" + dfa.toString(names, nil))
@ -1422,18 +1424,18 @@ func (this *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
return D
}
var hash = D.hashString()
var existing = dfa.states[hash] || nil
if(existing!=nil) {
var existing, ok = dfa.getStates()[hash]
if(ok) {
return existing
}
D.stateNumber = len(dfa.states)
D.stateNumber = len(dfa.getStates())
if (! D.configs.readOnly) {
D.configs.optimizeConfigs(this)
D.configs.optimizeConfigs(this.ATNSimulator)
D.configs.setReadonly(true)
}
dfa.states[hash] = D
dfa.getStates()[hash] = D
if (ParserATNSimulatorprototypedebug) {
fmt.Println("adding NewDFA state: " + D)
fmt.Println("adding NewDFA state: " + D.toString())
}
return D
}
@ -1441,8 +1443,8 @@ func (this *ParserATNSimulator) addDFAState(dfa *DFA, D *DFAState) *DFAState {
func (this *ParserATNSimulator) reportAttemptingFullContext(dfa *DFA, conflictingAlts *BitSet, configs *ATNConfigSet, startIndex, stopIndex int) {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototyperetry_debug) {
var interval = NewInterval(startIndex, stopIndex + 1)
fmt.Println("reportAttemptingFullContext decision=" + dfa.decision + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval))
fmt.Println("reportAttemptingFullContext decision=" + strconv.Itoa(dfa.decision) + ":" + configs.toString() +
", input=" + this.parser.getTokenStream().getTextFromInterval(interval))
}
if (this.parser!=nil) {
this.parser.getErrorListenerDispatch().reportAttemptingFullContext(this.parser, dfa, startIndex, stopIndex, conflictingAlts, configs)
@ -1452,8 +1454,8 @@ func (this *ParserATNSimulator) reportAttemptingFullContext(dfa *DFA, conflictin
func (this *ParserATNSimulator) reportContextSensitivity(dfa *DFA, prediction int, configs *ATNConfigSet, startIndex, stopIndex int) {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototyperetry_debug) {
var interval = NewInterval(startIndex, stopIndex + 1)
fmt.Println("reportContextSensitivity decision=" + dfa.decision + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval))
fmt.Println("reportContextSensitivity decision=" + strconv.Itoa(dfa.decision) + ":" + configs.toString() +
", input=" + this.parser.getTokenStream().getTextFromInterval(interval))
}
if (this.parser!=nil) {
this.parser.getErrorListenerDispatch().reportContextSensitivity(this.parser, dfa, startIndex, stopIndex, prediction, configs)
@ -1465,8 +1467,8 @@ func (this *ParserATNSimulator) reportAmbiguity(dfa *DFA, D *DFAState, startInde
exact bool, ambigAlts *BitSet, configs *ATNConfigSet ) {
if (ParserATNSimulatorprototypedebug || ParserATNSimulatorprototyperetry_debug) {
var interval = NewInterval(startIndex, stopIndex + 1)
fmt.Println("reportAmbiguity " + ambigAlts + ":" + configs +
", input=" + this.parser.getTokenStream().getText(interval))
fmt.Println("reportAmbiguity " + ambigAlts.toString() + ":" + configs.toString() +
", input=" + this.parser.getTokenStream().getTextFromInterval(interval))
}
if (this.parser!=nil) {
this.parser.getErrorListenerDispatch().reportAmbiguity(this.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs)

View File

@ -1,44 +1,39 @@
package antlr4
import (
"reflect"
"reflect"
)
//* A rule invocation record for parsing.
//
// Contains all of the information about the current rule not stored in the
// RuleContext. It handles parse tree children list, Any ATN state
// tracing, and the default values available for rule indications:
// start, stop, rule index, current alt number, current
// ATN state.
//
// Subclasses made for each rule and grammar track the parameters,
// return values, locals, and labels specific to that rule. These
// are the objects that are returned from rules.
//
// Note text is not an actual field of a rule return value it is computed
// from start and stop using the input stream's toString() method. I
// could add a ctor to prc so that we can pass in and store the input
// stream, but I'm not sure we want to do that. It would seem to be nil
// to get the .text property anyway if the rule matches tokens from multiple
// input streams.
//
// I do not use getters for fields of objects that are used simply to
// group values such as prc aggregate. The getters/setters are there to
// satisfy the superclass interface.
type IParserRuleContext interface {
IRuleContext
setException(IRecognitionException)
addTokenNode(token *Token) *TerminalNodeImpl
addErrorNode(badToken *Token) *ErrorNodeImpl
enterRule(listener ParseTreeListener)
exitRule(listener ParseTreeListener)
setStart(*Token)
getStart() *Token
setStop(*Token)
getStop() *Token
addChild(child IRuleContext) IRuleContext
removeLastChild()
}
type ParserRuleContext struct {
*RuleContext
parentCtx *ParserRuleContext
ruleIndex int
children []RuleContext
children []ParseTree
start, stop *Token
exception IRecognitionException
}
func NewParserRuleContext(parent *ParserRuleContext, invokingStateNumber int) *ParserRuleContext {
func NewParserRuleContext(parent IParserRuleContext, invokingStateNumber int) *ParserRuleContext {
prc := new(ParserRuleContext)
@ -49,7 +44,7 @@ func NewParserRuleContext(parent *ParserRuleContext, invokingStateNumber int) *P
}
func (prc *ParserRuleContext) InitParserRuleContext(parent *ParserRuleContext, invokingStateNumber int){
func (prc *ParserRuleContext) InitParserRuleContext(parent IParserRuleContext, invokingStateNumber int){
prc.InitRuleContext(parent, invokingStateNumber)
@ -67,33 +62,58 @@ func (prc *ParserRuleContext) InitParserRuleContext(parent *ParserRuleContext, i
// completed, prc is {@code nil}.
prc.exception = nil
return prc
}
func (prc *ParserRuleContext) setException(e IRecognitionException) {
prc.exception = e
}
func (prc *ParserRuleContext) getParent() Tree {
return prc.parentCtx
}
func (prc *ParserRuleContext) setParent(ctx Tree) {
prc.parentCtx = ctx.(IParserRuleContext)
}
func (prc *ParserRuleContext) setChildren(cs []Tree) {
prc.children = make([]ParseTree, len(cs))
for _, c := range cs {
prc.addChild(c.(IRuleContext))
}
}
func (prc *ParserRuleContext) copyFrom(ctx *ParserRuleContext) {
// from RuleContext
prc.parentCtx = ctx.parentCtx
prc.invokingState = ctx.invokingState
prc.children = nil
prc.start = ctx.start
prc.stop = ctx.stop
// from RuleContext
prc.parentCtx = ctx.parentCtx
prc.invokingState = ctx.invokingState
prc.children = nil
prc.start = ctx.start
prc.stop = ctx.stop
}
// Double dispatch methods for listeners
func (prc *ParserRuleContext) enterRule(listener *ParseTreeListener) {
func (prc *ParserRuleContext) enterRule(listener ParseTreeListener) {
}
func (prc *ParserRuleContext) exitRule(listener *ParseTreeListener) {
func (prc *ParserRuleContext) exitRule(listener ParseTreeListener) {
}
// * Does not set parent link other add methods do that///
func (prc *ParserRuleContext) addChild(child *ParserRuleContext) {
if (prc.children == nil) {
prc.children = make([]*ParserRuleContext)
}
prc.children = append( prc.children, child )
return child
func (prc *ParserRuleContext) addTerminalNodeChild(child TerminalNode) TerminalNode {
if (prc.children == nil) {
prc.children = make([]ParseTree, 0)
}
prc.children = append( prc.children, child )
return child
}
func (prc *ParserRuleContext) addChild(child IRuleContext) IRuleContext {
if (prc.children == nil) {
prc.children = make([]ParseTree, 0)
}
prc.children = append( prc.children, child )
return child
}
// * Used by enterOuterAlt to toss out a RuleContext previously added as
@ -101,23 +121,25 @@ func (prc *ParserRuleContext) addChild(child *ParserRuleContext) {
// generic ruleContext object.
// /
func (prc *ParserRuleContext) removeLastChild() {
if (prc.children != nil && len(prc.children) > 0) {
if (prc.children != nil && len(prc.children) > 0) {
prc.children = prc.children[0:len(prc.children)-1]
}
}
}
func (prc *ParserRuleContext) addTokenNode(token *Token) *TerminalNodeImpl {
var node = NewTerminalNodeImpl(token)
prc.addChild(node)
node.parentCtx = prc
return node
var node = NewTerminalNodeImpl(token)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
return node
}
func (prc *ParserRuleContext) addErrorNode(badToken *Token) *ErrorNodeImpl {
var node = NewErrorNodeImpl(badToken)
prc.addChild(node)
node.parentCtx = prc
return node
var node = NewErrorNodeImpl(badToken)
prc.addTerminalNodeChild(node)
node.parentCtx = prc
return node
}
func (prc *ParserRuleContext) getChild(i int) Tree {
@ -130,76 +152,91 @@ func (prc *ParserRuleContext) getChild(i int) Tree {
func (prc *ParserRuleContext) getChildOfType(i int, childType reflect.Type) IRuleContext {
if (childType == nil) {
return prc.getChild(i)
return prc.getChild(i).(IRuleContext)
} else {
for j :=0; j<len(prc.children); j++ {
var child = prc.children[j]
if reflect.TypeOf(child) == childType {
if(i==0) {
return child
return child.(IRuleContext)
} else {
i -= 1
}
}
}
return nil
}
}
}
func (prc *ParserRuleContext) setStart(t *Token) {
prc.start = t
}
func (prc *ParserRuleContext) getToken(ttype int, i int) *TerminalNode {
func (prc *ParserRuleContext) getStart() *Token {
return prc.start
}
func (prc *ParserRuleContext) setStop(t *Token){
prc.stop = t
}
func (prc *ParserRuleContext) getStop() *Token {
return prc.stop
}
func (prc *ParserRuleContext) getToken(ttype int, i int) TerminalNode {
for j :=0; j<len(prc.children); j++ {
var child = prc.children[j]
if _, ok := child.(*TerminalNode); ok {
if (child.symbol.tokenType == ttype) {
if c2, ok := child.(TerminalNode); ok {
if (c2.getSymbol().tokenType == ttype) {
if(i==0) {
return child
return c2
} else {
i -= 1
}
}
}
}
}
return nil
return nil
}
func (prc *ParserRuleContext) getTokens(ttype int) []*TerminalNode {
if (prc.children== nil) {
return make([]*TerminalNode)
} else {
var tokens = make([]*TerminalNode)
func (prc *ParserRuleContext) getTokens(ttype int) []TerminalNode {
if (prc.children== nil) {
return make([]TerminalNode, 0)
} else {
var tokens = make([]TerminalNode, 0)
for j:=0; j<len(prc.children); j++ {
var child = prc.children[j]
if tchild, ok := child.(*TerminalNode); ok {
if (tchild.symbol.tokenType == ttype) {
if tchild, ok := child.(TerminalNode); ok {
if (tchild.getSymbol().tokenType == ttype) {
tokens = append(tokens, tchild)
}
}
}
return tokens
}
}
}
func (prc *ParserRuleContext) getTypedRuleContext(ctxType reflect.Type, i int) *interface{} {
panic("getTypedRuleContexts not implemented")
// return prc.getChild(i, ctxType)
// return prc.getChild(i, ctxType)
}
func (prc *ParserRuleContext) getTypedRuleContexts(ctxType reflect.Type) []*interface{} {
panic("getTypedRuleContexts not implemented")
// if (prc.children== nil) {
// return []
// } else {
// var contexts = []
// for(var j=0 j<len(prc.children) j++) {
// var child = prc.children[j]
// if _, ok := child.(ctxType); ok {
// contexts.push(child)
// }
// }
// return contexts
// }
// if (prc.children== nil) {
// return []
// } else {
// var contexts = []
// for(var j=0 j<len(prc.children) j++) {
// var child = prc.children[j]
// if _, ok := child.(ctxType); ok {
// contexts.push(child)
// }
// }
// return contexts
// }
}
func (prc *ParserRuleContext) getChildCount() int {
@ -211,27 +248,30 @@ func (prc *ParserRuleContext) getChildCount() int {
}
func (prc *ParserRuleContext) getSourceInterval() *Interval {
if( prc.start == nil || prc.stop == nil) {
return TreeINVALID_INTERVAL
} else {
return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
}
if( prc.start == nil || prc.stop == nil) {
return TreeINVALID_INTERVAL
} else {
return NewInterval(prc.start.tokenIndex, prc.stop.tokenIndex)
}
}
var RuleContextEMPTY = NewParserRuleContext(nil, -1)
type IInterpreterRuleContext interface {
IParserRuleContext
}
type InterpreterRuleContext struct {
ParserRuleContext
ruleIndex int
}
func NewInterpreterRuleContext(parent *InterpreterRuleContext, invokingStateNumber, ruleIndex int) {
func NewInterpreterRuleContext(parent IInterpreterRuleContext, invokingStateNumber, ruleIndex int) *InterpreterRuleContext {
prc := new(InterpreterRuleContext)
prc.InitParserRuleContext( parent, invokingStateNumber )
prc.ruleIndex = ruleIndex
prc.ruleIndex = ruleIndex
return prc
return prc
}

View File

@ -2,6 +2,7 @@ package antlr4
import (
"fmt"
"strconv"
)
type IPredictionContext interface {
@ -12,6 +13,7 @@ type IPredictionContext interface {
length() int
isEmpty() bool
hasEmptyPath() bool
toString() string
}
type PredictionContext struct {
@ -64,11 +66,10 @@ var PredictionContextid = PredictionContextglobalNodeCount
// return hash
// }
// </pre>
// /
//
// This means only the {@link //EMPTY} context is in set.
func (this *PredictionContext) isEmpty() bool {
return this == PredictionContextEMPTY
return false
}
func (this *PredictionContext) hasEmptyPath() bool {
@ -79,14 +80,18 @@ func (this *PredictionContext) hashString() string {
return this.cachedHashString
}
func calculateHashString(parent *PredictionContext, returnState int) string {
return "" + fmt.Sprint(parent) + fmt.Sprint(returnState)
func calculateHashString(parent IPredictionContext, returnState int) string {
return parent.toString() + strconv.Itoa(returnState)
}
func calculateEmptyHashString() string {
return ""
}
func (this *PredictionContext) toString() string {
panic("Not implemented")
}
func (this *PredictionContext) getParent(index int) IPredictionContext {
panic("Not implemented")
}
@ -99,6 +104,7 @@ func (this *PredictionContext) getReturnState(index int) int {
panic("Not implemented")
}
// Used to cache {@link PredictionContext} objects. Its used for the shared
// context cash associated with contexts in DFA states. This cache
// can be used for both lexers and parsers.
@ -107,7 +113,7 @@ type PredictionContextCache struct {
cache map[IPredictionContext]IPredictionContext
}
func NewPredictionContextCache() {
func NewPredictionContextCache() *PredictionContextCache {
t := new(PredictionContextCache)
t.cache = make(map[IPredictionContext]IPredictionContext)
return t
@ -117,7 +123,7 @@ func NewPredictionContextCache() {
// return that one instead and do not add a Newcontext to the cache.
// Protect shared cache from unsafe thread access.
//
func (this *PredictionContextCache) add(ctx IPredictionContext) {
func (this *PredictionContextCache) add(ctx IPredictionContext) IPredictionContext {
if (ctx == PredictionContextEMPTY) {
return PredictionContextEMPTY
}
@ -129,7 +135,7 @@ func (this *PredictionContextCache) add(ctx IPredictionContext) {
return ctx
}
func (this *PredictionContextCache) get(ctx IPredictionContext) {
func (this *PredictionContextCache) get(ctx IPredictionContext) IPredictionContext {
return this.cache[ctx]
}
@ -137,34 +143,37 @@ func (this *PredictionContextCache) length() int {
return len(this.cache)
}
type ISingletonPredictionContext interface {
IPredictionContext
}
type SingletonPredictionContext struct {
PredictionContext
*PredictionContext
parentCtx IPredictionContext
returnState int
}
func NewSingletonPredictionContext(parent IPredictionContext, returnState int) {
func NewSingletonPredictionContext(parent IPredictionContext, returnState int) *SingletonPredictionContext {
s := new(SingletonPredictionContext)
s.InitSingletonPredictionContext(parent, returnState)
return s
}
// var hashString string
//
// if (parent != nil){
// hashString = calculateHashString(parent, returnState)
// } else {
// hashString = calculateEmptyHashString()
// }
func (s *SingletonPredictionContext) InitSingletonPredictionContext(parent IPredictionContext, returnState int) {
panic("Must initializer parent predicition context")
// PredictionContext.call(s, hashString)
if (parent != nil){
s.cachedHashString = calculateHashString(parent, returnState)
} else {
s.cachedHashString = calculateEmptyHashString()
}
s.parentCtx = parent
s.returnState = returnState
return s
}
func SingletonPredictionContextcreate(parent IPredictionContext, returnState int) *SingletonPredictionContext {
func SingletonPredictionContextcreate(parent IPredictionContext, returnState int) IPredictionContext {
if (returnState == PredictionContextEMPTY_RETURN_STATE && parent == nil) {
// someone can pass in the bits of an array ctx that mean $
return PredictionContextEMPTY
@ -216,30 +225,32 @@ func (this *SingletonPredictionContext) toString() string {
if (this.parentCtx == nil){
up = ""
} else {
up = fmt.Sprint(this.parentCtx)
up = this.parentCtx.toString()
}
if (len(up) == 0) {
if (this.returnState == PredictionContextEMPTY_RETURN_STATE) {
return "$"
} else {
return "" + this.returnState
return strconv.Itoa( this.returnState )
}
} else {
return "" + this.returnState + " " + up
return strconv.Itoa( this.returnState )+ " " + up
}
}
var PredictionContextEMPTY = NewEmptyPredictionContext()
type EmptyPredictionContext struct {
SingletonPredictionContext
*SingletonPredictionContext
}
func NewEmptyPredictionContext() *EmptyPredictionContext {
panic("Must init SingletonPredictionContext")
// SingletonPredictionContext.call(this, nil, PredictionContextEMPTY_RETURN_STATE)
p := new(EmptyPredictionContext)
p.InitSingletonPredictionContext(nil, PredictionContextEMPTY_RETURN_STATE)
return p
}
@ -263,10 +274,9 @@ func (this *EmptyPredictionContext) toString() string {
return "$"
}
var PredictionContextEMPTY = NewEmptyPredictionContext()
type ArrayPredictionContext struct {
PredictionContext
*PredictionContext
parents []IPredictionContext
returnStates []int
}
@ -279,9 +289,12 @@ func NewArrayPredictionContext(parents []IPredictionContext, returnStates []int)
c := new(ArrayPredictionContext)
panic("Must init PredictionContext")
// var hash = calculateHashString(parents, returnStates)
// PredictionContext.call(c, hash)
c.cachedHashString = ""
for i := range parents {
c.cachedHashString += calculateHashString(parents[i], returnStates[i])
}
c.parents = parents
c.returnStates = returnStates
@ -311,11 +324,11 @@ func (this *ArrayPredictionContext) equals(other IPredictionContext) bool {
return true
} else if _, ok := other.(*ArrayPredictionContext); !ok {
return false
} else if (this.hashString != other.hashString()) {
} else if (this.cachedHashString != other.hashString()) {
return false // can't be same if hash is different
} else {
otherP := other.(*ArrayPredictionContext)
return this.returnStates == otherP.returnStates && this.parents == otherP.parents
return &this.returnStates == &otherP.returnStates && &this.parents == &otherP.parents
}
}
@ -332,9 +345,9 @@ func (this *ArrayPredictionContext) toString() string {
s = s + "$"
continue
}
s = s + this.returnStates[i]
s = s + strconv.Itoa(this.returnStates[i])
if (this.parents[i] != nil) {
s = s + " " + this.parents[i]
s = s + " " + this.parents[i].toString()
} else {
s = s + "nil"
}
@ -346,24 +359,24 @@ func (this *ArrayPredictionContext) toString() string {
// Convert a {@link RuleContext} tree to a {@link PredictionContext} graph.
// Return {@link //EMPTY} if {@code outerContext} is empty or nil.
// /
func predictionContextFromRuleContext(a *ATN, outerContext *RuleContext) IPredictionContext {
func predictionContextFromRuleContext(a *ATN, outerContext IRuleContext) IPredictionContext {
if (outerContext == nil) {
outerContext = RuleContextEMPTY
}
// if we are in RuleContext of start rule, s, then PredictionContext
// is EMPTY. Nobody called us. (if we are empty, return empty)
if (outerContext.parentCtx == nil || outerContext == RuleContextEMPTY) {
if (outerContext.getParent() == nil || outerContext == RuleContextEMPTY) {
return PredictionContextEMPTY
}
// If we have a parent, convert it to a PredictionContext graph
var parent = predictionContextFromRuleContext(a, outerContext.parentCtx)
var state = a.states[outerContext.invokingState]
var parent = predictionContextFromRuleContext(a, outerContext.getParent().(IRuleContext))
var state = a.states[outerContext.getInvokingState()]
var transition = state.getTransitions()[0]
return SingletonPredictionContextcreate(parent, transition.(*RuleTransition).followState.getStateNumber())
}
func calculateListsHashString(parents []PredictionContext, returnStates []int) {
func calculateListsHashString(parents []PredictionContext, returnStates []int) string {
var s = ""
for _, p := range parents {
@ -392,21 +405,21 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// At least one of a or b is array
// If one is $ and rootIsWildcard, return $ as// wildcard
if (rootIsWildcard) {
if _, ok := a.(EmptyPredictionContext); ok {
if _, ok := a.(*EmptyPredictionContext); ok {
return a
}
if _, ok := b.(EmptyPredictionContext); ok {
if _, ok := b.(*EmptyPredictionContext); ok {
return b
}
}
// convert singleton so both are arrays to normalize
if _, ok := a.(SingletonPredictionContext); ok {
if _, ok := a.(*SingletonPredictionContext); ok {
a = NewArrayPredictionContext([]IPredictionContext{ a.getParent(0) }, []int{ a.getReturnState(0) })
}
if _, ok := b.(SingletonPredictionContext); ok {
if _, ok := b.(*SingletonPredictionContext); ok {
b = NewArrayPredictionContext( []IPredictionContext{ b.getParent(0) }, []int{ b.getReturnState(0) })
}
return mergeArrays(a, b, rootIsWildcard, mergeCache)
return mergeArrays(a.(*ArrayPredictionContext), b.(*ArrayPredictionContext), rootIsWildcard, mergeCache)
}
//
@ -442,20 +455,20 @@ func merge(a, b IPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict)
// /
func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if (mergeCache != nil) {
var previous = mergeCache.get(a, b)
var previous = mergeCache.get(a.hashString(), b.hashString())
if (previous != nil) {
return previous
return previous.(IPredictionContext)
}
previous = mergeCache.get(b, a)
previous = mergeCache.get(b.hashString(), a.hashString())
if (previous != nil) {
return previous
return previous.(IPredictionContext)
}
}
var rootMerge = mergeRoot(a, b, rootIsWildcard)
if (rootMerge != nil) {
if (mergeCache != nil) {
mergeCache.set(a, b, rootMerge)
mergeCache.set(a.hashString(), b.hashString(), rootMerge)
}
return rootMerge
}
@ -475,7 +488,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// Newjoined parent so create Newsingleton pointing to it, a'
var spc = SingletonPredictionContextcreate(parent, a.returnState)
if (mergeCache != nil) {
mergeCache.set(a, b, spc)
mergeCache.set(a.hashString(), b.hashString(), spc)
}
return spc
} else { // a != b payloads differ
@ -496,7 +509,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
var parents = []IPredictionContext{ singleParent, singleParent }
var apc = NewArrayPredictionContext(parents, payloads)
if (mergeCache != nil) {
mergeCache.set(a, b, apc)
mergeCache.set(a.hashString(), b.hashString(), apc)
}
return apc
}
@ -512,7 +525,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
}
var a_ = NewArrayPredictionContext(parents, payloads)
if (mergeCache != nil) {
mergeCache.set(a, b, a_)
mergeCache.set(a.hashString(), b.hashString(), a_)
}
return a_
}
@ -556,7 +569,7 @@ func mergeSingletons(a, b *SingletonPredictionContext, rootIsWildcard bool, merg
// @param rootIsWildcard {@code true} if this is a local-context merge,
// otherwise false to indicate a full-context merge
// /
func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) IPredictionContext {
func mergeRoot(a, b ISingletonPredictionContext, rootIsWildcard bool) IPredictionContext {
if (rootIsWildcard) {
if (a == PredictionContextEMPTY) {
return PredictionContextEMPTY // // + b =//
@ -568,12 +581,12 @@ func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) IPredictio
if (a == PredictionContextEMPTY && b == PredictionContextEMPTY) {
return PredictionContextEMPTY // $ + $ = $
} else if (a == PredictionContextEMPTY) { // $ + x = [$,x]
var payloads = []int{ b.returnState, PredictionContextEMPTY_RETURN_STATE }
var parents = []IPredictionContext{ b.parentCtx, nil }
var payloads = []int{ b.getReturnState(-1), PredictionContextEMPTY_RETURN_STATE }
var parents = []IPredictionContext{ b.getParent(-1), nil }
return NewArrayPredictionContext(parents, payloads)
} else if (b == PredictionContextEMPTY) { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{ a.returnState, PredictionContextEMPTY_RETURN_STATE }
var parents = []IPredictionContext{ a.parentCtx, nil }
var payloads = []int{ a.getReturnState(-1), PredictionContextEMPTY_RETURN_STATE }
var parents = []IPredictionContext{ a.getParent(-1), nil }
return NewArrayPredictionContext(parents, payloads)
}
}
@ -602,13 +615,13 @@ func mergeRoot(a, b *SingletonPredictionContext, rootIsWildcard bool) IPredictio
// /
func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *DoubleDict) IPredictionContext {
if (mergeCache != nil) {
var previous = mergeCache.get(a, b)
var previous = mergeCache.get(a.hashString(), b.hashString())
if (previous != nil) {
return previous
return previous.(IPredictionContext)
}
previous = mergeCache.get(b, a)
previous = mergeCache.get(b.hashString(), a.hashString())
if (previous != nil) {
return previous
return previous.(IPredictionContext)
}
}
// merge sorted payloads a + b => M
@ -670,7 +683,7 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
if (k == 1) { // for just one merged element, return singleton top
var a_ = SingletonPredictionContextcreate(mergedParents[0], mergedReturnStates[0])
if (mergeCache != nil) {
mergeCache.set(a, b, a_)
mergeCache.set(a.hashString(), b.hashString(), a_)
}
return a_
}
@ -684,20 +697,20 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
// TODO: track whether this is possible above during merge sort for speed
if (M == a) {
if (mergeCache != nil) {
mergeCache.set(a, b, a)
mergeCache.set(a.hashString(), b.hashString(), a)
}
return a
}
if (M == b) {
if (mergeCache != nil) {
mergeCache.set(a, b, b)
mergeCache.set(a.hashString(), b.hashString(), b)
}
return b
}
combineCommonParents(mergedParents)
if (mergeCache != nil) {
mergeCache.set(a, b, M)
mergeCache.set(a.hashString(), b.hashString(), M)
}
return M
}

View File

@ -1,6 +1,7 @@
package antlr4
import (
"strings"
"strconv"
)
//
@ -184,7 +185,7 @@ func PredictionModehasSLLConflictTerminatingPrediction( mode int, configs *ATNCo
var c = configs.configs[i]
// NewATNConfig({semanticContext:}, c)
c = NewATNConfig4(c, SemanticContextNONE)
c = NewATNConfig2(c, SemanticContextNONE)
dup.add(c, nil)
}
configs = dup
@ -207,7 +208,7 @@ func PredictionModehasSLLConflictTerminatingPrediction( mode int, configs *ATNCo
func PredictionModehasConfigInRuleStopState(configs *ATNConfigSet) bool {
for i:= 0; i< len(configs.configs); i++ {
var c = configs.configs[i]
if _, ok := c.state.(*RuleStopState); ok {
if _, ok := c.getState().(*RuleStopState); ok {
return true
}
}
@ -227,7 +228,7 @@ func PredictionModeallConfigsInRuleStopStates(configs *ATNConfigSet) bool {
for i:= 0; i < len(configs.configs); i++ {
var c = configs.configs[i]
if _, ok := c.state.(*RuleStopState); !ok {
if _, ok := c.getState().(*RuleStopState); !ok {
return false
}
}
@ -375,7 +376,7 @@ func PredictionModeallConfigsInRuleStopStates(configs *ATNConfigSet) bool {
// we need exact ambiguity detection when the sets look like
// {@code A={{1,2}}} or {@code {{1,2},{1,2}}}, etc...</p>
//
func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) bool {
func PredictionModeresolvesToJustOneViableAlt(altsets []*BitSet) int {
return PredictionModegetSingleViableAlt(altsets)
}
@ -401,7 +402,7 @@ func PredictionModeallSubsetsConflict(altsets []*BitSet) bool {
func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
for i:=0; i<len(altsets); i++{
var alts = altsets[i]
if (len(alts)==1) {
if (alts.length()==1) {
return true
}
}
@ -419,7 +420,7 @@ func PredictionModehasNonConflictingAltSet(altsets []*BitSet) bool {
func PredictionModehasConflictingAltSet(altsets []*BitSet) bool {
for i:=0; i<len(altsets); i++{
var alts = altsets[i]
if (len(alts)>1) {
if (alts.length()>1) {
return true
}
}
@ -457,7 +458,7 @@ func PredictionModeallSubsetsEqual(altsets []*BitSet) bool {
//
func PredictionModegetUniqueAlt(altsets []*BitSet) int {
var all = PredictionModegetAlts(altsets)
if (len(all)==1) {
if (all.length()==1) {
return all.minValue()
} else {
return ATNINVALID_ALT_NUMBER
@ -488,21 +489,21 @@ func PredictionModegetAlts(altsets []*BitSet) *BitSet {
// alt and not pred
// </pre>
//
func PredictionModegetConflictingAltSubsets(configs *ATNConfigSet) []int {
var configToAlts = make(map[string]int)
func PredictionModegetConflictingAltSubsets(configs *ATNConfigSet) []*BitSet {
var configToAlts = make(map[string]*BitSet)
for i :=0; i < len(configs.configs); i++ {
var c = configs.configs[i]
var key = "key_" + c.state.stateNumber + "/" + c.context
var key = "key_" + strconv.Itoa(c.getState().getStateNumber()) + "/" + c.getContext().toString()
var alts = configToAlts[key]
if (alts != nil) {
alts = NewBitSet()
configToAlts[key] = alts
}
alts.(*BitSet).add(c.alt)
alts.add(c.getAlt())
}
var values = make([]int)
var values = make([]*BitSet, 0)
for k,_ := range configToAlts {
if( strings.Index( k, "key_") != 0) {
@ -525,12 +526,12 @@ func PredictionModegetStateToAltMap(configs *ATNConfigSet) *AltDict {
var m = NewAltDict()
for _, c := range configs.configs {
var alts = m.get(c.state)
var alts = m.get(c.getState().toString())
if (alts == nil) {
alts = NewBitSet()
m.put(c.state, alts)
m.put(c.getState().toString(), alts)
}
alts.(*BitSet).add(c.alt)
alts.(*BitSet).add(c.getAlt())
}
return m
}
@ -538,7 +539,7 @@ func PredictionModegetStateToAltMap(configs *ATNConfigSet) *AltDict {
func PredictionModehasStateAssociatedWithOneAlt (configs *ATNConfigSet) bool {
var values = PredictionModegetStateToAltMap(configs).values()
for i:=0; i<len(values); i++ {
if ( len(values[i])==1) {
if ( values[i].(*BitSet).length() ==1) {
return true
}
}

View File

@ -3,18 +3,24 @@ package antlr4
import (
"fmt"
"strings"
)
"strconv"
)
type IRecognizer interface {
getState() int
getATN() *ATN
action( _localctx IRuleContext, ruleIndex, actionIndex int)
getRuleNames() []string
getErrorListenerDispatch() IErrorListener
sempred(localctx IRuleContext, ruleIndex int, actionIndex int) bool
precpred(localctx IRuleContext, precedence int) bool
}
type Recognizer struct {
_listeners []ParseTreeListener
_listeners []IErrorListener
state int
}
@ -26,12 +32,12 @@ func NewRecognizer() *Recognizer {
}
func (rec *Recognizer) InitRecognizer() {
rec._listeners = []ParseTreeListener{ ConsoleErrorListenerINSTANCE }
rec._listeners = []IErrorListener{ ConsoleErrorListenerINSTANCE }
rec.state = -1
}
var tokenTypeMapCache = make(map[[]string]int)
var ruleIndexMapCache = make(map[[]string]int)
var tokenTypeMapCache = make(map[string]int)
var ruleIndexMapCache = make(map[string]int)
func (this *Recognizer) checkVersion(toolVersion string) {
var runtimeVersion = "4.5.2"
@ -44,12 +50,12 @@ func (this *Recognizer) action( context IRuleContext, ruleIndex, actionIndex int
panic("action not implemented on Recognizer!")
}
func (this *Recognizer) addErrorListener(listener *ParseTreeListener) {
append(this._listeners, listener)
func (this *Recognizer) addErrorListener(listener IErrorListener) {
this._listeners = append(this._listeners, listener)
}
func (this *Recognizer) removeErrorListeners() {
this._listeners = make([]ParseTreeListener, 0)
this._listeners = make([]IErrorListener, 0)
}
func (this *Recognizer) getRuleNames() []string {
@ -82,7 +88,7 @@ func (this *Recognizer) getState() int {
//
// <p>Used for XPath and tree pattern compilation.</p>
//
func (this *Recognizer) getRuleIndexMap() {
func (this *Recognizer) getRuleIndexMap() map[string]int {
panic("Method not defined!")
// var ruleNames = this.getRuleNames()
// if (ruleNames==nil) {
@ -136,11 +142,10 @@ func (this *Recognizer) getTokenType(tokenName string) int {
//}
// What is the error header, normally line/character position information?//
func (this *Recognizer) getErrorHeader(e error) string {
panic("Method not defined!")
// var line = e.getOffendingToken().line
// var column = e.getOffendingToken().column
// return "line " + line + ":" + column
func (this *Recognizer) getErrorHeader(e IRecognitionException) string {
var line = e.getOffendingToken().line
var column = e.getOffendingToken().column
return "line " + strconv.Itoa(line) + ":" + strconv.Itoa(column)
}
@ -162,11 +167,11 @@ func (this *Recognizer) getTokenErrorDisplay(t *Token) string {
return "<no token>"
}
var s = t.text()
if s==nil {
if s=="" {
if (t.tokenType==TokenEOF) {
s = "<EOF>"
} else {
s = "<" + t.tokenType + ">"
s = "<" + strconv.Itoa(t.tokenType) + ">"
}
}
s = strings.Replace(s,"\t","\\t", -1)
@ -182,19 +187,10 @@ func (this *Recognizer) getErrorListenerDispatch() IErrorListener {
// subclass needs to override these if there are sempreds or actions
// that the ATN interp needs to execute
func (this *Recognizer) sempred(localctx *RuleContext, ruleIndex int, actionIndex int) bool {
func (this *Recognizer) sempred(localctx IRuleContext, ruleIndex int, actionIndex int) bool {
return true
}
func (this *Recognizer) precpred(localctx *RuleContext, precedence int) bool {
func (this *Recognizer) precpred(localctx IRuleContext, precedence int) bool {
return true
}
//Indicate that the recognizer has changed internal state that is
//consistent with the ATN state passed in. This way we always know
//where we are in the ATN as the parser goes along. The rule
//context objects form a stack that lets us see the stack of
//invoking rules. Combine this and we have complete ATN
//configuration information.
}

View File

@ -1,7 +1,8 @@
package antlr4
import (
)
"strconv"
)
// A rule context is a record of a single rule invocation. It knows
// which context invoked it, if any. If there is no parent context, then
@ -28,16 +29,19 @@ type IRuleContext interface {
RuleNode
getInvokingState()int
setInvokingState(int)
getRuleIndex()int
getChildren()[]IRuleContext
isEmpty() bool
toString([]string, IRuleContext) string
}
type RuleContext struct {
parentCtx IRuleContext
invokingState int
ruleIndex int
children []IRuleContext
children []Tree
}
func NewRuleContext(parent IRuleContext, invokingState int) *RuleContext {
@ -49,7 +53,7 @@ func NewRuleContext(parent IRuleContext, invokingState int) *RuleContext {
return rn
}
func (rn *RuleContext) InitRuleContext(parent *RuleContext, invokingState int) {
func (rn *RuleContext) InitRuleContext(parent IRuleContext, invokingState int) {
// What context invoked this rule?
rn.parentCtx = parent
@ -64,27 +68,36 @@ func (rn *RuleContext) InitRuleContext(parent *RuleContext, invokingState int) {
}
}
func (this *RuleContext) getParent() Tree {
return this.parentCtx
func (this *RuleContext) setChildren(elems []Tree){
this.children = elems
}
func (this *RuleContext) setParent(v Tree){
this.parentCtx = v.(IRuleContext)
}
func (this *RuleContext) getInvokingState() int {
return this.getInvokingState()
}
func (this *RuleContext) setInvokingState(t int) {
this.invokingState = t
}
func (this *RuleContext) getRuleIndex() int{
return this.ruleIndex
}
func (this *RuleContext) getChildren() []IRuleContext {
func (this *RuleContext) getChildren() []Tree {
return this.children
}
func (this *RuleContext) depth() {
func (this *RuleContext) depth() int {
var n = 0
var p = this
var p Tree = this
for (p != nil) {
p = p.parentCtx
p = p.getParent()
n += 1
}
return n
@ -123,23 +136,27 @@ func (this *RuleContext) getText() string {
} else {
var s string
for _, child := range this.children {
s += child.getText()
s += child.(IRuleContext).getText()
}
return s
}
}
func (this *RuleContext) getChild(i int) {
func (this *RuleContext) getChild(i int) Tree {
return nil
}
func (this *RuleContext) getChildCount() {
func (this *RuleContext) getParent() Tree {
return this.parentCtx
}
func (this *RuleContext) getChildCount() int {
return 0
}
func (this *RuleContext) accept(visitor *ParseTreeVisitor) {
(*visitor).visitChildren(this)
func (this *RuleContext) accept(visitor ParseTreeVisitor) interface{} {
return visitor.visitChildren(this)
}
//need to manage circular dependencies, so export now
@ -152,29 +169,29 @@ func (this *RuleContext) toStringTree(ruleNames []string, recog IRecognizer) str
return TreestoStringTree(this, ruleNames, recog)
}
func (this *RuleContext) toString(ruleNames []string, stop *RuleContext) string {
func (this *RuleContext) toString(ruleNames []string, stop IRuleContext) string {
var p *RuleContext = this
var p IRuleContext = this
var s = "["
for (p != nil && p != stop) {
if (ruleNames == nil) {
if (!p.isEmpty()) {
s += p.invokingState
s += strconv.Itoa(p.getInvokingState())
}
} else {
var ri = p.ruleIndex
var ri = p.getRuleIndex()
var ruleName string
if (ri >= 0 && ri < len(ruleNames)) {
ruleName = ruleNames[ri]
} else {
ruleName = "" + ri
ruleName = strconv.Itoa(ri)
}
s += ruleName
}
if (p.parentCtx != nil && (ruleNames != nil || !p.parentCtx.isEmpty())) {
if (p.getParent() != nil && (ruleNames != nil || !p.getParent().(IRuleContext).isEmpty())) {
s += " "
}
p = p.parentCtx
p = p.getParent().(IRuleContext)
}
s += "]"
return s

View File

@ -1,7 +1,10 @@
package antlr4
import (
)
"strconv"
"fmt"
)
// A tree structure used to record the semantic context in which
// an ATN configuration is valid. It's either a single predicate,
@ -12,8 +15,8 @@ import (
//
type SemanticContext interface {
evaluate(parser IRecognizer, outerContext *RuleContext) bool
evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext
evaluate(parser IRecognizer, outerContext IRuleContext) bool
evalPrecedence(parser IRecognizer, outerContext IRuleContext) SemanticContext
equals(interface{}) bool
toString() string
}
@ -52,8 +55,6 @@ func SemanticContextorContext(a, b SemanticContext) SemanticContext {
}
type Predicate struct {
ruleIndex int
predIndex int
@ -74,9 +75,13 @@ func NewPredicate(ruleIndex, predIndex int, isCtxDependent bool) *Predicate {
var SemanticContextNONE SemanticContext = NewPredicate(-1,-1,false)
func (this *Predicate) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *Predicate) evalPrecedence(parser IRecognizer, outerContext IRuleContext) SemanticContext {
return this
}
var localctx *RuleContext = nil
func (this *Predicate) evaluate(parser IRecognizer, outerContext IRuleContext) bool {
var localctx IRuleContext = nil
if (this.isCtxDependent){
localctx = outerContext
@ -85,8 +90,8 @@ func (this *Predicate) evaluate(parser IRecognizer, outerContext *RuleContext) S
return parser.sempred(localctx, this.ruleIndex, this.predIndex)
}
func (this *Predicate) hashString() {
return "" + this.ruleIndex + "/" + this.predIndex + "/" + this.isCtxDependent
func (this *Predicate) hashString() string {
return strconv.Itoa(this.ruleIndex) + "/" + strconv.Itoa(this.predIndex) + "/" + fmt.Sprint(this.isCtxDependent)
}
func (this *Predicate) equals(other interface{}) bool {
@ -102,12 +107,10 @@ func (this *Predicate) equals(other interface{}) bool {
}
func (this *Predicate) toString() string {
return "{" + this.ruleIndex + ":" + this.predIndex + "}?"
return "{" + strconv.Itoa(this.ruleIndex) + ":" + strconv.Itoa(this.predIndex) + "}?"
}
type PrecedencePredicate struct {
SemanticContext
precedence int
}
@ -119,11 +122,11 @@ func NewPrecedencePredicate(precedence int) *PrecedencePredicate {
return this
}
func (this *PrecedencePredicate) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *PrecedencePredicate) evaluate(parser IRecognizer, outerContext IRuleContext) bool {
return parser.precpred(outerContext, this.precedence)
}
func (this *PrecedencePredicate) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *PrecedencePredicate) evalPrecedence(parser IRecognizer, outerContext IRuleContext) SemanticContext {
if (parser.precpred(outerContext, this.precedence)) {
return SemanticContextNONE
} else {
@ -131,11 +134,11 @@ func (this *PrecedencePredicate) evalPrecedence(parser IRecognizer, outerContext
}
}
func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) bool {
func (this *PrecedencePredicate) compareTo(other *PrecedencePredicate) int {
return this.precedence - other.precedence
}
func (this *PrecedencePredicate) hashString() {
func (this *PrecedencePredicate) hashString() string {
return "31"
}
@ -150,12 +153,12 @@ func (this *PrecedencePredicate) equals(other interface{}) bool {
}
func (this *PrecedencePredicate) toString() string {
return "{"+this.precedence+">=prec}?"
return "{"+strconv.Itoa(this.precedence)+">=prec}?"
}
func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredicate {
var result = make([]*PrecedencePredicate)
var result = make([]*PrecedencePredicate, 0)
for _,v := range set.values() {
if c2, ok := v.(*PrecedencePredicate); ok {
@ -167,12 +170,10 @@ func PrecedencePredicatefilterPrecedencePredicates(set *Set) []*PrecedencePredic
}
// A semantic context which is true whenever none of the contained contexts
// is false.
// is false.`
type AND struct {
SemanticContext
opnds []SemanticContext
}
@ -208,8 +209,16 @@ func NewAND(a, b SemanticContext) *AND {
operands.add(reduced)
}
vs := operands.values()
opnds := make([]SemanticContext, len(vs))
for i, v := range vs {
vs[i] = v.(SemanticContext)
}
this := new(AND)
this.opnds = operands.values()
this.opnds = opnds
return this
}
@ -219,12 +228,17 @@ func (this *AND) equals(other interface{}) bool {
} else if _, ok := other.(*AND); !ok {
return false
} else {
return this.opnds == other.(*AND).opnds
for i, v := range other.(*AND).opnds {
if !this.opnds[i].equals(v) {
return false
}
}
return true
}
}
func (this *AND) hashString() {
return "" + this.opnds + "/AND"
func (this *AND) hashString() string {
return fmt.Sprint(this.opnds) + "/AND"
}
//
// {@inheritDoc}
@ -233,7 +247,7 @@ func (this *AND) hashString() {
// The evaluation of predicates by this context is short-circuiting, but
// unordered.</p>
//
func (this *AND) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *AND) evaluate(parser IRecognizer, outerContext IRuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if (!this.opnds[i].evaluate(parser, outerContext)) {
return false
@ -242,14 +256,14 @@ func (this *AND) evaluate(parser IRecognizer, outerContext *RuleContext) Semanti
return true
}
func (this *AND) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *AND) evalPrecedence(parser IRecognizer, outerContext IRuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext)
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs |= (evaluated != context)
differs = differs || (evaluated != context)
if (evaluated == nil) {
// The AND context is false if any element is false
return nil
@ -271,7 +285,7 @@ func (this *AND) evalPrecedence(parser IRecognizer, outerContext *RuleContext) S
for _,o := range operands {
if (result == nil){
result = 0
result = o
} else {
result = SemanticContextandContext(result, o)
}
@ -300,8 +314,6 @@ func (this *AND) toString() string {
//
type OR struct {
SemanticContext
opnds []SemanticContext
}
@ -336,8 +348,15 @@ func NewOR(a, b SemanticContext) *OR {
operands.add(reduced)
}
vs := operands.values()
opnds := make([]SemanticContext, len(vs))
for i, v := range vs {
vs[i] = v.(SemanticContext)
}
this := new(OR)
this.opnds = operands.values()
this.opnds = opnds
return this
}
@ -348,19 +367,24 @@ func (this *OR) equals(other interface{}) bool {
} else if _, ok := other.(*OR); !ok {
return false
} else {
return this.opnds == other.(*OR).opnds
for i, v := range other.(*OR).opnds {
if !this.opnds[i].equals(v) {
return false
}
}
return true
}
}
func (this *OR) hashString() {
return "" + this.opnds + "/OR"
func (this *OR) hashString() string {
return fmt.Sprint(this.opnds) + "/OR"
}
// <p>
// The evaluation of predicates by this context is short-circuiting, but
// unordered.</p>
//
func (this *OR) evaluate(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *OR) evaluate(parser IRecognizer, outerContext IRuleContext) bool {
for i := 0; i < len(this.opnds); i++ {
if (this.opnds[i].evaluate(parser, outerContext)) {
return true
@ -369,13 +393,13 @@ func (this *OR) evaluate(parser IRecognizer, outerContext *RuleContext) Semantic
return false
}
func (this *OR) evalPrecedence(parser IRecognizer, outerContext *RuleContext) SemanticContext {
func (this *OR) evalPrecedence(parser IRecognizer, outerContext IRuleContext) SemanticContext {
var differs = false
var operands = make([]SemanticContext)
var operands = make([]SemanticContext, 0)
for i := 0; i < len(this.opnds); i++ {
var context = this.opnds[i]
var evaluated = context.evalPrecedence(parser, outerContext)
differs |= (evaluated != context)
differs = differs || (evaluated != context)
if (evaluated == SemanticContextNONE) {
// The OR context is true if any element is true
return SemanticContextNONE

View File

@ -2,11 +2,12 @@ package antlr4
import (
"strings"
"strconv"
)
type TokenSourceInputStreamPair struct {
type TokenSourceCharStreamPair struct {
tokenSource TokenSource
inputStream *InputStream
charStream CharStream
}
// A token has properties: text, type, line, character position in the line
@ -14,7 +15,7 @@ type TokenSourceInputStreamPair struct {
// we obtained this token.
type Token struct {
source *TokenSourceInputStreamPair
source *TokenSourceCharStreamPair
tokenType int // token type of the token
channel int // The parser ignores everything not on DEFAULT_CHANNEL
start int // optional return -1 if not implemented.
@ -69,17 +70,17 @@ func (this *Token) getTokenSource() TokenSource {
return this.source.tokenSource
}
func (this *Token) getInputStream() *InputStream {
return this.source.inputStream
func (this *Token) getInputStream() CharStream {
return this.source.charStream
}
type CommonToken struct {
*Token
}
func NewCommonToken(source *TokenSourceInputStreamPair, tokenType, channel, start, stop int) *CommonToken {
func NewCommonToken(source *TokenSourceCharStreamPair, tokenType, channel, start, stop int) *CommonToken {
t := CommonToken{Token{}}
t := new(CommonToken)
t.source = source
t.tokenType = -1
@ -87,9 +88,9 @@ func NewCommonToken(source *TokenSourceInputStreamPair, tokenType, channel, star
t.start = start
t.stop = stop
t.tokenIndex = -1
if (t.source[0] != nil) {
t.line = source.tokenSource.line()
t.column = source.tokenSource.column()
if (t.source.tokenSource != nil) {
t.line = source.tokenSource.getLine()
t.column = source.tokenSource.getCharPositionInLine()
} else {
t.column = -1
}
@ -113,27 +114,27 @@ func NewCommonToken(source *TokenSourceInputStreamPair, tokenType, channel, star
//
// @param oldToken The token to copy.
//
func (ct *CommonToken) clone() {
func (ct *CommonToken) clone() *CommonToken {
var t = NewCommonToken(ct.source, ct.tokenType, ct.channel, ct.start,
ct.stop)
t.tokenIndex = ct.tokenIndex
t.line = ct.line
t.column = ct.column
t.text = ct.text
t._text = ct.text()
return t
}
func (this *CommonToken) text() string {
if (this._text != nil) {
if (this._text != "") {
return this._text
}
var input = this.getInputStream()
if (input == nil) {
return nil
return ""
}
var n = input.size
var n = input.size()
if (this.start < n && this.stop < n) {
return input.getText(this.start, this.stop)
return input.getTextFromInterval(NewInterval(this.start, this.stop))
} else {
return "<EOF>"
}
@ -144,8 +145,8 @@ func (this *CommonToken) setText(text string) {
}
func (this *CommonToken) toString() string {
var txt = this.text
if (txt != nil) {
var txt = this.text()
if (txt != "") {
txt = strings.Replace(txt, "\n", "", -1)
txt = strings.Replace(txt, "\r", "", -1)
txt = strings.Replace(txt, "\t", "", -1)
@ -155,14 +156,14 @@ func (this *CommonToken) toString() string {
var ch string;
if (this.channel > 0){
ch = ",channel=" + this.channel
ch = ",channel=" + strconv.Itoa(this.channel)
} else {
ch = ""
}
return "[@" + this.tokenIndex + "," + this.start + ":" + this.stop + "='" +
txt + "',<" + this.tokenType + ">" +
ch + "," + this.line + ":" + this.column + "]"
return "[@" + strconv.Itoa(this.tokenIndex) + "," + strconv.Itoa(this.start) + ":" + strconv.Itoa(this.stop) + "='" +
txt + "',<" + strconv.Itoa(this.tokenType) + ">" +
ch + "," + strconv.Itoa(this.line) + ":" + strconv.Itoa(this.column) + "]"
}

View File

@ -1 +1,16 @@
package antlr4
type TokenSource interface {
nextToken() *Token
skip()
more()
getLine() int
getCharPositionInLine() int
getInputStream() CharStream
getSourceName() string
setTokenFactory(factory TokenFactory)
getTokenFactory() TokenFactory
}

View File

@ -1 +1,16 @@
package antlr4
type TokenStream interface {
IntStream
LT(k int) *Token
get(index int) *Token
getTokenSource() TokenSource
setTokenSource(TokenSource)
getText() string
getTextFromInterval(*Interval) string
getTextFromRuleContext(IRuleContext) string
getTextFromTokens(*Token, *Token) string
}

View File

@ -1,6 +1,7 @@
package antlr4
import (
"fmt"
"strconv"
)
// atom, set, epsilon, action, predicate, rule transitions.
@ -126,7 +127,7 @@ var TransitionserializationNames = []string{
// TODO: make all transitions sets? no, should remove set edges
type AtomTransition struct {
Transition
*Transition
label_ int
label *IntervalSet
}
@ -154,11 +155,11 @@ func (t *AtomTransition) matches( symbol, minVocabSymbol, maxVocabSymbol int )
}
func (t *AtomTransition) toString() string {
return t.label_
return strconv.Itoa(t.label_)
}
type RuleTransition struct {
Transition
*Transition
followState IATNState
ruleIndex, precedence int
@ -186,7 +187,7 @@ func (t *RuleTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int) bo
type EpsilonTransition struct {
Transition
*Transition
isEpsilon bool
outermostPrecedenceReturn int
@ -213,7 +214,7 @@ func (t *EpsilonTransition) toString() string {
}
type RangeTransition struct {
Transition
*Transition
start, stop int
}
@ -246,7 +247,7 @@ func (t *RangeTransition) toString() string {
}
type AbstractPredicateTransition struct {
Transition
*Transition
}
func NewAbstractPredicateTransition ( target IATNState ) *AbstractPredicateTransition {
@ -258,7 +259,7 @@ func NewAbstractPredicateTransition ( target IATNState ) *AbstractPredicateTrans
}
type PredicateTransition struct {
Transition
*Transition
isCtxDependent bool
ruleIndex, predIndex int
@ -287,11 +288,11 @@ func (t *PredicateTransition) getPredicate() *Predicate {
}
func (t *PredicateTransition) toString() string {
return "pred_" + t.ruleIndex + ":" + t.predIndex
return "pred_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.predIndex)
}
type ActionTransition struct {
Transition
*Transition
isCtxDependent bool
ruleIndex, actionIndex, predIndex int
@ -317,12 +318,12 @@ func (t *ActionTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int)
}
func (t *ActionTransition) toString() string {
return "action_" + t.ruleIndex + ":" + t.actionIndex
return "action_" + strconv.Itoa(t.ruleIndex) + ":" + strconv.Itoa(t.actionIndex)
}
type SetTransition struct {
Transition
*Transition
}
func NewSetTransition ( target IATNState, set *IntervalSet ) *SetTransition {
@ -365,7 +366,7 @@ func NewNotSetTransition ( target IATNState, set *IntervalSet) *NotSetTransition
t := new(NotSetTransition)
t.InitTransition( target )
t.InitSetTransition( target )
t.InitSetTransition( set )
t.serializationType = TransitionNOT_SET
@ -378,11 +379,11 @@ func (t *NotSetTransition) matches(symbol, minVocabSymbol, maxVocabSymbol int)
}
func (t *NotSetTransition) toString() string {
return '~' + t.label.toString()
return "~" + t.label.toString()
}
type WildcardTransition struct {
Transition
*Transition
}
func NewWildcardTransition ( target IATNState ) *WildcardTransition {
@ -403,7 +404,7 @@ func (t *WildcardTransition) toString() string {
}
type PrecedencePredicateTransition struct {
Transition
*Transition
precedence int
}

View File

@ -9,9 +9,12 @@ var TreeINVALID_INTERVAL = NewInterval(-1, -2)
type Tree interface {
getParent() Tree
setParent(Tree)
getPayload() interface{}
getChild(i int) Tree
getChildCount() int
getChildren() []Tree
setChildren([]Tree)
// toStringTree() string
}
@ -25,7 +28,7 @@ type ParseTree interface {
SyntaxTree
// <T> T accept(ParseTreeVisitor<? extends T> visitor);
accept(visitor *ParseTreeVisitor)
accept(visitor ParseTreeVisitor) interface{}
getText() string
// toStringTree([]string, IRecognizer) string
}
@ -48,13 +51,14 @@ type ErrorNode interface {
type ParseTreeVisitor interface {
// NOTE: removed type arguments
visit(tree *ParseTree) interface{}
visitChildren(node *RuleNode) interface{}
visitTerminal(node *TerminalNode) interface{}
visitErrorNode(node *ErrorNode) interface{}
visit(tree ParseTree) interface{}
visitChildren(node RuleNode) interface{}
visitTerminal(node TerminalNode) interface{}
visitErrorNode(node ErrorNode) interface{}
}
//func (this *ParseTreeVisitor) visit(ctx) {
// TODO
//func (this ParseTreeVisitor) visit(ctx) {
// if (Utils.isArray(ctx)) {
// var self = this
// return ctx.map(function(child) { return visitAtom(self, child)})
@ -75,19 +79,20 @@ type ParseTreeVisitor interface {
//}
type ParseTreeListener interface {
visitTerminal(node *TerminalNode)
visitErrorNode(node *ErrorNode)
enterEveryRule(ctx *ParserRuleContext)
exitEveryRule(ctx *ParserRuleContext)
visitTerminal(node TerminalNode)
visitErrorNode(node ErrorNode)
enterEveryRule(ctx IParserRuleContext)
exitEveryRule(ctx IParserRuleContext)
}
type TerminalNodeImpl struct {
parentCtx *RuleContext
parentCtx IRuleContext
symbol *Token
}
func NewTerminalNodeImpl(symbol *Token) *TerminalNodeImpl {
tn := &TerminalNodeImpl{TerminalNode{}}
tn := new(TerminalNodeImpl)
tn.InitTerminalNodeImpl(symbol)
@ -99,19 +104,32 @@ func (this *TerminalNodeImpl) InitTerminalNodeImpl(symbol *Token) {
this.symbol = symbol
}
func (this *TerminalNodeImpl) getChild(i int) *Tree {
func (this *TerminalNodeImpl) getChild(i int) Tree {
return nil
}
func (this *TerminalNodeImpl) getChildren() []Tree {
return nil
}
func (this *TerminalNodeImpl) setChildren(t []Tree) {
panic("Cannot set children on terminal node")
}
func (this *TerminalNodeImpl) getSymbol() *Token {
return this.symbol
}
func (this *TerminalNodeImpl) getParent() *Tree {
func (this *TerminalNodeImpl) getParent() Tree {
return this.parentCtx
}
func (this *TerminalNodeImpl) getPayload() *Token {
func (this *TerminalNodeImpl) setParent(t Tree) {
this.parentCtx = t.(IRuleContext)
}
func (this *TerminalNodeImpl) getPayload() interface{} {
return this.symbol
}
@ -123,12 +141,12 @@ func (this *TerminalNodeImpl) getSourceInterval() *Interval {
return NewInterval(tokenIndex, tokenIndex)
}
func (this *TerminalNodeImpl) getChildCount() {
func (this *TerminalNodeImpl) getChildCount() int {
return 0
}
func (this *TerminalNodeImpl) accept(visitor *ParseTreeVisitor ) interface{} {
return (*visitor).visitTerminal(this)
func (this *TerminalNodeImpl) accept(visitor ParseTreeVisitor ) interface{} {
return visitor.visitTerminal(this)
}
func (this *TerminalNodeImpl) getText() string {
@ -139,13 +157,11 @@ func (this *TerminalNodeImpl) toString() string {
if (this.symbol.tokenType == TokenEOF) {
return "<EOF>"
} else {
return this.symbol.text
return this.symbol.text()
}
}
// Represents a token that was consumed during resynchronization
// rather than during a valid match operation. For example,
// we will create this kind of a node during single token insertion
@ -153,7 +169,7 @@ func (this *TerminalNodeImpl) toString() string {
// upon no viable alternative exceptions.
type ErrorNodeImpl struct {
TerminalNodeImpl
*TerminalNodeImpl
}
func NewErrorNodeImpl(token *Token) *ErrorNodeImpl {
@ -166,12 +182,11 @@ func (this *ErrorNodeImpl) isErrorNode() bool {
return true
}
func (this *ErrorNodeImpl) accept( visitor *ParseTreeVisitor ) interface{} {
return (*visitor).visitErrorNode(this)
func (this *ErrorNodeImpl) accept( visitor ParseTreeVisitor ) interface{} {
return visitor.visitErrorNode(this)
}
type ParseTreeWalker struct {
}
@ -180,19 +195,19 @@ func NewParseTreeWalker() *ParseTreeWalker {
return new(ParseTreeWalker)
}
func (this *ParseTreeWalker) walk(listener *ParseTreeListener, t *Tree) {
func (this *ParseTreeWalker) walk(listener ParseTreeListener, t Tree) {
if errorNode, ok := t.(*ErrorNode); ok {
(*listener).visitErrorNode(errorNode)
if errorNode, ok := t.(ErrorNode); ok {
listener.visitErrorNode(errorNode)
} else if term, ok := t.(TerminalNode); ok {
(*listener).visitTerminal(term)
listener.visitTerminal(term)
} else {
this.enterRule(listener, t)
for i := 0; i < len(t.children); i++ {
this.enterRule(listener, t.(RuleNode))
for i := 0; i < t.getChildCount(); i++ {
var child = t.getChild(i)
this.walk(listener, child)
}
this.exitRule(listener, t)
this.exitRule(listener, t.(RuleNode))
}
}
//
@ -201,14 +216,14 @@ func (this *ParseTreeWalker) walk(listener *ParseTreeListener, t *Tree) {
// {@link RuleContext}-specific event. First we trigger the generic and then
// the rule specific. We to them in reverse order upon finishing the node.
//
func (this *ParseTreeWalker) enterRule(listener *ParseTreeListener, r *RuleNode) {
var ctx = r.getRuleContext().(*ParserRuleContext)
(*listener).enterEveryRule(ctx)
func (this *ParseTreeWalker) enterRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.getRuleContext().(IParserRuleContext)
listener.enterEveryRule(ctx)
ctx.enterRule(listener)
}
func (this *ParseTreeWalker) exitRule(listene *ParseTreeListener, r *RuleNode) {
var ctx = r.getRuleContext().(*ParserRuleContext)
func (this *ParseTreeWalker) exitRule(listener ParseTreeListener, r RuleNode) {
var ctx = r.getRuleContext().(IParserRuleContext)
ctx.exitRule(listener)
listener.exitEveryRule(ctx)
}

View File

@ -6,7 +6,7 @@ import "fmt"
// Print out a whole tree in LISP form. {@link //getNodeText} is used on the
// node payloads to get the text for the nodes. Detect
// parse trees and extract data appropriately.
func TreestoStringTree(tree *Tree, ruleNames []string, recog *Parser) string {
func TreestoStringTree(tree Tree, ruleNames []string, recog IRecognizer) string {
if(recog!=nil) {
ruleNames = recog.getRuleNames()
@ -19,33 +19,33 @@ func TreestoStringTree(tree *Tree, ruleNames []string, recog *Parser) string {
if(c==0) {
return s
}
var res = "(" + s + ' '
var res = "(" + s + " "
if(c>0) {
s = TreestoStringTree(tree.getChild(0), ruleNames, nil)
res += s
}
for i :=1; i<c; i++ {
s = TreestoStringTree(tree.getChild(i), ruleNames, nil)
res += (' ' + s)
res += (" " + s)
}
res += ")"
return res
}
func TreesgetNodeText(t *Tree, ruleNames []string, recog *Parser) string {
func TreesgetNodeText(t Tree, ruleNames []string, recog *Parser) string {
if(recog!=nil) {
ruleNames = recog.getRuleNames()
}
if(ruleNames!=nil) {
if t2, ok := t.(*RuleNode); ok {
return ruleNames[t2.getRuleContext().ruleIndex]
} else if t2, ok := t.(*ErrorNode); ok {
return fmt.Printf(t2)
} else if t2, ok := t.(*TerminalNode); ok {
if t2, ok := t.(RuleNode); ok {
return ruleNames[t2.getRuleContext().getRuleIndex()]
} else if t2, ok := t.(ErrorNode); ok {
return fmt.Sprint(t2)
} else if t2, ok := t.(TerminalNode); ok {
if(t2.getSymbol()!=nil) {
return t2.getSymbol().text
return t2.getSymbol().text()
}
}
}
@ -53,16 +53,16 @@ func TreesgetNodeText(t *Tree, ruleNames []string, recog *Parser) string {
// no recog for rule names
var payload = t.getPayload()
if p2, ok := payload.(*Token); ok {
return p2.text
return p2.text()
}
return fmt.Printf(t.getPayload())
return fmt.Sprint(t.getPayload())
}
// Return ordered list of all children of this node
func TreesgetChildren(t *Tree) []*Tree {
var list = make([]*Tree)
func TreesgetChildren(t Tree) []Tree {
var list = make([]Tree, 0)
for i := 0;i< t.getChildCount();i++ {
list = append(list, t.getChild(i))
}
@ -72,56 +72,56 @@ func TreesgetChildren(t *Tree) []*Tree {
// Return a list of all ancestors of this node. The first node of
// list is the root and the last is the parent of this node.
//
func TreesgetAncestors(t *Tree) []*Tree {
var ancestors = make([]*Tree)
func TreesgetAncestors(t Tree) []Tree {
var ancestors = make([]Tree, 0)
t = t.getParent()
for(t!=nil) {
f := make([]*Tree, t)
f := []Tree { t }
ancestors = append(f, ancestors...)
t = t.getParent()
}
return ancestors
}
func TreesfindAllTokenNodes(t *ParseTree, ttype int) []*ParseTree {
func TreesfindAllTokenNodes(t ParseTree, ttype int) []ParseTree {
return TreesfindAllNodes(t, ttype, true)
}
func TreesfindAllRuleNodes(t *ParseTree, ruleIndex int) []*ParseTree {
func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
return TreesfindAllNodes(t, ruleIndex, false)
}
func TreesfindAllNodes(t *ParseTree, index int, findTokens bool) {
var nodes = make([]*ParseTree)
func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
var nodes = make([]ParseTree, 0)
Trees_findAllNodes(t, index, findTokens, nodes)
return nodes
}
func Trees_findAllNodes(t *ParseTree, index int, findTokens bool, nodes []*ParseTree) {
func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
// check this node (the root) first
t2, ok := t.(*TerminalNode)
t3, ok2 := t.(*ParserRuleContext)
t2, ok := t.(TerminalNode)
t3, ok2 := t.(IParserRuleContext)
if findTokens && ok {
if(t2.getSymbol().tokenType==index) {
nodes = append(nodes, t2)
}
} else if(!findTokens && ok2) {
if(t3.ruleIndex==index) {
if(t3.getRuleIndex()==index) {
nodes = append(nodes, t3)
}
}
// check children
for i := 0;i<t.getChildCount(); i++ {
Trees_findAllNodes(t.getChild(i), index, findTokens, nodes)
Trees_findAllNodes(t.getChild(i).(ParseTree), index, findTokens, nodes)
}
}
func Treesdescendants(t *ParseTree) []*ParseTree {
var nodes = make([]*ParseTree, t)
func Treesdescendants(t ParseTree) []ParseTree {
var nodes = []ParseTree{ t }
for i := 0; i<t.getChildCount(); i++ {
nodes = append(nodes, Treesdescendants(t.getChild(i))...)
nodes = append(nodes, Treesdescendants(t.getChild(i).(ParseTree))...)
}
return nodes
}

View File

@ -6,6 +6,8 @@ import (
"strings"
"hash/fnv"
// "regexp"
"bytes"
"encoding/gob"
)
func intMin(a,b int) int {
@ -43,21 +45,17 @@ func (s *IntStack) Push(e int) {
}
func arrayToString(a []interface{}) string{
return fmt.Sprintf( a )
return fmt.Sprint( a )
}
func hashCode(s string) int {
func hashCode(s string) string {
h := fnv.New32a()
h.Write([]byte((s)))
return h.Sum32()
return fmt.Sprint(h.Sum32())
}
type Set struct {
data map[int][]interface{}
data map[string][]interface{}
hashFunction func(interface{}) string
equalsFunction func(interface{},interface{}) bool
}
@ -66,7 +64,7 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
s := new(Set)
s.data = make(map[string]interface{})
s.data = make( map[string][]interface{})
if (hashFunction == nil){
s.hashFunction = standardHashFunction
@ -84,13 +82,24 @@ func NewSet(hashFunction func(interface{}) string, equalsFunction func(interface
}
func standardEqualsFunction(a interface{}, b interface{}) bool {
return a == b
return standardHashFunction(a) == standardHashFunction(b)
}
func getBytes(key interface{}) ([]byte, error) {
var buf bytes.Buffer
enc := gob.NewEncoder(&buf)
err := enc.Encode(key)
if err != nil {
return nil, err
}
return buf.Bytes(), nil
}
func standardHashFunction(a interface{}) string {
h := fnv.New32a()
h.Write([]byte((a)))
return h.Sum32()
v,_ := getBytes(a)
h.Write(v)
return fmt.Sprint(h.Sum32())
}
func (this *Set) length() int {
@ -149,7 +158,7 @@ func (this *Set) values() []interface{} {
}
func (this *Set) toString() string {
return arrayToString(this.values())
return fmt.Sprint(this.data)
}
@ -159,7 +168,7 @@ type BitSet struct {
func NewBitSet() *BitSet {
b := new(BitSet)
b.data = new(map[int]bool)
b.data = make(map[int]bool)
return b
}
@ -186,7 +195,7 @@ func (this *BitSet) contains(value int) bool {
}
func (this *BitSet) values() []int {
ks := make([]interface{}, len(this.data))
ks := make([]int, len(this.data))
i := 0
for k,_ := range this.data {
ks[i] = k
@ -207,19 +216,23 @@ func (this *BitSet) minValue() int {
return min
}
// TODO this may not work the same as the JavaScript version
func (this *BitSet) hashString() {
h := fnv.New32a()
h.Write([]byte(this.data))
return h.Sum32()
}
func (this *BitSet) equals(other interface{}) bool {
otherBitSet, ok := other.(BitSet); !ok
otherBitSet, ok := other.(*BitSet)
if !ok {
return false
}
return this.hashString()==otherBitSet.hashString()
if len(this.data) != len(otherBitSet.data){
return false
}
for k,v := range this.data {
if otherBitSet.data[k] != v {
return false
}
}
return true
}
func (this *BitSet) length() int {
@ -227,12 +240,10 @@ func (this *BitSet) length() int {
}
func (this *BitSet) toString() string {
return "{" + strings.Join(this.values(), ", ") + "}"
return fmt.Sprint(this.data)
}
type AltDict struct {
data map[string]interface{}
}
@ -276,7 +287,7 @@ func NewDoubleDict() *DoubleDict {
}
func (this *DoubleDict) get(a string, b string) interface{} {
var d = this.data[a] || nil
var d = this.data[a]
if (d == nil){
return nil
@ -318,7 +329,7 @@ func TitleCase(str string) string {
// return re.ReplaceAllStringFunc(str, func(s string) {
// return strings.ToUpper(s[0:1]) + s[1:2]
// })
return nil
return ""
}

View File

@ -93,6 +93,7 @@ var <superClass> = require('./<superClass>').<superClass> // TODO
type <parser.name> struct {
<superClass; null="Parser">
_interp *ParserATNSimulator
ruleNames []string
literalNames []string
@ -652,7 +653,8 @@ StructDecl(struct,ctorAttrs,attrs,getters,dispatchMethods,interfaces,extensionMe
superClass={ParserRuleContext}) ::= <<
type <struct.name> struct {
ParserRuleContext
*ParserRuleContext
parent *ParserRuleContext
parser *Parser
ruleIndex