Lint: Remove underscores from names

This commit is contained in:
Will Faught 2016-05-21 01:02:49 -07:00
parent 71dba8cb91
commit b4da149732
15 changed files with 345 additions and 349 deletions

View File

@ -614,7 +614,7 @@ func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2,
return NewActionTransition(target, arg1, arg2, arg3 != 0) return NewActionTransition(target, arg1, arg2, arg3 != 0)
case TransitionSET: case TransitionSET:
return NewSetTransition(target, sets[arg1]) return NewSetTransition(target, sets[arg1])
case TransitionNOT_SET: case TransitionNOTSET:
return NewNotSetTransition(target, sets[arg1]) return NewNotSetTransition(target, sets[arg1])
case TransitionWILDCARD: case TransitionWILDCARD:
return NewWildcardTransition(target) return NewWildcardTransition(target)

View File

@ -261,7 +261,7 @@ func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []To
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1)) panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
} }
var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel) var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1 var from = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token // if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int var to int
if nextOnChannel == -1 { if nextOnChannel == -1 {
@ -269,7 +269,7 @@ func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []To
} else { } else {
to = nextOnChannel to = nextOnChannel
} }
return c.filterForChannel(from_, to, channel) return c.filterForChannel(from, to, channel)
} }
// Collect all tokens on specified channel to the left of // Collect all tokens on specified channel to the left of
@ -285,9 +285,9 @@ func (c *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Tok
return nil return nil
} }
// if none on channel to left, prevOnChannel=-1 then from=0 // if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1 var from = prevOnChannel + 1
var to = tokenIndex - 1 var to = tokenIndex - 1
return c.filterForChannel(from_, to, channel) return c.filterForChannel(from, to, channel)
} }
func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token { func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {

View File

@ -5,7 +5,7 @@ import "sort"
type DFA struct { type DFA struct {
atnStartState DecisionState atnStartState DecisionState
decision int decision int
_states map[string]*DFAState states map[string]*DFAState
s0 *DFAState s0 *DFAState
precedenceDfa bool precedenceDfa bool
} }
@ -19,7 +19,7 @@ func NewDFA(atnStartState DecisionState, decision int) *DFA {
d.decision = decision d.decision = decision
// A set of all DFA states. Use {@link Map} so we can get old state back // A set of all DFA states. Use {@link Map} so we can get old state back
// ({@link Set} only allows you to see if it's there). // ({@link Set} only allows you to see if it's there).
d._states = make(map[string]*DFAState) d.states = make(map[string]*DFAState)
d.s0 = nil d.s0 = nil
// {@code true} if d DFA is for a precedence decision otherwise, // {@code true} if d DFA is for a precedence decision otherwise,
// {@code false}. This is the backing field for {@link //isPrecedenceDfa}, // {@code false}. This is the backing field for {@link //isPrecedenceDfa},
@ -98,7 +98,7 @@ func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
func (d *DFA) setPrecedenceDfa(precedenceDfa bool) { func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
if d.precedenceDfa != precedenceDfa { if d.precedenceDfa != precedenceDfa {
d._states = make(map[string]*DFAState) d.states = make(map[string]*DFAState)
if precedenceDfa { if precedenceDfa {
var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false)) var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState.edges = make([]*DFAState, 0) precedenceState.edges = make([]*DFAState, 0)
@ -113,7 +113,7 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
} }
func (d *DFA) GetStates() map[string]*DFAState { func (d *DFA) GetStates() map[string]*DFAState {
return d._states return d.states
} }
type DFAStateList []*DFAState type DFAStateList []*DFAState
@ -126,9 +126,9 @@ func (a DFAStateList) Less(i, j int) bool { return a[i].stateNumber < a[j].state
func (d *DFA) sortedStates() []*DFAState { func (d *DFA) sortedStates() []*DFAState {
// extract the values // extract the values
vs := make([]*DFAState, len(d._states)) vs := make([]*DFAState, len(d.states))
i := 0 i := 0
for _, v := range d._states { for _, v := range d.states {
vs[i] = v vs[i] = v
i++ i++
} }

View File

@ -31,16 +31,16 @@ type BaseLexer struct {
TokenStartColumn int TokenStartColumn int
ActionType int ActionType int
_input CharStream input CharStream
_factory TokenFactory factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair tokenFactorySourcePair *TokenSourceCharStreamPair
_token Token token Token
_hitEOF bool hitEOF bool
_channel int channel int
_type int thetype int
_modeStack IntStack modeStack IntStack
_mode int mode int
_text string text string
} }
func NewBaseLexer(input CharStream) *BaseLexer { func NewBaseLexer(input CharStream) *BaseLexer {
@ -49,9 +49,9 @@ func NewBaseLexer(input CharStream) *BaseLexer {
lexer.BaseRecognizer = NewBaseRecognizer() lexer.BaseRecognizer = NewBaseRecognizer()
lexer._input = input lexer.input = input
lexer._factory = CommonTokenFactoryDEFAULT lexer.factory = CommonTokenFactoryDEFAULT
lexer._tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input} lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
lexer.Interpreter = nil // child classes must populate it lexer.Interpreter = nil // child classes must populate it
@ -62,7 +62,7 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// emissions, then set l to the last token to be Matched or // emissions, then set l to the last token to be Matched or
// something nonnil so that the auto token emit mechanism will not // something nonnil so that the auto token emit mechanism will not
// emit another token. // emit another token.
lexer._token = nil lexer.token = nil
// What character index in the stream did the current token start at? // What character index in the stream did the current token start at?
// Needed, for example, to get the text for current token. Set at // Needed, for example, to get the text for current token. Set at
@ -77,21 +77,21 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// Once we see EOF on char stream, next token will be EOF. // Once we see EOF on char stream, next token will be EOF.
// If you have DONE : EOF then you see DONE EOF. // If you have DONE : EOF then you see DONE EOF.
lexer._hitEOF = false lexer.hitEOF = false
// The channel number for the current token/// // The channel number for the current token///
lexer._channel = TokenDefaultChannel lexer.channel = TokenDefaultChannel
// The token type for the current token/// // The token type for the current token///
lexer._type = TokenInvalidType lexer.thetype = TokenInvalidType
lexer._modeStack = make([]int, 0) lexer.modeStack = make([]int, 0)
lexer._mode = LexerDefaultMode lexer.mode = LexerDefaultMode
// You can set the text for the current token to override what is in // You can set the text for the current token to override what is in
// the input char buffer. Use setText() or can set l instance var. // the input char buffer. Use setText() or can set l instance var.
// / // /
lexer._text = "" lexer.text = ""
return lexer return lexer
} }
@ -111,20 +111,20 @@ const (
func (b *BaseLexer) reset() { func (b *BaseLexer) reset() {
// wack Lexer state variables // wack Lexer state variables
if b._input != nil { if b.input != nil {
b._input.Seek(0) // rewind the input b.input.Seek(0) // rewind the input
} }
b._token = nil b.token = nil
b._type = TokenInvalidType b.thetype = TokenInvalidType
b._channel = TokenDefaultChannel b.channel = TokenDefaultChannel
b.TokenStartCharIndex = -1 b.TokenStartCharIndex = -1
b.TokenStartColumn = -1 b.TokenStartColumn = -1
b.TokenStartLine = -1 b.TokenStartLine = -1
b._text = "" b.text = ""
b._hitEOF = false b.hitEOF = false
b._mode = LexerDefaultMode b.mode = LexerDefaultMode
b._modeStack = make([]int, 0) b.modeStack = make([]int, 0)
b.Interpreter.reset() b.Interpreter.reset()
} }
@ -134,7 +134,7 @@ func (b *BaseLexer) GetInterpreter() *LexerATNSimulator {
} }
func (b *BaseLexer) GetInputStream() CharStream { func (b *BaseLexer) GetInputStream() CharStream {
return b._input return b.input
} }
func (b *BaseLexer) GetSourceName() string { func (b *BaseLexer) GetSourceName() string {
@ -142,15 +142,15 @@ func (b *BaseLexer) GetSourceName() string {
} }
func (b *BaseLexer) setChannel(v int) { func (b *BaseLexer) setChannel(v int) {
b._channel = v b.channel = v
} }
func (b *BaseLexer) GetTokenFactory() TokenFactory { func (b *BaseLexer) GetTokenFactory() TokenFactory {
return b._factory return b.factory
} }
func (b *BaseLexer) setTokenFactory(f TokenFactory) { func (b *BaseLexer) setTokenFactory(f TokenFactory) {
b._factory = f b.factory = f
} }
func (b *BaseLexer) safeMatch() (ret int) { func (b *BaseLexer) safeMatch() (ret int) {
@ -166,53 +166,53 @@ func (b *BaseLexer) safeMatch() (ret int) {
} }
}() }()
return b.Interpreter.Match(b._input, b._mode) return b.Interpreter.Match(b.input, b.mode)
} }
// Return a token from l source i.e., Match a token on the char stream. // Return a token from l source i.e., Match a token on the char stream.
func (b *BaseLexer) NextToken() Token { func (b *BaseLexer) NextToken() Token {
if b._input == nil { if b.input == nil {
panic("NextToken requires a non-nil input stream.") panic("NextToken requires a non-nil input stream.")
} }
var tokenStartMarker = b._input.Mark() var tokenStartMarker = b.input.Mark()
// previously in finally block // previously in finally block
defer func() { defer func() {
// make sure we release marker after Match or // make sure we release marker after Match or
// unbuffered char stream will keep buffering // unbuffered char stream will keep buffering
b._input.Release(tokenStartMarker) b.input.Release(tokenStartMarker)
}() }()
for true { for true {
if b._hitEOF { if b.hitEOF {
b.emitEOF() b.emitEOF()
return b._token return b.token
} }
b._token = nil b.token = nil
b._channel = TokenDefaultChannel b.channel = TokenDefaultChannel
b.TokenStartCharIndex = b._input.Index() b.TokenStartCharIndex = b.input.Index()
b.TokenStartColumn = b.Interpreter.column b.TokenStartColumn = b.Interpreter.column
b.TokenStartLine = b.Interpreter.line b.TokenStartLine = b.Interpreter.line
b._text = "" b.text = ""
var continueOuter = false var continueOuter = false
for true { for true {
b._type = TokenInvalidType b.thetype = TokenInvalidType
var ttype = LexerSkip var ttype = LexerSkip
ttype = b.safeMatch() ttype = b.safeMatch()
if b._input.LA(1) == TokenEOF { if b.input.LA(1) == TokenEOF {
b._hitEOF = true b.hitEOF = true
} }
if b._type == TokenInvalidType { if b.thetype == TokenInvalidType {
b._type = ttype b.thetype = ttype
} }
if b._type == LexerSkip { if b.thetype == LexerSkip {
continueOuter = true continueOuter = true
break break
} }
if b._type != LexerMore { if b.thetype != LexerMore {
break break
} }
if PortDebug { if PortDebug {
@ -226,10 +226,10 @@ func (b *BaseLexer) NextToken() Token {
if continueOuter { if continueOuter {
continue continue
} }
if b._token == nil { if b.token == nil {
b.emit() b.emit()
} }
return b._token return b.token
} }
return nil return nil
@ -237,52 +237,48 @@ func (b *BaseLexer) NextToken() Token {
// Instruct the lexer to Skip creating a token for current lexer rule // Instruct the lexer to Skip creating a token for current lexer rule
// and look for another token. NextToken() knows to keep looking when // and look for another token. NextToken() knows to keep looking when
// a lexer rule finishes with token set to SKIP_TOKEN. Recall that // a lexer rule finishes with token set to SKIPTOKEN. Recall that
// if token==nil at end of any token rule, it creates one for you // if token==nil at end of any token rule, it creates one for you
// and emits it. // and emits it.
// / // /
func (b *BaseLexer) Skip() { func (b *BaseLexer) Skip() {
b._type = LexerSkip b.thetype = LexerSkip
} }
func (b *BaseLexer) More() { func (b *BaseLexer) More() {
b._type = LexerMore b.thetype = LexerMore
}
func (b *BaseLexer) mode(m int) {
b._mode = m
} }
func (b *BaseLexer) pushMode(m int) { func (b *BaseLexer) pushMode(m int) {
if LexerATNSimulatorDebug { if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m)) fmt.Println("pushMode " + strconv.Itoa(m))
} }
b._modeStack.Push(b._mode) b.modeStack.Push(b.mode)
b.mode(m) b.mode = m
} }
func (b *BaseLexer) popMode() int { func (b *BaseLexer) popMode() int {
if len(b._modeStack) == 0 { if len(b.modeStack) == 0 {
panic("Empty Stack") panic("Empty Stack")
} }
if LexerATNSimulatorDebug { if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(b._modeStack[0:len(b._modeStack)-1])) fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1]))
} }
i, _ := b._modeStack.Pop() i, _ := b.modeStack.Pop()
b.mode(i) b.mode = i
return b._mode return b.mode
} }
func (b *BaseLexer) inputStream() CharStream { func (b *BaseLexer) inputStream() CharStream {
return b._input return b.input
} }
func (b *BaseLexer) setInputStream(input CharStream) { func (b *BaseLexer) setInputStream(input CharStream) {
b._input = nil b.input = nil
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input} b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
b.reset() b.reset()
b._input = input b.input = input
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input} b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
} }
// By default does not support multiple emits per NextToken invocation // By default does not support multiple emits per NextToken invocation
@ -291,7 +287,7 @@ func (b *BaseLexer) setInputStream(input CharStream) {
// rather than a single variable as l implementation does). // rather than a single variable as l implementation does).
// / // /
func (b *BaseLexer) emitToken(token Token) { func (b *BaseLexer) emitToken(token Token) {
b._token = token b.token = token
} }
// The standard method called to automatically emit a token at the // The standard method called to automatically emit a token at the
@ -304,7 +300,7 @@ func (b *BaseLexer) emit() Token {
if PortDebug { if PortDebug {
fmt.Println("emit") fmt.Println("emit")
} }
var t = b._factory.Create(b._tokenFactorySourcePair, b._type, b._text, b._channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn) var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.emitToken(t) b.emitToken(t)
return t return t
} }
@ -315,7 +311,7 @@ func (b *BaseLexer) emitEOF() Token {
if PortDebug { if PortDebug {
fmt.Println("emitEOF") fmt.Println("emitEOF")
} }
var eof = b._factory.Create(b._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b._input.Index(), b._input.Index()-1, lpos, cpos) var eof = b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
b.emitToken(eof) b.emitToken(eof)
return eof return eof
} }
@ -329,30 +325,30 @@ func (b *BaseLexer) GetLine() int {
} }
func (b *BaseLexer) getType() int { func (b *BaseLexer) getType() int {
return b._type return b.thetype
} }
func (b *BaseLexer) setType(t int) { func (b *BaseLexer) setType(t int) {
b._type = t b.thetype = t
} }
// What is the index of the current character of lookahead?/// // What is the index of the current character of lookahead?///
func (b *BaseLexer) getCharIndex() int { func (b *BaseLexer) getCharIndex() int {
return b._input.Index() return b.input.Index()
} }
// Return the text Matched so far for the current token or any text override. // Return the text Matched so far for the current token or any text override.
//Set the complete text of l token it wipes any previous changes to the text. //Set the complete text of l token it wipes any previous changes to the text.
func (b *BaseLexer) GetText() string { func (b *BaseLexer) GetText() string {
if b._text != "" { if b.text != "" {
return b._text return b.text
} }
return b.Interpreter.GetText(b._input) return b.Interpreter.GetText(b.input)
} }
func (b *BaseLexer) SetText(text string) { func (b *BaseLexer) SetText(text string) {
b._text = text b.text = text
} }
func (b *BaseLexer) GetATN() *ATN { func (b *BaseLexer) GetATN() *ATN {
@ -380,8 +376,8 @@ func (b *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) notifyListeners(e RecognitionException) { func (b *BaseLexer) notifyListeners(e RecognitionException) {
var start = b.TokenStartCharIndex var start = b.TokenStartCharIndex
var stop = b._input.Index() var stop = b.input.Index()
var text = b._input.GetTextFromInterval(NewInterval(start, stop)) var text = b.input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'" var msg = "token recognition error at: '" + text + "'"
var listener = b.GetErrorListenerDispatch() var listener = b.GetErrorListenerDispatch()
listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e) listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
@ -411,13 +407,13 @@ func (b *BaseLexer) getCharErrorDisplay(c rune) string {
// to do sophisticated error recovery if you are in a fragment rule. // to do sophisticated error recovery if you are in a fragment rule.
// / // /
func (b *BaseLexer) Recover(re RecognitionException) { func (b *BaseLexer) Recover(re RecognitionException) {
if b._input.LA(1) != TokenEOF { if b.input.LA(1) != TokenEOF {
if _, ok := re.(*LexerNoViableAltException); ok { if _, ok := re.(*LexerNoViableAltException); ok {
// Skip a char and try again // Skip a char and try again
b.Interpreter.consume(b._input) b.Interpreter.consume(b.input)
} else { } else {
// TODO: Do we lose character or line position information? // TODO: Do we lose character or line position information?
b._input.Consume() b.input.Consume()
} }
} }
} }

View File

@ -86,22 +86,22 @@ func (l *LexerSkipAction) String() string {
type LexerTypeAction struct { type LexerTypeAction struct {
*BaseLexerAction *BaseLexerAction
_type int thetype int
} }
func NewLexerTypeAction(_type int) *LexerTypeAction { func NewLexerTypeAction(thetype int) *LexerTypeAction {
l := new(LexerTypeAction) l := new(LexerTypeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType) l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
l._type = _type l.thetype = thetype
return l return l
} }
func (l *LexerTypeAction) execute(lexer Lexer) { func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l._type) lexer.setType(l.thetype)
} }
func (l *LexerTypeAction) Hash() string { func (l *LexerTypeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l._type) return strconv.Itoa(l.actionType) + strconv.Itoa(l.thetype)
} }
func (l *LexerTypeAction) equals(other LexerAction) bool { func (l *LexerTypeAction) equals(other LexerAction) bool {
@ -110,12 +110,12 @@ func (l *LexerTypeAction) equals(other LexerAction) bool {
} else if _, ok := other.(*LexerTypeAction); !ok { } else if _, ok := other.(*LexerTypeAction); !ok {
return false return false
} else { } else {
return l._type == other.(*LexerTypeAction)._type return l.thetype == other.(*LexerTypeAction).thetype
} }
} }
func (l *LexerTypeAction) String() string { func (l *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(l._type) + ")" return "actionType(" + strconv.Itoa(l.thetype) + ")"
} }
// Implements the {@code pushMode} lexer action by calling // Implements the {@code pushMode} lexer action by calling

View File

@ -59,7 +59,7 @@ type LexerATNSimulator struct {
column int column int
mode int mode int
prevAccept *SimState prevAccept *SimState
Match_calls int MatchCalls int
} }
func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator { func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
@ -91,10 +91,10 @@ func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedCon
var LexerATNSimulatorDebug = false var LexerATNSimulatorDebug = false
var LexerATNSimulatorDFADebug = false var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0 var LexerATNSimulatorMinDFAEdge = 0
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN var LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMatch_calls = 0 var LexerATNSimulatorMatchCalls = 0
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) { func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
l.column = simulator.column l.column = simulator.column
@ -109,7 +109,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
fmt.Println("Match") fmt.Println("Match")
} }
l.Match_calls++ l.MatchCalls++
l.mode = mode l.mode = mode
var mark = input.Mark() var mark = input.Mark()
@ -153,12 +153,12 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
if LexerATNSimulatorDebug { if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String()) fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
} }
var old_mode = l.mode var oldMode = l.mode
var s0_closure = l.computeStartState(input, startState) var s0Closure = l.computeStartState(input, startState)
var suppressEdge = s0_closure.hasSemanticContext var suppressEdge = s0Closure.hasSemanticContext
s0_closure.hasSemanticContext = false s0Closure.hasSemanticContext = false
var next = l.addDFAState(s0_closure) var next = l.addDFAState(s0Closure)
if !suppressEdge { if !suppressEdge {
l.DecisionToDFA[l.mode].s0 = next l.DecisionToDFA[l.mode].s0 = next
@ -167,7 +167,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
var predict = l.execATN(input, next) var predict = l.execATN(input, next)
if LexerATNSimulatorDebug { if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[old_mode].ToLexerString()) fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[oldMode].ToLexerString())
} }
return predict return predict
} }
@ -251,11 +251,11 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// {@code t}, or {@code nil} if the target state for l edge is not // {@code t}, or {@code nil} if the target state for l edge is not
// already cached // already cached
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState { func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.edges == nil || t < LexerATNSimulatorMIN_DFA_EDGE || t > LexerATNSimulatorMAX_DFA_EDGE { if s.edges == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
return nil return nil
} }
var target = s.edges[t-LexerATNSimulatorMIN_DFA_EDGE] var target = s.edges[t-LexerATNSimulatorMinDFAEdge]
if target == nil { if target == nil {
target = nil target = nil
} }
@ -431,7 +431,7 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co
} }
if config.context != nil && !config.context.isEmpty() { if config.context != nil && !config.context.isEmpty() {
for i := 0; i < config.context.length(); i++ { for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE { if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
var newContext = config.context.GetParent(i) // "pop" return state var newContext = config.context.GetParent(i) // "pop" return state
var returnState = l.atn.states[config.context.getReturnState(i)] var returnState = l.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext) cfg := NewLexerATNConfig2(config, returnState, newContext)
@ -585,7 +585,7 @@ func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream
settings.dfaState = dfaState settings.dfaState = dfaState
} }
func (l *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState { func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
if to == nil && cfgs != nil { if to == nil && cfgs != nil {
// leading to l call, ATNConfigSet.hasSemanticContext is used as a // leading to l call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes l edge // marker indicating dynamic predicate evaluation makes l edge
@ -608,18 +608,18 @@ func (l *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cf
} }
} }
// add the edge // add the edge
if tk < LexerATNSimulatorMIN_DFA_EDGE || tk > LexerATNSimulatorMAX_DFA_EDGE { if tk < LexerATNSimulatorMinDFAEdge || tk > LexerATNSimulatorMaxDFAEdge {
// Only track edges within the DFA bounds // Only track edges within the DFA bounds
return to return to
} }
if LexerATNSimulatorDebug { if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk)) fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
} }
if from_.edges == nil { if from.edges == nil {
// make room for tokens 1..n and -1 masquerading as index 0 // make room for tokens 1..n and -1 masquerading as index 0
from_.edges = make([]*DFAState, LexerATNSimulatorMAX_DFA_EDGE-LexerATNSimulatorMIN_DFA_EDGE+1) from.edges = make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1)
} }
from_.edges[tk-LexerATNSimulatorMIN_DFA_EDGE] = to // connect from.edges[tk-LexerATNSimulatorMinDFAEdge] = to // connect
return to return to
} }

View File

@ -18,7 +18,7 @@ func NewLL1Analyzer(atn *ATN) *LL1Analyzer {
// a predicate during analysis if {@code seeThruPreds==false}. // a predicate during analysis if {@code seeThruPreds==false}.
/// ///
const ( const (
LL1AnalyzerHIT_PRED = TokenInvalidType LL1AnalyzerHitPred = TokenInvalidType
) )
//* //*
@ -40,10 +40,10 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look[alt] = NewIntervalSet() look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil) var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred var seeThruPreds = false // fail to get lookahead upon pred
la._look(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false) la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing // Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds // or we had a predicate when we !seeThruPreds
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHIT_PRED) { if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) {
look[alt] = nil look[alt] = nil
} }
} }
@ -85,7 +85,7 @@ func (la *LL1Analyzer) look(s, stopState ATNState, ctx RuleContext) *IntervalSet
fmt.Println(seeThruPreds) fmt.Println(seeThruPreds)
fmt.Println("=====") fmt.Println("=====")
} }
la._look(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true) la.look1(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
if PortDebug { if PortDebug {
fmt.Println(r) fmt.Println(r)
} }
@ -116,13 +116,13 @@ func (la *LL1Analyzer) look(s, stopState ATNState, ctx RuleContext) *IntervalSet
// {@code NewBitSet()} for la argument. // {@code NewBitSet()} for la argument.
// @param seeThruPreds {@code true} to true semantic predicates as // @param seeThruPreds {@code true} to true semantic predicates as
// implicitly {@code true} and "see through them", otherwise {@code false} // implicitly {@code true} and "see through them", otherwise {@code false}
// to treat semantic predicates as opaque and add {@link //HIT_PRED} to the // to treat semantic predicates as opaque and add {@link //HitPred} to the
// result if one is encountered. // result if one is encountered.
// @param addEOF Add {@link Token//EOF} to the result if the end of the // @param addEOF Add {@link Token//EOF} to the result if the end of the
// outermost context is reached. This parameter has no effect if {@code ctx} // outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}. // is {@code nil}.
func (la *LL1Analyzer) __look(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) { func (la *LL1Analyzer) look2(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
returnState := la.atn.states[ctx.getReturnState(i)] returnState := la.atn.states[ctx.getReturnState(i)]
@ -135,11 +135,11 @@ func (la *LL1Analyzer) __look(s, stopState ATNState, ctx PredictionContext, look
}() }()
calledRuleStack.remove(returnState.GetRuleIndex()) calledRuleStack.remove(returnState.GetRuleIndex())
la._look(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} }
func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) { func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewBaseATNConfig6(s, 0, ctx) c := NewBaseATNConfig6(s, 0, ctx)
@ -182,7 +182,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
for i := 0; i < ctx.length(); i++ { for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)] returnState := la.atn.states[ctx.getReturnState(i)]
la.__look(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i) la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
} }
return return
@ -205,7 +205,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber()) newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
la.___look(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1) la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
if PortDebug { if PortDebug {
fmt.Println(look) fmt.Println(look)
@ -216,15 +216,15 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
fmt.Println("DEBUG 9") fmt.Println("DEBUG 9")
} }
if seeThruPreds { if seeThruPreds {
la._look(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else { } else {
look.addOne(LL1AnalyzerHIT_PRED) look.addOne(LL1AnalyzerHitPred)
} }
} else if t.getIsEpsilon() { } else if t.getIsEpsilon() {
if PortDebug { if PortDebug {
fmt.Println("DEBUG 10") fmt.Println("DEBUG 10")
} }
la._look(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok { } else if _, ok := t.(*WildcardTransition); ok {
if PortDebug { if PortDebug {
fmt.Println("DEBUG 11") fmt.Println("DEBUG 11")
@ -248,7 +248,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
} }
} }
func (la *LL1Analyzer) ___look(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) { func (la *LL1Analyzer) look3(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber()) newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
@ -257,6 +257,6 @@ func (la *LL1Analyzer) ___look(stopState ATNState, ctx PredictionContext, look *
}() }()
calledRuleStack.add(t1.getTarget().GetRuleIndex()) calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._look(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) la.look1(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} }

View File

@ -34,13 +34,13 @@ type BaseParser struct {
Interpreter *ParserATNSimulator Interpreter *ParserATNSimulator
BuildParseTrees bool BuildParseTrees bool
_input TokenStream input TokenStream
_errHandler ErrorStrategy errHandler ErrorStrategy
_precedenceStack IntStack precedenceStack IntStack
_ctx ParserRuleContext ctx ParserRuleContext
_tracer *TraceListener tracer *TraceListener
_parseListeners []ParseTreeListener parseListeners []ParseTreeListener
_SyntaxErrors int _SyntaxErrors int
} }
@ -53,15 +53,15 @@ func NewBaseParser(input TokenStream) *BaseParser {
p.BaseRecognizer = NewBaseRecognizer() p.BaseRecognizer = NewBaseRecognizer()
// The input stream. // The input stream.
p._input = nil p.input = nil
// The error handling strategy for the parser. The default value is a new // The error handling strategy for the parser. The default value is a new
// instance of {@link DefaultErrorStrategy}. // instance of {@link DefaultErrorStrategy}.
p._errHandler = NewDefaultErrorStrategy() p.errHandler = NewDefaultErrorStrategy()
p._precedenceStack = make([]int, 0) p.precedenceStack = make([]int, 0)
p._precedenceStack.Push(0) p.precedenceStack.Push(0)
// The {@link ParserRuleContext} object for the currently executing rule. // The {@link ParserRuleContext} object for the currently executing rule.
// p.is always non-nil during the parsing process. // p.is always non-nil during the parsing process.
p._ctx = nil p.ctx = nil
// Specifies whether or not the parser should construct a parse tree during // Specifies whether or not the parser should construct a parse tree during
// the parsing process. The default value is {@code true}. // the parsing process. The default value is {@code true}.
p.BuildParseTrees = true p.BuildParseTrees = true
@ -70,10 +70,10 @@ func NewBaseParser(input TokenStream) *BaseParser {
// later call to {@link //setTrace}{@code (false)}. The listener itself is // later call to {@link //setTrace}{@code (false)}. The listener itself is
// implemented as a parser listener so p.field is not directly used by // implemented as a parser listener so p.field is not directly used by
// other parser methods. // other parser methods.
p._tracer = nil p.tracer = nil
// The list of {@link ParseTreeListener} listeners registered to receive // The list of {@link ParseTreeListener} listeners registered to receive
// events during the parse. // events during the parse.
p._parseListeners = nil p.parseListeners = nil
// The number of syntax errors Reported during parsing. p.value is // The number of syntax errors Reported during parsing. p.value is
// incremented each time {@link //NotifyErrorListeners} is called. // incremented each time {@link //NotifyErrorListeners} is called.
p._SyntaxErrors = 0 p._SyntaxErrors = 0
@ -92,26 +92,26 @@ var bypassAltsAtnCache = make(map[string]int)
// reset the parser's state// // reset the parser's state//
func (p *BaseParser) reset() { func (p *BaseParser) reset() {
if p._input != nil { if p.input != nil {
p._input.Seek(0) p.input.Seek(0)
} }
p._errHandler.reset(p) p.errHandler.reset(p)
p._ctx = nil p.ctx = nil
p._SyntaxErrors = 0 p._SyntaxErrors = 0
p.SetTrace(nil) p.SetTrace(nil)
p._precedenceStack = make([]int, 0) p.precedenceStack = make([]int, 0)
p._precedenceStack.Push(0) p.precedenceStack.Push(0)
if p.Interpreter != nil { if p.Interpreter != nil {
p.Interpreter.reset() p.Interpreter.reset()
} }
} }
func (p *BaseParser) GetErrorHandler() ErrorStrategy { func (p *BaseParser) GetErrorHandler() ErrorStrategy {
return p._errHandler return p.errHandler
} }
func (p *BaseParser) SetErrorHandler(e ErrorStrategy) { func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
p._errHandler = e p.errHandler = e
} }
// Match current input symbol against {@code ttype}. If the symbol type // Match current input symbol against {@code ttype}. If the symbol type
@ -143,15 +143,15 @@ func (p *BaseParser) Match(ttype int) Token {
} }
if t.GetTokenType() == ttype { if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p) p.errHandler.ReportMatch(p)
p.Consume() p.Consume()
} else { } else {
t = p._errHandler.RecoverInline(p) t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 { if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token // we must have conjured up a Newtoken during single token
// insertion // insertion
// if it's not the current symbol // if it's not the current symbol
p._ctx.AddErrorNode(t) p.ctx.AddErrorNode(t)
} }
} }
@ -181,33 +181,33 @@ func (p *BaseParser) Match(ttype int) Token {
func (p *BaseParser) MatchWildcard() Token { func (p *BaseParser) MatchWildcard() Token {
var t = p.GetCurrentToken() var t = p.GetCurrentToken()
if t.GetTokenType() > 0 { if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p) p.errHandler.ReportMatch(p)
p.Consume() p.Consume()
} else { } else {
t = p._errHandler.RecoverInline(p) t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 { if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token // we must have conjured up a Newtoken during single token
// insertion // insertion
// if it's not the current symbol // if it's not the current symbol
p._ctx.AddErrorNode(t) p.ctx.AddErrorNode(t)
} }
} }
return t return t
} }
func (p *BaseParser) GetParserRuleContext() ParserRuleContext { func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
return p._ctx return p.ctx
} }
func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) { func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
p._ctx = v p.ctx = v
} }
func (p *BaseParser) GetParseListeners() []ParseTreeListener { func (p *BaseParser) GetParseListeners() []ParseTreeListener {
if p._parseListeners == nil { if p.parseListeners == nil {
return make([]ParseTreeListener, 0) return make([]ParseTreeListener, 0)
} }
return p._parseListeners return p.parseListeners
} }
// Registers {@code listener} to receive events during the parsing process. // Registers {@code listener} to receive events during the parsing process.
@ -242,10 +242,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
if listener == nil { if listener == nil {
panic("listener") panic("listener")
} }
if p._parseListeners == nil { if p.parseListeners == nil {
p._parseListeners = make([]ParseTreeListener, 0) p.parseListeners = make([]ParseTreeListener, 0)
} }
p._parseListeners = append(p._parseListeners, listener) p.parseListeners = append(p.parseListeners, listener)
} }
// //
@ -257,10 +257,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
// //
func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) { func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
if p._parseListeners != nil { if p.parseListeners != nil {
idx := -1 idx := -1
for i, v := range p._parseListeners { for i, v := range p.parseListeners {
if v == listener { if v == listener {
idx = i idx = i
break break
@ -272,24 +272,24 @@ func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
} }
// remove the listener from the slice // remove the listener from the slice
p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...) p.parseListeners = append(p.parseListeners[0:idx], p.parseListeners[idx+1:]...)
if len(p._parseListeners) == 0 { if len(p.parseListeners) == 0 {
p._parseListeners = nil p.parseListeners = nil
} }
} }
} }
// Remove all parse listeners. // Remove all parse listeners.
func (p *BaseParser) removeParseListeners() { func (p *BaseParser) removeParseListeners() {
p._parseListeners = nil p.parseListeners = nil
} }
// Notify any parse listeners of an enter rule event. // Notify any parse listeners of an enter rule event.
func (p *BaseParser) TriggerEnterRuleEvent() { func (p *BaseParser) TriggerEnterRuleEvent() {
if p._parseListeners != nil { if p.parseListeners != nil {
var ctx = p._ctx var ctx = p.ctx
for _, listener := range p._parseListeners { for _, listener := range p.parseListeners {
listener.EnterEveryRule(ctx) listener.EnterEveryRule(ctx)
ctx.EnterRule(listener) ctx.EnterRule(listener)
} }
@ -302,13 +302,13 @@ func (p *BaseParser) TriggerEnterRuleEvent() {
// @see //addParseListener // @see //addParseListener
// //
func (p *BaseParser) TriggerExitRuleEvent() { func (p *BaseParser) TriggerExitRuleEvent() {
if p._parseListeners != nil { if p.parseListeners != nil {
// reverse order walk of listeners // reverse order walk of listeners
ctx := p._ctx ctx := p.ctx
l := len(p._parseListeners) - 1 l := len(p.parseListeners) - 1
for i := range p._parseListeners { for i := range p.parseListeners {
listener := p._parseListeners[l-i] listener := p.parseListeners[l-i]
ctx.ExitRule(listener) ctx.ExitRule(listener)
listener.ExitEveryRule(ctx) listener.ExitEveryRule(ctx)
} }
@ -324,12 +324,12 @@ func (p *BaseParser) GetATN() *ATN {
} }
func (p *BaseParser) GetTokenFactory() TokenFactory { func (p *BaseParser) GetTokenFactory() TokenFactory {
return p._input.GetTokenSource().GetTokenFactory() return p.input.GetTokenSource().GetTokenFactory()
} }
// Tell our token source and error strategy about a Newway to create tokens.// // Tell our token source and error strategy about a Newway to create tokens.//
func (p *BaseParser) setTokenFactory(factory TokenFactory) { func (p *BaseParser) setTokenFactory(factory TokenFactory) {
p._input.GetTokenSource().setTokenFactory(factory) p.input.GetTokenSource().setTokenFactory(factory)
} }
// The ATN with bypass alternatives is expensive to create so we create it // The ATN with bypass alternatives is expensive to create so we create it
@ -397,21 +397,21 @@ func (p *BaseParser) SetInputStream(input TokenStream) {
} }
func (p *BaseParser) GetTokenStream() TokenStream { func (p *BaseParser) GetTokenStream() TokenStream {
return p._input return p.input
} }
// Set the token stream and reset the parser.// // Set the token stream and reset the parser.//
func (p *BaseParser) SetTokenStream(input TokenStream) { func (p *BaseParser) SetTokenStream(input TokenStream) {
p._input = nil p.input = nil
p.reset() p.reset()
p._input = input p.input = input
} }
// Match needs to return the current input symbol, which gets put // Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID. // into the label for the associated token ref e.g., x=ID.
// //
func (p *BaseParser) GetCurrentToken() Token { func (p *BaseParser) GetCurrentToken() Token {
return p._input.LT(1) return p.input.LT(1)
} }
func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) { func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
@ -436,20 +436,20 @@ func (p *BaseParser) Consume() Token {
fmt.Println("Done consuming") fmt.Println("Done consuming")
} }
} }
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0 var hasListener = p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener { if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) { if p.errHandler.inErrorRecoveryMode(p) {
var node = p._ctx.AddErrorNode(o) var node = p.ctx.AddErrorNode(o)
if p._parseListeners != nil { if p.parseListeners != nil {
for _, l := range p._parseListeners { for _, l := range p.parseListeners {
l.VisitErrorNode(node) l.VisitErrorNode(node)
} }
} }
} else { } else {
node := p._ctx.AddTokenNode(o) node := p.ctx.AddTokenNode(o)
if p._parseListeners != nil { if p.parseListeners != nil {
for _, l := range p._parseListeners { for _, l := range p.parseListeners {
l.VisitTerminal(node) l.VisitTerminal(node)
} }
} }
@ -462,47 +462,47 @@ func (p *BaseParser) Consume() Token {
func (p *BaseParser) addContextToParseTree() { func (p *BaseParser) addContextToParseTree() {
// add current context to parent if we have a parent // add current context to parent if we have a parent
if p._ctx.GetParent() != nil { if p.ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).AddChild(p._ctx) p.ctx.GetParent().(ParserRuleContext).AddChild(p.ctx)
} }
} }
func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, ruleIndex int) { func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, ruleIndex int) {
p.SetState(state) p.SetState(state)
p._ctx = localctx p.ctx = localctx
p._ctx.SetStart(p._input.LT(1)) p.ctx.SetStart(p.input.LT(1))
if p.BuildParseTrees { if p.BuildParseTrees {
p.addContextToParseTree() p.addContextToParseTree()
} }
if p._parseListeners != nil { if p.parseListeners != nil {
p.TriggerEnterRuleEvent() p.TriggerEnterRuleEvent()
} }
} }
func (p *BaseParser) ExitRule() { func (p *BaseParser) ExitRule() {
p._ctx.SetStop(p._input.LT(-1)) p.ctx.SetStop(p.input.LT(-1))
// trigger event on _ctx, before it reverts to parent // trigger event on ctx, before it reverts to parent
if p._parseListeners != nil { if p.parseListeners != nil {
p.TriggerExitRuleEvent() p.TriggerExitRuleEvent()
} }
p.SetState(p._ctx.GetInvokingState()) p.SetState(p.ctx.GetInvokingState())
if p._ctx.GetParent() != nil { if p.ctx.GetParent() != nil {
p._ctx = p._ctx.GetParent().(ParserRuleContext) p.ctx = p.ctx.GetParent().(ParserRuleContext)
} else { } else {
p._ctx = nil p.ctx = nil
} }
} }
func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) { func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx // if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree // that is previous child of parse tree
if p.BuildParseTrees && p._ctx != localctx { if p.BuildParseTrees && p.ctx != localctx {
if p._ctx.GetParent() != nil { if p.ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).RemoveLastChild() p.ctx.GetParent().(ParserRuleContext).RemoveLastChild()
p._ctx.GetParent().(ParserRuleContext).AddChild(localctx) p.ctx.GetParent().(ParserRuleContext).AddChild(localctx)
} }
} }
p._ctx = localctx p.ctx = localctx
} }
// Get the precedence level for the top-most precedence rule. // Get the precedence level for the top-most precedence rule.
@ -511,19 +511,19 @@ func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// the parser context is not nested within a precedence rule. // the parser context is not nested within a precedence rule.
func (p *BaseParser) GetPrecedence() int { func (p *BaseParser) GetPrecedence() int {
if len(p._precedenceStack) == 0 { if len(p.precedenceStack) == 0 {
return -1 return -1
} }
return p._precedenceStack[len(p._precedenceStack)-1] return p.precedenceStack[len(p.precedenceStack)-1]
} }
func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) { func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) {
p.SetState(state) p.SetState(state)
p._precedenceStack.Push(precedence) p.precedenceStack.Push(precedence)
p._ctx = localctx p.ctx = localctx
p._ctx.SetStart(p._input.LT(1)) p.ctx.SetStart(p.input.LT(1))
if p._parseListeners != nil { if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules // left-recursive rules
} }
@ -533,34 +533,34 @@ func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleI
// Like {@link //EnterRule} but for recursive rules. // Like {@link //EnterRule} but for recursive rules.
func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) { func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) {
var previous = p._ctx var previous = p.ctx
previous.SetParent(localctx) previous.SetParent(localctx)
previous.SetInvokingState(state) previous.SetInvokingState(state)
previous.SetStop(p._input.LT(-1)) previous.SetStop(p.input.LT(-1))
p._ctx = localctx p.ctx = localctx
p._ctx.SetStart(previous.GetStart()) p.ctx.SetStart(previous.GetStart())
if p.BuildParseTrees { if p.BuildParseTrees {
p._ctx.AddChild(previous) p.ctx.AddChild(previous)
} }
if p._parseListeners != nil { if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules // left-recursive rules
} }
} }
func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) { func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
p._precedenceStack.Pop() p.precedenceStack.Pop()
p._ctx.SetStop(p._input.LT(-1)) p.ctx.SetStop(p.input.LT(-1))
var retCtx = p._ctx // save current ctx (return value) var retCtx = p.ctx // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method // unroll so ctx is as it was before call to recursive method
if p._parseListeners != nil { if p.parseListeners != nil {
for p._ctx != parentCtx { for p.ctx != parentCtx {
p.TriggerExitRuleEvent() p.TriggerExitRuleEvent()
p._ctx = p._ctx.GetParent().(ParserRuleContext) p.ctx = p.ctx.GetParent().(ParserRuleContext)
} }
} else { } else {
p._ctx = parentCtx p.ctx = parentCtx
} }
// hook into tree // hook into tree
retCtx.SetParent(parentCtx) retCtx.SetParent(parentCtx)
@ -571,7 +571,7 @@ func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
} }
func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext { func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
var ctx = p._ctx var ctx = p.ctx
for ctx != nil { for ctx != nil {
if ctx.GetRuleIndex() == ruleIndex { if ctx.GetRuleIndex() == ruleIndex {
return ctx return ctx
@ -582,7 +582,7 @@ func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
} }
func (p *BaseParser) Precpred(localctx RuleContext, precedence int) bool { func (p *BaseParser) Precpred(localctx RuleContext, precedence int) bool {
return precedence >= p._precedenceStack[len(p._precedenceStack)-1] return precedence >= p.precedenceStack[len(p.precedenceStack)-1]
} }
func (p *BaseParser) inContext(context ParserRuleContext) bool { func (p *BaseParser) inContext(context ParserRuleContext) bool {
@ -606,7 +606,7 @@ func (p *BaseParser) inContext(context ParserRuleContext) bool {
func (p *BaseParser) IsExpectedToken(symbol int) bool { func (p *BaseParser) IsExpectedToken(symbol int) bool {
var atn = p.Interpreter.atn var atn = p.Interpreter.atn
var ctx = p._ctx var ctx = p.ctx
var s = atn.states[p.state] var s = atn.states[p.state]
var following = atn.NextTokens(s, nil) var following = atn.NextTokens(s, nil)
if following.contains(symbol) { if following.contains(symbol) {
@ -638,7 +638,7 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
// @see ATN//getExpectedTokens(int, RuleContext) // @see ATN//getExpectedTokens(int, RuleContext)
// //
func (p *BaseParser) GetExpectedTokens() *IntervalSet { func (p *BaseParser) GetExpectedTokens() *IntervalSet {
return p.Interpreter.atn.getExpectedTokens(p.state, p._ctx) return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx)
} }
func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet { func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
@ -666,7 +666,7 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string { func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
if c == nil { if c == nil {
c = p._ctx c = p.ctx
} }
var stack = make([]string, 0) var stack = make([]string, 0)
for c != nil { for c != nil {
@ -718,13 +718,13 @@ func (p *BaseParser) GetSourceName() string {
// //
func (p *BaseParser) SetTrace(trace *TraceListener) { func (p *BaseParser) SetTrace(trace *TraceListener) {
if trace == nil { if trace == nil {
p.RemoveParseListener(p._tracer) p.RemoveParseListener(p.tracer)
p._tracer = nil p.tracer = nil
} else { } else {
if p._tracer != nil { if p.tracer != nil {
p.RemoveParseListener(p._tracer) p.RemoveParseListener(p.tracer)
} }
p._tracer = NewTraceListener(p) p.tracer = NewTraceListener(p)
p.AddParseListener(p._tracer) p.AddParseListener(p.tracer)
} }
} }

View File

@ -11,12 +11,12 @@ type ParserATNSimulator struct {
parser Parser parser Parser
predictionMode int predictionMode int
_input TokenStream input TokenStream
_startIndex int startIndex int
_dfa *DFA dfa *DFA
DecisionToDFA []*DFA DecisionToDFA []*DFA
mergeCache *DoubleDict mergeCache *DoubleDict
_outerContext ParserRuleContext outerContext ParserRuleContext
} }
func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator { func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
@ -30,10 +30,10 @@ func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, shared
// SLL, LL, or LL + exact ambig detection?// // SLL, LL, or LL + exact ambig detection?//
p.predictionMode = PredictionModeLL p.predictionMode = PredictionModeLL
// LAME globals to avoid parameters!!!!! I need these down deep in predTransition // LAME globals to avoid parameters!!!!! I need these down deep in predTransition
p._input = nil p.input = nil
p._startIndex = 0 p.startIndex = 0
p._outerContext = nil p.outerContext = nil
p._dfa = nil p.dfa = nil
// Each prediction operation uses a cache for merge of prediction contexts. // Each prediction operation uses a cache for merge of prediction contexts.
// Don't keep around as it wastes huge amounts of memory. DoubleKeyMap // Don't keep around as it wastes huge amounts of memory. DoubleKeyMap
// isn't Synchronized but we're ok since two threads shouldn't reuse same // isn't Synchronized but we're ok since two threads shouldn't reuse same
@ -77,17 +77,17 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
strconv.Itoa(input.LT(1).GetColumn())) strconv.Itoa(input.LT(1).GetColumn()))
} }
p._input = input p.input = input
p._startIndex = input.Index() p.startIndex = input.Index()
p._outerContext = outerContext p.outerContext = outerContext
var dfa = p.DecisionToDFA[decision] var dfa = p.DecisionToDFA[decision]
p._dfa = dfa p.dfa = dfa
var m = input.Mark() var m = input.Mark()
var index = input.Index() var index = input.Index()
defer func() { defer func() {
p._dfa = nil p.dfa = nil
p.mergeCache = nil // wack cache after each prediction p.mergeCache = nil // wack cache after each prediction
input.Seek(index) input.Seek(index)
input.Release(m) input.Release(m)
@ -127,7 +127,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
} }
} }
var fullCtx = false var fullCtx = false
var s0_closure = p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx) var s0Closure = p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
if dfa.precedenceDfa { if dfa.precedenceDfa {
// If p is a precedence DFA, we use applyPrecedenceFilter // If p is a precedence DFA, we use applyPrecedenceFilter
@ -136,11 +136,11 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// appropriate start state for the precedence level rather // appropriate start state for the precedence level rather
// than simply setting DFA.s0. // than simply setting DFA.s0.
// //
s0_closure = p.applyPrecedenceFilter(s0_closure) s0Closure = p.applyPrecedenceFilter(s0Closure)
s0 = p.addDFAState(dfa, NewDFAState(-1, s0_closure)) s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0) dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
} else { } else {
s0 = p.addDFAState(dfa, NewDFAState(-1, s0_closure)) s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.s0 = s0 dfa.s0 = s0
} }
} }
@ -248,9 +248,9 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String()) fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String())
} }
var fullCtx = true var fullCtx = true
var s0_closure = p.computeStartState(dfa.atnStartState, outerContext, fullCtx) var s0Closure = p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index()) p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
alt := p.execATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext) alt := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext)
return alt return alt
} }
if D.isAcceptState { if D.isAcceptState {
@ -742,7 +742,7 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
if config.GetAlt() != 1 { if config.GetAlt() != 1 {
continue continue
} }
var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p._outerContext) var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
if updatedContext == nil { if updatedContext == nil {
// the configuration was eliminated // the configuration was eliminated
continue continue
@ -998,7 +998,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
// run thru all possible stack tops in ctx // run thru all possible stack tops in ctx
if !config.GetContext().isEmpty() { if !config.GetContext().isEmpty() {
for i := 0; i < config.GetContext().length(); i++ { for i := 0; i < config.GetContext().length(); i++ {
if config.GetContext().getReturnState(i) == BasePredictionContextEMPTY_RETURN_STATE { if config.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
if fullCtx { if fullCtx {
configs.Add(NewBaseATNConfig1(config, config.GetState(), BasePredictionContextEMPTY), p.mergeCache) configs.Add(NewBaseATNConfig1(config, config.GetState(), BasePredictionContextEMPTY), p.mergeCache)
continue continue
@ -1010,7 +1010,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
} }
fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex())) fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
} }
p.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
} }
continue continue
} }
@ -1039,13 +1039,13 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
} }
} }
} }
p.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon) p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
} }
// Do the actual work of walking epsilon edges// // Do the actual work of walking epsilon edges//
func (p *ParserATNSimulator) closure_(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) { func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
if PortDebug { if PortDebug {
fmt.Println("closure_") fmt.Println("closureWork")
} }
var state = config.GetState() var state = config.GetState()
// optimization // optimization
@ -1094,11 +1094,11 @@ func (p *ParserATNSimulator) closure_(config ATNConfig, configs ATNConfigSet, cl
} }
} }
if p._dfa != nil && p._dfa.precedenceDfa { if p.dfa != nil && p.dfa.precedenceDfa {
if PortDebug { if PortDebug {
fmt.Println("DEBUG 4") fmt.Println("DEBUG 4")
} }
if t.(*EpsilonTransition).outermostPrecedenceReturn == p._dfa.atnStartState.GetRuleIndex() { if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
c.setPrecedenceFilterSuppressed(true) c.setPrecedenceFilterSuppressed(true)
} }
} }
@ -1197,10 +1197,10 @@ func (p *ParserATNSimulator) precedenceTransition(config ATNConfig,
// during closure, which dramatically reduces the size of // during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates // the config sets. It also obviates the need to test predicates
// later during conflict resolution. // later during conflict resolution.
var currentPosition = p._input.Index() var currentPosition = p.input.Index()
p._input.Seek(p._startIndex) p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p._outerContext) var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
p._input.Seek(currentPosition) p.input.Seek(currentPosition)
if predSucceeds { if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
} }
@ -1233,10 +1233,10 @@ func (p *ParserATNSimulator) predTransition(config ATNConfig, pt *PredicateTrans
// during closure, which dramatically reduces the size of // during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates // the config sets. It also obviates the need to test predicates
// later during conflict resolution. // later during conflict resolution.
var currentPosition = p._input.Index() var currentPosition = p.input.Index()
p._input.Seek(p._startIndex) p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p._outerContext) var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
p._input.Seek(currentPosition) p.input.Seek(currentPosition)
if predSucceeds { if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
} }
@ -1410,21 +1410,21 @@ func (p *ParserATNSimulator) getUniqueAlt(configs ATNConfigSet) int {
// otherwise p method returns the result of calling {@link //addDFAState} // otherwise p method returns the result of calling {@link //addDFAState}
// on {@code to} // on {@code to}
// //
func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from_ *DFAState, t int, to *DFAState) *DFAState { func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFAState) *DFAState {
if ParserATNSimulatorDebug { if ParserATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + p.GetTokenName(t)) fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + p.GetTokenName(t))
} }
if to == nil { if to == nil {
return nil return nil
} }
to = p.addDFAState(dfa, to) // used existing if possible not incoming to = p.addDFAState(dfa, to) // used existing if possible not incoming
if from_ == nil || t < -1 || t > p.atn.maxTokenType { if from == nil || t < -1 || t > p.atn.maxTokenType {
return to return to
} }
if from_.edges == nil { if from.edges == nil {
from_.edges = make([]*DFAState, p.atn.maxTokenType+1+1) from.edges = make([]*DFAState, p.atn.maxTokenType+1+1)
} }
from_.edges[t+1] = to // connect from.edges[t+1] = to // connect
if ParserATNSimulatorDebug { if ParserATNSimulatorDebug {
var names []string var names []string

View File

@ -32,12 +32,12 @@ func NewBasePredictionContext(cachedHashString string) *BasePredictionContext {
// {@code//+x =//}. // {@code//+x =//}.
// / // /
const ( const (
BasePredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF BasePredictionContextEmptyReturnState = 0x7FFFFFFF
) )
// Represents {@code $} in an array in full context mode, when {@code $} // Represents {@code $} in an array in full context mode, when {@code $}
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here, // doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
// {@code $} = {@link //EMPTY_RETURN_STATE}. // {@code $} = {@link //EmptyReturnState}.
// / // /
var BasePredictionContextglobalNodeCount = 1 var BasePredictionContextglobalNodeCount = 1
@ -151,7 +151,7 @@ func NewBaseSingletonPredictionContext(parent PredictionContext, returnState int
} }
func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext { func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext {
if returnState == BasePredictionContextEMPTY_RETURN_STATE && parent == nil { if returnState == BasePredictionContextEmptyReturnState && parent == nil {
// someone can pass in the bits of an array ctx that mean $ // someone can pass in the bits of an array ctx that mean $
return BasePredictionContextEMPTY return BasePredictionContextEMPTY
} }
@ -172,7 +172,7 @@ func (b *BaseSingletonPredictionContext) getReturnState(index int) int {
} }
func (b *BaseSingletonPredictionContext) hasEmptyPath() bool { func (b *BaseSingletonPredictionContext) hasEmptyPath() bool {
return b.returnState == BasePredictionContextEMPTY_RETURN_STATE return b.returnState == BasePredictionContextEmptyReturnState
} }
func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool { func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
@ -209,7 +209,7 @@ func (b *BaseSingletonPredictionContext) String() string {
} }
if len(up) == 0 { if len(up) == 0 {
if b.returnState == BasePredictionContextEMPTY_RETURN_STATE { if b.returnState == BasePredictionContextEmptyReturnState {
return "$" return "$"
} }
@ -229,7 +229,7 @@ func NewEmptyPredictionContext() *EmptyPredictionContext {
p := new(EmptyPredictionContext) p := new(EmptyPredictionContext)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEMPTY_RETURN_STATE) p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEmptyReturnState)
return p return p
} }
@ -265,7 +265,7 @@ func NewArrayPredictionContext(parents []PredictionContext, returnStates []int)
// Parent can be nil only if full ctx mode and we make an array // Parent can be nil only if full ctx mode and we make an array
// from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using // from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
// nil parent and // nil parent and
// returnState == {@link //EMPTY_RETURN_STATE}. // returnState == {@link //EmptyReturnState}.
c := new(ArrayPredictionContext) c := new(ArrayPredictionContext)
c.BasePredictionContext = NewBasePredictionContext("") c.BasePredictionContext = NewBasePredictionContext("")
@ -285,13 +285,13 @@ func (a *ArrayPredictionContext) GetReturnStates() []int {
} }
func (a *ArrayPredictionContext) hasEmptyPath() bool { func (a *ArrayPredictionContext) hasEmptyPath() bool {
return a.getReturnState(a.length()-1) == BasePredictionContextEMPTY_RETURN_STATE return a.getReturnState(a.length()-1) == BasePredictionContextEmptyReturnState
} }
func (a *ArrayPredictionContext) isEmpty() bool { func (a *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we // since EmptyReturnState can only appear in the last position, we
// don't need to verify that size==1 // don't need to verify that size==1
return a.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE return a.returnStates[0] == BasePredictionContextEmptyReturnState
} }
func (a *ArrayPredictionContext) length() int { func (a *ArrayPredictionContext) length() int {
@ -327,7 +327,7 @@ func (a *ArrayPredictionContext) String() string {
if i > 0 { if i > 0 {
s = s + ", " s = s + ", "
} }
if a.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE { if a.returnStates[i] == BasePredictionContextEmptyReturnState {
s = s + "$" s = s + "$"
continue continue
} }
@ -567,11 +567,11 @@ func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionC
if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY { if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // $ + $ = $ return BasePredictionContextEMPTY // $ + $ = $
} else if a == BasePredictionContextEMPTY { // $ + x = [$,x] } else if a == BasePredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE} var payloads = []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{b.GetParent(-1), nil} var parents = []PredictionContext{b.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads) return NewArrayPredictionContext(parents, payloads)
} else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present) } else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE} var payloads = []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{a.GetParent(-1), nil} var parents = []PredictionContext{a.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads) return NewArrayPredictionContext(parents, payloads)
} }
@ -619,32 +619,32 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
var mergedParents = make([]PredictionContext, 0) var mergedParents = make([]PredictionContext, 0)
// walk and merge to yield mergedParents, mergedReturnStates // walk and merge to yield mergedParents, mergedReturnStates
for i < len(a.returnStates) && j < len(b.returnStates) { for i < len(a.returnStates) && j < len(b.returnStates) {
var a_parent = a.parents[i] var aParent = a.parents[i]
var b_parent = b.parents[j] var bParent = b.parents[j]
if a.returnStates[i] == b.returnStates[j] { if a.returnStates[i] == b.returnStates[j] {
// same payload (stack tops are equal), must yield merged singleton // same payload (stack tops are equal), must yield merged singleton
var payload = a.returnStates[i] var payload = a.returnStates[i]
// $+$ = $ // $+$ = $
var bothDollars = payload == BasePredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil var bothDollars = payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
var ax_ax = (a_parent != nil && b_parent != nil && a_parent == b_parent) // ax+ax var axAX = (aParent != nil && bParent != nil && aParent == bParent) // ax+ax
// -> // ->
// ax // ax
if bothDollars || ax_ax { if bothDollars || axAX {
mergedParents[k] = a_parent // choose left mergedParents[k] = aParent // choose left
mergedReturnStates[k] = payload mergedReturnStates[k] = payload
} else { // ax+ay -> a'[x,y] } else { // ax+ay -> a'[x,y]
var mergedParent = merge(a_parent, b_parent, rootIsWildcard, mergeCache) var mergedParent = merge(aParent, bParent, rootIsWildcard, mergeCache)
mergedParents[k] = mergedParent mergedParents[k] = mergedParent
mergedReturnStates[k] = payload mergedReturnStates[k] = payload
} }
i++ // hop over left one as usual i++ // hop over left one as usual
j++ // but also Skip one in right side since we merge j++ // but also Skip one in right side since we merge
} else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M } else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M
mergedParents[k] = a_parent mergedParents[k] = aParent
mergedReturnStates[k] = a.returnStates[i] mergedReturnStates[k] = a.returnStates[i]
i++ i++
} else { // b > a, copy b[j] to M } else { // b > a, copy b[j] to M
mergedParents[k] = b_parent mergedParents[k] = bParent
mergedReturnStates[k] = b.returnStates[j] mergedReturnStates[k] = b.returnStates[j]
j++ j++
} }

View File

@ -25,7 +25,7 @@ type Recognizer interface {
} }
type BaseRecognizer struct { type BaseRecognizer struct {
_listeners []ErrorListener listeners []ErrorListener
state int state int
RuleNames []string RuleNames []string
@ -36,7 +36,7 @@ type BaseRecognizer struct {
func NewBaseRecognizer() *BaseRecognizer { func NewBaseRecognizer() *BaseRecognizer {
rec := new(BaseRecognizer) rec := new(BaseRecognizer)
rec._listeners = []ErrorListener{ConsoleErrorListenerINSTANCE} rec.listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.state = -1 rec.state = -1
return rec return rec
} }
@ -56,11 +56,11 @@ func (b *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int)
} }
func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) { func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
b._listeners = append(b._listeners, listener) b.listeners = append(b.listeners, listener)
} }
func (b *BaseRecognizer) RemoveErrorListeners() { func (b *BaseRecognizer) RemoveErrorListeners() {
b._listeners = make([]ErrorListener, 0) b.listeners = make([]ErrorListener, 0)
} }
func (b *BaseRecognizer) GetRuleNames() []string { func (b *BaseRecognizer) GetRuleNames() []string {
@ -203,7 +203,7 @@ func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
} }
func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener { func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(b._listeners) return NewProxyErrorListener(b.listeners)
} }
// subclass needs to override these if there are sempreds or actions // subclass needs to override these if there are sempreds or actions

View File

@ -42,7 +42,7 @@ type BaseToken struct {
tokenIndex int // from 0..n-1 of the token object in the input stream tokenIndex int // from 0..n-1 of the token object in the input stream
line int // line=1..n of the 1st character line int // line=1..n of the 1st character
column int // beginning of the line at which it occurs, 0..n-1 column int // beginning of the line at which it occurs, 0..n-1
_text string // text of the token. text string // text of the token.
readOnly bool readOnly bool
} }
@ -160,13 +160,13 @@ func (c *CommonToken) clone() *CommonToken {
t.tokenIndex = c.GetTokenIndex() t.tokenIndex = c.GetTokenIndex()
t.line = c.GetLine() t.line = c.GetLine()
t.column = c.GetColumn() t.column = c.GetColumn()
t._text = c.GetText() t.text = c.GetText()
return t return t
} }
func (c *CommonToken) GetText() string { func (c *CommonToken) GetText() string {
if c._text != "" { if c.text != "" {
return c._text return c.text
} }
var input = c.GetInputStream() var input = c.GetInputStream()
if input == nil { if input == nil {
@ -180,7 +180,7 @@ func (c *CommonToken) GetText() string {
} }
func (c *CommonToken) SetText(text string) { func (c *CommonToken) SetText(text string) {
c._text = text c.text = text
} }
func (c *CommonToken) String() string { func (c *CommonToken) String() string {

View File

@ -16,13 +16,13 @@ func (t *TraceListener) VisitErrorNode(_ ErrorNode) {
} }
func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) { func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText()) fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
} }
func (t *TraceListener) VisitTerminal(node TerminalNode) { func (t *TraceListener) VisitTerminal(node TerminalNode) {
fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser._ctx.GetRuleIndex()]) fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser.ctx.GetRuleIndex()])
} }
func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) { func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText()) fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
} }

View File

@ -26,8 +26,8 @@ type Transition interface {
type BaseTransition struct { type BaseTransition struct {
target ATNState target ATNState
isEpsilon bool isEpsilon bool
label_ int label int
label *IntervalSet intervalSet *IntervalSet
serializationType int serializationType int
} }
@ -42,7 +42,7 @@ func NewBaseTransition(target ATNState) *BaseTransition {
t.target = target t.target = target
// Are we epsilon, action, sempred? // Are we epsilon, action, sempred?
t.isEpsilon = false t.isEpsilon = false
t.label = nil t.intervalSet = nil
return t return t
} }
@ -60,7 +60,7 @@ func (t *BaseTransition) getIsEpsilon() bool {
} }
func (t *BaseTransition) getLabel() *IntervalSet { func (t *BaseTransition) getLabel() *IntervalSet {
return t.label return t.intervalSet
} }
func (t *BaseTransition) getSerializationType() int { func (t *BaseTransition) getSerializationType() int {
@ -79,7 +79,7 @@ const (
TransitionATOM = 5 TransitionATOM = 5
TransitionACTION = 6 TransitionACTION = 6
TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2 TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2
TransitionNOT_SET = 8 TransitionNOTSET = 8
TransitionWILDCARD = 9 TransitionWILDCARD = 9
TransitionPRECEDENCE = 10 TransitionPRECEDENCE = 10
) )
@ -117,7 +117,7 @@ var TransitionserializationNames = []string{
// TransitionATOM, // TransitionATOM,
// TransitionACTION, // TransitionACTION,
// TransitionSET, // TransitionSET,
// TransitionNOT_SET, // TransitionNOTSET,
// TransitionWILDCARD, // TransitionWILDCARD,
// TransitionPRECEDENCE // TransitionPRECEDENCE
//} //}
@ -127,13 +127,13 @@ type AtomTransition struct {
*BaseTransition *BaseTransition
} }
func NewAtomTransition(target ATNState, label int) *AtomTransition { func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
t := new(AtomTransition) t := new(AtomTransition)
t.BaseTransition = NewBaseTransition(target) t.BaseTransition = NewBaseTransition(target)
t.label_ = label // The token type or character value or, signifies special label. t.label = intervalSet // The token type or character value or, signifies special intervalSet.
t.label = t.makeLabel() t.intervalSet = t.makeLabel()
t.serializationType = TransitionATOM t.serializationType = TransitionATOM
return t return t
@ -141,16 +141,16 @@ func NewAtomTransition(target ATNState, label int) *AtomTransition {
func (t *AtomTransition) makeLabel() *IntervalSet { func (t *AtomTransition) makeLabel() *IntervalSet {
var s = NewIntervalSet() var s = NewIntervalSet()
s.addOne(t.label_) s.addOne(t.label)
return s return s
} }
func (t *AtomTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { func (t *AtomTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label_ == symbol return t.label == symbol
} }
func (t *AtomTransition) String() string { func (t *AtomTransition) String() string {
return strconv.Itoa(t.label_) return strconv.Itoa(t.label)
} }
type RuleTransition struct { type RuleTransition struct {
@ -217,7 +217,7 @@ func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
t.serializationType = TransitionRANGE t.serializationType = TransitionRANGE
t.start = start t.start = start
t.stop = stop t.stop = stop
t.label = t.makeLabel() t.intervalSet = t.makeLabel()
return t return t
} }
@ -325,21 +325,21 @@ func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition {
t.serializationType = TransitionSET t.serializationType = TransitionSET
if set != nil { if set != nil {
t.label = set t.intervalSet = set
} else { } else {
t.label = NewIntervalSet() t.intervalSet = NewIntervalSet()
t.label.addOne(TokenInvalidType) t.intervalSet.addOne(TokenInvalidType)
} }
return t return t
} }
func (t *SetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { func (t *SetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label.contains(symbol) return t.intervalSet.contains(symbol)
} }
func (t *SetTransition) String() string { func (t *SetTransition) String() string {
return t.label.String() return t.intervalSet.String()
} }
type NotSetTransition struct { type NotSetTransition struct {
@ -352,17 +352,17 @@ func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition {
t.SetTransition = NewSetTransition(target, set) t.SetTransition = NewSetTransition(target, set)
t.serializationType = TransitionNOT_SET t.serializationType = TransitionNOTSET
return t return t
} }
func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool { func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.label.contains(symbol) return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.intervalSet.contains(symbol)
} }
func (t *NotSetTransition) String() string { func (t *NotSetTransition) String() string {
return "~" + t.label.String() return "~" + t.intervalSet.String()
} }
type WildcardTransition struct { type WildcardTransition struct {

View File

@ -93,11 +93,11 @@ func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree { func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
var nodes = make([]ParseTree, 0) var nodes = make([]ParseTree, 0)
Trees_findAllNodes(t, index, findTokens, nodes) TreesFindAllNodes(t, index, findTokens, nodes)
return nodes return nodes
} }
func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) { func TreesFindAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
// check this node (the root) first // check this node (the root) first
t2, ok := t.(TerminalNode) t2, ok := t.(TerminalNode)
@ -114,7 +114,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
} }
// check children // check children
for i := 0; i < t.GetChildCount(); i++ { for i := 0; i < t.GetChildCount(); i++ {
Trees_findAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes) TreesFindAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
} }
} }