Lint: Remove underscores from names

This commit is contained in:
Will Faught 2016-05-21 01:02:49 -07:00
parent 71dba8cb91
commit b4da149732
15 changed files with 345 additions and 349 deletions

View File

@ -614,7 +614,7 @@ func (a *ATNDeserializer) edgeFactory(atn *ATN, typeIndex, src, trg, arg1, arg2,
return NewActionTransition(target, arg1, arg2, arg3 != 0)
case TransitionSET:
return NewSetTransition(target, sets[arg1])
case TransitionNOT_SET:
case TransitionNOTSET:
return NewNotSetTransition(target, sets[arg1])
case TransitionWILDCARD:
return NewWildcardTransition(target)

View File

@ -261,7 +261,7 @@ func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []To
panic(strconv.Itoa(tokenIndex) + " not in 0.." + strconv.Itoa(len(c.tokens)-1))
}
var nextOnChannel = c.NextTokenOnChannel(tokenIndex+1, LexerDefaultTokenChannel)
var from_ = tokenIndex + 1
var from = tokenIndex + 1
// if none onchannel to right, nextOnChannel=-1 so set to = last token
var to int
if nextOnChannel == -1 {
@ -269,7 +269,7 @@ func (c *CommonTokenStream) getHiddenTokensToRight(tokenIndex, channel int) []To
} else {
to = nextOnChannel
}
return c.filterForChannel(from_, to, channel)
return c.filterForChannel(from, to, channel)
}
// Collect all tokens on specified channel to the left of
@ -285,9 +285,9 @@ func (c *CommonTokenStream) getHiddenTokensToLeft(tokenIndex, channel int) []Tok
return nil
}
// if none on channel to left, prevOnChannel=-1 then from=0
var from_ = prevOnChannel + 1
var from = prevOnChannel + 1
var to = tokenIndex - 1
return c.filterForChannel(from_, to, channel)
return c.filterForChannel(from, to, channel)
}
func (c *CommonTokenStream) filterForChannel(left, right, channel int) []Token {

View File

@ -5,7 +5,7 @@ import "sort"
type DFA struct {
atnStartState DecisionState
decision int
_states map[string]*DFAState
states map[string]*DFAState
s0 *DFAState
precedenceDfa bool
}
@ -19,7 +19,7 @@ func NewDFA(atnStartState DecisionState, decision int) *DFA {
d.decision = decision
// A set of all DFA states. Use {@link Map} so we can get old state back
// ({@link Set} only allows you to see if it's there).
d._states = make(map[string]*DFAState)
d.states = make(map[string]*DFAState)
d.s0 = nil
// {@code true} if d DFA is for a precedence decision otherwise,
// {@code false}. This is the backing field for {@link //isPrecedenceDfa},
@ -98,7 +98,7 @@ func (d *DFA) setPrecedenceStartState(precedence int, startState *DFAState) {
func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
if d.precedenceDfa != precedenceDfa {
d._states = make(map[string]*DFAState)
d.states = make(map[string]*DFAState)
if precedenceDfa {
var precedenceState = NewDFAState(-1, NewBaseATNConfigSet(false))
precedenceState.edges = make([]*DFAState, 0)
@ -113,7 +113,7 @@ func (d *DFA) setPrecedenceDfa(precedenceDfa bool) {
}
func (d *DFA) GetStates() map[string]*DFAState {
return d._states
return d.states
}
type DFAStateList []*DFAState
@ -126,9 +126,9 @@ func (a DFAStateList) Less(i, j int) bool { return a[i].stateNumber < a[j].state
func (d *DFA) sortedStates() []*DFAState {
// extract the values
vs := make([]*DFAState, len(d._states))
vs := make([]*DFAState, len(d.states))
i := 0
for _, v := range d._states {
for _, v := range d.states {
vs[i] = v
i++
}

View File

@ -31,16 +31,16 @@ type BaseLexer struct {
TokenStartColumn int
ActionType int
_input CharStream
_factory TokenFactory
_tokenFactorySourcePair *TokenSourceCharStreamPair
_token Token
_hitEOF bool
_channel int
_type int
_modeStack IntStack
_mode int
_text string
input CharStream
factory TokenFactory
tokenFactorySourcePair *TokenSourceCharStreamPair
token Token
hitEOF bool
channel int
thetype int
modeStack IntStack
mode int
text string
}
func NewBaseLexer(input CharStream) *BaseLexer {
@ -49,9 +49,9 @@ func NewBaseLexer(input CharStream) *BaseLexer {
lexer.BaseRecognizer = NewBaseRecognizer()
lexer._input = input
lexer._factory = CommonTokenFactoryDEFAULT
lexer._tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
lexer.input = input
lexer.factory = CommonTokenFactoryDEFAULT
lexer.tokenFactorySourcePair = &TokenSourceCharStreamPair{lexer, input}
lexer.Interpreter = nil // child classes must populate it
@ -62,7 +62,7 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// emissions, then set l to the last token to be Matched or
// something nonnil so that the auto token emit mechanism will not
// emit another token.
lexer._token = nil
lexer.token = nil
// What character index in the stream did the current token start at?
// Needed, for example, to get the text for current token. Set at
@ -77,21 +77,21 @@ func NewBaseLexer(input CharStream) *BaseLexer {
// Once we see EOF on char stream, next token will be EOF.
// If you have DONE : EOF then you see DONE EOF.
lexer._hitEOF = false
lexer.hitEOF = false
// The channel number for the current token///
lexer._channel = TokenDefaultChannel
lexer.channel = TokenDefaultChannel
// The token type for the current token///
lexer._type = TokenInvalidType
lexer.thetype = TokenInvalidType
lexer._modeStack = make([]int, 0)
lexer._mode = LexerDefaultMode
lexer.modeStack = make([]int, 0)
lexer.mode = LexerDefaultMode
// You can set the text for the current token to override what is in
// the input char buffer. Use setText() or can set l instance var.
// /
lexer._text = ""
lexer.text = ""
return lexer
}
@ -111,20 +111,20 @@ const (
func (b *BaseLexer) reset() {
// wack Lexer state variables
if b._input != nil {
b._input.Seek(0) // rewind the input
if b.input != nil {
b.input.Seek(0) // rewind the input
}
b._token = nil
b._type = TokenInvalidType
b._channel = TokenDefaultChannel
b.token = nil
b.thetype = TokenInvalidType
b.channel = TokenDefaultChannel
b.TokenStartCharIndex = -1
b.TokenStartColumn = -1
b.TokenStartLine = -1
b._text = ""
b.text = ""
b._hitEOF = false
b._mode = LexerDefaultMode
b._modeStack = make([]int, 0)
b.hitEOF = false
b.mode = LexerDefaultMode
b.modeStack = make([]int, 0)
b.Interpreter.reset()
}
@ -134,7 +134,7 @@ func (b *BaseLexer) GetInterpreter() *LexerATNSimulator {
}
func (b *BaseLexer) GetInputStream() CharStream {
return b._input
return b.input
}
func (b *BaseLexer) GetSourceName() string {
@ -142,15 +142,15 @@ func (b *BaseLexer) GetSourceName() string {
}
func (b *BaseLexer) setChannel(v int) {
b._channel = v
b.channel = v
}
func (b *BaseLexer) GetTokenFactory() TokenFactory {
return b._factory
return b.factory
}
func (b *BaseLexer) setTokenFactory(f TokenFactory) {
b._factory = f
b.factory = f
}
func (b *BaseLexer) safeMatch() (ret int) {
@ -166,53 +166,53 @@ func (b *BaseLexer) safeMatch() (ret int) {
}
}()
return b.Interpreter.Match(b._input, b._mode)
return b.Interpreter.Match(b.input, b.mode)
}
// Return a token from l source i.e., Match a token on the char stream.
func (b *BaseLexer) NextToken() Token {
if b._input == nil {
if b.input == nil {
panic("NextToken requires a non-nil input stream.")
}
var tokenStartMarker = b._input.Mark()
var tokenStartMarker = b.input.Mark()
// previously in finally block
defer func() {
// make sure we release marker after Match or
// unbuffered char stream will keep buffering
b._input.Release(tokenStartMarker)
b.input.Release(tokenStartMarker)
}()
for true {
if b._hitEOF {
if b.hitEOF {
b.emitEOF()
return b._token
return b.token
}
b._token = nil
b._channel = TokenDefaultChannel
b.TokenStartCharIndex = b._input.Index()
b.token = nil
b.channel = TokenDefaultChannel
b.TokenStartCharIndex = b.input.Index()
b.TokenStartColumn = b.Interpreter.column
b.TokenStartLine = b.Interpreter.line
b._text = ""
b.text = ""
var continueOuter = false
for true {
b._type = TokenInvalidType
b.thetype = TokenInvalidType
var ttype = LexerSkip
ttype = b.safeMatch()
if b._input.LA(1) == TokenEOF {
b._hitEOF = true
if b.input.LA(1) == TokenEOF {
b.hitEOF = true
}
if b._type == TokenInvalidType {
b._type = ttype
if b.thetype == TokenInvalidType {
b.thetype = ttype
}
if b._type == LexerSkip {
if b.thetype == LexerSkip {
continueOuter = true
break
}
if b._type != LexerMore {
if b.thetype != LexerMore {
break
}
if PortDebug {
@ -226,10 +226,10 @@ func (b *BaseLexer) NextToken() Token {
if continueOuter {
continue
}
if b._token == nil {
if b.token == nil {
b.emit()
}
return b._token
return b.token
}
return nil
@ -237,52 +237,48 @@ func (b *BaseLexer) NextToken() Token {
// Instruct the lexer to Skip creating a token for current lexer rule
// and look for another token. NextToken() knows to keep looking when
// a lexer rule finishes with token set to SKIP_TOKEN. Recall that
// a lexer rule finishes with token set to SKIPTOKEN. Recall that
// if token==nil at end of any token rule, it creates one for you
// and emits it.
// /
func (b *BaseLexer) Skip() {
b._type = LexerSkip
b.thetype = LexerSkip
}
func (b *BaseLexer) More() {
b._type = LexerMore
}
func (b *BaseLexer) mode(m int) {
b._mode = m
b.thetype = LexerMore
}
func (b *BaseLexer) pushMode(m int) {
if LexerATNSimulatorDebug {
fmt.Println("pushMode " + strconv.Itoa(m))
}
b._modeStack.Push(b._mode)
b.mode(m)
b.modeStack.Push(b.mode)
b.mode = m
}
func (b *BaseLexer) popMode() int {
if len(b._modeStack) == 0 {
if len(b.modeStack) == 0 {
panic("Empty Stack")
}
if LexerATNSimulatorDebug {
fmt.Println("popMode back to " + fmt.Sprint(b._modeStack[0:len(b._modeStack)-1]))
fmt.Println("popMode back to " + fmt.Sprint(b.modeStack[0:len(b.modeStack)-1]))
}
i, _ := b._modeStack.Pop()
b.mode(i)
return b._mode
i, _ := b.modeStack.Pop()
b.mode = i
return b.mode
}
func (b *BaseLexer) inputStream() CharStream {
return b._input
return b.input
}
func (b *BaseLexer) setInputStream(input CharStream) {
b._input = nil
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input}
b.input = nil
b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
b.reset()
b._input = input
b._tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b._input}
b.input = input
b.tokenFactorySourcePair = &TokenSourceCharStreamPair{b, b.input}
}
// By default does not support multiple emits per NextToken invocation
@ -291,7 +287,7 @@ func (b *BaseLexer) setInputStream(input CharStream) {
// rather than a single variable as l implementation does).
// /
func (b *BaseLexer) emitToken(token Token) {
b._token = token
b.token = token
}
// The standard method called to automatically emit a token at the
@ -304,7 +300,7 @@ func (b *BaseLexer) emit() Token {
if PortDebug {
fmt.Println("emit")
}
var t = b._factory.Create(b._tokenFactorySourcePair, b._type, b._text, b._channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
var t = b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.getCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.emitToken(t)
return t
}
@ -315,7 +311,7 @@ func (b *BaseLexer) emitEOF() Token {
if PortDebug {
fmt.Println("emitEOF")
}
var eof = b._factory.Create(b._tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b._input.Index(), b._input.Index()-1, lpos, cpos)
var eof = b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
b.emitToken(eof)
return eof
}
@ -329,30 +325,30 @@ func (b *BaseLexer) GetLine() int {
}
func (b *BaseLexer) getType() int {
return b._type
return b.thetype
}
func (b *BaseLexer) setType(t int) {
b._type = t
b.thetype = t
}
// What is the index of the current character of lookahead?///
func (b *BaseLexer) getCharIndex() int {
return b._input.Index()
return b.input.Index()
}
// Return the text Matched so far for the current token or any text override.
//Set the complete text of l token it wipes any previous changes to the text.
func (b *BaseLexer) GetText() string {
if b._text != "" {
return b._text
if b.text != "" {
return b.text
}
return b.Interpreter.GetText(b._input)
return b.Interpreter.GetText(b.input)
}
func (b *BaseLexer) SetText(text string) {
b._text = text
b.text = text
}
func (b *BaseLexer) GetATN() *ATN {
@ -380,8 +376,8 @@ func (b *BaseLexer) getAllTokens() []Token {
func (b *BaseLexer) notifyListeners(e RecognitionException) {
var start = b.TokenStartCharIndex
var stop = b._input.Index()
var text = b._input.GetTextFromInterval(NewInterval(start, stop))
var stop = b.input.Index()
var text = b.input.GetTextFromInterval(NewInterval(start, stop))
var msg = "token recognition error at: '" + text + "'"
var listener = b.GetErrorListenerDispatch()
listener.SyntaxError(b, nil, b.TokenStartLine, b.TokenStartColumn, msg, e)
@ -411,13 +407,13 @@ func (b *BaseLexer) getCharErrorDisplay(c rune) string {
// to do sophisticated error recovery if you are in a fragment rule.
// /
func (b *BaseLexer) Recover(re RecognitionException) {
if b._input.LA(1) != TokenEOF {
if b.input.LA(1) != TokenEOF {
if _, ok := re.(*LexerNoViableAltException); ok {
// Skip a char and try again
b.Interpreter.consume(b._input)
b.Interpreter.consume(b.input)
} else {
// TODO: Do we lose character or line position information?
b._input.Consume()
b.input.Consume()
}
}
}

View File

@ -86,22 +86,22 @@ func (l *LexerSkipAction) String() string {
type LexerTypeAction struct {
*BaseLexerAction
_type int
thetype int
}
func NewLexerTypeAction(_type int) *LexerTypeAction {
func NewLexerTypeAction(thetype int) *LexerTypeAction {
l := new(LexerTypeAction)
l.BaseLexerAction = NewBaseLexerAction(LexerActionTypeType)
l._type = _type
l.thetype = thetype
return l
}
func (l *LexerTypeAction) execute(lexer Lexer) {
lexer.setType(l._type)
lexer.setType(l.thetype)
}
func (l *LexerTypeAction) Hash() string {
return strconv.Itoa(l.actionType) + strconv.Itoa(l._type)
return strconv.Itoa(l.actionType) + strconv.Itoa(l.thetype)
}
func (l *LexerTypeAction) equals(other LexerAction) bool {
@ -110,12 +110,12 @@ func (l *LexerTypeAction) equals(other LexerAction) bool {
} else if _, ok := other.(*LexerTypeAction); !ok {
return false
} else {
return l._type == other.(*LexerTypeAction)._type
return l.thetype == other.(*LexerTypeAction).thetype
}
}
func (l *LexerTypeAction) String() string {
return "actionType(" + strconv.Itoa(l._type) + ")"
return "actionType(" + strconv.Itoa(l.thetype) + ")"
}
// Implements the {@code pushMode} lexer action by calling

View File

@ -59,7 +59,7 @@ type LexerATNSimulator struct {
column int
mode int
prevAccept *SimState
Match_calls int
MatchCalls int
}
func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *LexerATNSimulator {
@ -91,10 +91,10 @@ func NewLexerATNSimulator(recog Lexer, atn *ATN, decisionToDFA []*DFA, sharedCon
var LexerATNSimulatorDebug = false
var LexerATNSimulatorDFADebug = false
var LexerATNSimulatorMIN_DFA_EDGE = 0
var LexerATNSimulatorMAX_DFA_EDGE = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMinDFAEdge = 0
var LexerATNSimulatorMaxDFAEdge = 127 // forces unicode to stay in ATN
var LexerATNSimulatorMatch_calls = 0
var LexerATNSimulatorMatchCalls = 0
func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
l.column = simulator.column
@ -109,7 +109,7 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
fmt.Println("Match")
}
l.Match_calls++
l.MatchCalls++
l.mode = mode
var mark = input.Mark()
@ -153,12 +153,12 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
if LexerATNSimulatorDebug {
fmt.Println("MatchATN mode " + strconv.Itoa(l.mode) + " start: " + startState.String())
}
var old_mode = l.mode
var s0_closure = l.computeStartState(input, startState)
var suppressEdge = s0_closure.hasSemanticContext
s0_closure.hasSemanticContext = false
var oldMode = l.mode
var s0Closure = l.computeStartState(input, startState)
var suppressEdge = s0Closure.hasSemanticContext
s0Closure.hasSemanticContext = false
var next = l.addDFAState(s0_closure)
var next = l.addDFAState(s0Closure)
if !suppressEdge {
l.DecisionToDFA[l.mode].s0 = next
@ -167,7 +167,7 @@ func (l *LexerATNSimulator) MatchATN(input CharStream) int {
var predict = l.execATN(input, next)
if LexerATNSimulatorDebug {
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[old_mode].ToLexerString())
fmt.Println("DFA after MatchATN: " + l.DecisionToDFA[oldMode].ToLexerString())
}
return predict
}
@ -251,11 +251,11 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// {@code t}, or {@code nil} if the target state for l edge is not
// already cached
func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState {
if s.edges == nil || t < LexerATNSimulatorMIN_DFA_EDGE || t > LexerATNSimulatorMAX_DFA_EDGE {
if s.edges == nil || t < LexerATNSimulatorMinDFAEdge || t > LexerATNSimulatorMaxDFAEdge {
return nil
}
var target = s.edges[t-LexerATNSimulatorMIN_DFA_EDGE]
var target = s.edges[t-LexerATNSimulatorMinDFAEdge]
if target == nil {
target = nil
}
@ -431,7 +431,7 @@ func (l *LexerATNSimulator) closure(input CharStream, config *LexerATNConfig, co
}
if config.context != nil && !config.context.isEmpty() {
for i := 0; i < config.context.length(); i++ {
if config.context.getReturnState(i) != BasePredictionContextEMPTY_RETURN_STATE {
if config.context.getReturnState(i) != BasePredictionContextEmptyReturnState {
var newContext = config.context.GetParent(i) // "pop" return state
var returnState = l.atn.states[config.context.getReturnState(i)]
cfg := NewLexerATNConfig2(config, returnState, newContext)
@ -585,7 +585,7 @@ func (l *LexerATNSimulator) captureSimState(settings *SimState, input CharStream
settings.dfaState = dfaState
}
func (l *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
func (l *LexerATNSimulator) addDFAEdge(from *DFAState, tk int, to *DFAState, cfgs ATNConfigSet) *DFAState {
if to == nil && cfgs != nil {
// leading to l call, ATNConfigSet.hasSemanticContext is used as a
// marker indicating dynamic predicate evaluation makes l edge
@ -608,18 +608,18 @@ func (l *LexerATNSimulator) addDFAEdge(from_ *DFAState, tk int, to *DFAState, cf
}
}
// add the edge
if tk < LexerATNSimulatorMIN_DFA_EDGE || tk > LexerATNSimulatorMAX_DFA_EDGE {
if tk < LexerATNSimulatorMinDFAEdge || tk > LexerATNSimulatorMaxDFAEdge {
// Only track edges within the DFA bounds
return to
}
if LexerATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + strconv.Itoa(tk))
}
if from_.edges == nil {
if from.edges == nil {
// make room for tokens 1..n and -1 masquerading as index 0
from_.edges = make([]*DFAState, LexerATNSimulatorMAX_DFA_EDGE-LexerATNSimulatorMIN_DFA_EDGE+1)
from.edges = make([]*DFAState, LexerATNSimulatorMaxDFAEdge-LexerATNSimulatorMinDFAEdge+1)
}
from_.edges[tk-LexerATNSimulatorMIN_DFA_EDGE] = to // connect
from.edges[tk-LexerATNSimulatorMinDFAEdge] = to // connect
return to
}

View File

@ -18,7 +18,7 @@ func NewLL1Analyzer(atn *ATN) *LL1Analyzer {
// a predicate during analysis if {@code seeThruPreds==false}.
///
const (
LL1AnalyzerHIT_PRED = TokenInvalidType
LL1AnalyzerHitPred = TokenInvalidType
)
//*
@ -40,10 +40,10 @@ func (la *LL1Analyzer) getDecisionLookahead(s ATNState) []*IntervalSet {
look[alt] = NewIntervalSet()
var lookBusy = NewSet(nil, nil)
var seeThruPreds = false // fail to get lookahead upon pred
la._look(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
la.look1(s.GetTransitions()[alt].getTarget(), nil, BasePredictionContextEMPTY, look[alt], lookBusy, NewBitSet(), seeThruPreds, false)
// Wipe out lookahead for la alternative if we found nothing
// or we had a predicate when we !seeThruPreds
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHIT_PRED) {
if look[alt].length() == 0 || look[alt].contains(LL1AnalyzerHitPred) {
look[alt] = nil
}
}
@ -85,7 +85,7 @@ func (la *LL1Analyzer) look(s, stopState ATNState, ctx RuleContext) *IntervalSet
fmt.Println(seeThruPreds)
fmt.Println("=====")
}
la._look(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
la.look1(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
if PortDebug {
fmt.Println(r)
}
@ -116,13 +116,13 @@ func (la *LL1Analyzer) look(s, stopState ATNState, ctx RuleContext) *IntervalSet
// {@code NewBitSet()} for la argument.
// @param seeThruPreds {@code true} to true semantic predicates as
// implicitly {@code true} and "see through them", otherwise {@code false}
// to treat semantic predicates as opaque and add {@link //HIT_PRED} to the
// to treat semantic predicates as opaque and add {@link //HitPred} to the
// result if one is encountered.
// @param addEOF Add {@link Token//EOF} to the result if the end of the
// outermost context is reached. This parameter has no effect if {@code ctx}
// is {@code nil}.
func (la *LL1Analyzer) __look(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
func (la *LL1Analyzer) look2(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
returnState := la.atn.states[ctx.getReturnState(i)]
@ -135,11 +135,11 @@ func (la *LL1Analyzer) __look(s, stopState ATNState, ctx PredictionContext, look
}()
calledRuleStack.remove(returnState.GetRuleIndex())
la._look(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool) {
c := NewBaseATNConfig6(s, 0, ctx)
@ -182,7 +182,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]
la.__look(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
}
return
@ -205,7 +205,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
la.___look(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
if PortDebug {
fmt.Println(look)
@ -216,15 +216,15 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
fmt.Println("DEBUG 9")
}
if seeThruPreds {
la._look(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHIT_PRED)
look.addOne(LL1AnalyzerHitPred)
}
} else if t.getIsEpsilon() {
if PortDebug {
fmt.Println("DEBUG 10")
}
la._look(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
if PortDebug {
fmt.Println("DEBUG 11")
@ -248,7 +248,7 @@ func (la *LL1Analyzer) _look(s, stopState ATNState, ctx PredictionContext, look
}
}
func (la *LL1Analyzer) ___look(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
func (la *LL1Analyzer) look3(stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, t1 *RuleTransition) {
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
@ -257,6 +257,6 @@ func (la *LL1Analyzer) ___look(stopState ATNState, ctx PredictionContext, look *
}()
calledRuleStack.add(t1.getTarget().GetRuleIndex())
la._look(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
la.look1(t1.getTarget(), stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}

View File

@ -34,13 +34,13 @@ type BaseParser struct {
Interpreter *ParserATNSimulator
BuildParseTrees bool
_input TokenStream
_errHandler ErrorStrategy
_precedenceStack IntStack
_ctx ParserRuleContext
input TokenStream
errHandler ErrorStrategy
precedenceStack IntStack
ctx ParserRuleContext
_tracer *TraceListener
_parseListeners []ParseTreeListener
tracer *TraceListener
parseListeners []ParseTreeListener
_SyntaxErrors int
}
@ -53,15 +53,15 @@ func NewBaseParser(input TokenStream) *BaseParser {
p.BaseRecognizer = NewBaseRecognizer()
// The input stream.
p._input = nil
p.input = nil
// The error handling strategy for the parser. The default value is a new
// instance of {@link DefaultErrorStrategy}.
p._errHandler = NewDefaultErrorStrategy()
p._precedenceStack = make([]int, 0)
p._precedenceStack.Push(0)
p.errHandler = NewDefaultErrorStrategy()
p.precedenceStack = make([]int, 0)
p.precedenceStack.Push(0)
// The {@link ParserRuleContext} object for the currently executing rule.
// p.is always non-nil during the parsing process.
p._ctx = nil
p.ctx = nil
// Specifies whether or not the parser should construct a parse tree during
// the parsing process. The default value is {@code true}.
p.BuildParseTrees = true
@ -70,10 +70,10 @@ func NewBaseParser(input TokenStream) *BaseParser {
// later call to {@link //setTrace}{@code (false)}. The listener itself is
// implemented as a parser listener so p.field is not directly used by
// other parser methods.
p._tracer = nil
p.tracer = nil
// The list of {@link ParseTreeListener} listeners registered to receive
// events during the parse.
p._parseListeners = nil
p.parseListeners = nil
// The number of syntax errors Reported during parsing. p.value is
// incremented each time {@link //NotifyErrorListeners} is called.
p._SyntaxErrors = 0
@ -92,26 +92,26 @@ var bypassAltsAtnCache = make(map[string]int)
// reset the parser's state//
func (p *BaseParser) reset() {
if p._input != nil {
p._input.Seek(0)
if p.input != nil {
p.input.Seek(0)
}
p._errHandler.reset(p)
p._ctx = nil
p.errHandler.reset(p)
p.ctx = nil
p._SyntaxErrors = 0
p.SetTrace(nil)
p._precedenceStack = make([]int, 0)
p._precedenceStack.Push(0)
p.precedenceStack = make([]int, 0)
p.precedenceStack.Push(0)
if p.Interpreter != nil {
p.Interpreter.reset()
}
}
func (p *BaseParser) GetErrorHandler() ErrorStrategy {
return p._errHandler
return p.errHandler
}
func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
p._errHandler = e
p.errHandler = e
}
// Match current input symbol against {@code ttype}. If the symbol type
@ -143,15 +143,15 @@ func (p *BaseParser) Match(ttype int) Token {
}
if t.GetTokenType() == ttype {
p._errHandler.ReportMatch(p)
p.errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
p._ctx.AddErrorNode(t)
p.ctx.AddErrorNode(t)
}
}
@ -181,33 +181,33 @@ func (p *BaseParser) Match(ttype int) Token {
func (p *BaseParser) MatchWildcard() Token {
var t = p.GetCurrentToken()
if t.GetTokenType() > 0 {
p._errHandler.ReportMatch(p)
p.errHandler.ReportMatch(p)
p.Consume()
} else {
t = p._errHandler.RecoverInline(p)
t = p.errHandler.RecoverInline(p)
if p.BuildParseTrees && t.GetTokenIndex() == -1 {
// we must have conjured up a Newtoken during single token
// insertion
// if it's not the current symbol
p._ctx.AddErrorNode(t)
p.ctx.AddErrorNode(t)
}
}
return t
}
func (p *BaseParser) GetParserRuleContext() ParserRuleContext {
return p._ctx
return p.ctx
}
func (p *BaseParser) SetParserRuleContext(v ParserRuleContext) {
p._ctx = v
p.ctx = v
}
func (p *BaseParser) GetParseListeners() []ParseTreeListener {
if p._parseListeners == nil {
if p.parseListeners == nil {
return make([]ParseTreeListener, 0)
}
return p._parseListeners
return p.parseListeners
}
// Registers {@code listener} to receive events during the parsing process.
@ -242,10 +242,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
if listener == nil {
panic("listener")
}
if p._parseListeners == nil {
p._parseListeners = make([]ParseTreeListener, 0)
if p.parseListeners == nil {
p.parseListeners = make([]ParseTreeListener, 0)
}
p._parseListeners = append(p._parseListeners, listener)
p.parseListeners = append(p.parseListeners, listener)
}
//
@ -257,10 +257,10 @@ func (p *BaseParser) AddParseListener(listener ParseTreeListener) {
//
func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
if p._parseListeners != nil {
if p.parseListeners != nil {
idx := -1
for i, v := range p._parseListeners {
for i, v := range p.parseListeners {
if v == listener {
idx = i
break
@ -272,24 +272,24 @@ func (p *BaseParser) RemoveParseListener(listener ParseTreeListener) {
}
// remove the listener from the slice
p._parseListeners = append(p._parseListeners[0:idx], p._parseListeners[idx+1:]...)
p.parseListeners = append(p.parseListeners[0:idx], p.parseListeners[idx+1:]...)
if len(p._parseListeners) == 0 {
p._parseListeners = nil
if len(p.parseListeners) == 0 {
p.parseListeners = nil
}
}
}
// Remove all parse listeners.
func (p *BaseParser) removeParseListeners() {
p._parseListeners = nil
p.parseListeners = nil
}
// Notify any parse listeners of an enter rule event.
func (p *BaseParser) TriggerEnterRuleEvent() {
if p._parseListeners != nil {
var ctx = p._ctx
for _, listener := range p._parseListeners {
if p.parseListeners != nil {
var ctx = p.ctx
for _, listener := range p.parseListeners {
listener.EnterEveryRule(ctx)
ctx.EnterRule(listener)
}
@ -302,13 +302,13 @@ func (p *BaseParser) TriggerEnterRuleEvent() {
// @see //addParseListener
//
func (p *BaseParser) TriggerExitRuleEvent() {
if p._parseListeners != nil {
if p.parseListeners != nil {
// reverse order walk of listeners
ctx := p._ctx
l := len(p._parseListeners) - 1
ctx := p.ctx
l := len(p.parseListeners) - 1
for i := range p._parseListeners {
listener := p._parseListeners[l-i]
for i := range p.parseListeners {
listener := p.parseListeners[l-i]
ctx.ExitRule(listener)
listener.ExitEveryRule(ctx)
}
@ -324,12 +324,12 @@ func (p *BaseParser) GetATN() *ATN {
}
func (p *BaseParser) GetTokenFactory() TokenFactory {
return p._input.GetTokenSource().GetTokenFactory()
return p.input.GetTokenSource().GetTokenFactory()
}
// Tell our token source and error strategy about a Newway to create tokens.//
func (p *BaseParser) setTokenFactory(factory TokenFactory) {
p._input.GetTokenSource().setTokenFactory(factory)
p.input.GetTokenSource().setTokenFactory(factory)
}
// The ATN with bypass alternatives is expensive to create so we create it
@ -397,21 +397,21 @@ func (p *BaseParser) SetInputStream(input TokenStream) {
}
func (p *BaseParser) GetTokenStream() TokenStream {
return p._input
return p.input
}
// Set the token stream and reset the parser.//
func (p *BaseParser) SetTokenStream(input TokenStream) {
p._input = nil
p.input = nil
p.reset()
p._input = input
p.input = input
}
// Match needs to return the current input symbol, which gets put
// into the label for the associated token ref e.g., x=ID.
//
func (p *BaseParser) GetCurrentToken() Token {
return p._input.LT(1)
return p.input.LT(1)
}
func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err RecognitionException) {
@ -436,20 +436,20 @@ func (p *BaseParser) Consume() Token {
fmt.Println("Done consuming")
}
}
var hasListener = p._parseListeners != nil && len(p._parseListeners) > 0
var hasListener = p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {
if p._errHandler.inErrorRecoveryMode(p) {
var node = p._ctx.AddErrorNode(o)
if p._parseListeners != nil {
for _, l := range p._parseListeners {
if p.errHandler.inErrorRecoveryMode(p) {
var node = p.ctx.AddErrorNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {
l.VisitErrorNode(node)
}
}
} else {
node := p._ctx.AddTokenNode(o)
if p._parseListeners != nil {
for _, l := range p._parseListeners {
node := p.ctx.AddTokenNode(o)
if p.parseListeners != nil {
for _, l := range p.parseListeners {
l.VisitTerminal(node)
}
}
@ -462,47 +462,47 @@ func (p *BaseParser) Consume() Token {
func (p *BaseParser) addContextToParseTree() {
// add current context to parent if we have a parent
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).AddChild(p._ctx)
if p.ctx.GetParent() != nil {
p.ctx.GetParent().(ParserRuleContext).AddChild(p.ctx)
}
}
func (p *BaseParser) EnterRule(localctx ParserRuleContext, state, ruleIndex int) {
p.SetState(state)
p._ctx = localctx
p._ctx.SetStart(p._input.LT(1))
p.ctx = localctx
p.ctx.SetStart(p.input.LT(1))
if p.BuildParseTrees {
p.addContextToParseTree()
}
if p._parseListeners != nil {
if p.parseListeners != nil {
p.TriggerEnterRuleEvent()
}
}
func (p *BaseParser) ExitRule() {
p._ctx.SetStop(p._input.LT(-1))
// trigger event on _ctx, before it reverts to parent
if p._parseListeners != nil {
p.ctx.SetStop(p.input.LT(-1))
// trigger event on ctx, before it reverts to parent
if p.parseListeners != nil {
p.TriggerExitRuleEvent()
}
p.SetState(p._ctx.GetInvokingState())
if p._ctx.GetParent() != nil {
p._ctx = p._ctx.GetParent().(ParserRuleContext)
p.SetState(p.ctx.GetInvokingState())
if p.ctx.GetParent() != nil {
p.ctx = p.ctx.GetParent().(ParserRuleContext)
} else {
p._ctx = nil
p.ctx = nil
}
}
func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// if we have Newlocalctx, make sure we replace existing ctx
// that is previous child of parse tree
if p.BuildParseTrees && p._ctx != localctx {
if p._ctx.GetParent() != nil {
p._ctx.GetParent().(ParserRuleContext).RemoveLastChild()
p._ctx.GetParent().(ParserRuleContext).AddChild(localctx)
if p.BuildParseTrees && p.ctx != localctx {
if p.ctx.GetParent() != nil {
p.ctx.GetParent().(ParserRuleContext).RemoveLastChild()
p.ctx.GetParent().(ParserRuleContext).AddChild(localctx)
}
}
p._ctx = localctx
p.ctx = localctx
}
// Get the precedence level for the top-most precedence rule.
@ -511,19 +511,19 @@ func (p *BaseParser) EnterOuterAlt(localctx ParserRuleContext, altNum int) {
// the parser context is not nested within a precedence rule.
func (p *BaseParser) GetPrecedence() int {
if len(p._precedenceStack) == 0 {
if len(p.precedenceStack) == 0 {
return -1
}
return p._precedenceStack[len(p._precedenceStack)-1]
return p.precedenceStack[len(p.precedenceStack)-1]
}
func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleIndex, precedence int) {
p.SetState(state)
p._precedenceStack.Push(precedence)
p._ctx = localctx
p._ctx.SetStart(p._input.LT(1))
if p._parseListeners != nil {
p.precedenceStack.Push(precedence)
p.ctx = localctx
p.ctx.SetStart(p.input.LT(1))
if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules
}
@ -533,34 +533,34 @@ func (p *BaseParser) EnterRecursionRule(localctx ParserRuleContext, state, ruleI
// Like {@link //EnterRule} but for recursive rules.
func (p *BaseParser) PushNewRecursionContext(localctx ParserRuleContext, state, ruleIndex int) {
var previous = p._ctx
var previous = p.ctx
previous.SetParent(localctx)
previous.SetInvokingState(state)
previous.SetStop(p._input.LT(-1))
previous.SetStop(p.input.LT(-1))
p._ctx = localctx
p._ctx.SetStart(previous.GetStart())
p.ctx = localctx
p.ctx.SetStart(previous.GetStart())
if p.BuildParseTrees {
p._ctx.AddChild(previous)
p.ctx.AddChild(previous)
}
if p._parseListeners != nil {
if p.parseListeners != nil {
p.TriggerEnterRuleEvent() // simulates rule entry for
// left-recursive rules
}
}
func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
p._precedenceStack.Pop()
p._ctx.SetStop(p._input.LT(-1))
var retCtx = p._ctx // save current ctx (return value)
// unroll so _ctx is as it was before call to recursive method
if p._parseListeners != nil {
for p._ctx != parentCtx {
p.precedenceStack.Pop()
p.ctx.SetStop(p.input.LT(-1))
var retCtx = p.ctx // save current ctx (return value)
// unroll so ctx is as it was before call to recursive method
if p.parseListeners != nil {
for p.ctx != parentCtx {
p.TriggerExitRuleEvent()
p._ctx = p._ctx.GetParent().(ParserRuleContext)
p.ctx = p.ctx.GetParent().(ParserRuleContext)
}
} else {
p._ctx = parentCtx
p.ctx = parentCtx
}
// hook into tree
retCtx.SetParent(parentCtx)
@ -571,7 +571,7 @@ func (p *BaseParser) UnrollRecursionContexts(parentCtx ParserRuleContext) {
}
func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
var ctx = p._ctx
var ctx = p.ctx
for ctx != nil {
if ctx.GetRuleIndex() == ruleIndex {
return ctx
@ -582,7 +582,7 @@ func (p *BaseParser) GetInvokingContext(ruleIndex int) ParserRuleContext {
}
func (p *BaseParser) Precpred(localctx RuleContext, precedence int) bool {
return precedence >= p._precedenceStack[len(p._precedenceStack)-1]
return precedence >= p.precedenceStack[len(p.precedenceStack)-1]
}
func (p *BaseParser) inContext(context ParserRuleContext) bool {
@ -606,7 +606,7 @@ func (p *BaseParser) inContext(context ParserRuleContext) bool {
func (p *BaseParser) IsExpectedToken(symbol int) bool {
var atn = p.Interpreter.atn
var ctx = p._ctx
var ctx = p.ctx
var s = atn.states[p.state]
var following = atn.NextTokens(s, nil)
if following.contains(symbol) {
@ -638,7 +638,7 @@ func (p *BaseParser) IsExpectedToken(symbol int) bool {
// @see ATN//getExpectedTokens(int, RuleContext)
//
func (p *BaseParser) GetExpectedTokens() *IntervalSet {
return p.Interpreter.atn.getExpectedTokens(p.state, p._ctx)
return p.Interpreter.atn.getExpectedTokens(p.state, p.ctx)
}
func (p *BaseParser) GetExpectedTokensWithinCurrentRule() *IntervalSet {
@ -666,7 +666,7 @@ func (p *BaseParser) GetRuleIndex(ruleName string) int {
func (p *BaseParser) GetRuleInvocationStack(c ParserRuleContext) []string {
if c == nil {
c = p._ctx
c = p.ctx
}
var stack = make([]string, 0)
for c != nil {
@ -718,13 +718,13 @@ func (p *BaseParser) GetSourceName() string {
//
func (p *BaseParser) SetTrace(trace *TraceListener) {
if trace == nil {
p.RemoveParseListener(p._tracer)
p._tracer = nil
p.RemoveParseListener(p.tracer)
p.tracer = nil
} else {
if p._tracer != nil {
p.RemoveParseListener(p._tracer)
if p.tracer != nil {
p.RemoveParseListener(p.tracer)
}
p._tracer = NewTraceListener(p)
p.AddParseListener(p._tracer)
p.tracer = NewTraceListener(p)
p.AddParseListener(p.tracer)
}
}

View File

@ -11,12 +11,12 @@ type ParserATNSimulator struct {
parser Parser
predictionMode int
_input TokenStream
_startIndex int
_dfa *DFA
input TokenStream
startIndex int
dfa *DFA
DecisionToDFA []*DFA
mergeCache *DoubleDict
_outerContext ParserRuleContext
outerContext ParserRuleContext
}
func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, sharedContextCache *PredictionContextCache) *ParserATNSimulator {
@ -30,10 +30,10 @@ func NewParserATNSimulator(parser Parser, atn *ATN, decisionToDFA []*DFA, shared
// SLL, LL, or LL + exact ambig detection?//
p.predictionMode = PredictionModeLL
// LAME globals to avoid parameters!!!!! I need these down deep in predTransition
p._input = nil
p._startIndex = 0
p._outerContext = nil
p._dfa = nil
p.input = nil
p.startIndex = 0
p.outerContext = nil
p.dfa = nil
// Each prediction operation uses a cache for merge of prediction contexts.
// Don't keep around as it wastes huge amounts of memory. DoubleKeyMap
// isn't Synchronized but we're ok since two threads shouldn't reuse same
@ -77,17 +77,17 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
strconv.Itoa(input.LT(1).GetColumn()))
}
p._input = input
p._startIndex = input.Index()
p._outerContext = outerContext
p.input = input
p.startIndex = input.Index()
p.outerContext = outerContext
var dfa = p.DecisionToDFA[decision]
p._dfa = dfa
p.dfa = dfa
var m = input.Mark()
var index = input.Index()
defer func() {
p._dfa = nil
p.dfa = nil
p.mergeCache = nil // wack cache after each prediction
input.Seek(index)
input.Release(m)
@ -127,7 +127,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
}
}
var fullCtx = false
var s0_closure = p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
var s0Closure = p.computeStartState(dfa.atnStartState, RuleContextEmpty, fullCtx)
if dfa.precedenceDfa {
// If p is a precedence DFA, we use applyPrecedenceFilter
@ -136,11 +136,11 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// appropriate start state for the precedence level rather
// than simply setting DFA.s0.
//
s0_closure = p.applyPrecedenceFilter(s0_closure)
s0 = p.addDFAState(dfa, NewDFAState(-1, s0_closure))
s0Closure = p.applyPrecedenceFilter(s0Closure)
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
} else {
s0 = p.addDFAState(dfa, NewDFAState(-1, s0_closure))
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.s0 = s0
}
}
@ -248,9 +248,9 @@ func (p *ParserATNSimulator) execATN(dfa *DFA, s0 *DFAState, input TokenStream,
fmt.Println("ctx sensitive state " + outerContext.String(nil, nil) + " in " + D.String())
}
var fullCtx = true
var s0_closure = p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
var s0Closure = p.computeStartState(dfa.atnStartState, outerContext, fullCtx)
p.ReportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.Index())
alt := p.execATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext)
alt := p.execATNWithFullContext(dfa, D, s0Closure, input, startIndex, outerContext)
return alt
}
if D.isAcceptState {
@ -742,7 +742,7 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
if config.GetAlt() != 1 {
continue
}
var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p._outerContext)
var updatedContext = config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
if updatedContext == nil {
// the configuration was eliminated
continue
@ -998,7 +998,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
// run thru all possible stack tops in ctx
if !config.GetContext().isEmpty() {
for i := 0; i < config.GetContext().length(); i++ {
if config.GetContext().getReturnState(i) == BasePredictionContextEMPTY_RETURN_STATE {
if config.GetContext().getReturnState(i) == BasePredictionContextEmptyReturnState {
if fullCtx {
configs.Add(NewBaseATNConfig1(config, config.GetState(), BasePredictionContextEMPTY), p.mergeCache)
continue
@ -1010,7 +1010,7 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
}
fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
}
p.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
}
continue
}
@ -1039,13 +1039,13 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
}
}
}
p.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
}
// Do the actual work of walking epsilon edges//
func (p *ParserATNSimulator) closure_(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
if PortDebug {
fmt.Println("closure_")
fmt.Println("closureWork")
}
var state = config.GetState()
// optimization
@ -1094,11 +1094,11 @@ func (p *ParserATNSimulator) closure_(config ATNConfig, configs ATNConfigSet, cl
}
}
if p._dfa != nil && p._dfa.precedenceDfa {
if p.dfa != nil && p.dfa.precedenceDfa {
if PortDebug {
fmt.Println("DEBUG 4")
}
if t.(*EpsilonTransition).outermostPrecedenceReturn == p._dfa.atnStartState.GetRuleIndex() {
if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
c.setPrecedenceFilterSuppressed(true)
}
}
@ -1197,10 +1197,10 @@ func (p *ParserATNSimulator) precedenceTransition(config ATNConfig,
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = p._input.Index()
p._input.Seek(p._startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p._outerContext)
p._input.Seek(currentPosition)
var currentPosition = p.input.Index()
p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
p.input.Seek(currentPosition)
if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
}
@ -1233,10 +1233,10 @@ func (p *ParserATNSimulator) predTransition(config ATNConfig, pt *PredicateTrans
// during closure, which dramatically reduces the size of
// the config sets. It also obviates the need to test predicates
// later during conflict resolution.
var currentPosition = p._input.Index()
p._input.Seek(p._startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p._outerContext)
p._input.Seek(currentPosition)
var currentPosition = p.input.Index()
p.input.Seek(p.startIndex)
var predSucceeds = pt.getPredicate().evaluate(p.parser, p.outerContext)
p.input.Seek(currentPosition)
if predSucceeds {
c = NewBaseATNConfig4(config, pt.getTarget()) // no pred context
}
@ -1410,21 +1410,21 @@ func (p *ParserATNSimulator) getUniqueAlt(configs ATNConfigSet) int {
// otherwise p method returns the result of calling {@link //addDFAState}
// on {@code to}
//
func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from_ *DFAState, t int, to *DFAState) *DFAState {
func (p *ParserATNSimulator) addDFAEdge(dfa *DFA, from *DFAState, t int, to *DFAState) *DFAState {
if ParserATNSimulatorDebug {
fmt.Println("EDGE " + from_.String() + " -> " + to.String() + " upon " + p.GetTokenName(t))
fmt.Println("EDGE " + from.String() + " -> " + to.String() + " upon " + p.GetTokenName(t))
}
if to == nil {
return nil
}
to = p.addDFAState(dfa, to) // used existing if possible not incoming
if from_ == nil || t < -1 || t > p.atn.maxTokenType {
if from == nil || t < -1 || t > p.atn.maxTokenType {
return to
}
if from_.edges == nil {
from_.edges = make([]*DFAState, p.atn.maxTokenType+1+1)
if from.edges == nil {
from.edges = make([]*DFAState, p.atn.maxTokenType+1+1)
}
from_.edges[t+1] = to // connect
from.edges[t+1] = to // connect
if ParserATNSimulatorDebug {
var names []string

View File

@ -32,12 +32,12 @@ func NewBasePredictionContext(cachedHashString string) *BasePredictionContext {
// {@code//+x =//}.
// /
const (
BasePredictionContextEMPTY_RETURN_STATE = 0x7FFFFFFF
BasePredictionContextEmptyReturnState = 0x7FFFFFFF
)
// Represents {@code $} in an array in full context mode, when {@code $}
// doesn't mean wildcard: {@code $ + x = [$,x]}. Here,
// {@code $} = {@link //EMPTY_RETURN_STATE}.
// {@code $} = {@link //EmptyReturnState}.
// /
var BasePredictionContextglobalNodeCount = 1
@ -151,7 +151,7 @@ func NewBaseSingletonPredictionContext(parent PredictionContext, returnState int
}
func SingletonBasePredictionContextCreate(parent PredictionContext, returnState int) PredictionContext {
if returnState == BasePredictionContextEMPTY_RETURN_STATE && parent == nil {
if returnState == BasePredictionContextEmptyReturnState && parent == nil {
// someone can pass in the bits of an array ctx that mean $
return BasePredictionContextEMPTY
}
@ -172,7 +172,7 @@ func (b *BaseSingletonPredictionContext) getReturnState(index int) int {
}
func (b *BaseSingletonPredictionContext) hasEmptyPath() bool {
return b.returnState == BasePredictionContextEMPTY_RETURN_STATE
return b.returnState == BasePredictionContextEmptyReturnState
}
func (b *BaseSingletonPredictionContext) equals(other PredictionContext) bool {
@ -209,7 +209,7 @@ func (b *BaseSingletonPredictionContext) String() string {
}
if len(up) == 0 {
if b.returnState == BasePredictionContextEMPTY_RETURN_STATE {
if b.returnState == BasePredictionContextEmptyReturnState {
return "$"
}
@ -229,7 +229,7 @@ func NewEmptyPredictionContext() *EmptyPredictionContext {
p := new(EmptyPredictionContext)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEMPTY_RETURN_STATE)
p.BaseSingletonPredictionContext = NewBaseSingletonPredictionContext(nil, BasePredictionContextEmptyReturnState)
return p
}
@ -265,7 +265,7 @@ func NewArrayPredictionContext(parents []PredictionContext, returnStates []int)
// Parent can be nil only if full ctx mode and we make an array
// from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using
// nil parent and
// returnState == {@link //EMPTY_RETURN_STATE}.
// returnState == {@link //EmptyReturnState}.
c := new(ArrayPredictionContext)
c.BasePredictionContext = NewBasePredictionContext("")
@ -285,13 +285,13 @@ func (a *ArrayPredictionContext) GetReturnStates() []int {
}
func (a *ArrayPredictionContext) hasEmptyPath() bool {
return a.getReturnState(a.length()-1) == BasePredictionContextEMPTY_RETURN_STATE
return a.getReturnState(a.length()-1) == BasePredictionContextEmptyReturnState
}
func (a *ArrayPredictionContext) isEmpty() bool {
// since EMPTY_RETURN_STATE can only appear in the last position, we
// since EmptyReturnState can only appear in the last position, we
// don't need to verify that size==1
return a.returnStates[0] == BasePredictionContextEMPTY_RETURN_STATE
return a.returnStates[0] == BasePredictionContextEmptyReturnState
}
func (a *ArrayPredictionContext) length() int {
@ -327,7 +327,7 @@ func (a *ArrayPredictionContext) String() string {
if i > 0 {
s = s + ", "
}
if a.returnStates[i] == BasePredictionContextEMPTY_RETURN_STATE {
if a.returnStates[i] == BasePredictionContextEmptyReturnState {
s = s + "$"
continue
}
@ -567,11 +567,11 @@ func mergeRoot(a, b SingletonPredictionContext, rootIsWildcard bool) PredictionC
if a == BasePredictionContextEMPTY && b == BasePredictionContextEMPTY {
return BasePredictionContextEMPTY // $ + $ = $
} else if a == BasePredictionContextEMPTY { // $ + x = [$,x]
var payloads = []int{b.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var payloads = []int{b.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{b.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
} else if b == BasePredictionContextEMPTY { // x + $ = [$,x] ($ is always first if present)
var payloads = []int{a.getReturnState(-1), BasePredictionContextEMPTY_RETURN_STATE}
var payloads = []int{a.getReturnState(-1), BasePredictionContextEmptyReturnState}
var parents = []PredictionContext{a.GetParent(-1), nil}
return NewArrayPredictionContext(parents, payloads)
}
@ -619,32 +619,32 @@ func mergeArrays(a, b *ArrayPredictionContext, rootIsWildcard bool, mergeCache *
var mergedParents = make([]PredictionContext, 0)
// walk and merge to yield mergedParents, mergedReturnStates
for i < len(a.returnStates) && j < len(b.returnStates) {
var a_parent = a.parents[i]
var b_parent = b.parents[j]
var aParent = a.parents[i]
var bParent = b.parents[j]
if a.returnStates[i] == b.returnStates[j] {
// same payload (stack tops are equal), must yield merged singleton
var payload = a.returnStates[i]
// $+$ = $
var bothDollars = payload == BasePredictionContextEMPTY_RETURN_STATE && a_parent == nil && b_parent == nil
var ax_ax = (a_parent != nil && b_parent != nil && a_parent == b_parent) // ax+ax
var bothDollars = payload == BasePredictionContextEmptyReturnState && aParent == nil && bParent == nil
var axAX = (aParent != nil && bParent != nil && aParent == bParent) // ax+ax
// ->
// ax
if bothDollars || ax_ax {
mergedParents[k] = a_parent // choose left
if bothDollars || axAX {
mergedParents[k] = aParent // choose left
mergedReturnStates[k] = payload
} else { // ax+ay -> a'[x,y]
var mergedParent = merge(a_parent, b_parent, rootIsWildcard, mergeCache)
var mergedParent = merge(aParent, bParent, rootIsWildcard, mergeCache)
mergedParents[k] = mergedParent
mergedReturnStates[k] = payload
}
i++ // hop over left one as usual
j++ // but also Skip one in right side since we merge
} else if a.returnStates[i] < b.returnStates[j] { // copy a[i] to M
mergedParents[k] = a_parent
mergedParents[k] = aParent
mergedReturnStates[k] = a.returnStates[i]
i++
} else { // b > a, copy b[j] to M
mergedParents[k] = b_parent
mergedParents[k] = bParent
mergedReturnStates[k] = b.returnStates[j]
j++
}

View File

@ -25,7 +25,7 @@ type Recognizer interface {
}
type BaseRecognizer struct {
_listeners []ErrorListener
listeners []ErrorListener
state int
RuleNames []string
@ -36,7 +36,7 @@ type BaseRecognizer struct {
func NewBaseRecognizer() *BaseRecognizer {
rec := new(BaseRecognizer)
rec._listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.listeners = []ErrorListener{ConsoleErrorListenerINSTANCE}
rec.state = -1
return rec
}
@ -56,11 +56,11 @@ func (b *BaseRecognizer) Action(context RuleContext, ruleIndex, actionIndex int)
}
func (b *BaseRecognizer) AddErrorListener(listener ErrorListener) {
b._listeners = append(b._listeners, listener)
b.listeners = append(b.listeners, listener)
}
func (b *BaseRecognizer) RemoveErrorListeners() {
b._listeners = make([]ErrorListener, 0)
b.listeners = make([]ErrorListener, 0)
}
func (b *BaseRecognizer) GetRuleNames() []string {
@ -203,7 +203,7 @@ func (b *BaseRecognizer) GetTokenErrorDisplay(t Token) string {
}
func (b *BaseRecognizer) GetErrorListenerDispatch() ErrorListener {
return NewProxyErrorListener(b._listeners)
return NewProxyErrorListener(b.listeners)
}
// subclass needs to override these if there are sempreds or actions

View File

@ -42,7 +42,7 @@ type BaseToken struct {
tokenIndex int // from 0..n-1 of the token object in the input stream
line int // line=1..n of the 1st character
column int // beginning of the line at which it occurs, 0..n-1
_text string // text of the token.
text string // text of the token.
readOnly bool
}
@ -160,13 +160,13 @@ func (c *CommonToken) clone() *CommonToken {
t.tokenIndex = c.GetTokenIndex()
t.line = c.GetLine()
t.column = c.GetColumn()
t._text = c.GetText()
t.text = c.GetText()
return t
}
func (c *CommonToken) GetText() string {
if c._text != "" {
return c._text
if c.text != "" {
return c.text
}
var input = c.GetInputStream()
if input == nil {
@ -180,7 +180,7 @@ func (c *CommonToken) GetText() string {
}
func (c *CommonToken) SetText(text string) {
c._text = text
c.text = text
}
func (c *CommonToken) String() string {

View File

@ -16,13 +16,13 @@ func (t *TraceListener) VisitErrorNode(_ ErrorNode) {
}
func (t *TraceListener) EnterEveryRule(ctx ParserRuleContext) {
fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText())
fmt.Println("enter " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
}
func (t *TraceListener) VisitTerminal(node TerminalNode) {
fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser._ctx.GetRuleIndex()])
fmt.Println("consume " + fmt.Sprint(node.GetSymbol()) + " rule " + t.parser.GetRuleNames()[t.parser.ctx.GetRuleIndex()])
}
func (t *TraceListener) ExitEveryRule(ctx ParserRuleContext) {
fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser._input.LT(1).GetText())
fmt.Println("exit " + t.parser.GetRuleNames()[ctx.GetRuleIndex()] + ", LT(1)=" + t.parser.input.LT(1).GetText())
}

View File

@ -26,8 +26,8 @@ type Transition interface {
type BaseTransition struct {
target ATNState
isEpsilon bool
label_ int
label *IntervalSet
label int
intervalSet *IntervalSet
serializationType int
}
@ -42,7 +42,7 @@ func NewBaseTransition(target ATNState) *BaseTransition {
t.target = target
// Are we epsilon, action, sempred?
t.isEpsilon = false
t.label = nil
t.intervalSet = nil
return t
}
@ -60,7 +60,7 @@ func (t *BaseTransition) getIsEpsilon() bool {
}
func (t *BaseTransition) getLabel() *IntervalSet {
return t.label
return t.intervalSet
}
func (t *BaseTransition) getSerializationType() int {
@ -79,7 +79,7 @@ const (
TransitionATOM = 5
TransitionACTION = 6
TransitionSET = 7 // ~(A|B) or ~atom, wildcard, which convert to next 2
TransitionNOT_SET = 8
TransitionNOTSET = 8
TransitionWILDCARD = 9
TransitionPRECEDENCE = 10
)
@ -117,7 +117,7 @@ var TransitionserializationNames = []string{
// TransitionATOM,
// TransitionACTION,
// TransitionSET,
// TransitionNOT_SET,
// TransitionNOTSET,
// TransitionWILDCARD,
// TransitionPRECEDENCE
//}
@ -127,13 +127,13 @@ type AtomTransition struct {
*BaseTransition
}
func NewAtomTransition(target ATNState, label int) *AtomTransition {
func NewAtomTransition(target ATNState, intervalSet int) *AtomTransition {
t := new(AtomTransition)
t.BaseTransition = NewBaseTransition(target)
t.label_ = label // The token type or character value or, signifies special label.
t.label = t.makeLabel()
t.label = intervalSet // The token type or character value or, signifies special intervalSet.
t.intervalSet = t.makeLabel()
t.serializationType = TransitionATOM
return t
@ -141,16 +141,16 @@ func NewAtomTransition(target ATNState, label int) *AtomTransition {
func (t *AtomTransition) makeLabel() *IntervalSet {
var s = NewIntervalSet()
s.addOne(t.label_)
s.addOne(t.label)
return s
}
func (t *AtomTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label_ == symbol
return t.label == symbol
}
func (t *AtomTransition) String() string {
return strconv.Itoa(t.label_)
return strconv.Itoa(t.label)
}
type RuleTransition struct {
@ -217,7 +217,7 @@ func NewRangeTransition(target ATNState, start, stop int) *RangeTransition {
t.serializationType = TransitionRANGE
t.start = start
t.stop = stop
t.label = t.makeLabel()
t.intervalSet = t.makeLabel()
return t
}
@ -325,21 +325,21 @@ func NewSetTransition(target ATNState, set *IntervalSet) *SetTransition {
t.serializationType = TransitionSET
if set != nil {
t.label = set
t.intervalSet = set
} else {
t.label = NewIntervalSet()
t.label.addOne(TokenInvalidType)
t.intervalSet = NewIntervalSet()
t.intervalSet.addOne(TokenInvalidType)
}
return t
}
func (t *SetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return t.label.contains(symbol)
return t.intervalSet.contains(symbol)
}
func (t *SetTransition) String() string {
return t.label.String()
return t.intervalSet.String()
}
type NotSetTransition struct {
@ -352,17 +352,17 @@ func NewNotSetTransition(target ATNState, set *IntervalSet) *NotSetTransition {
t.SetTransition = NewSetTransition(target, set)
t.serializationType = TransitionNOT_SET
t.serializationType = TransitionNOTSET
return t
}
func (t *NotSetTransition) Matches(symbol, minVocabSymbol, maxVocabSymbol int) bool {
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.label.contains(symbol)
return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !t.intervalSet.contains(symbol)
}
func (t *NotSetTransition) String() string {
return "~" + t.label.String()
return "~" + t.intervalSet.String()
}
type WildcardTransition struct {

View File

@ -93,11 +93,11 @@ func TreesfindAllRuleNodes(t ParseTree, ruleIndex int) []ParseTree {
func TreesfindAllNodes(t ParseTree, index int, findTokens bool) []ParseTree {
var nodes = make([]ParseTree, 0)
Trees_findAllNodes(t, index, findTokens, nodes)
TreesFindAllNodes(t, index, findTokens, nodes)
return nodes
}
func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
func TreesFindAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTree) {
// check this node (the root) first
t2, ok := t.(TerminalNode)
@ -114,7 +114,7 @@ func Trees_findAllNodes(t ParseTree, index int, findTokens bool, nodes []ParseTr
}
// check children
for i := 0; i < t.GetChildCount(); i++ {
Trees_findAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
TreesFindAllNodes(t.GetChild(i).(ParseTree), index, findTokens, nodes)
}
}