Remove PortDebug

This commit is contained in:
Peter Boyer 2016-11-04 13:55:39 -04:00
parent 07f7dae78f
commit aa568711d9
15 changed files with 0 additions and 352 deletions

View File

@ -2,8 +2,6 @@ package antlr
import "fmt"
// PortDebug prints debug information to standard out when true. TODO: Remove.
const PortDebug = false
var ATNInvalidAltNumber int
@ -61,18 +59,9 @@ func (a *ATN) NextTokensInContext(s ATNState, ctx RuleContext) *IntervalSet {
// rule.
func (a *ATN) NextTokensNoContext(s ATNState) *IntervalSet {
if s.GetNextTokenWithinRule() != nil {
if PortDebug {
fmt.Println("DEBUG A")
}
return s.GetNextTokenWithinRule()
}
if PortDebug {
fmt.Println("DEBUG 2")
fmt.Println(a.NextTokensInContext(s, nil))
}
s.SetNextTokenWithinRule(a.NextTokensInContext(s, nil))
s.GetNextTokenWithinRule().readOnly = true

View File

@ -32,10 +32,6 @@ func NewCommonTokenFactory(copyText bool) *CommonTokenFactory {
var CommonTokenFactoryDEFAULT = NewCommonTokenFactory(false)
func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int, text string, channel, start, stop, line, column int) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)
}
t := NewCommonToken(source, ttype, channel, start, stop)
t.line = line
@ -51,12 +47,7 @@ func (c *CommonTokenFactory) Create(source *TokenSourceCharStreamPair, ttype int
}
func (c *CommonTokenFactory) createThin(ttype int, text string) Token {
if PortDebug {
fmt.Println("Token factory creating: " + text)
}
t := NewCommonToken(nil, ttype, TokenDefaultChannel, -1, -1)
t.SetText(text)
return t

View File

@ -92,19 +92,11 @@ func (c *CommonTokenStream) Consume() {
SkipEOFCheck = false
}
if PortDebug {
fmt.Println("Consume 1")
}
if !SkipEOFCheck && c.LA(1) == TokenEOF {
panic("cannot consume EOF")
}
if c.Sync(c.index + 1) {
if PortDebug {
fmt.Println("Consume 2")
}
c.index = c.adjustSeekIndex(c.index + 1)
}
}
@ -116,11 +108,6 @@ func (c *CommonTokenStream) Sync(i int) bool {
if n > 0 {
fetched := c.fetch(n)
if PortDebug {
fmt.Println("Sync done")
}
return fetched >= n
}
@ -137,10 +124,6 @@ func (c *CommonTokenStream) fetch(n int) int {
for i := 0; i < n; i++ {
t := c.tokenSource.NextToken()
if PortDebug {
fmt.Println("fetch loop")
}
t.SetTokenIndex(len(c.tokens))
c.tokens = append(c.tokens, t)
@ -151,10 +134,6 @@ func (c *CommonTokenStream) fetch(n int) int {
}
}
if PortDebug {
fmt.Println("fetch done")
}
return n
}

View File

@ -25,27 +25,15 @@ func NewDefaultErrorListener() *DefaultErrorListener {
}
func (d *DefaultErrorListener) SyntaxError(recognizer Recognizer, offendingSymbol interface{}, line, column int, msg string, e RecognitionException) {
if PortDebug {
fmt.Println("SyntaxError!")
}
}
func (d *DefaultErrorListener) ReportAmbiguity(recognizer Parser, dfa *DFA, startIndex, stopIndex int, exact bool, ambigAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAmbiguity!")
}
}
func (d *DefaultErrorListener) ReportAttemptingFullContext(recognizer Parser, dfa *DFA, startIndex, stopIndex int, conflictingAlts *BitSet, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportAttemptingFullContext!")
}
}
func (d *DefaultErrorListener) ReportContextSensitivity(recognizer Parser, dfa *DFA, startIndex, stopIndex, prediction int, configs ATNConfigSet) {
if PortDebug {
fmt.Println("ReportContextSensitivity!")
}
}
type ConsoleErrorListener struct {

View File

@ -209,37 +209,18 @@ func (d *DefaultErrorStrategy) Sync(recognizer Parser) {
return
}
if PortDebug {
fmt.Println("STATE" + strconv.Itoa(recognizer.GetState()))
}
s := recognizer.GetInterpreter().atn.states[recognizer.GetState()]
la := recognizer.GetTokenStream().LA(1)
if PortDebug {
fmt.Println("LA" + strconv.Itoa(la))
}
// try cheaper subset first might get lucky. seems to shave a wee bit off
if la == TokenEOF || recognizer.GetATN().NextTokens(s, nil).contains(la) {
if PortDebug {
fmt.Println("OK1")
}
return
}
// Return but don't end recovery. only do that upon valid token Match
if recognizer.IsExpectedToken(la) {
if PortDebug {
fmt.Println("OK2")
}
return
}
if PortDebug {
fmt.Println("LA" + strconv.Itoa(la))
fmt.Println(recognizer.GetATN().NextTokens(s, nil))
}
switch s.GetStateType() {
case ATNStateBlockStart, ATNStateStarBlockStart, ATNStatePlusBlockStart, ATNStateStarLoopEntry:
// Report error and recover if possible
@ -550,9 +531,6 @@ func (d *DefaultErrorStrategy) GetMissingSymbol(recognizer Parser) Token {
tf := recognizer.GetTokenFactory()
if PortDebug {
fmt.Println("Missing symbol error")
}
return tf.Create(current.GetSource(), expectedTokenType, tokenText, TokenDefaultChannel, -1, -1, current.GetLine(), current.GetColumn())
}

View File

@ -29,9 +29,6 @@ func NewFileStream(fileName string) *FileStream {
fs.filename = fileName
s := string(buf.Bytes())
if PortDebug {
fmt.Println(s)
}
fs.InputStream = NewInputStream(s)
return fs

View File

@ -68,9 +68,6 @@ func (is *InputStream) Mark() int {
}
func (is *InputStream) Release(marker int) {
if PortDebug {
fmt.Println("RELEASING")
}
}
func (is *InputStream) Seek(index int) {

View File

@ -68,9 +68,6 @@ func (i *IntervalSet) addRange(l, h int) {
}
func (i *IntervalSet) addInterval(v *Interval) {
if PortDebug {
fmt.Println("addInterval" + v.String())
}
if i.intervals == nil {
i.intervals = make([]*Interval, 0)
i.intervals = append(i.intervals, v)
@ -98,13 +95,7 @@ func (i *IntervalSet) addInterval(v *Interval) {
}
func (i *IntervalSet) addSet(other *IntervalSet) *IntervalSet {
if PortDebug {
fmt.Println("addSet")
}
if other.intervals != nil {
if PortDebug {
fmt.Println(len(other.intervals))
}
for k := 0; k < len(other.intervals); k++ {
i2 := other.intervals[k]
i.addInterval(NewInterval(i2.start, i2.stop))

View File

@ -162,9 +162,6 @@ func (b *BaseLexer) safeMatch() (ret int) {
defer func() {
if e := recover(); e != nil {
if re, ok := e.(RecognitionException); ok {
if PortDebug {
fmt.Println("RecognitionException")
}
b.notifyListeners(re) // Report error
b.Recover(re)
ret = LexerSkip // default
@ -207,12 +204,6 @@ func (b *BaseLexer) NextToken() Token {
ttype := LexerSkip
ttype = b.safeMatch()
if PortDebug {
fmt.Println("ttype", ttype)
}
if PortDebug {
fmt.Println("curType", b.thetype)
}
if b.input.LA(1) == TokenEOF {
b.hitEOF = true
@ -222,25 +213,13 @@ func (b *BaseLexer) NextToken() Token {
}
if b.thetype == LexerSkip {
continueOuter = true
if PortDebug {
fmt.Println("skip")
}
break
}
if b.thetype != LexerMore {
if PortDebug {
fmt.Println("no more")
}
break
}
if PortDebug {
fmt.Println("lex inner loop")
}
}
if PortDebug {
fmt.Println("lex loop")
}
if continueOuter {
continue
}
@ -264,9 +243,6 @@ func (b *BaseLexer) Skip() {
}
func (b *BaseLexer) More() {
if PortDebug {
fmt.Println("more")
}
b.thetype = LexerMore
}
@ -322,9 +298,6 @@ func (b *BaseLexer) EmitToken(token Token) {
// custom Token objects or provide a Newfactory.
// /
func (b *BaseLexer) Emit() Token {
if PortDebug {
fmt.Println("emit base lexer")
}
t := b.factory.Create(b.tokenFactorySourcePair, b.thetype, b.text, b.channel, b.TokenStartCharIndex, b.GetCharIndex()-1, b.TokenStartLine, b.TokenStartColumn)
b.EmitToken(t)
return t
@ -333,9 +306,6 @@ func (b *BaseLexer) Emit() Token {
func (b *BaseLexer) EmitEOF() Token {
cpos := b.GetCharPositionInLine()
lpos := b.GetLine()
if PortDebug {
fmt.Println("emitEOF")
}
eof := b.factory.Create(b.tokenFactorySourcePair, TokenEOF, "", TokenDefaultChannel, b.input.Index(), b.input.Index()-1, lpos, cpos)
b.EmitToken(eof)
return eof
@ -384,17 +354,11 @@ func (b *BaseLexer) GetATN() *ATN {
// Forces load of all tokens. Does not include EOF token.
// /
func (b *BaseLexer) getAllTokens() []Token {
if PortDebug {
fmt.Println("getAllTokens")
}
vl := b.Virt
tokens := make([]Token, 0)
t := vl.NextToken()
for t.GetTokenType() != TokenEOF {
tokens = append(tokens, t)
if PortDebug {
fmt.Println("getAllTokens")
}
t = vl.NextToken()
}
return tokens

View File

@ -133,10 +133,6 @@ func (l *LexerActionExecutor) fixOffsetBeforeMatch(offset int) *LexerActionExecu
// of the token.
// /
func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex int) {
if PortDebug {
fmt.Println("execute")
fmt.Println("len(lexerActions)", len(l.lexerActions))
}
requiresSeek := false
stopIndex := input.Index()
@ -149,24 +145,14 @@ func (l *LexerActionExecutor) execute(lexer Lexer, input CharStream, startIndex
for i := 0; i < len(l.lexerActions); i++ {
lexerAction := l.lexerActions[i]
if la, ok := lexerAction.(*LexerIndexedCustomAction); ok {
if PortDebug {
fmt.Printf("LexerIndexedCustomAction")
}
offset := la.offset
input.Seek(startIndex + offset)
lexerAction = la.lexerAction
requiresSeek = (startIndex + offset) != stopIndex
} else if lexerAction.getIsPositionDependent() {
if PortDebug {
fmt.Printf("posDep")
}
input.Seek(stopIndex)
requiresSeek = false
}
if PortDebug {
fmt.Println("exec")
fmt.Println(lexerAction)
}
lexerAction.execute(lexer)
}
}

View File

@ -73,18 +73,11 @@ func (l *LexerATNSimulator) copyState(simulator *LexerATNSimulator) {
}
func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
if PortDebug {
fmt.Println("Match")
}
l.MatchCalls++
l.mode = mode
mark := input.Mark()
defer func() {
if PortDebug {
fmt.Println("FINALLY")
}
input.Release(mark)
}()
@ -94,17 +87,9 @@ func (l *LexerATNSimulator) Match(input CharStream, mode int) int {
dfa := l.decisionToDFA[mode]
if dfa.s0 == nil {
if PortDebug {
fmt.Println("MatchATN")
}
return l.MatchATN(input)
}
if PortDebug {
fmt.Println("execATN")
fmt.Println("mode", mode, len(l.decisionToDFA[0].s0.edges))
}
return l.execATN(input, dfa.s0)
}
@ -153,15 +138,6 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
t := input.LA(1)
s := ds0 // s is current/from DFA state
if PortDebug {
fs,ok := input.(*FileStream)
if ok {
fmt.Println("enter execATN", t, len(s.edges), fs.index, fs.size)
} else {
fmt.Println("enter execATN", t, len(s.edges))
}
}
for { // while more work
if LexerATNSimulatorDebug {
fmt.Println("execATN loop starting closure: " + s.configs.String())
@ -185,10 +161,6 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// A character will take us back to an existing DFA state
// that already has lots of edges out of it. e.g., .* in comments.
target := l.getExistingTargetState(s, t)
if PortDebug {
fmt.Println(t)
fmt.Println(target != nil)
}
if target == nil {
target = l.computeTargetState(input, s, t)
// print("Computed:" + str(target))
@ -201,9 +173,6 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
// position accurately reflect the state of the interpreter at the
// end of the token.
if t != TokenEOF {
if PortDebug {
fmt.Println("consume", t, TokenEOF)
}
l.Consume(input)
}
if target.isAcceptState {
@ -216,9 +185,6 @@ func (l *LexerATNSimulator) execATN(input CharStream, ds0 *DFAState) int {
s = target // flip current DFA target becomes Newsrc/from state
}
if PortDebug {
fmt.Println("DONE WITH execATN loop")
}
return l.failOrAccept(l.prevAccept, input, s.configs, t)
}
@ -237,9 +203,6 @@ func (l *LexerATNSimulator) getExistingTargetState(s *DFAState, t int) *DFAState
}
target := s.edges[t-LexerATNSimulatorMinDFAEdge]
if PortDebug {
fmt.Println("len edges", len(s.edges), t, t-LexerATNSimulatorMinDFAEdge)
}
if LexerATNSimulatorDebug && target != nil {
fmt.Println("reuse state " + strconv.Itoa(s.stateNumber) + " edge to " + strconv.Itoa(target.stateNumber))
@ -279,15 +242,8 @@ func (l *LexerATNSimulator) computeTargetState(input CharStream, s *DFAState, t
func (l *LexerATNSimulator) failOrAccept(prevAccept *SimState, input CharStream, reach ATNConfigSet, t int) int {
if l.prevAccept.dfaState != nil {
if PortDebug {
fmt.Println(prevAccept.dfaState)
}
lexerActionExecutor := prevAccept.dfaState.lexerActionExecutor
l.accept(input, lexerActionExecutor, l.startIndex, prevAccept.index, prevAccept.line, prevAccept.column)
if PortDebug {
fmt.Println(prevAccept.dfaState.prediction)
}
return prevAccept.dfaState.prediction
}
@ -307,11 +263,6 @@ func (l *LexerATNSimulator) getReachableConfigSet(input CharStream, closure ATNC
// than a config that already reached an accept state for the same rule
SkipAlt := ATNInvalidAltNumber
if PortDebug {
fmt.Println("getReachableConfigSet")
fmt.Println("CLOSURE SIZE" + strconv.Itoa(len(closure.GetItems())))
}
for _, cfg := range closure.GetItems() {
currentAltReachedAcceptState := (cfg.GetAlt() == SkipAlt)
if currentAltReachedAcceptState && cfg.(*LexerATNConfig).passedThroughNonGreedyDecision {
@ -365,11 +316,6 @@ func (l *LexerATNSimulator) getReachableTarget(trans Transition, t int) ATNState
}
func (l *LexerATNSimulator) computeStartState(input CharStream, p ATNState) *OrderedATNConfigSet {
if PortDebug {
fmt.Println("Num transitions" + strconv.Itoa(len(p.GetTransitions())))
}
configs := NewOrderedATNConfigSet()
for i := 0; i < len(p.GetTransitions()); i++ {
target := p.GetTransitions()[i].getTarget()
@ -560,9 +506,6 @@ func (l *LexerATNSimulator) evaluatePredicate(input CharStream, ruleIndex, predI
input.Release(marker)
}()
if PortDebug {
fmt.Println("evalPred")
}
l.Consume(input)
return l.recog.Sempred(nil, ruleIndex, predIndex)
}
@ -661,9 +604,6 @@ func (l *LexerATNSimulator) GetText(input CharStream) string {
}
func (l *LexerATNSimulator) Consume(input CharStream) {
if PortDebug {
fmt.Println("consume", input.Index(), input.Size())
}
curChar := input.LA(1)
if curChar == int('\n') {
l.Line++
@ -683,9 +623,6 @@ func (l *LexerATNSimulator) GetLine() int {
}
func (l *LexerATNSimulator) GetTokenName(tt int) string {
if PortDebug {
fmt.Println(tt)
}
if tt == -1 {
return "EOF"
}

View File

@ -75,20 +75,7 @@ func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet
if ctx != nil {
lookContext = predictionContextFromRuleContext(s.GetATN(), ctx)
}
if PortDebug {
fmt.Println("DEBUG 5")
// fmt.Println("DEBUG" + lookContext.String())
fmt.Println(s)
fmt.Println(stopState)
fmt.Println(lookContext)
fmt.Println(r)
fmt.Println(seeThruPreds)
fmt.Println("=====")
}
la.look1(s, stopState, lookContext, r, NewSet(nil, nil), NewBitSet(), seeThruPreds, true)
if PortDebug {
fmt.Println(r)
}
return r
}
@ -150,9 +137,6 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look
lookBusy.add(c)
if s == stopState {
if PortDebug {
fmt.Println("DEBUG 6")
}
if ctx == nil {
look.addOne(TokenEpsilon)
return
@ -174,16 +158,10 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look
}
if ctx != BasePredictionContextEMPTY {
if PortDebug {
fmt.Println("DEBUG 7")
}
// run thru all possible stack tops in ctx
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]
la.look2(returnState, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, i)
}
return
}
@ -195,49 +173,24 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look
t := s.GetTransitions()[i]
if t1, ok := t.(*RuleTransition); ok {
if PortDebug {
fmt.Println("DEBUG 8")
}
if calledRuleStack.contains(t1.getTarget().GetRuleIndex()) {
continue
}
newContext := SingletonBasePredictionContextCreate(ctx, t1.followState.GetStateNumber())
la.look3(stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF, t1)
if PortDebug {
fmt.Println(look)
}
} else if t2, ok := t.(AbstractPredicateTransition); ok {
if PortDebug {
fmt.Println("DEBUG 9")
}
if seeThruPreds {
la.look1(t2.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else {
look.addOne(LL1AnalyzerHitPred)
}
} else if t.getIsEpsilon() {
if PortDebug {
fmt.Println("DEBUG 10")
}
la.look1(t.getTarget(), stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
} else if _, ok := t.(*WildcardTransition); ok {
if PortDebug {
fmt.Println("DEBUG 11")
}
look.addRange(TokenMinUserTokenType, la.atn.maxTokenType)
} else {
if PortDebug {
fmt.Println("DEBUG 12")
}
set := t.getLabel()
if PortDebug {
fmt.Println(set)
}
if set != nil {
if _, ok := t.(*NotSetTransition); ok {
set = set.complement(TokenMinUserTokenType, la.atn.maxTokenType)

View File

@ -133,15 +133,8 @@ func (p *BaseParser) SetErrorHandler(e ErrorStrategy) {
func (p *BaseParser) Match(ttype int) Token {
if PortDebug {
fmt.Println("get current token")
}
t := p.GetCurrentToken()
if PortDebug {
fmt.Println("TOKEN IS " + t.GetText())
}
if t.GetTokenType() == ttype {
p.errHandler.ReportMatch(p)
p.Consume()
@ -155,10 +148,6 @@ func (p *BaseParser) Match(ttype int) Token {
}
}
if PortDebug {
fmt.Println("match done")
}
return t
}
@ -428,13 +417,7 @@ func (p *BaseParser) NotifyErrorListeners(msg string, offendingToken Token, err
func (p *BaseParser) Consume() Token {
o := p.GetCurrentToken()
if o.GetTokenType() != TokenEOF {
if PortDebug {
fmt.Println("Consuming")
}
p.GetInputStream().Consume()
if PortDebug {
fmt.Println("Done consuming")
}
}
hasListener := p.parseListeners != nil && len(p.parseListeners) > 0
if p.BuildParseTrees || hasListener {

View File

@ -65,13 +65,7 @@ func (p *ParserATNSimulator) reset() {
}
func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, outerContext ParserRuleContext) int {
if PortDebug {
fmt.Println("Adaptive predict")
}
if ParserATNSimulatorDebug || ParserATNSimulatorListATNDecisions {
fmt.Println("AdaptivePredict decision " + strconv.Itoa(decision) +
" exec LA(1)==" + p.getLookaheadName(input) +
" line " + strconv.Itoa(input.LT(1).GetLine()) + ":" +
@ -137,13 +131,7 @@ func (p *ParserATNSimulator) AdaptivePredict(input TokenStream, decision int, ou
// appropriate start state for the precedence level rather
// than simply setting DFA.s0.
//
if PortDebug {
fmt.Println("precfilter", s0Closure)
}
s0Closure = p.applyPrecedenceFilter(s0Closure)
if PortDebug {
fmt.Println("precfilter", s0Closure)
}
s0 = p.addDFAState(dfa, NewDFAState(-1, s0Closure))
dfa.setPrecedenceStartState(p.parser.GetPrecedence(), s0)
} else {
@ -667,10 +655,6 @@ func (p *ParserATNSimulator) removeAllConfigsNotInRuleStopState(configs ATNConfi
}
func (p *ParserATNSimulator) computeStartState(a ATNState, ctx RuleContext, fullCtx bool) ATNConfigSet {
if PortDebug {
fmt.Println("computeStartState")
}
// always at least the implicit call to start rule
initialContext := predictionContextFromRuleContext(p.atn, ctx)
configs := NewBaseATNConfigSet(fullCtx)
@ -744,18 +728,9 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
statesFromAlt1 := make(map[int]PredictionContext)
configSet := NewBaseATNConfigSet(configs.FullContext())
if PortDebug {
fmt.Println("len", len(configs.GetItems()))
for _, config := range configs.GetItems() {
fmt.Println(config.getPrecedenceFilterSuppressed())
}
}
for _, config := range configs.GetItems() {
// handle alt 1 first
if config.GetAlt() != 1 {
if PortDebug {
fmt.Println("getalt1")
}
continue
}
updatedContext := config.GetSemanticContext().evalPrecedence(p.parser, p.outerContext)
@ -765,14 +740,8 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
}
statesFromAlt1[config.GetState().GetStateNumber()] = config.GetContext()
if updatedContext != config.GetSemanticContext() {
if PortDebug {
fmt.Println("add1")
}
configSet.Add(NewBaseATNConfig2(config, updatedContext), p.mergeCache)
} else {
if PortDebug {
fmt.Println("add2")
}
configSet.Add(config, p.mergeCache)
}
}
@ -780,27 +749,18 @@ func (p *ParserATNSimulator) applyPrecedenceFilter(configs ATNConfigSet) ATNConf
if config.GetAlt() == 1 {
// already handled
if PortDebug {
fmt.Println("getalt2")
}
continue
}
// In the future, p elimination step could be updated to also
// filter the prediction context for alternatives predicting alt>1
// (basically a graph subtraction algorithm).
if !config.getPrecedenceFilterSuppressed() {
if PortDebug {
fmt.Println("!getPrecedenceFilterSuppressed")
}
context := statesFromAlt1[config.GetState().GetStateNumber()]
if context != nil && context.equals(config.GetContext()) {
// eliminated
continue
}
}
if PortDebug {
fmt.Println("add3", config.getPrecedenceFilterSuppressed())
}
configSet.Add(config, p.mergeCache)
}
return configSet
@ -1037,9 +997,6 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
} else {
// we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
if PortDebug {
fmt.Println("DEBUG B")
}
fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
}
p.closureWork(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEOFAsEpsilon)
@ -1064,9 +1021,6 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
} else {
// else if we have no context info, just chase follow links (if greedy)
if ParserATNSimulatorDebug {
if PortDebug {
fmt.Println("DEBUG 2")
}
fmt.Println("FALLING off rule " + p.getRuleName(config.GetState().GetRuleIndex()))
}
}
@ -1076,9 +1030,6 @@ func (p *ParserATNSimulator) closureCheckingStopState(config ATNConfig, configs
// Do the actual work of walking epsilon edges//
func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet, closureBusy *Set, collectPredicates, fullCtx bool, depth int, treatEOFAsEpsilon bool) {
if PortDebug {
fmt.Println("closureWork")
}
state := config.GetState()
// optimization
if !state.GetEpsilonOnlyTransitions() {
@ -1092,9 +1043,6 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
continueCollecting := collectPredicates && !ok
c := p.getEpsilonTarget(config, t, continueCollecting, depth == 0, fullCtx, treatEOFAsEpsilon)
if ci, ok := c.(*BaseATNConfig); ok && ci != nil {
if PortDebug {
fmt.Println("DEBUG 1 ok")
}
if !t.getIsEpsilon() && closureBusy.add(c) != c {
// avoid infinite recursion for EOF* and EOF+
continue
@ -1103,10 +1051,6 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
if _, ok := config.GetState().(*RuleStopState); ok {
if PortDebug {
fmt.Println("DEBUG 2")
fmt.Println(closureBusy.String())
}
// target fell off end of rule mark resulting c as having dipped into outer context
// We can't get here if incoming config was rule stop and we had context
// track how far we dip into outer context. Might
@ -1114,26 +1058,12 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
// preds if p is > 0.
if closureBusy.add(c) != c {
if PortDebug {
fmt.Println("DEBUG 3", i, len(state.GetTransitions()))
}
// avoid infinite recursion for right-recursive rules
continue
} else {
if PortDebug {
fmt.Println(c)
fmt.Println(closureBusy)
}
}
if p.dfa != nil && p.dfa.precedenceDfa {
if PortDebug {
fmt.Println("DEBUG 4")
}
if t.(*EpsilonTransition).outermostPrecedenceReturn == p.dfa.atnStartState.GetRuleIndex() {
if PortDebug {
fmt.Println("setPrecedenceFilterSuppressed")
}
c.setPrecedenceFilterSuppressed(true)
}
}
@ -1150,15 +1080,9 @@ func (p *ParserATNSimulator) closureWork(config ATNConfig, configs ATNConfigSet,
newDepth++
}
}
if PortDebug {
fmt.Println("closureCheckingStopState")
}
p.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEOFAsEpsilon)
}
}
if PortDebug {
fmt.Println("closureWork done")
}
}
func (p *ParserATNSimulator) getRuleName(index int) string {
@ -1356,11 +1280,6 @@ func (p *ParserATNSimulator) getConflictingAltsOrUniqueAlt(configs ATNConfigSet)
}
func (p *ParserATNSimulator) GetTokenName(t int) string {
if PortDebug {
fmt.Println("Get token name")
}
if t == TokenEOF {
return "EOF"
}

View File

@ -84,10 +84,6 @@ func (b *BaseRecognizer) GetState() int {
}
func (b *BaseRecognizer) SetState(v int) {
if PortDebug {
fmt.Println("SETTING STATE " + strconv.Itoa(v) + " from " + strconv.Itoa(b.state))
}
b.state = v
}